2 * lttng-filter-interpreter.c
4 * LTTng modules filter interpreter.
6 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
27 #include <linux/uaccess.h>
28 #include <wrapper/frame.h>
29 #include <wrapper/types.h>
30 #include <linux/swab.h>
32 #include <lttng-filter.h>
33 #include <lttng-string-utils.h>
35 LTTNG_STACK_FRAME_NON_STANDARD(lttng_filter_interpret_bytecode
);
38 * get_char should be called with page fault handler disabled if it is expected
39 * to handle user-space read.
42 char get_char(struct estack_entry
*reg
, size_t offset
)
44 if (unlikely(offset
>= reg
->u
.s
.seq_len
))
49 /* Handle invalid access as end of string. */
50 if (unlikely(!access_ok(VERIFY_READ
,
51 reg
->u
.s
.user_str
+ offset
,
54 /* Handle fault (nonzero return value) as end of string. */
55 if (unlikely(__copy_from_user_inatomic(&c
,
56 reg
->u
.s
.user_str
+ offset
,
61 return reg
->u
.s
.str
[offset
];
67 * -2: unknown escape char.
71 int parse_char(struct estack_entry
*reg
, char *c
, size_t *offset
)
76 *c
= get_char(reg
, *offset
);
92 char get_char_at_cb(size_t at
, void *data
)
94 return get_char(data
, at
);
98 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
100 bool has_user
= false;
103 struct estack_entry
*pattern_reg
;
104 struct estack_entry
*candidate_reg
;
106 if (estack_bx(stack
, top
)->u
.s
.user
107 || estack_ax(stack
, top
)->u
.s
.user
) {
114 /* Find out which side is the pattern vs. the candidate. */
115 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
116 pattern_reg
= estack_ax(stack
, top
);
117 candidate_reg
= estack_bx(stack
, top
);
119 pattern_reg
= estack_bx(stack
, top
);
120 candidate_reg
= estack_ax(stack
, top
);
123 /* Perform the match operation. */
124 result
= !strutils_star_glob_match_char_cb(get_char_at_cb
,
125 pattern_reg
, get_char_at_cb
, candidate_reg
);
135 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
137 size_t offset_bx
= 0, offset_ax
= 0;
138 int diff
, has_user
= 0;
141 if (estack_bx(stack
, top
)->u
.s
.user
142 || estack_ax(stack
, top
)->u
.s
.user
) {
152 char char_bx
, char_ax
;
154 char_bx
= get_char(estack_bx(stack
, top
), offset_bx
);
155 char_ax
= get_char(estack_ax(stack
, top
), offset_ax
);
157 if (unlikely(char_bx
== '\0')) {
158 if (char_ax
== '\0') {
162 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
163 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
164 ret
= parse_char(estack_ax(stack
, top
),
165 &char_ax
, &offset_ax
);
175 if (unlikely(char_ax
== '\0')) {
176 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
177 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
178 ret
= parse_char(estack_bx(stack
, top
),
179 &char_bx
, &offset_bx
);
188 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
189 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
190 ret
= parse_char(estack_bx(stack
, top
),
191 &char_bx
, &offset_bx
);
195 } else if (ret
== -2) {
198 /* else compare both char */
200 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
201 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
202 ret
= parse_char(estack_ax(stack
, top
),
203 &char_ax
, &offset_ax
);
207 } else if (ret
== -2) {
224 diff
= char_bx
- char_ax
;
237 uint64_t lttng_filter_false(void *filter_data
,
238 struct lttng_probe_ctx
*lttng_probe_ctx
,
239 const char *filter_stack_data
)
244 #ifdef INTERPRETER_USE_SWITCH
247 * Fallback for compilers that do not support taking address of labels.
251 start_pc = &bytecode->data[0]; \
252 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
254 dbg_printk("Executing op %s (%u)\n", \
255 lttng_filter_print_op((unsigned int) *(filter_opcode_t *) pc), \
256 (unsigned int) *(filter_opcode_t *) pc); \
257 switch (*(filter_opcode_t *) pc) {
259 #define OP(name) case name
269 * Dispatch-table based interpreter.
273 start_pc = &bytecode->code[0]; \
274 pc = next_pc = start_pc; \
275 if (unlikely(pc - start_pc >= bytecode->len)) \
277 goto *dispatch[*(filter_opcode_t *) pc];
284 goto *dispatch[*(filter_opcode_t *) pc];
290 static int context_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
291 struct load_ptr
*ptr
,
295 struct lttng_ctx_field
*ctx_field
;
296 struct lttng_event_field
*field
;
297 union lttng_ctx_value v
;
299 ctx_field
= <tng_static_ctx
->fields
[idx
];
300 field
= &ctx_field
->event_field
;
301 ptr
->type
= LOAD_OBJECT
;
302 /* field is only used for types nested within variants. */
305 switch (field
->type
.atype
) {
307 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
308 if (field
->type
.u
.basic
.integer
.signedness
) {
309 ptr
->object_type
= OBJECT_TYPE_S64
;
311 ptr
->ptr
= &ptr
->u
.s64
;
313 ptr
->object_type
= OBJECT_TYPE_U64
;
314 ptr
->u
.u64
= v
.s64
; /* Cast. */
315 ptr
->ptr
= &ptr
->u
.u64
;
320 const struct lttng_integer_type
*itype
=
321 &field
->type
.u
.basic
.enumeration
.container_type
;
323 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
324 if (itype
->signedness
) {
325 ptr
->object_type
= OBJECT_TYPE_S64
;
327 ptr
->ptr
= &ptr
->u
.s64
;
329 ptr
->object_type
= OBJECT_TYPE_U64
;
330 ptr
->u
.u64
= v
.s64
; /* Cast. */
331 ptr
->ptr
= &ptr
->u
.u64
;
336 if (field
->type
.u
.array
.elem_type
.atype
!= atype_integer
) {
337 printk(KERN_WARNING
"Array nesting only supports integer types.\n");
340 if (field
->type
.u
.array
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
341 printk(KERN_WARNING
"Only string arrays are supported for contexts.\n");
344 ptr
->object_type
= OBJECT_TYPE_STRING
;
345 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
349 if (field
->type
.u
.sequence
.elem_type
.atype
!= atype_integer
) {
350 printk(KERN_WARNING
"Sequence nesting only supports integer types.\n");
353 if (field
->type
.u
.sequence
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
354 printk(KERN_WARNING
"Only string sequences are supported for contexts.\n");
357 ptr
->object_type
= OBJECT_TYPE_STRING
;
358 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
361 case atype_array_bitfield
:
362 printk(KERN_WARNING
"Bitfield array type is not supported.\n");
364 case atype_sequence_bitfield
:
365 printk(KERN_WARNING
"Bitfield sequence type is not supported.\n");
368 ptr
->object_type
= OBJECT_TYPE_STRING
;
369 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
373 printk(KERN_WARNING
"Structure type cannot be loaded.\n");
376 printk(KERN_WARNING
"Unknown type: %d", (int) field
->type
.atype
);
382 static int dynamic_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
383 struct bytecode_runtime
*runtime
,
384 uint64_t index
, struct estack_entry
*stack_top
)
387 const struct filter_get_index_data
*gid
;
390 * Types nested within variants need to perform dynamic lookup
391 * based on the field descriptions. LTTng-UST does not implement
394 if (stack_top
->u
.ptr
.field
)
396 gid
= (const struct filter_get_index_data
*) &runtime
->data
[index
];
397 switch (stack_top
->u
.ptr
.type
) {
399 switch (stack_top
->u
.ptr
.object_type
) {
400 case OBJECT_TYPE_ARRAY
:
404 WARN_ON_ONCE(gid
->offset
>= gid
->array_len
);
405 /* Skip count (unsigned long) */
406 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
407 ptr
= ptr
+ gid
->offset
;
408 stack_top
->u
.ptr
.ptr
= ptr
;
409 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
410 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
411 /* field is only used for types nested within variants. */
412 stack_top
->u
.ptr
.field
= NULL
;
415 case OBJECT_TYPE_SEQUENCE
:
420 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
421 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
422 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
426 ptr
= ptr
+ gid
->offset
;
427 stack_top
->u
.ptr
.ptr
= ptr
;
428 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
429 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
430 /* field is only used for types nested within variants. */
431 stack_top
->u
.ptr
.field
= NULL
;
434 case OBJECT_TYPE_STRUCT
:
435 printk(KERN_WARNING
"Nested structures are not supported yet.\n");
438 case OBJECT_TYPE_VARIANT
:
440 printk(KERN_WARNING
"Unexpected get index type %d",
441 (int) stack_top
->u
.ptr
.object_type
);
446 case LOAD_ROOT_CONTEXT
:
447 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
449 ret
= context_get_index(lttng_probe_ctx
,
457 case LOAD_ROOT_PAYLOAD
:
458 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
459 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
460 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
461 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
462 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
463 /* field is only used for types nested within variants. */
464 stack_top
->u
.ptr
.field
= NULL
;
473 static int dynamic_load_field(struct estack_entry
*stack_top
)
477 switch (stack_top
->u
.ptr
.type
) {
480 case LOAD_ROOT_CONTEXT
:
481 case LOAD_ROOT_APP_CONTEXT
:
482 case LOAD_ROOT_PAYLOAD
:
484 dbg_printk("Filter warning: cannot load root, missing field name.\n");
488 switch (stack_top
->u
.ptr
.object_type
) {
490 dbg_printk("op load field s8\n");
491 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
493 case OBJECT_TYPE_S16
:
497 dbg_printk("op load field s16\n");
498 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
499 if (stack_top
->u
.ptr
.rev_bo
)
501 stack_top
->u
.v
= tmp
;
504 case OBJECT_TYPE_S32
:
508 dbg_printk("op load field s32\n");
509 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
510 if (stack_top
->u
.ptr
.rev_bo
)
512 stack_top
->u
.v
= tmp
;
515 case OBJECT_TYPE_S64
:
519 dbg_printk("op load field s64\n");
520 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
521 if (stack_top
->u
.ptr
.rev_bo
)
523 stack_top
->u
.v
= tmp
;
527 dbg_printk("op load field u8\n");
528 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
530 case OBJECT_TYPE_U16
:
534 dbg_printk("op load field s16\n");
535 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
536 if (stack_top
->u
.ptr
.rev_bo
)
538 stack_top
->u
.v
= tmp
;
541 case OBJECT_TYPE_U32
:
545 dbg_printk("op load field u32\n");
546 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
547 if (stack_top
->u
.ptr
.rev_bo
)
549 stack_top
->u
.v
= tmp
;
552 case OBJECT_TYPE_U64
:
556 dbg_printk("op load field u64\n");
557 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
558 if (stack_top
->u
.ptr
.rev_bo
)
560 stack_top
->u
.v
= tmp
;
563 case OBJECT_TYPE_STRING
:
567 dbg_printk("op load field string\n");
568 str
= (const char *) stack_top
->u
.ptr
.ptr
;
569 stack_top
->u
.s
.str
= str
;
570 if (unlikely(!stack_top
->u
.s
.str
)) {
571 dbg_printk("Filter warning: loading a NULL string.\n");
575 stack_top
->u
.s
.seq_len
= SIZE_MAX
;
576 stack_top
->u
.s
.literal_type
=
577 ESTACK_STRING_LITERAL_TYPE_NONE
;
580 case OBJECT_TYPE_STRING_SEQUENCE
:
584 dbg_printk("op load field string sequence\n");
585 ptr
= stack_top
->u
.ptr
.ptr
;
586 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
587 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
588 if (unlikely(!stack_top
->u
.s
.str
)) {
589 dbg_printk("Filter warning: loading a NULL sequence.\n");
593 stack_top
->u
.s
.literal_type
=
594 ESTACK_STRING_LITERAL_TYPE_NONE
;
597 case OBJECT_TYPE_DYNAMIC
:
599 * Dynamic types in context are looked up
600 * by context get index.
604 case OBJECT_TYPE_DOUBLE
:
607 case OBJECT_TYPE_SEQUENCE
:
608 case OBJECT_TYPE_ARRAY
:
609 case OBJECT_TYPE_STRUCT
:
610 case OBJECT_TYPE_VARIANT
:
611 printk(KERN_WARNING
"Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
622 * Return 0 (discard), or raise the 0x1 flag (log event).
623 * Currently, other flags are kept for future extensions and have no
626 uint64_t lttng_filter_interpret_bytecode(void *filter_data
,
627 struct lttng_probe_ctx
*lttng_probe_ctx
,
628 const char *filter_stack_data
)
630 struct bytecode_runtime
*bytecode
= filter_data
;
631 void *pc
, *next_pc
, *start_pc
;
634 struct estack _stack
;
635 struct estack
*stack
= &_stack
;
636 register int64_t ax
= 0, bx
= 0;
637 register int top
= FILTER_STACK_EMPTY
;
638 #ifndef INTERPRETER_USE_SWITCH
639 static void *dispatch
[NR_FILTER_OPS
] = {
640 [ FILTER_OP_UNKNOWN
] = &&LABEL_FILTER_OP_UNKNOWN
,
642 [ FILTER_OP_RETURN
] = &&LABEL_FILTER_OP_RETURN
,
645 [ FILTER_OP_MUL
] = &&LABEL_FILTER_OP_MUL
,
646 [ FILTER_OP_DIV
] = &&LABEL_FILTER_OP_DIV
,
647 [ FILTER_OP_MOD
] = &&LABEL_FILTER_OP_MOD
,
648 [ FILTER_OP_PLUS
] = &&LABEL_FILTER_OP_PLUS
,
649 [ FILTER_OP_MINUS
] = &&LABEL_FILTER_OP_MINUS
,
650 [ FILTER_OP_BIT_RSHIFT
] = &&LABEL_FILTER_OP_BIT_RSHIFT
,
651 [ FILTER_OP_BIT_LSHIFT
] = &&LABEL_FILTER_OP_BIT_LSHIFT
,
652 [ FILTER_OP_BIT_AND
] = &&LABEL_FILTER_OP_BIT_AND
,
653 [ FILTER_OP_BIT_OR
] = &&LABEL_FILTER_OP_BIT_OR
,
654 [ FILTER_OP_BIT_XOR
] = &&LABEL_FILTER_OP_BIT_XOR
,
656 /* binary comparators */
657 [ FILTER_OP_EQ
] = &&LABEL_FILTER_OP_EQ
,
658 [ FILTER_OP_NE
] = &&LABEL_FILTER_OP_NE
,
659 [ FILTER_OP_GT
] = &&LABEL_FILTER_OP_GT
,
660 [ FILTER_OP_LT
] = &&LABEL_FILTER_OP_LT
,
661 [ FILTER_OP_GE
] = &&LABEL_FILTER_OP_GE
,
662 [ FILTER_OP_LE
] = &&LABEL_FILTER_OP_LE
,
664 /* string binary comparator */
665 [ FILTER_OP_EQ_STRING
] = &&LABEL_FILTER_OP_EQ_STRING
,
666 [ FILTER_OP_NE_STRING
] = &&LABEL_FILTER_OP_NE_STRING
,
667 [ FILTER_OP_GT_STRING
] = &&LABEL_FILTER_OP_GT_STRING
,
668 [ FILTER_OP_LT_STRING
] = &&LABEL_FILTER_OP_LT_STRING
,
669 [ FILTER_OP_GE_STRING
] = &&LABEL_FILTER_OP_GE_STRING
,
670 [ FILTER_OP_LE_STRING
] = &&LABEL_FILTER_OP_LE_STRING
,
672 /* globbing pattern binary comparator */
673 [ FILTER_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_EQ_STAR_GLOB_STRING
,
674 [ FILTER_OP_NE_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_NE_STAR_GLOB_STRING
,
676 /* s64 binary comparator */
677 [ FILTER_OP_EQ_S64
] = &&LABEL_FILTER_OP_EQ_S64
,
678 [ FILTER_OP_NE_S64
] = &&LABEL_FILTER_OP_NE_S64
,
679 [ FILTER_OP_GT_S64
] = &&LABEL_FILTER_OP_GT_S64
,
680 [ FILTER_OP_LT_S64
] = &&LABEL_FILTER_OP_LT_S64
,
681 [ FILTER_OP_GE_S64
] = &&LABEL_FILTER_OP_GE_S64
,
682 [ FILTER_OP_LE_S64
] = &&LABEL_FILTER_OP_LE_S64
,
684 /* double binary comparator */
685 [ FILTER_OP_EQ_DOUBLE
] = &&LABEL_FILTER_OP_EQ_DOUBLE
,
686 [ FILTER_OP_NE_DOUBLE
] = &&LABEL_FILTER_OP_NE_DOUBLE
,
687 [ FILTER_OP_GT_DOUBLE
] = &&LABEL_FILTER_OP_GT_DOUBLE
,
688 [ FILTER_OP_LT_DOUBLE
] = &&LABEL_FILTER_OP_LT_DOUBLE
,
689 [ FILTER_OP_GE_DOUBLE
] = &&LABEL_FILTER_OP_GE_DOUBLE
,
690 [ FILTER_OP_LE_DOUBLE
] = &&LABEL_FILTER_OP_LE_DOUBLE
,
692 /* Mixed S64-double binary comparators */
693 [ FILTER_OP_EQ_DOUBLE_S64
] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64
,
694 [ FILTER_OP_NE_DOUBLE_S64
] = &&LABEL_FILTER_OP_NE_DOUBLE_S64
,
695 [ FILTER_OP_GT_DOUBLE_S64
] = &&LABEL_FILTER_OP_GT_DOUBLE_S64
,
696 [ FILTER_OP_LT_DOUBLE_S64
] = &&LABEL_FILTER_OP_LT_DOUBLE_S64
,
697 [ FILTER_OP_GE_DOUBLE_S64
] = &&LABEL_FILTER_OP_GE_DOUBLE_S64
,
698 [ FILTER_OP_LE_DOUBLE_S64
] = &&LABEL_FILTER_OP_LE_DOUBLE_S64
,
700 [ FILTER_OP_EQ_S64_DOUBLE
] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE
,
701 [ FILTER_OP_NE_S64_DOUBLE
] = &&LABEL_FILTER_OP_NE_S64_DOUBLE
,
702 [ FILTER_OP_GT_S64_DOUBLE
] = &&LABEL_FILTER_OP_GT_S64_DOUBLE
,
703 [ FILTER_OP_LT_S64_DOUBLE
] = &&LABEL_FILTER_OP_LT_S64_DOUBLE
,
704 [ FILTER_OP_GE_S64_DOUBLE
] = &&LABEL_FILTER_OP_GE_S64_DOUBLE
,
705 [ FILTER_OP_LE_S64_DOUBLE
] = &&LABEL_FILTER_OP_LE_S64_DOUBLE
,
708 [ FILTER_OP_UNARY_PLUS
] = &&LABEL_FILTER_OP_UNARY_PLUS
,
709 [ FILTER_OP_UNARY_MINUS
] = &&LABEL_FILTER_OP_UNARY_MINUS
,
710 [ FILTER_OP_UNARY_NOT
] = &&LABEL_FILTER_OP_UNARY_NOT
,
711 [ FILTER_OP_UNARY_PLUS_S64
] = &&LABEL_FILTER_OP_UNARY_PLUS_S64
,
712 [ FILTER_OP_UNARY_MINUS_S64
] = &&LABEL_FILTER_OP_UNARY_MINUS_S64
,
713 [ FILTER_OP_UNARY_NOT_S64
] = &&LABEL_FILTER_OP_UNARY_NOT_S64
,
714 [ FILTER_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE
,
715 [ FILTER_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE
,
716 [ FILTER_OP_UNARY_NOT_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE
,
719 [ FILTER_OP_AND
] = &&LABEL_FILTER_OP_AND
,
720 [ FILTER_OP_OR
] = &&LABEL_FILTER_OP_OR
,
723 [ FILTER_OP_LOAD_FIELD_REF
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF
,
724 [ FILTER_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING
,
725 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE
,
726 [ FILTER_OP_LOAD_FIELD_REF_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64
,
727 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE
,
729 /* load from immediate operand */
730 [ FILTER_OP_LOAD_STRING
] = &&LABEL_FILTER_OP_LOAD_STRING
,
731 [ FILTER_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_LOAD_STAR_GLOB_STRING
,
732 [ FILTER_OP_LOAD_S64
] = &&LABEL_FILTER_OP_LOAD_S64
,
733 [ FILTER_OP_LOAD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_DOUBLE
,
736 [ FILTER_OP_CAST_TO_S64
] = &&LABEL_FILTER_OP_CAST_TO_S64
,
737 [ FILTER_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64
,
738 [ FILTER_OP_CAST_NOP
] = &&LABEL_FILTER_OP_CAST_NOP
,
740 /* get context ref */
741 [ FILTER_OP_GET_CONTEXT_REF
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF
,
742 [ FILTER_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING
,
743 [ FILTER_OP_GET_CONTEXT_REF_S64
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64
,
744 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE
,
746 /* load userspace field ref */
747 [ FILTER_OP_LOAD_FIELD_REF_USER_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_STRING
,
748 [ FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
,
750 /* Instructions for recursive traversal through composed types. */
751 [ FILTER_OP_GET_CONTEXT_ROOT
] = &&LABEL_FILTER_OP_GET_CONTEXT_ROOT
,
752 [ FILTER_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_FILTER_OP_GET_APP_CONTEXT_ROOT
,
753 [ FILTER_OP_GET_PAYLOAD_ROOT
] = &&LABEL_FILTER_OP_GET_PAYLOAD_ROOT
,
755 [ FILTER_OP_GET_SYMBOL
] = &&LABEL_FILTER_OP_GET_SYMBOL
,
756 [ FILTER_OP_GET_SYMBOL_FIELD
] = &&LABEL_FILTER_OP_GET_SYMBOL_FIELD
,
757 [ FILTER_OP_GET_INDEX_U16
] = &&LABEL_FILTER_OP_GET_INDEX_U16
,
758 [ FILTER_OP_GET_INDEX_U64
] = &&LABEL_FILTER_OP_GET_INDEX_U64
,
760 [ FILTER_OP_LOAD_FIELD
] = &&LABEL_FILTER_OP_LOAD_FIELD
,
761 [ FILTER_OP_LOAD_FIELD_S8
] = &&LABEL_FILTER_OP_LOAD_FIELD_S8
,
762 [ FILTER_OP_LOAD_FIELD_S16
] = &&LABEL_FILTER_OP_LOAD_FIELD_S16
,
763 [ FILTER_OP_LOAD_FIELD_S32
] = &&LABEL_FILTER_OP_LOAD_FIELD_S32
,
764 [ FILTER_OP_LOAD_FIELD_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_S64
,
765 [ FILTER_OP_LOAD_FIELD_U8
] = &&LABEL_FILTER_OP_LOAD_FIELD_U8
,
766 [ FILTER_OP_LOAD_FIELD_U16
] = &&LABEL_FILTER_OP_LOAD_FIELD_U16
,
767 [ FILTER_OP_LOAD_FIELD_U32
] = &&LABEL_FILTER_OP_LOAD_FIELD_U32
,
768 [ FILTER_OP_LOAD_FIELD_U64
] = &&LABEL_FILTER_OP_LOAD_FIELD_U64
,
769 [ FILTER_OP_LOAD_FIELD_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_STRING
,
770 [ FILTER_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_SEQUENCE
,
771 [ FILTER_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_DOUBLE
,
773 [ FILTER_OP_UNARY_BIT_NOT
] = &&LABEL_FILTER_OP_UNARY_BIT_NOT
,
775 #endif /* #ifndef INTERPRETER_USE_SWITCH */
779 OP(FILTER_OP_UNKNOWN
):
780 OP(FILTER_OP_LOAD_FIELD_REF
):
781 OP(FILTER_OP_GET_CONTEXT_REF
):
782 #ifdef INTERPRETER_USE_SWITCH
784 #endif /* INTERPRETER_USE_SWITCH */
785 printk(KERN_WARNING
"unknown bytecode op %u\n",
786 (unsigned int) *(filter_opcode_t
*) pc
);
790 OP(FILTER_OP_RETURN
):
791 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
792 retval
= !!estack_ax_v
;
802 printk(KERN_WARNING
"unsupported bytecode op %u\n",
803 (unsigned int) *(filter_opcode_t
*) pc
);
813 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
814 (unsigned int) *(filter_opcode_t
*) pc
);
818 OP(FILTER_OP_EQ_STRING
):
822 res
= (stack_strcmp(stack
, top
, "==") == 0);
823 estack_pop(stack
, top
, ax
, bx
);
825 next_pc
+= sizeof(struct binary_op
);
828 OP(FILTER_OP_NE_STRING
):
832 res
= (stack_strcmp(stack
, top
, "!=") != 0);
833 estack_pop(stack
, top
, ax
, bx
);
835 next_pc
+= sizeof(struct binary_op
);
838 OP(FILTER_OP_GT_STRING
):
842 res
= (stack_strcmp(stack
, top
, ">") > 0);
843 estack_pop(stack
, top
, ax
, bx
);
845 next_pc
+= sizeof(struct binary_op
);
848 OP(FILTER_OP_LT_STRING
):
852 res
= (stack_strcmp(stack
, top
, "<") < 0);
853 estack_pop(stack
, top
, ax
, bx
);
855 next_pc
+= sizeof(struct binary_op
);
858 OP(FILTER_OP_GE_STRING
):
862 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
863 estack_pop(stack
, top
, ax
, bx
);
865 next_pc
+= sizeof(struct binary_op
);
868 OP(FILTER_OP_LE_STRING
):
872 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
873 estack_pop(stack
, top
, ax
, bx
);
875 next_pc
+= sizeof(struct binary_op
);
879 OP(FILTER_OP_EQ_STAR_GLOB_STRING
):
883 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
884 estack_pop(stack
, top
, ax
, bx
);
886 next_pc
+= sizeof(struct binary_op
);
889 OP(FILTER_OP_NE_STAR_GLOB_STRING
):
893 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
894 estack_pop(stack
, top
, ax
, bx
);
896 next_pc
+= sizeof(struct binary_op
);
900 OP(FILTER_OP_EQ_S64
):
904 res
= (estack_bx_v
== estack_ax_v
);
905 estack_pop(stack
, top
, ax
, bx
);
907 next_pc
+= sizeof(struct binary_op
);
910 OP(FILTER_OP_NE_S64
):
914 res
= (estack_bx_v
!= estack_ax_v
);
915 estack_pop(stack
, top
, ax
, bx
);
917 next_pc
+= sizeof(struct binary_op
);
920 OP(FILTER_OP_GT_S64
):
924 res
= (estack_bx_v
> estack_ax_v
);
925 estack_pop(stack
, top
, ax
, bx
);
927 next_pc
+= sizeof(struct binary_op
);
930 OP(FILTER_OP_LT_S64
):
934 res
= (estack_bx_v
< estack_ax_v
);
935 estack_pop(stack
, top
, ax
, bx
);
937 next_pc
+= sizeof(struct binary_op
);
940 OP(FILTER_OP_GE_S64
):
944 res
= (estack_bx_v
>= estack_ax_v
);
945 estack_pop(stack
, top
, ax
, bx
);
947 next_pc
+= sizeof(struct binary_op
);
950 OP(FILTER_OP_LE_S64
):
954 res
= (estack_bx_v
<= estack_ax_v
);
955 estack_pop(stack
, top
, ax
, bx
);
957 next_pc
+= sizeof(struct binary_op
);
961 OP(FILTER_OP_EQ_DOUBLE
):
962 OP(FILTER_OP_NE_DOUBLE
):
963 OP(FILTER_OP_GT_DOUBLE
):
964 OP(FILTER_OP_LT_DOUBLE
):
965 OP(FILTER_OP_GE_DOUBLE
):
966 OP(FILTER_OP_LE_DOUBLE
):
972 /* Mixed S64-double binary comparators */
973 OP(FILTER_OP_EQ_DOUBLE_S64
):
974 OP(FILTER_OP_NE_DOUBLE_S64
):
975 OP(FILTER_OP_GT_DOUBLE_S64
):
976 OP(FILTER_OP_LT_DOUBLE_S64
):
977 OP(FILTER_OP_GE_DOUBLE_S64
):
978 OP(FILTER_OP_LE_DOUBLE_S64
):
979 OP(FILTER_OP_EQ_S64_DOUBLE
):
980 OP(FILTER_OP_NE_S64_DOUBLE
):
981 OP(FILTER_OP_GT_S64_DOUBLE
):
982 OP(FILTER_OP_LT_S64_DOUBLE
):
983 OP(FILTER_OP_GE_S64_DOUBLE
):
984 OP(FILTER_OP_LE_S64_DOUBLE
):
989 OP(FILTER_OP_BIT_RSHIFT
):
993 res
= (estack_bx_v
>> estack_ax_v
);
994 estack_pop(stack
, top
, ax
, bx
);
996 next_pc
+= sizeof(struct binary_op
);
999 OP(FILTER_OP_BIT_LSHIFT
):
1003 res
= (estack_bx_v
<< estack_ax_v
);
1004 estack_pop(stack
, top
, ax
, bx
);
1006 next_pc
+= sizeof(struct binary_op
);
1009 OP(FILTER_OP_BIT_AND
):
1013 res
= (estack_bx_v
& estack_ax_v
);
1014 estack_pop(stack
, top
, ax
, bx
);
1016 next_pc
+= sizeof(struct binary_op
);
1019 OP(FILTER_OP_BIT_OR
):
1023 res
= (estack_bx_v
| estack_ax_v
);
1024 estack_pop(stack
, top
, ax
, bx
);
1026 next_pc
+= sizeof(struct binary_op
);
1029 OP(FILTER_OP_BIT_XOR
):
1033 res
= (estack_bx_v
^ estack_ax_v
);
1034 estack_pop(stack
, top
, ax
, bx
);
1036 next_pc
+= sizeof(struct binary_op
);
1041 OP(FILTER_OP_UNARY_PLUS
):
1042 OP(FILTER_OP_UNARY_MINUS
):
1043 OP(FILTER_OP_UNARY_NOT
):
1044 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
1045 (unsigned int) *(filter_opcode_t
*) pc
);
1050 OP(FILTER_OP_UNARY_BIT_NOT
):
1052 estack_ax_v
= ~estack_ax_v
;
1053 next_pc
+= sizeof(struct unary_op
);
1057 OP(FILTER_OP_UNARY_PLUS_S64
):
1059 next_pc
+= sizeof(struct unary_op
);
1062 OP(FILTER_OP_UNARY_MINUS_S64
):
1064 estack_ax_v
= -estack_ax_v
;
1065 next_pc
+= sizeof(struct unary_op
);
1068 OP(FILTER_OP_UNARY_PLUS_DOUBLE
):
1069 OP(FILTER_OP_UNARY_MINUS_DOUBLE
):
1074 OP(FILTER_OP_UNARY_NOT_S64
):
1076 estack_ax_v
= !estack_ax_v
;
1077 next_pc
+= sizeof(struct unary_op
);
1080 OP(FILTER_OP_UNARY_NOT_DOUBLE
):
1089 struct logical_op
*insn
= (struct logical_op
*) pc
;
1091 /* If AX is 0, skip and evaluate to 0 */
1092 if (unlikely(estack_ax_v
== 0)) {
1093 dbg_printk("Jumping to bytecode offset %u\n",
1094 (unsigned int) insn
->skip_offset
);
1095 next_pc
= start_pc
+ insn
->skip_offset
;
1097 /* Pop 1 when jump not taken */
1098 estack_pop(stack
, top
, ax
, bx
);
1099 next_pc
+= sizeof(struct logical_op
);
1105 struct logical_op
*insn
= (struct logical_op
*) pc
;
1107 /* If AX is nonzero, skip and evaluate to 1 */
1109 if (unlikely(estack_ax_v
!= 0)) {
1111 dbg_printk("Jumping to bytecode offset %u\n",
1112 (unsigned int) insn
->skip_offset
);
1113 next_pc
= start_pc
+ insn
->skip_offset
;
1115 /* Pop 1 when jump not taken */
1116 estack_pop(stack
, top
, ax
, bx
);
1117 next_pc
+= sizeof(struct logical_op
);
1123 /* load field ref */
1124 OP(FILTER_OP_LOAD_FIELD_REF_STRING
):
1126 struct load_op
*insn
= (struct load_op
*) pc
;
1127 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1129 dbg_printk("load field ref offset %u type string\n",
1131 estack_push(stack
, top
, ax
, bx
);
1132 estack_ax(stack
, top
)->u
.s
.str
=
1133 *(const char * const *) &filter_stack_data
[ref
->offset
];
1134 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1135 dbg_printk("Filter warning: loading a NULL string.\n");
1139 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1140 estack_ax(stack
, top
)->u
.s
.literal_type
=
1141 ESTACK_STRING_LITERAL_TYPE_NONE
;
1142 estack_ax(stack
, top
)->u
.s
.user
= 0;
1143 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1144 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1148 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE
):
1150 struct load_op
*insn
= (struct load_op
*) pc
;
1151 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1153 dbg_printk("load field ref offset %u type sequence\n",
1155 estack_push(stack
, top
, ax
, bx
);
1156 estack_ax(stack
, top
)->u
.s
.seq_len
=
1157 *(unsigned long *) &filter_stack_data
[ref
->offset
];
1158 estack_ax(stack
, top
)->u
.s
.str
=
1159 *(const char **) (&filter_stack_data
[ref
->offset
1160 + sizeof(unsigned long)]);
1161 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1162 dbg_printk("Filter warning: loading a NULL sequence.\n");
1166 estack_ax(stack
, top
)->u
.s
.literal_type
=
1167 ESTACK_STRING_LITERAL_TYPE_NONE
;
1168 estack_ax(stack
, top
)->u
.s
.user
= 0;
1169 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1173 OP(FILTER_OP_LOAD_FIELD_REF_S64
):
1175 struct load_op
*insn
= (struct load_op
*) pc
;
1176 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1178 dbg_printk("load field ref offset %u type s64\n",
1180 estack_push(stack
, top
, ax
, bx
);
1182 ((struct literal_numeric
*) &filter_stack_data
[ref
->offset
])->v
;
1183 dbg_printk("ref load s64 %lld\n",
1184 (long long) estack_ax_v
);
1185 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1189 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE
):
1195 /* load from immediate operand */
1196 OP(FILTER_OP_LOAD_STRING
):
1198 struct load_op
*insn
= (struct load_op
*) pc
;
1200 dbg_printk("load string %s\n", insn
->data
);
1201 estack_push(stack
, top
, ax
, bx
);
1202 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1203 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1204 estack_ax(stack
, top
)->u
.s
.literal_type
=
1205 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
1206 estack_ax(stack
, top
)->u
.s
.user
= 0;
1207 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1211 OP(FILTER_OP_LOAD_STAR_GLOB_STRING
):
1213 struct load_op
*insn
= (struct load_op
*) pc
;
1215 dbg_printk("load globbing pattern %s\n", insn
->data
);
1216 estack_push(stack
, top
, ax
, bx
);
1217 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1218 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1219 estack_ax(stack
, top
)->u
.s
.literal_type
=
1220 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
1221 estack_ax(stack
, top
)->u
.s
.user
= 0;
1222 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1226 OP(FILTER_OP_LOAD_S64
):
1228 struct load_op
*insn
= (struct load_op
*) pc
;
1230 estack_push(stack
, top
, ax
, bx
);
1231 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
1232 dbg_printk("load s64 %lld\n",
1233 (long long) estack_ax_v
);
1234 next_pc
+= sizeof(struct load_op
)
1235 + sizeof(struct literal_numeric
);
1239 OP(FILTER_OP_LOAD_DOUBLE
):
1246 OP(FILTER_OP_CAST_TO_S64
):
1247 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
1248 (unsigned int) *(filter_opcode_t
*) pc
);
1252 OP(FILTER_OP_CAST_DOUBLE_TO_S64
):
1258 OP(FILTER_OP_CAST_NOP
):
1260 next_pc
+= sizeof(struct cast_op
);
1264 /* get context ref */
1265 OP(FILTER_OP_GET_CONTEXT_REF_STRING
):
1267 struct load_op
*insn
= (struct load_op
*) pc
;
1268 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1269 struct lttng_ctx_field
*ctx_field
;
1270 union lttng_ctx_value v
;
1272 dbg_printk("get context ref offset %u type string\n",
1274 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1275 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1276 estack_push(stack
, top
, ax
, bx
);
1277 estack_ax(stack
, top
)->u
.s
.str
= v
.str
;
1278 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1279 dbg_printk("Filter warning: loading a NULL string.\n");
1283 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1284 estack_ax(stack
, top
)->u
.s
.literal_type
=
1285 ESTACK_STRING_LITERAL_TYPE_NONE
;
1286 estack_ax(stack
, top
)->u
.s
.user
= 0;
1287 dbg_printk("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1288 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1292 OP(FILTER_OP_GET_CONTEXT_REF_S64
):
1294 struct load_op
*insn
= (struct load_op
*) pc
;
1295 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1296 struct lttng_ctx_field
*ctx_field
;
1297 union lttng_ctx_value v
;
1299 dbg_printk("get context ref offset %u type s64\n",
1301 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1302 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1303 estack_push(stack
, top
, ax
, bx
);
1304 estack_ax_v
= v
.s64
;
1305 dbg_printk("ref get context s64 %lld\n",
1306 (long long) estack_ax_v
);
1307 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1311 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE
):
1317 /* load userspace field ref */
1318 OP(FILTER_OP_LOAD_FIELD_REF_USER_STRING
):
1320 struct load_op
*insn
= (struct load_op
*) pc
;
1321 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1323 dbg_printk("load field ref offset %u type user string\n",
1325 estack_push(stack
, top
, ax
, bx
);
1326 estack_ax(stack
, top
)->u
.s
.user_str
=
1327 *(const char * const *) &filter_stack_data
[ref
->offset
];
1328 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1329 dbg_printk("Filter warning: loading a NULL string.\n");
1333 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1334 estack_ax(stack
, top
)->u
.s
.literal_type
=
1335 ESTACK_STRING_LITERAL_TYPE_NONE
;
1336 estack_ax(stack
, top
)->u
.s
.user
= 1;
1337 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1338 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1342 OP(FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
):
1344 struct load_op
*insn
= (struct load_op
*) pc
;
1345 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1347 dbg_printk("load field ref offset %u type user sequence\n",
1349 estack_push(stack
, top
, ax
, bx
);
1350 estack_ax(stack
, top
)->u
.s
.seq_len
=
1351 *(unsigned long *) &filter_stack_data
[ref
->offset
];
1352 estack_ax(stack
, top
)->u
.s
.user_str
=
1353 *(const char **) (&filter_stack_data
[ref
->offset
1354 + sizeof(unsigned long)]);
1355 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1356 dbg_printk("Filter warning: loading a NULL sequence.\n");
1360 estack_ax(stack
, top
)->u
.s
.literal_type
=
1361 ESTACK_STRING_LITERAL_TYPE_NONE
;
1362 estack_ax(stack
, top
)->u
.s
.user
= 1;
1363 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1367 OP(FILTER_OP_GET_CONTEXT_ROOT
):
1369 dbg_printk("op get context root\n");
1370 estack_push(stack
, top
, ax
, bx
);
1371 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
1372 /* "field" only needed for variants. */
1373 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1374 next_pc
+= sizeof(struct load_op
);
1378 OP(FILTER_OP_GET_APP_CONTEXT_ROOT
):
1384 OP(FILTER_OP_GET_PAYLOAD_ROOT
):
1386 dbg_printk("op get app payload root\n");
1387 estack_push(stack
, top
, ax
, bx
);
1388 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
1389 estack_ax(stack
, top
)->u
.ptr
.ptr
= filter_stack_data
;
1390 /* "field" only needed for variants. */
1391 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1392 next_pc
+= sizeof(struct load_op
);
1396 OP(FILTER_OP_GET_SYMBOL
):
1398 dbg_printk("op get symbol\n");
1399 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
1401 printk(KERN_WARNING
"Nested fields not implemented yet.\n");
1404 case LOAD_ROOT_CONTEXT
:
1405 case LOAD_ROOT_APP_CONTEXT
:
1406 case LOAD_ROOT_PAYLOAD
:
1408 * symbol lookup is performed by
1414 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1418 OP(FILTER_OP_GET_SYMBOL_FIELD
):
1421 * Used for first variant encountered in a
1422 * traversal. Variants are not implemented yet.
1428 OP(FILTER_OP_GET_INDEX_U16
):
1430 struct load_op
*insn
= (struct load_op
*) pc
;
1431 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1433 dbg_printk("op get index u16\n");
1434 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1437 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1438 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1442 OP(FILTER_OP_GET_INDEX_U64
):
1444 struct load_op
*insn
= (struct load_op
*) pc
;
1445 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1447 dbg_printk("op get index u64\n");
1448 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1451 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1452 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1456 OP(FILTER_OP_LOAD_FIELD
):
1458 dbg_printk("op load field\n");
1459 ret
= dynamic_load_field(estack_ax(stack
, top
));
1462 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1463 next_pc
+= sizeof(struct load_op
);
1467 OP(FILTER_OP_LOAD_FIELD_S8
):
1469 dbg_printk("op load field s8\n");
1471 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1472 next_pc
+= sizeof(struct load_op
);
1475 OP(FILTER_OP_LOAD_FIELD_S16
):
1477 dbg_printk("op load field s16\n");
1479 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1480 next_pc
+= sizeof(struct load_op
);
1483 OP(FILTER_OP_LOAD_FIELD_S32
):
1485 dbg_printk("op load field s32\n");
1487 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1488 next_pc
+= sizeof(struct load_op
);
1491 OP(FILTER_OP_LOAD_FIELD_S64
):
1493 dbg_printk("op load field s64\n");
1495 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1496 next_pc
+= sizeof(struct load_op
);
1499 OP(FILTER_OP_LOAD_FIELD_U8
):
1501 dbg_printk("op load field u8\n");
1503 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1504 next_pc
+= sizeof(struct load_op
);
1507 OP(FILTER_OP_LOAD_FIELD_U16
):
1509 dbg_printk("op load field u16\n");
1511 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1512 next_pc
+= sizeof(struct load_op
);
1515 OP(FILTER_OP_LOAD_FIELD_U32
):
1517 dbg_printk("op load field u32\n");
1519 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1520 next_pc
+= sizeof(struct load_op
);
1523 OP(FILTER_OP_LOAD_FIELD_U64
):
1525 dbg_printk("op load field u64\n");
1527 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1528 next_pc
+= sizeof(struct load_op
);
1531 OP(FILTER_OP_LOAD_FIELD_DOUBLE
):
1537 OP(FILTER_OP_LOAD_FIELD_STRING
):
1541 dbg_printk("op load field string\n");
1542 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1543 estack_ax(stack
, top
)->u
.s
.str
= str
;
1544 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1545 dbg_printk("Filter warning: loading a NULL string.\n");
1549 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
1550 estack_ax(stack
, top
)->u
.s
.literal_type
=
1551 ESTACK_STRING_LITERAL_TYPE_NONE
;
1552 next_pc
+= sizeof(struct load_op
);
1556 OP(FILTER_OP_LOAD_FIELD_SEQUENCE
):
1560 dbg_printk("op load field string sequence\n");
1561 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
1562 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
1563 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
1564 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1565 dbg_printk("Filter warning: loading a NULL sequence.\n");
1569 estack_ax(stack
, top
)->u
.s
.literal_type
=
1570 ESTACK_STRING_LITERAL_TYPE_NONE
;
1571 next_pc
+= sizeof(struct load_op
);
1577 /* return 0 (discard) on error */