1 /* SPDX-License-Identifier: MIT
3 * lttng-filter-interpreter.c
5 * LTTng modules filter interpreter.
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #include <wrapper/uaccess.h>
11 #include <wrapper/frame.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
15 #include <lttng/filter.h>
16 #include <lttng/string-utils.h>
18 LTTNG_STACK_FRAME_NON_STANDARD(lttng_filter_interpret_bytecode
);
21 * get_char should be called with page fault handler disabled if it is expected
22 * to handle user-space read.
25 char get_char(struct estack_entry
*reg
, size_t offset
)
27 if (unlikely(offset
>= reg
->u
.s
.seq_len
))
32 /* Handle invalid access as end of string. */
33 if (unlikely(!lttng_access_ok(VERIFY_READ
,
34 reg
->u
.s
.user_str
+ offset
,
37 /* Handle fault (nonzero return value) as end of string. */
38 if (unlikely(__copy_from_user_inatomic(&c
,
39 reg
->u
.s
.user_str
+ offset
,
44 return reg
->u
.s
.str
[offset
];
50 * -2: unknown escape char.
54 int parse_char(struct estack_entry
*reg
, char *c
, size_t *offset
)
59 *c
= get_char(reg
, *offset
);
75 char get_char_at_cb(size_t at
, void *data
)
77 return get_char(data
, at
);
81 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
83 bool has_user
= false;
85 struct estack_entry
*pattern_reg
;
86 struct estack_entry
*candidate_reg
;
88 /* Disable the page fault handler when reading from userspace. */
89 if (estack_bx(stack
, top
)->u
.s
.user
90 || estack_ax(stack
, top
)->u
.s
.user
) {
95 /* Find out which side is the pattern vs. the candidate. */
96 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
97 pattern_reg
= estack_ax(stack
, top
);
98 candidate_reg
= estack_bx(stack
, top
);
100 pattern_reg
= estack_bx(stack
, top
);
101 candidate_reg
= estack_ax(stack
, top
);
104 /* Perform the match operation. */
105 result
= !strutils_star_glob_match_char_cb(get_char_at_cb
,
106 pattern_reg
, get_char_at_cb
, candidate_reg
);
114 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
116 size_t offset_bx
= 0, offset_ax
= 0;
117 int diff
, has_user
= 0;
119 if (estack_bx(stack
, top
)->u
.s
.user
120 || estack_ax(stack
, top
)->u
.s
.user
) {
128 char char_bx
, char_ax
;
130 char_bx
= get_char(estack_bx(stack
, top
), offset_bx
);
131 char_ax
= get_char(estack_ax(stack
, top
), offset_ax
);
133 if (unlikely(char_bx
== '\0')) {
134 if (char_ax
== '\0') {
138 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
139 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
140 ret
= parse_char(estack_ax(stack
, top
),
141 &char_ax
, &offset_ax
);
151 if (unlikely(char_ax
== '\0')) {
152 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
153 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
154 ret
= parse_char(estack_bx(stack
, top
),
155 &char_bx
, &offset_bx
);
164 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
165 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
166 ret
= parse_char(estack_bx(stack
, top
),
167 &char_bx
, &offset_bx
);
171 } else if (ret
== -2) {
174 /* else compare both char */
176 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
177 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
178 ret
= parse_char(estack_ax(stack
, top
),
179 &char_ax
, &offset_ax
);
183 } else if (ret
== -2) {
200 diff
= char_bx
- char_ax
;
212 uint64_t lttng_filter_false(void *filter_data
,
213 struct lttng_probe_ctx
*lttng_probe_ctx
,
214 const char *filter_stack_data
)
216 return LTTNG_FILTER_DISCARD
;
219 #ifdef INTERPRETER_USE_SWITCH
222 * Fallback for compilers that do not support taking address of labels.
226 start_pc = &bytecode->data[0]; \
227 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
229 dbg_printk("LTTng: Executing op %s (%u)\n", \
230 lttng_filter_print_op((unsigned int) *(filter_opcode_t *) pc), \
231 (unsigned int) *(filter_opcode_t *) pc); \
232 switch (*(filter_opcode_t *) pc) {
234 #define OP(name) case name
244 * Dispatch-table based interpreter.
248 start_pc = &bytecode->code[0]; \
249 pc = next_pc = start_pc; \
250 if (unlikely(pc - start_pc >= bytecode->len)) \
252 goto *dispatch[*(filter_opcode_t *) pc];
259 goto *dispatch[*(filter_opcode_t *) pc];
265 static int context_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
266 struct load_ptr
*ptr
,
270 struct lttng_ctx_field
*ctx_field
;
271 struct lttng_event_field
*field
;
272 union lttng_ctx_value v
;
274 ctx_field
= <tng_static_ctx
->fields
[idx
];
275 field
= &ctx_field
->event_field
;
276 ptr
->type
= LOAD_OBJECT
;
277 /* field is only used for types nested within variants. */
280 switch (field
->type
.atype
) {
282 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
283 if (field
->type
.u
.integer
.signedness
) {
284 ptr
->object_type
= OBJECT_TYPE_S64
;
286 ptr
->ptr
= &ptr
->u
.s64
;
288 ptr
->object_type
= OBJECT_TYPE_U64
;
289 ptr
->u
.u64
= v
.s64
; /* Cast. */
290 ptr
->ptr
= &ptr
->u
.u64
;
293 case atype_enum_nestable
:
295 const struct lttng_integer_type
*itype
=
296 &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
298 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
299 if (itype
->signedness
) {
300 ptr
->object_type
= OBJECT_TYPE_S64
;
302 ptr
->ptr
= &ptr
->u
.s64
;
304 ptr
->object_type
= OBJECT_TYPE_U64
;
305 ptr
->u
.u64
= v
.s64
; /* Cast. */
306 ptr
->ptr
= &ptr
->u
.u64
;
310 case atype_array_nestable
:
311 if (!lttng_is_bytewise_integer(field
->type
.u
.array_nestable
.elem_type
)) {
312 printk(KERN_WARNING
"LTTng: filter: Array nesting only supports integer types.\n");
315 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
316 printk(KERN_WARNING
"LTTng: filter: Only string arrays are supported for contexts.\n");
319 ptr
->object_type
= OBJECT_TYPE_STRING
;
320 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
323 case atype_sequence_nestable
:
324 if (!lttng_is_bytewise_integer(field
->type
.u
.sequence_nestable
.elem_type
)) {
325 printk(KERN_WARNING
"LTTng: filter: Sequence nesting only supports integer types.\n");
328 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
329 printk(KERN_WARNING
"LTTng: filter: Only string sequences are supported for contexts.\n");
332 ptr
->object_type
= OBJECT_TYPE_STRING
;
333 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
337 ptr
->object_type
= OBJECT_TYPE_STRING
;
338 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
341 case atype_struct_nestable
:
342 printk(KERN_WARNING
"LTTng: filter: Structure type cannot be loaded.\n");
344 case atype_variant_nestable
:
345 printk(KERN_WARNING
"LTTng: filter: Variant type cannot be loaded.\n");
348 printk(KERN_WARNING
"LTTng: filter: Unknown type: %d", (int) field
->type
.atype
);
354 static int dynamic_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
355 struct bytecode_runtime
*runtime
,
356 uint64_t index
, struct estack_entry
*stack_top
)
359 const struct filter_get_index_data
*gid
;
362 * Types nested within variants need to perform dynamic lookup
363 * based on the field descriptions. LTTng-UST does not implement
366 if (stack_top
->u
.ptr
.field
)
368 gid
= (const struct filter_get_index_data
*) &runtime
->data
[index
];
369 switch (stack_top
->u
.ptr
.type
) {
371 switch (stack_top
->u
.ptr
.object_type
) {
372 case OBJECT_TYPE_ARRAY
:
376 WARN_ON_ONCE(gid
->offset
>= gid
->array_len
);
377 /* Skip count (unsigned long) */
378 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
379 ptr
= ptr
+ gid
->offset
;
380 stack_top
->u
.ptr
.ptr
= ptr
;
381 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
382 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
383 /* field is only used for types nested within variants. */
384 stack_top
->u
.ptr
.field
= NULL
;
387 case OBJECT_TYPE_SEQUENCE
:
392 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
393 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
394 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
398 ptr
= ptr
+ gid
->offset
;
399 stack_top
->u
.ptr
.ptr
= ptr
;
400 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
401 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
402 /* field is only used for types nested within variants. */
403 stack_top
->u
.ptr
.field
= NULL
;
406 case OBJECT_TYPE_STRUCT
:
407 printk(KERN_WARNING
"LTTng: filter: Nested structures are not supported yet.\n");
410 case OBJECT_TYPE_VARIANT
:
412 printk(KERN_WARNING
"LTTng: filter: Unexpected get index type %d",
413 (int) stack_top
->u
.ptr
.object_type
);
418 case LOAD_ROOT_CONTEXT
:
419 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
421 ret
= context_get_index(lttng_probe_ctx
,
429 case LOAD_ROOT_PAYLOAD
:
430 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
431 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
432 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
433 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
434 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
435 /* field is only used for types nested within variants. */
436 stack_top
->u
.ptr
.field
= NULL
;
445 static int dynamic_load_field(struct estack_entry
*stack_top
)
449 switch (stack_top
->u
.ptr
.type
) {
452 case LOAD_ROOT_CONTEXT
:
453 case LOAD_ROOT_APP_CONTEXT
:
454 case LOAD_ROOT_PAYLOAD
:
456 dbg_printk("Filter warning: cannot load root, missing field name.\n");
460 switch (stack_top
->u
.ptr
.object_type
) {
462 dbg_printk("op load field s8\n");
463 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
465 case OBJECT_TYPE_S16
:
469 dbg_printk("op load field s16\n");
470 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
471 if (stack_top
->u
.ptr
.rev_bo
)
473 stack_top
->u
.v
= tmp
;
476 case OBJECT_TYPE_S32
:
480 dbg_printk("op load field s32\n");
481 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
482 if (stack_top
->u
.ptr
.rev_bo
)
484 stack_top
->u
.v
= tmp
;
487 case OBJECT_TYPE_S64
:
491 dbg_printk("op load field s64\n");
492 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
493 if (stack_top
->u
.ptr
.rev_bo
)
495 stack_top
->u
.v
= tmp
;
499 dbg_printk("op load field u8\n");
500 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
502 case OBJECT_TYPE_U16
:
506 dbg_printk("op load field u16\n");
507 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
508 if (stack_top
->u
.ptr
.rev_bo
)
510 stack_top
->u
.v
= tmp
;
513 case OBJECT_TYPE_U32
:
517 dbg_printk("op load field u32\n");
518 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
519 if (stack_top
->u
.ptr
.rev_bo
)
521 stack_top
->u
.v
= tmp
;
524 case OBJECT_TYPE_U64
:
528 dbg_printk("op load field u64\n");
529 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
530 if (stack_top
->u
.ptr
.rev_bo
)
532 stack_top
->u
.v
= tmp
;
535 case OBJECT_TYPE_STRING
:
539 dbg_printk("op load field string\n");
540 str
= (const char *) stack_top
->u
.ptr
.ptr
;
541 stack_top
->u
.s
.str
= str
;
542 if (unlikely(!stack_top
->u
.s
.str
)) {
543 dbg_printk("Filter warning: loading a NULL string.\n");
547 stack_top
->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
548 stack_top
->u
.s
.literal_type
=
549 ESTACK_STRING_LITERAL_TYPE_NONE
;
552 case OBJECT_TYPE_STRING_SEQUENCE
:
556 dbg_printk("op load field string sequence\n");
557 ptr
= stack_top
->u
.ptr
.ptr
;
558 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
559 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
560 if (unlikely(!stack_top
->u
.s
.str
)) {
561 dbg_printk("Filter warning: loading a NULL sequence.\n");
565 stack_top
->u
.s
.literal_type
=
566 ESTACK_STRING_LITERAL_TYPE_NONE
;
569 case OBJECT_TYPE_DYNAMIC
:
571 * Dynamic types in context are looked up
572 * by context get index.
576 case OBJECT_TYPE_DOUBLE
:
579 case OBJECT_TYPE_SEQUENCE
:
580 case OBJECT_TYPE_ARRAY
:
581 case OBJECT_TYPE_STRUCT
:
582 case OBJECT_TYPE_VARIANT
:
583 printk(KERN_WARNING
"LTTng: filter: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
594 * Return 0 (discard), or raise the 0x1 flag (log event).
595 * Currently, other flags are kept for future extensions and have no
598 uint64_t lttng_filter_interpret_bytecode(void *filter_data
,
599 struct lttng_probe_ctx
*lttng_probe_ctx
,
600 const char *filter_stack_data
)
602 struct bytecode_runtime
*bytecode
= filter_data
;
603 void *pc
, *next_pc
, *start_pc
;
606 struct estack _stack
;
607 struct estack
*stack
= &_stack
;
608 register int64_t ax
= 0, bx
= 0;
609 register int top
= FILTER_STACK_EMPTY
;
610 #ifndef INTERPRETER_USE_SWITCH
611 static void *dispatch
[NR_FILTER_OPS
] = {
612 [ FILTER_OP_UNKNOWN
] = &&LABEL_FILTER_OP_UNKNOWN
,
614 [ FILTER_OP_RETURN
] = &&LABEL_FILTER_OP_RETURN
,
617 [ FILTER_OP_MUL
] = &&LABEL_FILTER_OP_MUL
,
618 [ FILTER_OP_DIV
] = &&LABEL_FILTER_OP_DIV
,
619 [ FILTER_OP_MOD
] = &&LABEL_FILTER_OP_MOD
,
620 [ FILTER_OP_PLUS
] = &&LABEL_FILTER_OP_PLUS
,
621 [ FILTER_OP_MINUS
] = &&LABEL_FILTER_OP_MINUS
,
622 [ FILTER_OP_BIT_RSHIFT
] = &&LABEL_FILTER_OP_BIT_RSHIFT
,
623 [ FILTER_OP_BIT_LSHIFT
] = &&LABEL_FILTER_OP_BIT_LSHIFT
,
624 [ FILTER_OP_BIT_AND
] = &&LABEL_FILTER_OP_BIT_AND
,
625 [ FILTER_OP_BIT_OR
] = &&LABEL_FILTER_OP_BIT_OR
,
626 [ FILTER_OP_BIT_XOR
] = &&LABEL_FILTER_OP_BIT_XOR
,
628 /* binary comparators */
629 [ FILTER_OP_EQ
] = &&LABEL_FILTER_OP_EQ
,
630 [ FILTER_OP_NE
] = &&LABEL_FILTER_OP_NE
,
631 [ FILTER_OP_GT
] = &&LABEL_FILTER_OP_GT
,
632 [ FILTER_OP_LT
] = &&LABEL_FILTER_OP_LT
,
633 [ FILTER_OP_GE
] = &&LABEL_FILTER_OP_GE
,
634 [ FILTER_OP_LE
] = &&LABEL_FILTER_OP_LE
,
636 /* string binary comparator */
637 [ FILTER_OP_EQ_STRING
] = &&LABEL_FILTER_OP_EQ_STRING
,
638 [ FILTER_OP_NE_STRING
] = &&LABEL_FILTER_OP_NE_STRING
,
639 [ FILTER_OP_GT_STRING
] = &&LABEL_FILTER_OP_GT_STRING
,
640 [ FILTER_OP_LT_STRING
] = &&LABEL_FILTER_OP_LT_STRING
,
641 [ FILTER_OP_GE_STRING
] = &&LABEL_FILTER_OP_GE_STRING
,
642 [ FILTER_OP_LE_STRING
] = &&LABEL_FILTER_OP_LE_STRING
,
644 /* globbing pattern binary comparator */
645 [ FILTER_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_EQ_STAR_GLOB_STRING
,
646 [ FILTER_OP_NE_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_NE_STAR_GLOB_STRING
,
648 /* s64 binary comparator */
649 [ FILTER_OP_EQ_S64
] = &&LABEL_FILTER_OP_EQ_S64
,
650 [ FILTER_OP_NE_S64
] = &&LABEL_FILTER_OP_NE_S64
,
651 [ FILTER_OP_GT_S64
] = &&LABEL_FILTER_OP_GT_S64
,
652 [ FILTER_OP_LT_S64
] = &&LABEL_FILTER_OP_LT_S64
,
653 [ FILTER_OP_GE_S64
] = &&LABEL_FILTER_OP_GE_S64
,
654 [ FILTER_OP_LE_S64
] = &&LABEL_FILTER_OP_LE_S64
,
656 /* double binary comparator */
657 [ FILTER_OP_EQ_DOUBLE
] = &&LABEL_FILTER_OP_EQ_DOUBLE
,
658 [ FILTER_OP_NE_DOUBLE
] = &&LABEL_FILTER_OP_NE_DOUBLE
,
659 [ FILTER_OP_GT_DOUBLE
] = &&LABEL_FILTER_OP_GT_DOUBLE
,
660 [ FILTER_OP_LT_DOUBLE
] = &&LABEL_FILTER_OP_LT_DOUBLE
,
661 [ FILTER_OP_GE_DOUBLE
] = &&LABEL_FILTER_OP_GE_DOUBLE
,
662 [ FILTER_OP_LE_DOUBLE
] = &&LABEL_FILTER_OP_LE_DOUBLE
,
664 /* Mixed S64-double binary comparators */
665 [ FILTER_OP_EQ_DOUBLE_S64
] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64
,
666 [ FILTER_OP_NE_DOUBLE_S64
] = &&LABEL_FILTER_OP_NE_DOUBLE_S64
,
667 [ FILTER_OP_GT_DOUBLE_S64
] = &&LABEL_FILTER_OP_GT_DOUBLE_S64
,
668 [ FILTER_OP_LT_DOUBLE_S64
] = &&LABEL_FILTER_OP_LT_DOUBLE_S64
,
669 [ FILTER_OP_GE_DOUBLE_S64
] = &&LABEL_FILTER_OP_GE_DOUBLE_S64
,
670 [ FILTER_OP_LE_DOUBLE_S64
] = &&LABEL_FILTER_OP_LE_DOUBLE_S64
,
672 [ FILTER_OP_EQ_S64_DOUBLE
] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE
,
673 [ FILTER_OP_NE_S64_DOUBLE
] = &&LABEL_FILTER_OP_NE_S64_DOUBLE
,
674 [ FILTER_OP_GT_S64_DOUBLE
] = &&LABEL_FILTER_OP_GT_S64_DOUBLE
,
675 [ FILTER_OP_LT_S64_DOUBLE
] = &&LABEL_FILTER_OP_LT_S64_DOUBLE
,
676 [ FILTER_OP_GE_S64_DOUBLE
] = &&LABEL_FILTER_OP_GE_S64_DOUBLE
,
677 [ FILTER_OP_LE_S64_DOUBLE
] = &&LABEL_FILTER_OP_LE_S64_DOUBLE
,
680 [ FILTER_OP_UNARY_PLUS
] = &&LABEL_FILTER_OP_UNARY_PLUS
,
681 [ FILTER_OP_UNARY_MINUS
] = &&LABEL_FILTER_OP_UNARY_MINUS
,
682 [ FILTER_OP_UNARY_NOT
] = &&LABEL_FILTER_OP_UNARY_NOT
,
683 [ FILTER_OP_UNARY_PLUS_S64
] = &&LABEL_FILTER_OP_UNARY_PLUS_S64
,
684 [ FILTER_OP_UNARY_MINUS_S64
] = &&LABEL_FILTER_OP_UNARY_MINUS_S64
,
685 [ FILTER_OP_UNARY_NOT_S64
] = &&LABEL_FILTER_OP_UNARY_NOT_S64
,
686 [ FILTER_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE
,
687 [ FILTER_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE
,
688 [ FILTER_OP_UNARY_NOT_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE
,
691 [ FILTER_OP_AND
] = &&LABEL_FILTER_OP_AND
,
692 [ FILTER_OP_OR
] = &&LABEL_FILTER_OP_OR
,
695 [ FILTER_OP_LOAD_FIELD_REF
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF
,
696 [ FILTER_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING
,
697 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE
,
698 [ FILTER_OP_LOAD_FIELD_REF_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64
,
699 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE
,
701 /* load from immediate operand */
702 [ FILTER_OP_LOAD_STRING
] = &&LABEL_FILTER_OP_LOAD_STRING
,
703 [ FILTER_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_LOAD_STAR_GLOB_STRING
,
704 [ FILTER_OP_LOAD_S64
] = &&LABEL_FILTER_OP_LOAD_S64
,
705 [ FILTER_OP_LOAD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_DOUBLE
,
708 [ FILTER_OP_CAST_TO_S64
] = &&LABEL_FILTER_OP_CAST_TO_S64
,
709 [ FILTER_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64
,
710 [ FILTER_OP_CAST_NOP
] = &&LABEL_FILTER_OP_CAST_NOP
,
712 /* get context ref */
713 [ FILTER_OP_GET_CONTEXT_REF
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF
,
714 [ FILTER_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING
,
715 [ FILTER_OP_GET_CONTEXT_REF_S64
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64
,
716 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE
,
718 /* load userspace field ref */
719 [ FILTER_OP_LOAD_FIELD_REF_USER_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_STRING
,
720 [ FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
,
722 /* Instructions for recursive traversal through composed types. */
723 [ FILTER_OP_GET_CONTEXT_ROOT
] = &&LABEL_FILTER_OP_GET_CONTEXT_ROOT
,
724 [ FILTER_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_FILTER_OP_GET_APP_CONTEXT_ROOT
,
725 [ FILTER_OP_GET_PAYLOAD_ROOT
] = &&LABEL_FILTER_OP_GET_PAYLOAD_ROOT
,
727 [ FILTER_OP_GET_SYMBOL
] = &&LABEL_FILTER_OP_GET_SYMBOL
,
728 [ FILTER_OP_GET_SYMBOL_FIELD
] = &&LABEL_FILTER_OP_GET_SYMBOL_FIELD
,
729 [ FILTER_OP_GET_INDEX_U16
] = &&LABEL_FILTER_OP_GET_INDEX_U16
,
730 [ FILTER_OP_GET_INDEX_U64
] = &&LABEL_FILTER_OP_GET_INDEX_U64
,
732 [ FILTER_OP_LOAD_FIELD
] = &&LABEL_FILTER_OP_LOAD_FIELD
,
733 [ FILTER_OP_LOAD_FIELD_S8
] = &&LABEL_FILTER_OP_LOAD_FIELD_S8
,
734 [ FILTER_OP_LOAD_FIELD_S16
] = &&LABEL_FILTER_OP_LOAD_FIELD_S16
,
735 [ FILTER_OP_LOAD_FIELD_S32
] = &&LABEL_FILTER_OP_LOAD_FIELD_S32
,
736 [ FILTER_OP_LOAD_FIELD_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_S64
,
737 [ FILTER_OP_LOAD_FIELD_U8
] = &&LABEL_FILTER_OP_LOAD_FIELD_U8
,
738 [ FILTER_OP_LOAD_FIELD_U16
] = &&LABEL_FILTER_OP_LOAD_FIELD_U16
,
739 [ FILTER_OP_LOAD_FIELD_U32
] = &&LABEL_FILTER_OP_LOAD_FIELD_U32
,
740 [ FILTER_OP_LOAD_FIELD_U64
] = &&LABEL_FILTER_OP_LOAD_FIELD_U64
,
741 [ FILTER_OP_LOAD_FIELD_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_STRING
,
742 [ FILTER_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_SEQUENCE
,
743 [ FILTER_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_DOUBLE
,
745 [ FILTER_OP_UNARY_BIT_NOT
] = &&LABEL_FILTER_OP_UNARY_BIT_NOT
,
747 [ FILTER_OP_RETURN_S64
] = &&LABEL_FILTER_OP_RETURN_S64
,
749 #endif /* #ifndef INTERPRETER_USE_SWITCH */
753 OP(FILTER_OP_UNKNOWN
):
754 OP(FILTER_OP_LOAD_FIELD_REF
):
755 OP(FILTER_OP_GET_CONTEXT_REF
):
756 #ifdef INTERPRETER_USE_SWITCH
758 #endif /* INTERPRETER_USE_SWITCH */
759 printk(KERN_WARNING
"LTTng: filter: unknown bytecode op %u\n",
760 (unsigned int) *(filter_opcode_t
*) pc
);
764 OP(FILTER_OP_RETURN
):
765 OP(FILTER_OP_RETURN_S64
):
766 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
767 retval
= !!estack_ax_v
;
777 printk(KERN_WARNING
"LTTng: filter: unsupported bytecode op %u\n",
778 (unsigned int) *(filter_opcode_t
*) pc
);
788 printk(KERN_WARNING
"LTTng: filter: unsupported non-specialized bytecode op %u\n",
789 (unsigned int) *(filter_opcode_t
*) pc
);
793 OP(FILTER_OP_EQ_STRING
):
797 res
= (stack_strcmp(stack
, top
, "==") == 0);
798 estack_pop(stack
, top
, ax
, bx
);
800 next_pc
+= sizeof(struct binary_op
);
803 OP(FILTER_OP_NE_STRING
):
807 res
= (stack_strcmp(stack
, top
, "!=") != 0);
808 estack_pop(stack
, top
, ax
, bx
);
810 next_pc
+= sizeof(struct binary_op
);
813 OP(FILTER_OP_GT_STRING
):
817 res
= (stack_strcmp(stack
, top
, ">") > 0);
818 estack_pop(stack
, top
, ax
, bx
);
820 next_pc
+= sizeof(struct binary_op
);
823 OP(FILTER_OP_LT_STRING
):
827 res
= (stack_strcmp(stack
, top
, "<") < 0);
828 estack_pop(stack
, top
, ax
, bx
);
830 next_pc
+= sizeof(struct binary_op
);
833 OP(FILTER_OP_GE_STRING
):
837 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
838 estack_pop(stack
, top
, ax
, bx
);
840 next_pc
+= sizeof(struct binary_op
);
843 OP(FILTER_OP_LE_STRING
):
847 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
848 estack_pop(stack
, top
, ax
, bx
);
850 next_pc
+= sizeof(struct binary_op
);
854 OP(FILTER_OP_EQ_STAR_GLOB_STRING
):
858 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
859 estack_pop(stack
, top
, ax
, bx
);
861 next_pc
+= sizeof(struct binary_op
);
864 OP(FILTER_OP_NE_STAR_GLOB_STRING
):
868 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
869 estack_pop(stack
, top
, ax
, bx
);
871 next_pc
+= sizeof(struct binary_op
);
875 OP(FILTER_OP_EQ_S64
):
879 res
= (estack_bx_v
== estack_ax_v
);
880 estack_pop(stack
, top
, ax
, bx
);
882 next_pc
+= sizeof(struct binary_op
);
885 OP(FILTER_OP_NE_S64
):
889 res
= (estack_bx_v
!= estack_ax_v
);
890 estack_pop(stack
, top
, ax
, bx
);
892 next_pc
+= sizeof(struct binary_op
);
895 OP(FILTER_OP_GT_S64
):
899 res
= (estack_bx_v
> estack_ax_v
);
900 estack_pop(stack
, top
, ax
, bx
);
902 next_pc
+= sizeof(struct binary_op
);
905 OP(FILTER_OP_LT_S64
):
909 res
= (estack_bx_v
< estack_ax_v
);
910 estack_pop(stack
, top
, ax
, bx
);
912 next_pc
+= sizeof(struct binary_op
);
915 OP(FILTER_OP_GE_S64
):
919 res
= (estack_bx_v
>= estack_ax_v
);
920 estack_pop(stack
, top
, ax
, bx
);
922 next_pc
+= sizeof(struct binary_op
);
925 OP(FILTER_OP_LE_S64
):
929 res
= (estack_bx_v
<= estack_ax_v
);
930 estack_pop(stack
, top
, ax
, bx
);
932 next_pc
+= sizeof(struct binary_op
);
936 OP(FILTER_OP_EQ_DOUBLE
):
937 OP(FILTER_OP_NE_DOUBLE
):
938 OP(FILTER_OP_GT_DOUBLE
):
939 OP(FILTER_OP_LT_DOUBLE
):
940 OP(FILTER_OP_GE_DOUBLE
):
941 OP(FILTER_OP_LE_DOUBLE
):
947 /* Mixed S64-double binary comparators */
948 OP(FILTER_OP_EQ_DOUBLE_S64
):
949 OP(FILTER_OP_NE_DOUBLE_S64
):
950 OP(FILTER_OP_GT_DOUBLE_S64
):
951 OP(FILTER_OP_LT_DOUBLE_S64
):
952 OP(FILTER_OP_GE_DOUBLE_S64
):
953 OP(FILTER_OP_LE_DOUBLE_S64
):
954 OP(FILTER_OP_EQ_S64_DOUBLE
):
955 OP(FILTER_OP_NE_S64_DOUBLE
):
956 OP(FILTER_OP_GT_S64_DOUBLE
):
957 OP(FILTER_OP_LT_S64_DOUBLE
):
958 OP(FILTER_OP_GE_S64_DOUBLE
):
959 OP(FILTER_OP_LE_S64_DOUBLE
):
964 OP(FILTER_OP_BIT_RSHIFT
):
968 /* Catch undefined behavior. */
969 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
973 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
974 estack_pop(stack
, top
, ax
, bx
);
976 next_pc
+= sizeof(struct binary_op
);
979 OP(FILTER_OP_BIT_LSHIFT
):
983 /* Catch undefined behavior. */
984 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
988 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
989 estack_pop(stack
, top
, ax
, bx
);
991 next_pc
+= sizeof(struct binary_op
);
994 OP(FILTER_OP_BIT_AND
):
998 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
999 estack_pop(stack
, top
, ax
, bx
);
1001 next_pc
+= sizeof(struct binary_op
);
1004 OP(FILTER_OP_BIT_OR
):
1008 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1009 estack_pop(stack
, top
, ax
, bx
);
1011 next_pc
+= sizeof(struct binary_op
);
1014 OP(FILTER_OP_BIT_XOR
):
1018 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1019 estack_pop(stack
, top
, ax
, bx
);
1021 next_pc
+= sizeof(struct binary_op
);
1026 OP(FILTER_OP_UNARY_PLUS
):
1027 OP(FILTER_OP_UNARY_MINUS
):
1028 OP(FILTER_OP_UNARY_NOT
):
1029 printk(KERN_WARNING
"LTTng: filter: unsupported non-specialized bytecode op %u\n",
1030 (unsigned int) *(filter_opcode_t
*) pc
);
1035 OP(FILTER_OP_UNARY_BIT_NOT
):
1037 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1038 next_pc
+= sizeof(struct unary_op
);
1042 OP(FILTER_OP_UNARY_PLUS_S64
):
1044 next_pc
+= sizeof(struct unary_op
);
1047 OP(FILTER_OP_UNARY_MINUS_S64
):
1049 estack_ax_v
= -estack_ax_v
;
1050 next_pc
+= sizeof(struct unary_op
);
1053 OP(FILTER_OP_UNARY_PLUS_DOUBLE
):
1054 OP(FILTER_OP_UNARY_MINUS_DOUBLE
):
1059 OP(FILTER_OP_UNARY_NOT_S64
):
1061 estack_ax_v
= !estack_ax_v
;
1062 next_pc
+= sizeof(struct unary_op
);
1065 OP(FILTER_OP_UNARY_NOT_DOUBLE
):
1074 struct logical_op
*insn
= (struct logical_op
*) pc
;
1076 /* If AX is 0, skip and evaluate to 0 */
1077 if (unlikely(estack_ax_v
== 0)) {
1078 dbg_printk("Jumping to bytecode offset %u\n",
1079 (unsigned int) insn
->skip_offset
);
1080 next_pc
= start_pc
+ insn
->skip_offset
;
1082 /* Pop 1 when jump not taken */
1083 estack_pop(stack
, top
, ax
, bx
);
1084 next_pc
+= sizeof(struct logical_op
);
1090 struct logical_op
*insn
= (struct logical_op
*) pc
;
1092 /* If AX is nonzero, skip and evaluate to 1 */
1094 if (unlikely(estack_ax_v
!= 0)) {
1096 dbg_printk("Jumping to bytecode offset %u\n",
1097 (unsigned int) insn
->skip_offset
);
1098 next_pc
= start_pc
+ insn
->skip_offset
;
1100 /* Pop 1 when jump not taken */
1101 estack_pop(stack
, top
, ax
, bx
);
1102 next_pc
+= sizeof(struct logical_op
);
1108 /* load field ref */
1109 OP(FILTER_OP_LOAD_FIELD_REF_STRING
):
1111 struct load_op
*insn
= (struct load_op
*) pc
;
1112 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1114 dbg_printk("load field ref offset %u type string\n",
1116 estack_push(stack
, top
, ax
, bx
);
1117 estack_ax(stack
, top
)->u
.s
.str
=
1118 *(const char * const *) &filter_stack_data
[ref
->offset
];
1119 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1120 dbg_printk("Filter warning: loading a NULL string.\n");
1124 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1125 estack_ax(stack
, top
)->u
.s
.literal_type
=
1126 ESTACK_STRING_LITERAL_TYPE_NONE
;
1127 estack_ax(stack
, top
)->u
.s
.user
= 0;
1128 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1129 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1133 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE
):
1135 struct load_op
*insn
= (struct load_op
*) pc
;
1136 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1138 dbg_printk("load field ref offset %u type sequence\n",
1140 estack_push(stack
, top
, ax
, bx
);
1141 estack_ax(stack
, top
)->u
.s
.seq_len
=
1142 *(unsigned long *) &filter_stack_data
[ref
->offset
];
1143 estack_ax(stack
, top
)->u
.s
.str
=
1144 *(const char **) (&filter_stack_data
[ref
->offset
1145 + sizeof(unsigned long)]);
1146 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1147 dbg_printk("Filter warning: loading a NULL sequence.\n");
1151 estack_ax(stack
, top
)->u
.s
.literal_type
=
1152 ESTACK_STRING_LITERAL_TYPE_NONE
;
1153 estack_ax(stack
, top
)->u
.s
.user
= 0;
1154 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1158 OP(FILTER_OP_LOAD_FIELD_REF_S64
):
1160 struct load_op
*insn
= (struct load_op
*) pc
;
1161 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1163 dbg_printk("load field ref offset %u type s64\n",
1165 estack_push(stack
, top
, ax
, bx
);
1167 ((struct literal_numeric
*) &filter_stack_data
[ref
->offset
])->v
;
1168 dbg_printk("ref load s64 %lld\n",
1169 (long long) estack_ax_v
);
1170 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1174 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE
):
1180 /* load from immediate operand */
1181 OP(FILTER_OP_LOAD_STRING
):
1183 struct load_op
*insn
= (struct load_op
*) pc
;
1185 dbg_printk("load string %s\n", insn
->data
);
1186 estack_push(stack
, top
, ax
, bx
);
1187 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1188 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1189 estack_ax(stack
, top
)->u
.s
.literal_type
=
1190 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
1191 estack_ax(stack
, top
)->u
.s
.user
= 0;
1192 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1196 OP(FILTER_OP_LOAD_STAR_GLOB_STRING
):
1198 struct load_op
*insn
= (struct load_op
*) pc
;
1200 dbg_printk("load globbing pattern %s\n", insn
->data
);
1201 estack_push(stack
, top
, ax
, bx
);
1202 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1203 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1204 estack_ax(stack
, top
)->u
.s
.literal_type
=
1205 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
1206 estack_ax(stack
, top
)->u
.s
.user
= 0;
1207 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1211 OP(FILTER_OP_LOAD_S64
):
1213 struct load_op
*insn
= (struct load_op
*) pc
;
1215 estack_push(stack
, top
, ax
, bx
);
1216 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
1217 dbg_printk("load s64 %lld\n",
1218 (long long) estack_ax_v
);
1219 next_pc
+= sizeof(struct load_op
)
1220 + sizeof(struct literal_numeric
);
1224 OP(FILTER_OP_LOAD_DOUBLE
):
1231 OP(FILTER_OP_CAST_TO_S64
):
1232 printk(KERN_WARNING
"LTTng: filter: unsupported non-specialized bytecode op %u\n",
1233 (unsigned int) *(filter_opcode_t
*) pc
);
1237 OP(FILTER_OP_CAST_DOUBLE_TO_S64
):
1243 OP(FILTER_OP_CAST_NOP
):
1245 next_pc
+= sizeof(struct cast_op
);
1249 /* get context ref */
1250 OP(FILTER_OP_GET_CONTEXT_REF_STRING
):
1252 struct load_op
*insn
= (struct load_op
*) pc
;
1253 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1254 struct lttng_ctx_field
*ctx_field
;
1255 union lttng_ctx_value v
;
1257 dbg_printk("get context ref offset %u type string\n",
1259 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1260 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1261 estack_push(stack
, top
, ax
, bx
);
1262 estack_ax(stack
, top
)->u
.s
.str
= v
.str
;
1263 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1264 dbg_printk("Filter warning: loading a NULL string.\n");
1268 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1269 estack_ax(stack
, top
)->u
.s
.literal_type
=
1270 ESTACK_STRING_LITERAL_TYPE_NONE
;
1271 estack_ax(stack
, top
)->u
.s
.user
= 0;
1272 dbg_printk("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1273 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1277 OP(FILTER_OP_GET_CONTEXT_REF_S64
):
1279 struct load_op
*insn
= (struct load_op
*) pc
;
1280 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1281 struct lttng_ctx_field
*ctx_field
;
1282 union lttng_ctx_value v
;
1284 dbg_printk("get context ref offset %u type s64\n",
1286 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1287 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1288 estack_push(stack
, top
, ax
, bx
);
1289 estack_ax_v
= v
.s64
;
1290 dbg_printk("ref get context s64 %lld\n",
1291 (long long) estack_ax_v
);
1292 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1296 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE
):
1302 /* load userspace field ref */
1303 OP(FILTER_OP_LOAD_FIELD_REF_USER_STRING
):
1305 struct load_op
*insn
= (struct load_op
*) pc
;
1306 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1308 dbg_printk("load field ref offset %u type user string\n",
1310 estack_push(stack
, top
, ax
, bx
);
1311 estack_ax(stack
, top
)->u
.s
.user_str
=
1312 *(const char * const *) &filter_stack_data
[ref
->offset
];
1313 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1314 dbg_printk("Filter warning: loading a NULL string.\n");
1318 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1319 estack_ax(stack
, top
)->u
.s
.literal_type
=
1320 ESTACK_STRING_LITERAL_TYPE_NONE
;
1321 estack_ax(stack
, top
)->u
.s
.user
= 1;
1322 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1323 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1327 OP(FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
):
1329 struct load_op
*insn
= (struct load_op
*) pc
;
1330 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1332 dbg_printk("load field ref offset %u type user sequence\n",
1334 estack_push(stack
, top
, ax
, bx
);
1335 estack_ax(stack
, top
)->u
.s
.seq_len
=
1336 *(unsigned long *) &filter_stack_data
[ref
->offset
];
1337 estack_ax(stack
, top
)->u
.s
.user_str
=
1338 *(const char **) (&filter_stack_data
[ref
->offset
1339 + sizeof(unsigned long)]);
1340 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1341 dbg_printk("Filter warning: loading a NULL sequence.\n");
1345 estack_ax(stack
, top
)->u
.s
.literal_type
=
1346 ESTACK_STRING_LITERAL_TYPE_NONE
;
1347 estack_ax(stack
, top
)->u
.s
.user
= 1;
1348 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1352 OP(FILTER_OP_GET_CONTEXT_ROOT
):
1354 dbg_printk("op get context root\n");
1355 estack_push(stack
, top
, ax
, bx
);
1356 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
1357 /* "field" only needed for variants. */
1358 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1359 next_pc
+= sizeof(struct load_op
);
1363 OP(FILTER_OP_GET_APP_CONTEXT_ROOT
):
1369 OP(FILTER_OP_GET_PAYLOAD_ROOT
):
1371 dbg_printk("op get app payload root\n");
1372 estack_push(stack
, top
, ax
, bx
);
1373 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
1374 estack_ax(stack
, top
)->u
.ptr
.ptr
= filter_stack_data
;
1375 /* "field" only needed for variants. */
1376 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1377 next_pc
+= sizeof(struct load_op
);
1381 OP(FILTER_OP_GET_SYMBOL
):
1383 dbg_printk("op get symbol\n");
1384 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
1386 printk(KERN_WARNING
"LTTng: filter: Nested fields not implemented yet.\n");
1389 case LOAD_ROOT_CONTEXT
:
1390 case LOAD_ROOT_APP_CONTEXT
:
1391 case LOAD_ROOT_PAYLOAD
:
1393 * symbol lookup is performed by
1399 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1403 OP(FILTER_OP_GET_SYMBOL_FIELD
):
1406 * Used for first variant encountered in a
1407 * traversal. Variants are not implemented yet.
1413 OP(FILTER_OP_GET_INDEX_U16
):
1415 struct load_op
*insn
= (struct load_op
*) pc
;
1416 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1418 dbg_printk("op get index u16\n");
1419 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1422 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1423 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1427 OP(FILTER_OP_GET_INDEX_U64
):
1429 struct load_op
*insn
= (struct load_op
*) pc
;
1430 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1432 dbg_printk("op get index u64\n");
1433 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1436 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1437 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1441 OP(FILTER_OP_LOAD_FIELD
):
1443 dbg_printk("op load field\n");
1444 ret
= dynamic_load_field(estack_ax(stack
, top
));
1447 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1448 next_pc
+= sizeof(struct load_op
);
1452 OP(FILTER_OP_LOAD_FIELD_S8
):
1454 dbg_printk("op load field s8\n");
1456 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1457 next_pc
+= sizeof(struct load_op
);
1460 OP(FILTER_OP_LOAD_FIELD_S16
):
1462 dbg_printk("op load field s16\n");
1464 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1465 next_pc
+= sizeof(struct load_op
);
1468 OP(FILTER_OP_LOAD_FIELD_S32
):
1470 dbg_printk("op load field s32\n");
1472 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1473 next_pc
+= sizeof(struct load_op
);
1476 OP(FILTER_OP_LOAD_FIELD_S64
):
1478 dbg_printk("op load field s64\n");
1480 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1481 next_pc
+= sizeof(struct load_op
);
1484 OP(FILTER_OP_LOAD_FIELD_U8
):
1486 dbg_printk("op load field u8\n");
1488 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1489 next_pc
+= sizeof(struct load_op
);
1492 OP(FILTER_OP_LOAD_FIELD_U16
):
1494 dbg_printk("op load field u16\n");
1496 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1497 next_pc
+= sizeof(struct load_op
);
1500 OP(FILTER_OP_LOAD_FIELD_U32
):
1502 dbg_printk("op load field u32\n");
1504 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1505 next_pc
+= sizeof(struct load_op
);
1508 OP(FILTER_OP_LOAD_FIELD_U64
):
1510 dbg_printk("op load field u64\n");
1512 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1513 next_pc
+= sizeof(struct load_op
);
1516 OP(FILTER_OP_LOAD_FIELD_DOUBLE
):
1522 OP(FILTER_OP_LOAD_FIELD_STRING
):
1526 dbg_printk("op load field string\n");
1527 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1528 estack_ax(stack
, top
)->u
.s
.str
= str
;
1529 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1530 dbg_printk("Filter warning: loading a NULL string.\n");
1534 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1535 estack_ax(stack
, top
)->u
.s
.literal_type
=
1536 ESTACK_STRING_LITERAL_TYPE_NONE
;
1537 next_pc
+= sizeof(struct load_op
);
1541 OP(FILTER_OP_LOAD_FIELD_SEQUENCE
):
1545 dbg_printk("op load field string sequence\n");
1546 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
1547 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
1548 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
1549 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1550 dbg_printk("Filter warning: loading a NULL sequence.\n");
1554 estack_ax(stack
, top
)->u
.s
.literal_type
=
1555 ESTACK_STRING_LITERAL_TYPE_NONE
;
1556 next_pc
+= sizeof(struct load_op
);
1562 /* Return _DISCARD on error. */
1564 return LTTNG_FILTER_DISCARD
;