2 * lttng-filter-interpreter.c
4 * LTTng modules filter interpreter.
6 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
27 #include <linux/uaccess.h>
28 #include <wrapper/frame.h>
29 #include <wrapper/types.h>
30 #include <linux/swab.h>
32 #include <lttng-filter.h>
33 #include <lttng-string-utils.h>
35 LTTNG_STACK_FRAME_NON_STANDARD(lttng_filter_interpret_bytecode
);
38 * get_char should be called with page fault handler disabled if it is expected
39 * to handle user-space read.
42 char get_char(struct estack_entry
*reg
, size_t offset
)
44 if (unlikely(offset
>= reg
->u
.s
.seq_len
))
49 /* Handle invalid access as end of string. */
50 if (unlikely(!access_ok(VERIFY_READ
,
51 reg
->u
.s
.user_str
+ offset
,
54 /* Handle fault (nonzero return value) as end of string. */
55 if (unlikely(__copy_from_user_inatomic(&c
,
56 reg
->u
.s
.user_str
+ offset
,
61 return reg
->u
.s
.str
[offset
];
67 * -2: unknown escape char.
71 int parse_char(struct estack_entry
*reg
, char *c
, size_t *offset
)
76 *c
= get_char(reg
, *offset
);
92 char get_char_at_cb(size_t at
, void *data
)
94 return get_char(data
, at
);
98 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
100 bool has_user
= false;
103 struct estack_entry
*pattern_reg
;
104 struct estack_entry
*candidate_reg
;
106 if (estack_bx(stack
, top
)->u
.s
.user
107 || estack_ax(stack
, top
)->u
.s
.user
) {
114 /* Find out which side is the pattern vs. the candidate. */
115 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
116 pattern_reg
= estack_ax(stack
, top
);
117 candidate_reg
= estack_bx(stack
, top
);
119 pattern_reg
= estack_bx(stack
, top
);
120 candidate_reg
= estack_ax(stack
, top
);
123 /* Perform the match operation. */
124 result
= !strutils_star_glob_match_char_cb(get_char_at_cb
,
125 pattern_reg
, get_char_at_cb
, candidate_reg
);
135 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
137 size_t offset_bx
= 0, offset_ax
= 0;
138 int diff
, has_user
= 0;
141 if (estack_bx(stack
, top
)->u
.s
.user
142 || estack_ax(stack
, top
)->u
.s
.user
) {
152 char char_bx
, char_ax
;
154 char_bx
= get_char(estack_bx(stack
, top
), offset_bx
);
155 char_ax
= get_char(estack_ax(stack
, top
), offset_ax
);
157 if (unlikely(char_bx
== '\0')) {
158 if (char_ax
== '\0') {
162 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
163 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
164 ret
= parse_char(estack_ax(stack
, top
),
165 &char_ax
, &offset_ax
);
175 if (unlikely(char_ax
== '\0')) {
176 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
177 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
178 ret
= parse_char(estack_bx(stack
, top
),
179 &char_bx
, &offset_bx
);
188 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
189 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
190 ret
= parse_char(estack_bx(stack
, top
),
191 &char_bx
, &offset_bx
);
195 } else if (ret
== -2) {
198 /* else compare both char */
200 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
201 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
202 ret
= parse_char(estack_ax(stack
, top
),
203 &char_ax
, &offset_ax
);
207 } else if (ret
== -2) {
224 diff
= char_bx
- char_ax
;
237 uint64_t lttng_filter_false(void *filter_data
,
238 struct lttng_probe_ctx
*lttng_probe_ctx
,
239 const char *filter_stack_data
)
244 #ifdef INTERPRETER_USE_SWITCH
247 * Fallback for compilers that do not support taking address of labels.
251 start_pc = &bytecode->data[0]; \
252 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
254 dbg_printk("Executing op %s (%u)\n", \
255 lttng_filter_print_op((unsigned int) *(filter_opcode_t *) pc), \
256 (unsigned int) *(filter_opcode_t *) pc); \
257 switch (*(filter_opcode_t *) pc) {
259 #define OP(name) case name
269 * Dispatch-table based interpreter.
273 start_pc = &bytecode->code[0]; \
274 pc = next_pc = start_pc; \
275 if (unlikely(pc - start_pc >= bytecode->len)) \
277 goto *dispatch[*(filter_opcode_t *) pc];
284 goto *dispatch[*(filter_opcode_t *) pc];
290 static int context_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
291 struct load_ptr
*ptr
,
295 struct lttng_ctx_field
*ctx_field
;
296 struct lttng_event_field
*field
;
297 union lttng_ctx_value v
;
299 ctx_field
= <tng_static_ctx
->fields
[idx
];
300 field
= &ctx_field
->event_field
;
301 ptr
->type
= LOAD_OBJECT
;
302 /* field is only used for types nested within variants. */
305 switch (field
->type
.atype
) {
307 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
308 if (field
->type
.u
.basic
.integer
.signedness
) {
309 ptr
->object_type
= OBJECT_TYPE_S64
;
311 ptr
->ptr
= &ptr
->u
.s64
;
313 ptr
->object_type
= OBJECT_TYPE_U64
;
314 ptr
->u
.u64
= v
.s64
; /* Cast. */
315 ptr
->ptr
= &ptr
->u
.u64
;
320 const struct lttng_integer_type
*itype
=
321 &field
->type
.u
.basic
.enumeration
.container_type
;
323 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
324 if (itype
->signedness
) {
325 ptr
->object_type
= OBJECT_TYPE_S64
;
327 ptr
->ptr
= &ptr
->u
.s64
;
329 ptr
->object_type
= OBJECT_TYPE_U64
;
330 ptr
->u
.u64
= v
.s64
; /* Cast. */
331 ptr
->ptr
= &ptr
->u
.u64
;
336 if (field
->type
.u
.array
.elem_type
.atype
!= atype_integer
) {
337 printk(KERN_WARNING
"Array nesting only supports integer types.\n");
340 if (field
->type
.u
.array
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
341 printk(KERN_WARNING
"Only string arrays are supported for contexts.\n");
344 ptr
->object_type
= OBJECT_TYPE_STRING
;
345 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
349 if (field
->type
.u
.sequence
.elem_type
.atype
!= atype_integer
) {
350 printk(KERN_WARNING
"Sequence nesting only supports integer types.\n");
353 if (field
->type
.u
.sequence
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
354 printk(KERN_WARNING
"Only string sequences are supported for contexts.\n");
357 ptr
->object_type
= OBJECT_TYPE_STRING
;
358 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
361 case atype_array_bitfield
:
362 printk(KERN_WARNING
"Bitfield array type is not supported.\n");
364 case atype_sequence_bitfield
:
365 printk(KERN_WARNING
"Bitfield sequence type is not supported.\n");
368 ptr
->object_type
= OBJECT_TYPE_STRING
;
369 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
373 printk(KERN_WARNING
"Structure type cannot be loaded.\n");
376 printk(KERN_WARNING
"Unknown type: %d", (int) field
->type
.atype
);
382 static int dynamic_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
383 struct bytecode_runtime
*runtime
,
384 uint64_t index
, struct estack_entry
*stack_top
)
387 const struct filter_get_index_data
*gid
;
390 * Types nested within variants need to perform dynamic lookup
391 * based on the field descriptions. LTTng-UST does not implement
394 if (stack_top
->u
.ptr
.field
)
396 gid
= (const struct filter_get_index_data
*) &runtime
->data
[index
];
397 switch (stack_top
->u
.ptr
.type
) {
399 switch (stack_top
->u
.ptr
.object_type
) {
400 case OBJECT_TYPE_ARRAY
:
404 WARN_ON_ONCE(gid
->offset
>= gid
->array_len
);
405 /* Skip count (unsigned long) */
406 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
407 ptr
= ptr
+ gid
->offset
;
408 stack_top
->u
.ptr
.ptr
= ptr
;
409 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
410 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
411 /* field is only used for types nested within variants. */
412 stack_top
->u
.ptr
.field
= NULL
;
415 case OBJECT_TYPE_SEQUENCE
:
420 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
421 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
422 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
426 ptr
= ptr
+ gid
->offset
;
427 stack_top
->u
.ptr
.ptr
= ptr
;
428 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
429 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
430 /* field is only used for types nested within variants. */
431 stack_top
->u
.ptr
.field
= NULL
;
434 case OBJECT_TYPE_STRUCT
:
435 printk(KERN_WARNING
"Nested structures are not supported yet.\n");
438 case OBJECT_TYPE_VARIANT
:
440 printk(KERN_WARNING
"Unexpected get index type %d",
441 (int) stack_top
->u
.ptr
.object_type
);
446 case LOAD_ROOT_CONTEXT
:
447 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
449 ret
= context_get_index(lttng_probe_ctx
,
457 case LOAD_ROOT_PAYLOAD
:
458 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
459 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
460 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
461 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
462 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
463 /* field is only used for types nested within variants. */
464 stack_top
->u
.ptr
.field
= NULL
;
473 static int dynamic_load_field(struct estack_entry
*stack_top
)
477 switch (stack_top
->u
.ptr
.type
) {
480 case LOAD_ROOT_CONTEXT
:
481 case LOAD_ROOT_APP_CONTEXT
:
482 case LOAD_ROOT_PAYLOAD
:
484 dbg_printk("Filter warning: cannot load root, missing field name.\n");
488 switch (stack_top
->u
.ptr
.object_type
) {
490 dbg_printk("op load field s8\n");
491 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
493 case OBJECT_TYPE_S16
:
497 dbg_printk("op load field s16\n");
498 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
499 if (stack_top
->u
.ptr
.rev_bo
)
501 stack_top
->u
.v
= tmp
;
504 case OBJECT_TYPE_S32
:
508 dbg_printk("op load field s32\n");
509 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
510 if (stack_top
->u
.ptr
.rev_bo
)
512 stack_top
->u
.v
= tmp
;
515 case OBJECT_TYPE_S64
:
519 dbg_printk("op load field s64\n");
520 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
521 if (stack_top
->u
.ptr
.rev_bo
)
523 stack_top
->u
.v
= tmp
;
527 dbg_printk("op load field u8\n");
528 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
530 case OBJECT_TYPE_U16
:
534 dbg_printk("op load field s16\n");
535 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
536 if (stack_top
->u
.ptr
.rev_bo
)
538 stack_top
->u
.v
= tmp
;
541 case OBJECT_TYPE_U32
:
545 dbg_printk("op load field u32\n");
546 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
547 if (stack_top
->u
.ptr
.rev_bo
)
549 stack_top
->u
.v
= tmp
;
552 case OBJECT_TYPE_U64
:
556 dbg_printk("op load field u64\n");
557 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
558 if (stack_top
->u
.ptr
.rev_bo
)
560 stack_top
->u
.v
= tmp
;
563 case OBJECT_TYPE_STRING
:
567 dbg_printk("op load field string\n");
568 str
= (const char *) stack_top
->u
.ptr
.ptr
;
569 stack_top
->u
.s
.str
= str
;
570 if (unlikely(!stack_top
->u
.s
.str
)) {
571 dbg_printk("Filter warning: loading a NULL string.\n");
575 stack_top
->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
576 stack_top
->u
.s
.literal_type
=
577 ESTACK_STRING_LITERAL_TYPE_NONE
;
580 case OBJECT_TYPE_STRING_SEQUENCE
:
584 dbg_printk("op load field string sequence\n");
585 ptr
= stack_top
->u
.ptr
.ptr
;
586 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
587 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
588 if (unlikely(!stack_top
->u
.s
.str
)) {
589 dbg_printk("Filter warning: loading a NULL sequence.\n");
593 stack_top
->u
.s
.literal_type
=
594 ESTACK_STRING_LITERAL_TYPE_NONE
;
597 case OBJECT_TYPE_DYNAMIC
:
599 * Dynamic types in context are looked up
600 * by context get index.
604 case OBJECT_TYPE_DOUBLE
:
607 case OBJECT_TYPE_SEQUENCE
:
608 case OBJECT_TYPE_ARRAY
:
609 case OBJECT_TYPE_STRUCT
:
610 case OBJECT_TYPE_VARIANT
:
611 printk(KERN_WARNING
"Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
622 * Return 0 (discard), or raise the 0x1 flag (log event).
623 * Currently, other flags are kept for future extensions and have no
626 uint64_t lttng_filter_interpret_bytecode(void *filter_data
,
627 struct lttng_probe_ctx
*lttng_probe_ctx
,
628 const char *filter_stack_data
)
630 struct bytecode_runtime
*bytecode
= filter_data
;
631 void *pc
, *next_pc
, *start_pc
;
634 struct estack _stack
;
635 struct estack
*stack
= &_stack
;
636 register int64_t ax
= 0, bx
= 0;
637 register int top
= FILTER_STACK_EMPTY
;
638 #ifndef INTERPRETER_USE_SWITCH
639 static void *dispatch
[NR_FILTER_OPS
] = {
640 [ FILTER_OP_UNKNOWN
] = &&LABEL_FILTER_OP_UNKNOWN
,
642 [ FILTER_OP_RETURN
] = &&LABEL_FILTER_OP_RETURN
,
645 [ FILTER_OP_MUL
] = &&LABEL_FILTER_OP_MUL
,
646 [ FILTER_OP_DIV
] = &&LABEL_FILTER_OP_DIV
,
647 [ FILTER_OP_MOD
] = &&LABEL_FILTER_OP_MOD
,
648 [ FILTER_OP_PLUS
] = &&LABEL_FILTER_OP_PLUS
,
649 [ FILTER_OP_MINUS
] = &&LABEL_FILTER_OP_MINUS
,
650 [ FILTER_OP_BIT_RSHIFT
] = &&LABEL_FILTER_OP_BIT_RSHIFT
,
651 [ FILTER_OP_BIT_LSHIFT
] = &&LABEL_FILTER_OP_BIT_LSHIFT
,
652 [ FILTER_OP_BIT_AND
] = &&LABEL_FILTER_OP_BIT_AND
,
653 [ FILTER_OP_BIT_OR
] = &&LABEL_FILTER_OP_BIT_OR
,
654 [ FILTER_OP_BIT_XOR
] = &&LABEL_FILTER_OP_BIT_XOR
,
656 /* binary comparators */
657 [ FILTER_OP_EQ
] = &&LABEL_FILTER_OP_EQ
,
658 [ FILTER_OP_NE
] = &&LABEL_FILTER_OP_NE
,
659 [ FILTER_OP_GT
] = &&LABEL_FILTER_OP_GT
,
660 [ FILTER_OP_LT
] = &&LABEL_FILTER_OP_LT
,
661 [ FILTER_OP_GE
] = &&LABEL_FILTER_OP_GE
,
662 [ FILTER_OP_LE
] = &&LABEL_FILTER_OP_LE
,
664 /* string binary comparator */
665 [ FILTER_OP_EQ_STRING
] = &&LABEL_FILTER_OP_EQ_STRING
,
666 [ FILTER_OP_NE_STRING
] = &&LABEL_FILTER_OP_NE_STRING
,
667 [ FILTER_OP_GT_STRING
] = &&LABEL_FILTER_OP_GT_STRING
,
668 [ FILTER_OP_LT_STRING
] = &&LABEL_FILTER_OP_LT_STRING
,
669 [ FILTER_OP_GE_STRING
] = &&LABEL_FILTER_OP_GE_STRING
,
670 [ FILTER_OP_LE_STRING
] = &&LABEL_FILTER_OP_LE_STRING
,
672 /* globbing pattern binary comparator */
673 [ FILTER_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_EQ_STAR_GLOB_STRING
,
674 [ FILTER_OP_NE_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_NE_STAR_GLOB_STRING
,
676 /* s64 binary comparator */
677 [ FILTER_OP_EQ_S64
] = &&LABEL_FILTER_OP_EQ_S64
,
678 [ FILTER_OP_NE_S64
] = &&LABEL_FILTER_OP_NE_S64
,
679 [ FILTER_OP_GT_S64
] = &&LABEL_FILTER_OP_GT_S64
,
680 [ FILTER_OP_LT_S64
] = &&LABEL_FILTER_OP_LT_S64
,
681 [ FILTER_OP_GE_S64
] = &&LABEL_FILTER_OP_GE_S64
,
682 [ FILTER_OP_LE_S64
] = &&LABEL_FILTER_OP_LE_S64
,
684 /* double binary comparator */
685 [ FILTER_OP_EQ_DOUBLE
] = &&LABEL_FILTER_OP_EQ_DOUBLE
,
686 [ FILTER_OP_NE_DOUBLE
] = &&LABEL_FILTER_OP_NE_DOUBLE
,
687 [ FILTER_OP_GT_DOUBLE
] = &&LABEL_FILTER_OP_GT_DOUBLE
,
688 [ FILTER_OP_LT_DOUBLE
] = &&LABEL_FILTER_OP_LT_DOUBLE
,
689 [ FILTER_OP_GE_DOUBLE
] = &&LABEL_FILTER_OP_GE_DOUBLE
,
690 [ FILTER_OP_LE_DOUBLE
] = &&LABEL_FILTER_OP_LE_DOUBLE
,
692 /* Mixed S64-double binary comparators */
693 [ FILTER_OP_EQ_DOUBLE_S64
] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64
,
694 [ FILTER_OP_NE_DOUBLE_S64
] = &&LABEL_FILTER_OP_NE_DOUBLE_S64
,
695 [ FILTER_OP_GT_DOUBLE_S64
] = &&LABEL_FILTER_OP_GT_DOUBLE_S64
,
696 [ FILTER_OP_LT_DOUBLE_S64
] = &&LABEL_FILTER_OP_LT_DOUBLE_S64
,
697 [ FILTER_OP_GE_DOUBLE_S64
] = &&LABEL_FILTER_OP_GE_DOUBLE_S64
,
698 [ FILTER_OP_LE_DOUBLE_S64
] = &&LABEL_FILTER_OP_LE_DOUBLE_S64
,
700 [ FILTER_OP_EQ_S64_DOUBLE
] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE
,
701 [ FILTER_OP_NE_S64_DOUBLE
] = &&LABEL_FILTER_OP_NE_S64_DOUBLE
,
702 [ FILTER_OP_GT_S64_DOUBLE
] = &&LABEL_FILTER_OP_GT_S64_DOUBLE
,
703 [ FILTER_OP_LT_S64_DOUBLE
] = &&LABEL_FILTER_OP_LT_S64_DOUBLE
,
704 [ FILTER_OP_GE_S64_DOUBLE
] = &&LABEL_FILTER_OP_GE_S64_DOUBLE
,
705 [ FILTER_OP_LE_S64_DOUBLE
] = &&LABEL_FILTER_OP_LE_S64_DOUBLE
,
708 [ FILTER_OP_UNARY_PLUS
] = &&LABEL_FILTER_OP_UNARY_PLUS
,
709 [ FILTER_OP_UNARY_MINUS
] = &&LABEL_FILTER_OP_UNARY_MINUS
,
710 [ FILTER_OP_UNARY_NOT
] = &&LABEL_FILTER_OP_UNARY_NOT
,
711 [ FILTER_OP_UNARY_PLUS_S64
] = &&LABEL_FILTER_OP_UNARY_PLUS_S64
,
712 [ FILTER_OP_UNARY_MINUS_S64
] = &&LABEL_FILTER_OP_UNARY_MINUS_S64
,
713 [ FILTER_OP_UNARY_NOT_S64
] = &&LABEL_FILTER_OP_UNARY_NOT_S64
,
714 [ FILTER_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE
,
715 [ FILTER_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE
,
716 [ FILTER_OP_UNARY_NOT_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE
,
719 [ FILTER_OP_AND
] = &&LABEL_FILTER_OP_AND
,
720 [ FILTER_OP_OR
] = &&LABEL_FILTER_OP_OR
,
723 [ FILTER_OP_LOAD_FIELD_REF
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF
,
724 [ FILTER_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING
,
725 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE
,
726 [ FILTER_OP_LOAD_FIELD_REF_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64
,
727 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE
,
729 /* load from immediate operand */
730 [ FILTER_OP_LOAD_STRING
] = &&LABEL_FILTER_OP_LOAD_STRING
,
731 [ FILTER_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_LOAD_STAR_GLOB_STRING
,
732 [ FILTER_OP_LOAD_S64
] = &&LABEL_FILTER_OP_LOAD_S64
,
733 [ FILTER_OP_LOAD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_DOUBLE
,
736 [ FILTER_OP_CAST_TO_S64
] = &&LABEL_FILTER_OP_CAST_TO_S64
,
737 [ FILTER_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64
,
738 [ FILTER_OP_CAST_NOP
] = &&LABEL_FILTER_OP_CAST_NOP
,
740 /* get context ref */
741 [ FILTER_OP_GET_CONTEXT_REF
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF
,
742 [ FILTER_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING
,
743 [ FILTER_OP_GET_CONTEXT_REF_S64
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64
,
744 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE
,
746 /* load userspace field ref */
747 [ FILTER_OP_LOAD_FIELD_REF_USER_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_STRING
,
748 [ FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
,
750 /* Instructions for recursive traversal through composed types. */
751 [ FILTER_OP_GET_CONTEXT_ROOT
] = &&LABEL_FILTER_OP_GET_CONTEXT_ROOT
,
752 [ FILTER_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_FILTER_OP_GET_APP_CONTEXT_ROOT
,
753 [ FILTER_OP_GET_PAYLOAD_ROOT
] = &&LABEL_FILTER_OP_GET_PAYLOAD_ROOT
,
755 [ FILTER_OP_GET_SYMBOL
] = &&LABEL_FILTER_OP_GET_SYMBOL
,
756 [ FILTER_OP_GET_SYMBOL_FIELD
] = &&LABEL_FILTER_OP_GET_SYMBOL_FIELD
,
757 [ FILTER_OP_GET_INDEX_U16
] = &&LABEL_FILTER_OP_GET_INDEX_U16
,
758 [ FILTER_OP_GET_INDEX_U64
] = &&LABEL_FILTER_OP_GET_INDEX_U64
,
760 [ FILTER_OP_LOAD_FIELD
] = &&LABEL_FILTER_OP_LOAD_FIELD
,
761 [ FILTER_OP_LOAD_FIELD_S8
] = &&LABEL_FILTER_OP_LOAD_FIELD_S8
,
762 [ FILTER_OP_LOAD_FIELD_S16
] = &&LABEL_FILTER_OP_LOAD_FIELD_S16
,
763 [ FILTER_OP_LOAD_FIELD_S32
] = &&LABEL_FILTER_OP_LOAD_FIELD_S32
,
764 [ FILTER_OP_LOAD_FIELD_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_S64
,
765 [ FILTER_OP_LOAD_FIELD_U8
] = &&LABEL_FILTER_OP_LOAD_FIELD_U8
,
766 [ FILTER_OP_LOAD_FIELD_U16
] = &&LABEL_FILTER_OP_LOAD_FIELD_U16
,
767 [ FILTER_OP_LOAD_FIELD_U32
] = &&LABEL_FILTER_OP_LOAD_FIELD_U32
,
768 [ FILTER_OP_LOAD_FIELD_U64
] = &&LABEL_FILTER_OP_LOAD_FIELD_U64
,
769 [ FILTER_OP_LOAD_FIELD_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_STRING
,
770 [ FILTER_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_SEQUENCE
,
771 [ FILTER_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_DOUBLE
,
773 [ FILTER_OP_UNARY_BIT_NOT
] = &&LABEL_FILTER_OP_UNARY_BIT_NOT
,
775 [ FILTER_OP_RETURN_S64
] = &&LABEL_FILTER_OP_RETURN_S64
,
777 #endif /* #ifndef INTERPRETER_USE_SWITCH */
781 OP(FILTER_OP_UNKNOWN
):
782 OP(FILTER_OP_LOAD_FIELD_REF
):
783 OP(FILTER_OP_GET_CONTEXT_REF
):
784 #ifdef INTERPRETER_USE_SWITCH
786 #endif /* INTERPRETER_USE_SWITCH */
787 printk(KERN_WARNING
"unknown bytecode op %u\n",
788 (unsigned int) *(filter_opcode_t
*) pc
);
792 OP(FILTER_OP_RETURN
):
793 OP(FILTER_OP_RETURN_S64
):
794 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
795 retval
= !!estack_ax_v
;
805 printk(KERN_WARNING
"unsupported bytecode op %u\n",
806 (unsigned int) *(filter_opcode_t
*) pc
);
816 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
817 (unsigned int) *(filter_opcode_t
*) pc
);
821 OP(FILTER_OP_EQ_STRING
):
825 res
= (stack_strcmp(stack
, top
, "==") == 0);
826 estack_pop(stack
, top
, ax
, bx
);
828 next_pc
+= sizeof(struct binary_op
);
831 OP(FILTER_OP_NE_STRING
):
835 res
= (stack_strcmp(stack
, top
, "!=") != 0);
836 estack_pop(stack
, top
, ax
, bx
);
838 next_pc
+= sizeof(struct binary_op
);
841 OP(FILTER_OP_GT_STRING
):
845 res
= (stack_strcmp(stack
, top
, ">") > 0);
846 estack_pop(stack
, top
, ax
, bx
);
848 next_pc
+= sizeof(struct binary_op
);
851 OP(FILTER_OP_LT_STRING
):
855 res
= (stack_strcmp(stack
, top
, "<") < 0);
856 estack_pop(stack
, top
, ax
, bx
);
858 next_pc
+= sizeof(struct binary_op
);
861 OP(FILTER_OP_GE_STRING
):
865 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
866 estack_pop(stack
, top
, ax
, bx
);
868 next_pc
+= sizeof(struct binary_op
);
871 OP(FILTER_OP_LE_STRING
):
875 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
876 estack_pop(stack
, top
, ax
, bx
);
878 next_pc
+= sizeof(struct binary_op
);
882 OP(FILTER_OP_EQ_STAR_GLOB_STRING
):
886 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
887 estack_pop(stack
, top
, ax
, bx
);
889 next_pc
+= sizeof(struct binary_op
);
892 OP(FILTER_OP_NE_STAR_GLOB_STRING
):
896 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
897 estack_pop(stack
, top
, ax
, bx
);
899 next_pc
+= sizeof(struct binary_op
);
903 OP(FILTER_OP_EQ_S64
):
907 res
= (estack_bx_v
== estack_ax_v
);
908 estack_pop(stack
, top
, ax
, bx
);
910 next_pc
+= sizeof(struct binary_op
);
913 OP(FILTER_OP_NE_S64
):
917 res
= (estack_bx_v
!= estack_ax_v
);
918 estack_pop(stack
, top
, ax
, bx
);
920 next_pc
+= sizeof(struct binary_op
);
923 OP(FILTER_OP_GT_S64
):
927 res
= (estack_bx_v
> estack_ax_v
);
928 estack_pop(stack
, top
, ax
, bx
);
930 next_pc
+= sizeof(struct binary_op
);
933 OP(FILTER_OP_LT_S64
):
937 res
= (estack_bx_v
< estack_ax_v
);
938 estack_pop(stack
, top
, ax
, bx
);
940 next_pc
+= sizeof(struct binary_op
);
943 OP(FILTER_OP_GE_S64
):
947 res
= (estack_bx_v
>= estack_ax_v
);
948 estack_pop(stack
, top
, ax
, bx
);
950 next_pc
+= sizeof(struct binary_op
);
953 OP(FILTER_OP_LE_S64
):
957 res
= (estack_bx_v
<= estack_ax_v
);
958 estack_pop(stack
, top
, ax
, bx
);
960 next_pc
+= sizeof(struct binary_op
);
964 OP(FILTER_OP_EQ_DOUBLE
):
965 OP(FILTER_OP_NE_DOUBLE
):
966 OP(FILTER_OP_GT_DOUBLE
):
967 OP(FILTER_OP_LT_DOUBLE
):
968 OP(FILTER_OP_GE_DOUBLE
):
969 OP(FILTER_OP_LE_DOUBLE
):
975 /* Mixed S64-double binary comparators */
976 OP(FILTER_OP_EQ_DOUBLE_S64
):
977 OP(FILTER_OP_NE_DOUBLE_S64
):
978 OP(FILTER_OP_GT_DOUBLE_S64
):
979 OP(FILTER_OP_LT_DOUBLE_S64
):
980 OP(FILTER_OP_GE_DOUBLE_S64
):
981 OP(FILTER_OP_LE_DOUBLE_S64
):
982 OP(FILTER_OP_EQ_S64_DOUBLE
):
983 OP(FILTER_OP_NE_S64_DOUBLE
):
984 OP(FILTER_OP_GT_S64_DOUBLE
):
985 OP(FILTER_OP_LT_S64_DOUBLE
):
986 OP(FILTER_OP_GE_S64_DOUBLE
):
987 OP(FILTER_OP_LE_S64_DOUBLE
):
992 OP(FILTER_OP_BIT_RSHIFT
):
996 /* Catch undefined behavior. */
997 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1001 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
1002 estack_pop(stack
, top
, ax
, bx
);
1004 next_pc
+= sizeof(struct binary_op
);
1007 OP(FILTER_OP_BIT_LSHIFT
):
1011 /* Catch undefined behavior. */
1012 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1016 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
1017 estack_pop(stack
, top
, ax
, bx
);
1019 next_pc
+= sizeof(struct binary_op
);
1022 OP(FILTER_OP_BIT_AND
):
1026 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
1027 estack_pop(stack
, top
, ax
, bx
);
1029 next_pc
+= sizeof(struct binary_op
);
1032 OP(FILTER_OP_BIT_OR
):
1036 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1037 estack_pop(stack
, top
, ax
, bx
);
1039 next_pc
+= sizeof(struct binary_op
);
1042 OP(FILTER_OP_BIT_XOR
):
1046 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1047 estack_pop(stack
, top
, ax
, bx
);
1049 next_pc
+= sizeof(struct binary_op
);
1054 OP(FILTER_OP_UNARY_PLUS
):
1055 OP(FILTER_OP_UNARY_MINUS
):
1056 OP(FILTER_OP_UNARY_NOT
):
1057 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
1058 (unsigned int) *(filter_opcode_t
*) pc
);
1063 OP(FILTER_OP_UNARY_BIT_NOT
):
1065 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1066 next_pc
+= sizeof(struct unary_op
);
1070 OP(FILTER_OP_UNARY_PLUS_S64
):
1072 next_pc
+= sizeof(struct unary_op
);
1075 OP(FILTER_OP_UNARY_MINUS_S64
):
1077 estack_ax_v
= -estack_ax_v
;
1078 next_pc
+= sizeof(struct unary_op
);
1081 OP(FILTER_OP_UNARY_PLUS_DOUBLE
):
1082 OP(FILTER_OP_UNARY_MINUS_DOUBLE
):
1087 OP(FILTER_OP_UNARY_NOT_S64
):
1089 estack_ax_v
= !estack_ax_v
;
1090 next_pc
+= sizeof(struct unary_op
);
1093 OP(FILTER_OP_UNARY_NOT_DOUBLE
):
1102 struct logical_op
*insn
= (struct logical_op
*) pc
;
1104 /* If AX is 0, skip and evaluate to 0 */
1105 if (unlikely(estack_ax_v
== 0)) {
1106 dbg_printk("Jumping to bytecode offset %u\n",
1107 (unsigned int) insn
->skip_offset
);
1108 next_pc
= start_pc
+ insn
->skip_offset
;
1110 /* Pop 1 when jump not taken */
1111 estack_pop(stack
, top
, ax
, bx
);
1112 next_pc
+= sizeof(struct logical_op
);
1118 struct logical_op
*insn
= (struct logical_op
*) pc
;
1120 /* If AX is nonzero, skip and evaluate to 1 */
1122 if (unlikely(estack_ax_v
!= 0)) {
1124 dbg_printk("Jumping to bytecode offset %u\n",
1125 (unsigned int) insn
->skip_offset
);
1126 next_pc
= start_pc
+ insn
->skip_offset
;
1128 /* Pop 1 when jump not taken */
1129 estack_pop(stack
, top
, ax
, bx
);
1130 next_pc
+= sizeof(struct logical_op
);
1136 /* load field ref */
1137 OP(FILTER_OP_LOAD_FIELD_REF_STRING
):
1139 struct load_op
*insn
= (struct load_op
*) pc
;
1140 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1142 dbg_printk("load field ref offset %u type string\n",
1144 estack_push(stack
, top
, ax
, bx
);
1145 estack_ax(stack
, top
)->u
.s
.str
=
1146 *(const char * const *) &filter_stack_data
[ref
->offset
];
1147 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1148 dbg_printk("Filter warning: loading a NULL string.\n");
1152 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1153 estack_ax(stack
, top
)->u
.s
.literal_type
=
1154 ESTACK_STRING_LITERAL_TYPE_NONE
;
1155 estack_ax(stack
, top
)->u
.s
.user
= 0;
1156 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1157 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1161 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE
):
1163 struct load_op
*insn
= (struct load_op
*) pc
;
1164 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1166 dbg_printk("load field ref offset %u type sequence\n",
1168 estack_push(stack
, top
, ax
, bx
);
1169 estack_ax(stack
, top
)->u
.s
.seq_len
=
1170 *(unsigned long *) &filter_stack_data
[ref
->offset
];
1171 estack_ax(stack
, top
)->u
.s
.str
=
1172 *(const char **) (&filter_stack_data
[ref
->offset
1173 + sizeof(unsigned long)]);
1174 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1175 dbg_printk("Filter warning: loading a NULL sequence.\n");
1179 estack_ax(stack
, top
)->u
.s
.literal_type
=
1180 ESTACK_STRING_LITERAL_TYPE_NONE
;
1181 estack_ax(stack
, top
)->u
.s
.user
= 0;
1182 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1186 OP(FILTER_OP_LOAD_FIELD_REF_S64
):
1188 struct load_op
*insn
= (struct load_op
*) pc
;
1189 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1191 dbg_printk("load field ref offset %u type s64\n",
1193 estack_push(stack
, top
, ax
, bx
);
1195 ((struct literal_numeric
*) &filter_stack_data
[ref
->offset
])->v
;
1196 dbg_printk("ref load s64 %lld\n",
1197 (long long) estack_ax_v
);
1198 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1202 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE
):
1208 /* load from immediate operand */
1209 OP(FILTER_OP_LOAD_STRING
):
1211 struct load_op
*insn
= (struct load_op
*) pc
;
1213 dbg_printk("load string %s\n", insn
->data
);
1214 estack_push(stack
, top
, ax
, bx
);
1215 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1216 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1217 estack_ax(stack
, top
)->u
.s
.literal_type
=
1218 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
1219 estack_ax(stack
, top
)->u
.s
.user
= 0;
1220 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1224 OP(FILTER_OP_LOAD_STAR_GLOB_STRING
):
1226 struct load_op
*insn
= (struct load_op
*) pc
;
1228 dbg_printk("load globbing pattern %s\n", insn
->data
);
1229 estack_push(stack
, top
, ax
, bx
);
1230 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1231 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1232 estack_ax(stack
, top
)->u
.s
.literal_type
=
1233 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
1234 estack_ax(stack
, top
)->u
.s
.user
= 0;
1235 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1239 OP(FILTER_OP_LOAD_S64
):
1241 struct load_op
*insn
= (struct load_op
*) pc
;
1243 estack_push(stack
, top
, ax
, bx
);
1244 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
1245 dbg_printk("load s64 %lld\n",
1246 (long long) estack_ax_v
);
1247 next_pc
+= sizeof(struct load_op
)
1248 + sizeof(struct literal_numeric
);
1252 OP(FILTER_OP_LOAD_DOUBLE
):
1259 OP(FILTER_OP_CAST_TO_S64
):
1260 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
1261 (unsigned int) *(filter_opcode_t
*) pc
);
1265 OP(FILTER_OP_CAST_DOUBLE_TO_S64
):
1271 OP(FILTER_OP_CAST_NOP
):
1273 next_pc
+= sizeof(struct cast_op
);
1277 /* get context ref */
1278 OP(FILTER_OP_GET_CONTEXT_REF_STRING
):
1280 struct load_op
*insn
= (struct load_op
*) pc
;
1281 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1282 struct lttng_ctx_field
*ctx_field
;
1283 union lttng_ctx_value v
;
1285 dbg_printk("get context ref offset %u type string\n",
1287 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1288 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1289 estack_push(stack
, top
, ax
, bx
);
1290 estack_ax(stack
, top
)->u
.s
.str
= v
.str
;
1291 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1292 dbg_printk("Filter warning: loading a NULL string.\n");
1296 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1297 estack_ax(stack
, top
)->u
.s
.literal_type
=
1298 ESTACK_STRING_LITERAL_TYPE_NONE
;
1299 estack_ax(stack
, top
)->u
.s
.user
= 0;
1300 dbg_printk("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1301 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1305 OP(FILTER_OP_GET_CONTEXT_REF_S64
):
1307 struct load_op
*insn
= (struct load_op
*) pc
;
1308 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1309 struct lttng_ctx_field
*ctx_field
;
1310 union lttng_ctx_value v
;
1312 dbg_printk("get context ref offset %u type s64\n",
1314 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1315 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1316 estack_push(stack
, top
, ax
, bx
);
1317 estack_ax_v
= v
.s64
;
1318 dbg_printk("ref get context s64 %lld\n",
1319 (long long) estack_ax_v
);
1320 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1324 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE
):
1330 /* load userspace field ref */
1331 OP(FILTER_OP_LOAD_FIELD_REF_USER_STRING
):
1333 struct load_op
*insn
= (struct load_op
*) pc
;
1334 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1336 dbg_printk("load field ref offset %u type user string\n",
1338 estack_push(stack
, top
, ax
, bx
);
1339 estack_ax(stack
, top
)->u
.s
.user_str
=
1340 *(const char * const *) &filter_stack_data
[ref
->offset
];
1341 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1342 dbg_printk("Filter warning: loading a NULL string.\n");
1346 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1347 estack_ax(stack
, top
)->u
.s
.literal_type
=
1348 ESTACK_STRING_LITERAL_TYPE_NONE
;
1349 estack_ax(stack
, top
)->u
.s
.user
= 1;
1350 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1351 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1355 OP(FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
):
1357 struct load_op
*insn
= (struct load_op
*) pc
;
1358 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1360 dbg_printk("load field ref offset %u type user sequence\n",
1362 estack_push(stack
, top
, ax
, bx
);
1363 estack_ax(stack
, top
)->u
.s
.seq_len
=
1364 *(unsigned long *) &filter_stack_data
[ref
->offset
];
1365 estack_ax(stack
, top
)->u
.s
.user_str
=
1366 *(const char **) (&filter_stack_data
[ref
->offset
1367 + sizeof(unsigned long)]);
1368 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1369 dbg_printk("Filter warning: loading a NULL sequence.\n");
1373 estack_ax(stack
, top
)->u
.s
.literal_type
=
1374 ESTACK_STRING_LITERAL_TYPE_NONE
;
1375 estack_ax(stack
, top
)->u
.s
.user
= 1;
1376 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1380 OP(FILTER_OP_GET_CONTEXT_ROOT
):
1382 dbg_printk("op get context root\n");
1383 estack_push(stack
, top
, ax
, bx
);
1384 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
1385 /* "field" only needed for variants. */
1386 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1387 next_pc
+= sizeof(struct load_op
);
1391 OP(FILTER_OP_GET_APP_CONTEXT_ROOT
):
1397 OP(FILTER_OP_GET_PAYLOAD_ROOT
):
1399 dbg_printk("op get app payload root\n");
1400 estack_push(stack
, top
, ax
, bx
);
1401 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
1402 estack_ax(stack
, top
)->u
.ptr
.ptr
= filter_stack_data
;
1403 /* "field" only needed for variants. */
1404 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1405 next_pc
+= sizeof(struct load_op
);
1409 OP(FILTER_OP_GET_SYMBOL
):
1411 dbg_printk("op get symbol\n");
1412 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
1414 printk(KERN_WARNING
"Nested fields not implemented yet.\n");
1417 case LOAD_ROOT_CONTEXT
:
1418 case LOAD_ROOT_APP_CONTEXT
:
1419 case LOAD_ROOT_PAYLOAD
:
1421 * symbol lookup is performed by
1427 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1431 OP(FILTER_OP_GET_SYMBOL_FIELD
):
1434 * Used for first variant encountered in a
1435 * traversal. Variants are not implemented yet.
1441 OP(FILTER_OP_GET_INDEX_U16
):
1443 struct load_op
*insn
= (struct load_op
*) pc
;
1444 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1446 dbg_printk("op get index u16\n");
1447 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1450 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1451 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1455 OP(FILTER_OP_GET_INDEX_U64
):
1457 struct load_op
*insn
= (struct load_op
*) pc
;
1458 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1460 dbg_printk("op get index u64\n");
1461 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1464 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1465 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1469 OP(FILTER_OP_LOAD_FIELD
):
1471 dbg_printk("op load field\n");
1472 ret
= dynamic_load_field(estack_ax(stack
, top
));
1475 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1476 next_pc
+= sizeof(struct load_op
);
1480 OP(FILTER_OP_LOAD_FIELD_S8
):
1482 dbg_printk("op load field s8\n");
1484 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1485 next_pc
+= sizeof(struct load_op
);
1488 OP(FILTER_OP_LOAD_FIELD_S16
):
1490 dbg_printk("op load field s16\n");
1492 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1493 next_pc
+= sizeof(struct load_op
);
1496 OP(FILTER_OP_LOAD_FIELD_S32
):
1498 dbg_printk("op load field s32\n");
1500 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1501 next_pc
+= sizeof(struct load_op
);
1504 OP(FILTER_OP_LOAD_FIELD_S64
):
1506 dbg_printk("op load field s64\n");
1508 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1509 next_pc
+= sizeof(struct load_op
);
1512 OP(FILTER_OP_LOAD_FIELD_U8
):
1514 dbg_printk("op load field u8\n");
1516 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1517 next_pc
+= sizeof(struct load_op
);
1520 OP(FILTER_OP_LOAD_FIELD_U16
):
1522 dbg_printk("op load field u16\n");
1524 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1525 next_pc
+= sizeof(struct load_op
);
1528 OP(FILTER_OP_LOAD_FIELD_U32
):
1530 dbg_printk("op load field u32\n");
1532 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1533 next_pc
+= sizeof(struct load_op
);
1536 OP(FILTER_OP_LOAD_FIELD_U64
):
1538 dbg_printk("op load field u64\n");
1540 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1541 next_pc
+= sizeof(struct load_op
);
1544 OP(FILTER_OP_LOAD_FIELD_DOUBLE
):
1550 OP(FILTER_OP_LOAD_FIELD_STRING
):
1554 dbg_printk("op load field string\n");
1555 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1556 estack_ax(stack
, top
)->u
.s
.str
= str
;
1557 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1558 dbg_printk("Filter warning: loading a NULL string.\n");
1562 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1563 estack_ax(stack
, top
)->u
.s
.literal_type
=
1564 ESTACK_STRING_LITERAL_TYPE_NONE
;
1565 next_pc
+= sizeof(struct load_op
);
1569 OP(FILTER_OP_LOAD_FIELD_SEQUENCE
):
1573 dbg_printk("op load field string sequence\n");
1574 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
1575 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
1576 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
1577 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1578 dbg_printk("Filter warning: loading a NULL sequence.\n");
1582 estack_ax(stack
, top
)->u
.s
.literal_type
=
1583 ESTACK_STRING_LITERAL_TYPE_NONE
;
1584 next_pc
+= sizeof(struct load_op
);
1590 /* return 0 (discard) on error */