1 /* SPDX-License-Identifier: MIT
3 * lttng-bytecode-interpreter.c
5 * LTTng modules bytecode interpreter.
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
15 #include <lttng/lttng-bytecode.h>
16 #include <lttng/string-utils.h>
17 #include <lttng/events-internal.h>
20 * get_char should be called with page fault handler disabled if it is expected
21 * to handle user-space read.
24 char get_char(const struct estack_entry
*reg
, size_t offset
)
26 if (unlikely(offset
>= reg
->u
.s
.seq_len
))
31 /* Handle invalid access as end of string. */
32 if (unlikely(!lttng_access_ok(VERIFY_READ
,
33 reg
->u
.s
.user_str
+ offset
,
36 /* Handle fault (nonzero return value) as end of string. */
37 if (unlikely(__copy_from_user_inatomic(&c
,
38 reg
->u
.s
.user_str
+ offset
,
43 return reg
->u
.s
.str
[offset
];
49 * -2: unknown escape char.
53 int parse_char(struct estack_entry
*reg
, char *c
, size_t *offset
)
58 *c
= get_char(reg
, *offset
);
74 char get_char_at_cb(size_t at
, void *data
)
76 return get_char(data
, at
);
80 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
82 bool has_user
= false;
84 struct estack_entry
*pattern_reg
;
85 struct estack_entry
*candidate_reg
;
87 /* Disable the page fault handler when reading from userspace. */
88 if (estack_bx(stack
, top
)->u
.s
.user
89 || estack_ax(stack
, top
)->u
.s
.user
) {
94 /* Find out which side is the pattern vs. the candidate. */
95 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
96 pattern_reg
= estack_ax(stack
, top
);
97 candidate_reg
= estack_bx(stack
, top
);
99 pattern_reg
= estack_bx(stack
, top
);
100 candidate_reg
= estack_ax(stack
, top
);
103 /* Perform the match operation. */
104 result
= !strutils_star_glob_match_char_cb(get_char_at_cb
,
105 pattern_reg
, get_char_at_cb
, candidate_reg
);
113 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
115 size_t offset_bx
= 0, offset_ax
= 0;
116 int diff
, has_user
= 0;
118 if (estack_bx(stack
, top
)->u
.s
.user
119 || estack_ax(stack
, top
)->u
.s
.user
) {
127 char char_bx
, char_ax
;
129 char_bx
= get_char(estack_bx(stack
, top
), offset_bx
);
130 char_ax
= get_char(estack_ax(stack
, top
), offset_ax
);
132 if (unlikely(char_bx
== '\0')) {
133 if (char_ax
== '\0') {
137 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
138 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
139 ret
= parse_char(estack_ax(stack
, top
),
140 &char_ax
, &offset_ax
);
150 if (unlikely(char_ax
== '\0')) {
151 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
152 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
153 ret
= parse_char(estack_bx(stack
, top
),
154 &char_bx
, &offset_bx
);
163 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
164 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
165 ret
= parse_char(estack_bx(stack
, top
),
166 &char_bx
, &offset_bx
);
170 } else if (ret
== -2) {
173 /* else compare both char */
175 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
176 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
177 ret
= parse_char(estack_ax(stack
, top
),
178 &char_ax
, &offset_ax
);
182 } else if (ret
== -2) {
199 diff
= char_bx
- char_ax
;
211 int lttng_bytecode_interpret_error(
212 struct lttng_kernel_bytecode_runtime
*bytecode_runtime
__attribute__((unused
)),
213 const char *stack_data
__attribute__((unused
)),
214 struct lttng_kernel_probe_ctx
*probe_ctx
__attribute__((unused
)),
215 void *ctx
__attribute__((unused
)))
217 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR
;
220 #ifdef INTERPRETER_USE_SWITCH
223 * Fallback for compilers that do not support taking address of labels.
227 start_pc = &bytecode->data[0]; \
228 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
230 dbg_printk("LTTng: Executing op %s (%u)\n", \
231 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
232 (unsigned int) *(bytecode_opcode_t *) pc); \
233 switch (*(bytecode_opcode_t *) pc) {
235 #define OP(name) case name
245 * Dispatch-table based interpreter.
249 start_pc = &bytecode->code[0]; \
250 pc = next_pc = start_pc; \
251 if (unlikely(pc - start_pc >= bytecode->len)) \
253 goto *dispatch[*(bytecode_opcode_t *) pc];
260 goto *dispatch[*(bytecode_opcode_t *) pc];
266 #define IS_INTEGER_REGISTER(reg_type) \
267 (reg_type == REG_S64 || reg_type == REG_U64)
269 static int context_get_index(struct lttng_kernel_probe_ctx
*lttng_probe_ctx
,
270 struct load_ptr
*ptr
,
274 struct lttng_kernel_ctx_field
*ctx_field
;
275 const struct lttng_kernel_event_field
*field
;
276 struct lttng_ctx_value v
;
278 ctx_field
= <tng_static_ctx
->fields
[idx
];
279 field
= ctx_field
->event_field
;
280 ptr
->type
= LOAD_OBJECT
;
281 /* field is only used for types nested within variants. */
284 switch (field
->type
->type
) {
285 case lttng_kernel_type_integer
:
286 ctx_field
->get_value(ctx_field
->priv
, lttng_probe_ctx
, &v
);
287 if (lttng_kernel_get_type_integer(field
->type
)->signedness
) {
288 ptr
->object_type
= OBJECT_TYPE_S64
;
289 ptr
->u
.s64
= v
.u
.s64
;
290 ptr
->ptr
= &ptr
->u
.s64
;
292 ptr
->object_type
= OBJECT_TYPE_U64
;
293 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
294 ptr
->ptr
= &ptr
->u
.u64
;
297 case lttng_kernel_type_enum
:
299 const struct lttng_kernel_type_enum
*enum_type
= lttng_kernel_get_type_enum(field
->type
);
300 const struct lttng_kernel_type_integer
*integer_type
= lttng_kernel_get_type_integer(enum_type
->container_type
);
302 ctx_field
->get_value(ctx_field
->priv
, lttng_probe_ctx
, &v
);
303 if (integer_type
->signedness
) {
304 ptr
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
305 ptr
->u
.s64
= v
.u
.s64
;
306 ptr
->ptr
= &ptr
->u
.s64
;
308 ptr
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
309 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
310 ptr
->ptr
= &ptr
->u
.u64
;
314 case lttng_kernel_type_array
:
316 const struct lttng_kernel_type_array
*array_type
= lttng_kernel_get_type_array(field
->type
);
318 if (!lttng_kernel_type_is_bytewise_integer(array_type
->elem_type
)) {
319 printk(KERN_WARNING
"LTTng: bytecode: Array nesting only supports integer types.\n");
322 if (array_type
->encoding
== lttng_kernel_string_encoding_none
) {
323 printk(KERN_WARNING
"LTTng: bytecode: Only string arrays are supported for contexts.\n");
326 ptr
->object_type
= OBJECT_TYPE_STRING
;
327 ctx_field
->get_value(ctx_field
->priv
, lttng_probe_ctx
, &v
);
331 case lttng_kernel_type_sequence
:
333 const struct lttng_kernel_type_sequence
*sequence_type
= lttng_kernel_get_type_sequence(field
->type
);
335 if (!lttng_kernel_type_is_bytewise_integer(sequence_type
->elem_type
)) {
336 printk(KERN_WARNING
"LTTng: bytecode: Sequence nesting only supports integer types.\n");
339 if (sequence_type
->encoding
== lttng_kernel_string_encoding_none
) {
340 printk(KERN_WARNING
"LTTng: bytecode: Only string sequences are supported for contexts.\n");
343 ptr
->object_type
= OBJECT_TYPE_STRING
;
344 ctx_field
->get_value(ctx_field
->priv
, lttng_probe_ctx
, &v
);
348 case lttng_kernel_type_string
:
349 ptr
->object_type
= OBJECT_TYPE_STRING
;
350 ctx_field
->get_value(ctx_field
->priv
, lttng_probe_ctx
, &v
);
353 case lttng_kernel_type_struct
:
354 printk(KERN_WARNING
"LTTng: bytecode: Structure type cannot be loaded.\n");
356 case lttng_kernel_type_variant
:
357 printk(KERN_WARNING
"LTTng: bytecode: Variant type cannot be loaded.\n");
360 printk(KERN_WARNING
"LTTng: bytecode: Unknown type: %d", (int) field
->type
->type
);
366 static int dynamic_get_index(struct lttng_kernel_probe_ctx
*lttng_probe_ctx
,
367 struct bytecode_runtime
*runtime
,
368 uint64_t index
, struct estack_entry
*stack_top
)
371 const struct bytecode_get_index_data
*gid
;
373 gid
= (const struct bytecode_get_index_data
*) &runtime
->data
[index
];
374 switch (stack_top
->u
.ptr
.type
) {
376 switch (stack_top
->u
.ptr
.object_type
) {
377 case OBJECT_TYPE_ARRAY
:
381 WARN_ON_ONCE(gid
->offset
>= gid
->array_len
);
382 /* Skip count (unsigned long) */
383 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
384 ptr
= ptr
+ gid
->offset
;
385 stack_top
->u
.ptr
.ptr
= ptr
;
386 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
387 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
388 BUG_ON(stack_top
->u
.ptr
.field
->type
->type
!= lttng_kernel_type_array
);
389 stack_top
->u
.ptr
.field
= NULL
;
392 case OBJECT_TYPE_SEQUENCE
:
397 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
398 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
399 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
403 ptr
= ptr
+ gid
->offset
;
404 stack_top
->u
.ptr
.ptr
= ptr
;
405 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
406 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
407 BUG_ON(stack_top
->u
.ptr
.field
->type
->type
!= lttng_kernel_type_sequence
);
408 stack_top
->u
.ptr
.field
= NULL
;
411 case OBJECT_TYPE_STRUCT
:
412 printk(KERN_WARNING
"LTTng: bytecode: Nested structures are not supported yet.\n");
415 case OBJECT_TYPE_VARIANT
:
417 printk(KERN_WARNING
"LTTng: bytecode: Unexpected get index type %d",
418 (int) stack_top
->u
.ptr
.object_type
);
423 case LOAD_ROOT_CONTEXT
:
424 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
426 ret
= context_get_index(lttng_probe_ctx
,
434 case LOAD_ROOT_PAYLOAD
:
435 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
436 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
437 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
438 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
439 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
440 stack_top
->u
.ptr
.field
= gid
->field
;
441 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
445 stack_top
->type
= REG_PTR
;
453 static int dynamic_load_field(struct estack_entry
*stack_top
)
457 switch (stack_top
->u
.ptr
.type
) {
460 case LOAD_ROOT_CONTEXT
:
461 case LOAD_ROOT_APP_CONTEXT
:
462 case LOAD_ROOT_PAYLOAD
:
464 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
468 switch (stack_top
->u
.ptr
.object_type
) {
470 dbg_printk("op load field s8\n");
471 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
472 stack_top
->type
= REG_S64
;
474 case OBJECT_TYPE_S16
:
478 dbg_printk("op load field s16\n");
479 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
480 if (stack_top
->u
.ptr
.rev_bo
)
482 stack_top
->u
.v
= tmp
;
483 stack_top
->type
= REG_S64
;
486 case OBJECT_TYPE_S32
:
490 dbg_printk("op load field s32\n");
491 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
492 if (stack_top
->u
.ptr
.rev_bo
)
494 stack_top
->u
.v
= tmp
;
495 stack_top
->type
= REG_S64
;
498 case OBJECT_TYPE_S64
:
502 dbg_printk("op load field s64\n");
503 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
504 if (stack_top
->u
.ptr
.rev_bo
)
506 stack_top
->u
.v
= tmp
;
507 stack_top
->type
= REG_S64
;
510 case OBJECT_TYPE_SIGNED_ENUM
:
514 dbg_printk("op load field signed enumeration\n");
515 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
516 if (stack_top
->u
.ptr
.rev_bo
)
518 stack_top
->u
.v
= tmp
;
519 stack_top
->type
= REG_S64
;
523 dbg_printk("op load field u8\n");
524 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
525 stack_top
->type
= REG_U64
;
527 case OBJECT_TYPE_U16
:
531 dbg_printk("op load field u16\n");
532 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
533 if (stack_top
->u
.ptr
.rev_bo
)
535 stack_top
->u
.v
= tmp
;
536 stack_top
->type
= REG_U64
;
539 case OBJECT_TYPE_U32
:
543 dbg_printk("op load field u32\n");
544 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
545 if (stack_top
->u
.ptr
.rev_bo
)
547 stack_top
->u
.v
= tmp
;
548 stack_top
->type
= REG_U64
;
551 case OBJECT_TYPE_U64
:
555 dbg_printk("op load field u64\n");
556 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
557 if (stack_top
->u
.ptr
.rev_bo
)
559 stack_top
->u
.v
= tmp
;
560 stack_top
->type
= REG_U64
;
563 case OBJECT_TYPE_UNSIGNED_ENUM
:
567 dbg_printk("op load field unsigned enumeration\n");
568 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
569 if (stack_top
->u
.ptr
.rev_bo
)
571 stack_top
->u
.v
= tmp
;
572 stack_top
->type
= REG_U64
;
575 case OBJECT_TYPE_STRING
:
579 dbg_printk("op load field string\n");
580 str
= (const char *) stack_top
->u
.ptr
.ptr
;
581 stack_top
->u
.s
.str
= str
;
582 if (unlikely(!stack_top
->u
.s
.str
)) {
583 dbg_printk("Bytecode warning: loading a NULL string.\n");
587 stack_top
->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
588 stack_top
->u
.s
.literal_type
=
589 ESTACK_STRING_LITERAL_TYPE_NONE
;
590 stack_top
->type
= REG_STRING
;
593 case OBJECT_TYPE_STRING_SEQUENCE
:
597 dbg_printk("op load field string sequence\n");
598 ptr
= stack_top
->u
.ptr
.ptr
;
599 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
600 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
601 if (unlikely(!stack_top
->u
.s
.str
)) {
602 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
606 stack_top
->u
.s
.literal_type
=
607 ESTACK_STRING_LITERAL_TYPE_NONE
;
608 stack_top
->type
= REG_STRING
;
611 case OBJECT_TYPE_DYNAMIC
:
613 * Dynamic types in context are looked up
614 * by context get index.
618 case OBJECT_TYPE_DOUBLE
:
621 case OBJECT_TYPE_SEQUENCE
:
622 case OBJECT_TYPE_ARRAY
:
623 case OBJECT_TYPE_STRUCT
:
624 case OBJECT_TYPE_VARIANT
:
625 printk(KERN_WARNING
"LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
636 int lttng_bytecode_interpret_format_output(struct estack_entry
*ax
,
637 struct lttng_interpreter_output
*output
)
644 output
->type
= LTTNG_INTERPRETER_TYPE_S64
;
645 output
->u
.s
= ax
->u
.v
;
648 output
->type
= LTTNG_INTERPRETER_TYPE_U64
;
649 output
->u
.u
= (uint64_t) ax
->u
.v
;
652 output
->type
= LTTNG_INTERPRETER_TYPE_STRING
;
653 output
->u
.str
.str
= ax
->u
.s
.str
;
654 output
->u
.str
.len
= ax
->u
.s
.seq_len
;
657 switch (ax
->u
.ptr
.object_type
) {
659 case OBJECT_TYPE_S16
:
660 case OBJECT_TYPE_S32
:
661 case OBJECT_TYPE_S64
:
663 case OBJECT_TYPE_U16
:
664 case OBJECT_TYPE_U32
:
665 case OBJECT_TYPE_U64
:
666 case OBJECT_TYPE_DOUBLE
:
667 case OBJECT_TYPE_STRING
:
668 case OBJECT_TYPE_STRING_SEQUENCE
:
669 ret
= dynamic_load_field(ax
);
672 /* Retry after loading ptr into stack top. */
674 case OBJECT_TYPE_SEQUENCE
:
675 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
676 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
677 output
->u
.sequence
.nr_elem
= *(unsigned long *) ax
->u
.ptr
.ptr
;
678 output
->u
.sequence
.nested_type
= lttng_kernel_get_type_sequence(ax
->u
.ptr
.field
->type
)->elem_type
;
680 case OBJECT_TYPE_ARRAY
:
681 /* Skip count (unsigned long) */
682 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
683 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
684 output
->u
.sequence
.nr_elem
= lttng_kernel_get_type_array(ax
->u
.ptr
.field
->type
)->length
;
685 output
->u
.sequence
.nested_type
= lttng_kernel_get_type_array(ax
->u
.ptr
.field
->type
)->elem_type
;
687 case OBJECT_TYPE_SIGNED_ENUM
:
688 ret
= dynamic_load_field(ax
);
691 output
->type
= LTTNG_INTERPRETER_TYPE_SIGNED_ENUM
;
692 output
->u
.s
= ax
->u
.v
;
694 case OBJECT_TYPE_UNSIGNED_ENUM
:
695 ret
= dynamic_load_field(ax
);
698 output
->type
= LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM
;
699 output
->u
.u
= ax
->u
.v
;
701 case OBJECT_TYPE_STRUCT
:
702 case OBJECT_TYPE_VARIANT
:
708 case REG_STAR_GLOB_STRING
:
709 case REG_TYPE_UNKNOWN
:
719 #define DBG_USER_STR_CUTOFF 32
722 * In debug mode, print user string (truncated, if necessary).
725 void dbg_load_ref_user_str_printk(const struct estack_entry
*user_str_reg
)
729 char user_str
[DBG_USER_STR_CUTOFF
];
733 last_char
= get_char(user_str_reg
, pos
);
734 user_str
[pos
] = last_char
;
736 } while (last_char
!= '\0' && pos
< sizeof(user_str
));
739 user_str
[sizeof(user_str
) - 1] = '\0';
740 dbg_printk("load field ref user string: '%s%s'\n", user_str
,
741 last_char
!= '\0' ? "[...]" : "");
745 void dbg_load_ref_user_str_printk(const struct estack_entry
*user_str_reg
)
751 * Return LTTNG_KERNEL_BYTECODE_INTERPRETER_OK on success.
752 * Return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR on error.
754 * For FILTER bytecode: expect a struct lttng_kernel_bytecode_filter_ctx *
756 * For CAPTURE bytecode: expect a struct lttng_interpreter_output *
759 int lttng_bytecode_interpret(struct lttng_kernel_bytecode_runtime
*kernel_bytecode
,
760 const char *interpreter_stack_data
,
761 struct lttng_kernel_probe_ctx
*lttng_probe_ctx
,
764 struct bytecode_runtime
*bytecode
= container_of(kernel_bytecode
, struct bytecode_runtime
, p
);
765 void *pc
, *next_pc
, *start_pc
;
768 struct estack _stack
;
769 struct estack
*stack
= &_stack
;
770 register int64_t ax
= 0, bx
= 0;
771 register enum entry_type ax_t
= REG_TYPE_UNKNOWN
, bx_t
= REG_TYPE_UNKNOWN
;
772 register int top
= INTERPRETER_STACK_EMPTY
;
773 #ifndef INTERPRETER_USE_SWITCH
774 static void *dispatch
[NR_BYTECODE_OPS
] = {
775 [ BYTECODE_OP_UNKNOWN
] = &&LABEL_BYTECODE_OP_UNKNOWN
,
777 [ BYTECODE_OP_RETURN
] = &&LABEL_BYTECODE_OP_RETURN
,
780 [ BYTECODE_OP_MUL
] = &&LABEL_BYTECODE_OP_MUL
,
781 [ BYTECODE_OP_DIV
] = &&LABEL_BYTECODE_OP_DIV
,
782 [ BYTECODE_OP_MOD
] = &&LABEL_BYTECODE_OP_MOD
,
783 [ BYTECODE_OP_PLUS
] = &&LABEL_BYTECODE_OP_PLUS
,
784 [ BYTECODE_OP_MINUS
] = &&LABEL_BYTECODE_OP_MINUS
,
785 [ BYTECODE_OP_BIT_RSHIFT
] = &&LABEL_BYTECODE_OP_BIT_RSHIFT
,
786 [ BYTECODE_OP_BIT_LSHIFT
] = &&LABEL_BYTECODE_OP_BIT_LSHIFT
,
787 [ BYTECODE_OP_BIT_AND
] = &&LABEL_BYTECODE_OP_BIT_AND
,
788 [ BYTECODE_OP_BIT_OR
] = &&LABEL_BYTECODE_OP_BIT_OR
,
789 [ BYTECODE_OP_BIT_XOR
] = &&LABEL_BYTECODE_OP_BIT_XOR
,
791 /* binary comparators */
792 [ BYTECODE_OP_EQ
] = &&LABEL_BYTECODE_OP_EQ
,
793 [ BYTECODE_OP_NE
] = &&LABEL_BYTECODE_OP_NE
,
794 [ BYTECODE_OP_GT
] = &&LABEL_BYTECODE_OP_GT
,
795 [ BYTECODE_OP_LT
] = &&LABEL_BYTECODE_OP_LT
,
796 [ BYTECODE_OP_GE
] = &&LABEL_BYTECODE_OP_GE
,
797 [ BYTECODE_OP_LE
] = &&LABEL_BYTECODE_OP_LE
,
799 /* string binary comparator */
800 [ BYTECODE_OP_EQ_STRING
] = &&LABEL_BYTECODE_OP_EQ_STRING
,
801 [ BYTECODE_OP_NE_STRING
] = &&LABEL_BYTECODE_OP_NE_STRING
,
802 [ BYTECODE_OP_GT_STRING
] = &&LABEL_BYTECODE_OP_GT_STRING
,
803 [ BYTECODE_OP_LT_STRING
] = &&LABEL_BYTECODE_OP_LT_STRING
,
804 [ BYTECODE_OP_GE_STRING
] = &&LABEL_BYTECODE_OP_GE_STRING
,
805 [ BYTECODE_OP_LE_STRING
] = &&LABEL_BYTECODE_OP_LE_STRING
,
807 /* globbing pattern binary comparator */
808 [ BYTECODE_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING
,
809 [ BYTECODE_OP_NE_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING
,
811 /* s64 binary comparator */
812 [ BYTECODE_OP_EQ_S64
] = &&LABEL_BYTECODE_OP_EQ_S64
,
813 [ BYTECODE_OP_NE_S64
] = &&LABEL_BYTECODE_OP_NE_S64
,
814 [ BYTECODE_OP_GT_S64
] = &&LABEL_BYTECODE_OP_GT_S64
,
815 [ BYTECODE_OP_LT_S64
] = &&LABEL_BYTECODE_OP_LT_S64
,
816 [ BYTECODE_OP_GE_S64
] = &&LABEL_BYTECODE_OP_GE_S64
,
817 [ BYTECODE_OP_LE_S64
] = &&LABEL_BYTECODE_OP_LE_S64
,
819 /* double binary comparator */
820 [ BYTECODE_OP_EQ_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE
,
821 [ BYTECODE_OP_NE_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_DOUBLE
,
822 [ BYTECODE_OP_GT_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_DOUBLE
,
823 [ BYTECODE_OP_LT_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_DOUBLE
,
824 [ BYTECODE_OP_GE_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_DOUBLE
,
825 [ BYTECODE_OP_LE_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_DOUBLE
,
827 /* Mixed S64-double binary comparators */
828 [ BYTECODE_OP_EQ_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64
,
829 [ BYTECODE_OP_NE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64
,
830 [ BYTECODE_OP_GT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64
,
831 [ BYTECODE_OP_LT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64
,
832 [ BYTECODE_OP_GE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64
,
833 [ BYTECODE_OP_LE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64
,
835 [ BYTECODE_OP_EQ_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE
,
836 [ BYTECODE_OP_NE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE
,
837 [ BYTECODE_OP_GT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE
,
838 [ BYTECODE_OP_LT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE
,
839 [ BYTECODE_OP_GE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE
,
840 [ BYTECODE_OP_LE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE
,
843 [ BYTECODE_OP_UNARY_PLUS
] = &&LABEL_BYTECODE_OP_UNARY_PLUS
,
844 [ BYTECODE_OP_UNARY_MINUS
] = &&LABEL_BYTECODE_OP_UNARY_MINUS
,
845 [ BYTECODE_OP_UNARY_NOT
] = &&LABEL_BYTECODE_OP_UNARY_NOT
,
846 [ BYTECODE_OP_UNARY_PLUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64
,
847 [ BYTECODE_OP_UNARY_MINUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64
,
848 [ BYTECODE_OP_UNARY_NOT_S64
] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64
,
849 [ BYTECODE_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE
,
850 [ BYTECODE_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE
,
851 [ BYTECODE_OP_UNARY_NOT_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE
,
854 [ BYTECODE_OP_AND
] = &&LABEL_BYTECODE_OP_AND
,
855 [ BYTECODE_OP_OR
] = &&LABEL_BYTECODE_OP_OR
,
858 [ BYTECODE_OP_LOAD_FIELD_REF
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF
,
859 [ BYTECODE_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING
,
860 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
,
861 [ BYTECODE_OP_LOAD_FIELD_REF_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64
,
862 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
,
864 /* load from immediate operand */
865 [ BYTECODE_OP_LOAD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STRING
,
866 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING
,
867 [ BYTECODE_OP_LOAD_S64
] = &&LABEL_BYTECODE_OP_LOAD_S64
,
868 [ BYTECODE_OP_LOAD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE
,
871 [ BYTECODE_OP_CAST_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_TO_S64
,
872 [ BYTECODE_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64
,
873 [ BYTECODE_OP_CAST_NOP
] = &&LABEL_BYTECODE_OP_CAST_NOP
,
875 /* get context ref */
876 [ BYTECODE_OP_GET_CONTEXT_REF
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF
,
877 [ BYTECODE_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING
,
878 [ BYTECODE_OP_GET_CONTEXT_REF_S64
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64
,
879 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
,
881 /* load userspace field ref */
882 [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
,
883 [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
,
885 /* Instructions for recursive traversal through composed types. */
886 [ BYTECODE_OP_GET_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT
,
887 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT
,
888 [ BYTECODE_OP_GET_PAYLOAD_ROOT
] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT
,
890 [ BYTECODE_OP_GET_SYMBOL
] = &&LABEL_BYTECODE_OP_GET_SYMBOL
,
891 [ BYTECODE_OP_GET_SYMBOL_FIELD
] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD
,
892 [ BYTECODE_OP_GET_INDEX_U16
] = &&LABEL_BYTECODE_OP_GET_INDEX_U16
,
893 [ BYTECODE_OP_GET_INDEX_U64
] = &&LABEL_BYTECODE_OP_GET_INDEX_U64
,
895 [ BYTECODE_OP_LOAD_FIELD
] = &&LABEL_BYTECODE_OP_LOAD_FIELD
,
896 [ BYTECODE_OP_LOAD_FIELD_S8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8
,
897 [ BYTECODE_OP_LOAD_FIELD_S16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16
,
898 [ BYTECODE_OP_LOAD_FIELD_S32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32
,
899 [ BYTECODE_OP_LOAD_FIELD_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64
,
900 [ BYTECODE_OP_LOAD_FIELD_U8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8
,
901 [ BYTECODE_OP_LOAD_FIELD_U16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16
,
902 [ BYTECODE_OP_LOAD_FIELD_U32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32
,
903 [ BYTECODE_OP_LOAD_FIELD_U64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64
,
904 [ BYTECODE_OP_LOAD_FIELD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING
,
905 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE
,
906 [ BYTECODE_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE
,
908 [ BYTECODE_OP_UNARY_BIT_NOT
] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT
,
910 [ BYTECODE_OP_RETURN_S64
] = &&LABEL_BYTECODE_OP_RETURN_S64
,
912 #endif /* #ifndef INTERPRETER_USE_SWITCH */
916 OP(BYTECODE_OP_UNKNOWN
):
917 OP(BYTECODE_OP_LOAD_FIELD_REF
):
918 OP(BYTECODE_OP_GET_CONTEXT_REF
):
919 #ifdef INTERPRETER_USE_SWITCH
921 #endif /* INTERPRETER_USE_SWITCH */
922 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
923 (unsigned int) *(bytecode_opcode_t
*) pc
);
927 OP(BYTECODE_OP_RETURN
):
928 /* LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR or LTTNG_KERNEL_BYTECODE_INTERPRETER_OK */
929 switch (estack_ax_t
) {
932 retval
= !!estack_ax_v
;
937 if (kernel_bytecode
->type
!= LTTNG_KERNEL_BYTECODE_TYPE_CAPTURE
) {
943 case REG_STAR_GLOB_STRING
:
944 case REG_TYPE_UNKNOWN
:
951 OP(BYTECODE_OP_RETURN_S64
):
952 /* LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR or LTTNG_KERNEL_BYTECODE_INTERPRETER_OK */
953 retval
= !!estack_ax_v
;
961 OP(BYTECODE_OP_PLUS
):
962 OP(BYTECODE_OP_MINUS
):
963 printk(KERN_WARNING
"LTTng: bytecode: unsupported bytecode op %u\n",
964 (unsigned int) *(bytecode_opcode_t
*) pc
);
974 printk(KERN_WARNING
"LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
975 (unsigned int) *(bytecode_opcode_t
*) pc
);
979 OP(BYTECODE_OP_EQ_STRING
):
983 res
= (stack_strcmp(stack
, top
, "==") == 0);
984 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
986 estack_ax_t
= REG_S64
;
987 next_pc
+= sizeof(struct binary_op
);
990 OP(BYTECODE_OP_NE_STRING
):
994 res
= (stack_strcmp(stack
, top
, "!=") != 0);
995 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
997 estack_ax_t
= REG_S64
;
998 next_pc
+= sizeof(struct binary_op
);
1001 OP(BYTECODE_OP_GT_STRING
):
1005 res
= (stack_strcmp(stack
, top
, ">") > 0);
1006 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1008 estack_ax_t
= REG_S64
;
1009 next_pc
+= sizeof(struct binary_op
);
1012 OP(BYTECODE_OP_LT_STRING
):
1016 res
= (stack_strcmp(stack
, top
, "<") < 0);
1017 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1019 estack_ax_t
= REG_S64
;
1020 next_pc
+= sizeof(struct binary_op
);
1023 OP(BYTECODE_OP_GE_STRING
):
1027 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
1028 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1030 estack_ax_t
= REG_S64
;
1031 next_pc
+= sizeof(struct binary_op
);
1034 OP(BYTECODE_OP_LE_STRING
):
1038 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
1039 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1041 estack_ax_t
= REG_S64
;
1042 next_pc
+= sizeof(struct binary_op
);
1046 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING
):
1050 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
1051 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1053 estack_ax_t
= REG_S64
;
1054 next_pc
+= sizeof(struct binary_op
);
1057 OP(BYTECODE_OP_NE_STAR_GLOB_STRING
):
1061 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
1062 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1064 estack_ax_t
= REG_S64
;
1065 next_pc
+= sizeof(struct binary_op
);
1069 OP(BYTECODE_OP_EQ_S64
):
1073 res
= (estack_bx_v
== estack_ax_v
);
1074 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1076 estack_ax_t
= REG_S64
;
1077 next_pc
+= sizeof(struct binary_op
);
1080 OP(BYTECODE_OP_NE_S64
):
1084 res
= (estack_bx_v
!= estack_ax_v
);
1085 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1087 estack_ax_t
= REG_S64
;
1088 next_pc
+= sizeof(struct binary_op
);
1091 OP(BYTECODE_OP_GT_S64
):
1095 res
= (estack_bx_v
> estack_ax_v
);
1096 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1098 estack_ax_t
= REG_S64
;
1099 next_pc
+= sizeof(struct binary_op
);
1102 OP(BYTECODE_OP_LT_S64
):
1106 res
= (estack_bx_v
< estack_ax_v
);
1107 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1109 estack_ax_t
= REG_S64
;
1110 next_pc
+= sizeof(struct binary_op
);
1113 OP(BYTECODE_OP_GE_S64
):
1117 res
= (estack_bx_v
>= estack_ax_v
);
1118 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1120 estack_ax_t
= REG_S64
;
1121 next_pc
+= sizeof(struct binary_op
);
1124 OP(BYTECODE_OP_LE_S64
):
1128 res
= (estack_bx_v
<= estack_ax_v
);
1129 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1131 estack_ax_t
= REG_S64
;
1132 next_pc
+= sizeof(struct binary_op
);
1136 OP(BYTECODE_OP_EQ_DOUBLE
):
1137 OP(BYTECODE_OP_NE_DOUBLE
):
1138 OP(BYTECODE_OP_GT_DOUBLE
):
1139 OP(BYTECODE_OP_LT_DOUBLE
):
1140 OP(BYTECODE_OP_GE_DOUBLE
):
1141 OP(BYTECODE_OP_LE_DOUBLE
):
1147 /* Mixed S64-double binary comparators */
1148 OP(BYTECODE_OP_EQ_DOUBLE_S64
):
1149 OP(BYTECODE_OP_NE_DOUBLE_S64
):
1150 OP(BYTECODE_OP_GT_DOUBLE_S64
):
1151 OP(BYTECODE_OP_LT_DOUBLE_S64
):
1152 OP(BYTECODE_OP_GE_DOUBLE_S64
):
1153 OP(BYTECODE_OP_LE_DOUBLE_S64
):
1154 OP(BYTECODE_OP_EQ_S64_DOUBLE
):
1155 OP(BYTECODE_OP_NE_S64_DOUBLE
):
1156 OP(BYTECODE_OP_GT_S64_DOUBLE
):
1157 OP(BYTECODE_OP_LT_S64_DOUBLE
):
1158 OP(BYTECODE_OP_GE_S64_DOUBLE
):
1159 OP(BYTECODE_OP_LE_S64_DOUBLE
):
1164 OP(BYTECODE_OP_BIT_RSHIFT
):
1168 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1173 /* Catch undefined behavior. */
1174 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1178 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
1179 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1181 estack_ax_t
= REG_U64
;
1182 next_pc
+= sizeof(struct binary_op
);
1185 OP(BYTECODE_OP_BIT_LSHIFT
):
1189 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1194 /* Catch undefined behavior. */
1195 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1199 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
1200 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1202 estack_ax_t
= REG_U64
;
1203 next_pc
+= sizeof(struct binary_op
);
1206 OP(BYTECODE_OP_BIT_AND
):
1210 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1215 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
1216 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1218 estack_ax_t
= REG_U64
;
1219 next_pc
+= sizeof(struct binary_op
);
1222 OP(BYTECODE_OP_BIT_OR
):
1226 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1231 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1232 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1234 estack_ax_t
= REG_U64
;
1235 next_pc
+= sizeof(struct binary_op
);
1238 OP(BYTECODE_OP_BIT_XOR
):
1242 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1247 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1248 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1250 estack_ax_t
= REG_U64
;
1251 next_pc
+= sizeof(struct binary_op
);
1256 OP(BYTECODE_OP_UNARY_PLUS
):
1257 OP(BYTECODE_OP_UNARY_MINUS
):
1258 OP(BYTECODE_OP_UNARY_NOT
):
1259 printk(KERN_WARNING
"LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1260 (unsigned int) *(bytecode_opcode_t
*) pc
);
1265 OP(BYTECODE_OP_UNARY_BIT_NOT
):
1267 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1268 estack_ax_t
= REG_S64
;
1269 next_pc
+= sizeof(struct unary_op
);
1273 OP(BYTECODE_OP_UNARY_PLUS_S64
):
1275 next_pc
+= sizeof(struct unary_op
);
1278 OP(BYTECODE_OP_UNARY_MINUS_S64
):
1280 estack_ax_v
= -estack_ax_v
;
1281 estack_ax_t
= REG_S64
;
1282 next_pc
+= sizeof(struct unary_op
);
1285 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE
):
1286 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE
):
1291 OP(BYTECODE_OP_UNARY_NOT_S64
):
1293 estack_ax_v
= !estack_ax_v
;
1294 estack_ax_t
= REG_S64
;
1295 next_pc
+= sizeof(struct unary_op
);
1298 OP(BYTECODE_OP_UNARY_NOT_DOUBLE
):
1305 OP(BYTECODE_OP_AND
):
1307 struct logical_op
*insn
= (struct logical_op
*) pc
;
1309 /* If AX is 0, skip and evaluate to 0 */
1310 if (unlikely(estack_ax_v
== 0)) {
1311 dbg_printk("Jumping to bytecode offset %u\n",
1312 (unsigned int) insn
->skip_offset
);
1313 next_pc
= start_pc
+ insn
->skip_offset
;
1315 /* Pop 1 when jump not taken */
1316 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1317 next_pc
+= sizeof(struct logical_op
);
1323 struct logical_op
*insn
= (struct logical_op
*) pc
;
1325 /* If AX is nonzero, skip and evaluate to 1 */
1327 if (unlikely(estack_ax_v
!= 0)) {
1329 dbg_printk("Jumping to bytecode offset %u\n",
1330 (unsigned int) insn
->skip_offset
);
1331 next_pc
= start_pc
+ insn
->skip_offset
;
1333 /* Pop 1 when jump not taken */
1334 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1335 next_pc
+= sizeof(struct logical_op
);
1341 /* load field ref */
1342 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING
):
1344 struct load_op
*insn
= (struct load_op
*) pc
;
1345 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1347 dbg_printk("load field ref offset %u type string\n",
1349 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1350 estack_ax(stack
, top
)->u
.s
.str
=
1351 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
1352 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1353 dbg_printk("Bytecode warning: loading a NULL string.\n");
1357 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1358 estack_ax(stack
, top
)->u
.s
.literal_type
=
1359 ESTACK_STRING_LITERAL_TYPE_NONE
;
1360 estack_ax(stack
, top
)->u
.s
.user
= 0;
1361 estack_ax(stack
, top
)->type
= REG_STRING
;
1362 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1363 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1367 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
):
1369 struct load_op
*insn
= (struct load_op
*) pc
;
1370 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1372 dbg_printk("load field ref offset %u type sequence\n",
1374 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1375 estack_ax(stack
, top
)->u
.s
.seq_len
=
1376 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
1377 estack_ax(stack
, top
)->u
.s
.str
=
1378 *(const char **) (&interpreter_stack_data
[ref
->offset
1379 + sizeof(unsigned long)]);
1380 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1381 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1385 estack_ax(stack
, top
)->u
.s
.literal_type
=
1386 ESTACK_STRING_LITERAL_TYPE_NONE
;
1387 estack_ax(stack
, top
)->u
.s
.user
= 0;
1388 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1392 OP(BYTECODE_OP_LOAD_FIELD_REF_S64
):
1394 struct load_op
*insn
= (struct load_op
*) pc
;
1395 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1397 dbg_printk("load field ref offset %u type s64\n",
1399 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1401 ((struct literal_numeric
*) &interpreter_stack_data
[ref
->offset
])->v
;
1402 estack_ax_t
= REG_S64
;
1403 dbg_printk("ref load s64 %lld\n",
1404 (long long) estack_ax_v
);
1405 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1409 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
):
1415 /* load from immediate operand */
1416 OP(BYTECODE_OP_LOAD_STRING
):
1418 struct load_op
*insn
= (struct load_op
*) pc
;
1420 dbg_printk("load string %s\n", insn
->data
);
1421 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1422 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1423 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1424 estack_ax(stack
, top
)->u
.s
.literal_type
=
1425 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
1426 estack_ax(stack
, top
)->u
.s
.user
= 0;
1427 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1431 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING
):
1433 struct load_op
*insn
= (struct load_op
*) pc
;
1435 dbg_printk("load globbing pattern %s\n", insn
->data
);
1436 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1437 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1438 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1439 estack_ax(stack
, top
)->u
.s
.literal_type
=
1440 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
1441 estack_ax(stack
, top
)->u
.s
.user
= 0;
1442 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1446 OP(BYTECODE_OP_LOAD_S64
):
1448 struct load_op
*insn
= (struct load_op
*) pc
;
1450 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1451 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
1452 estack_ax_t
= REG_S64
;
1453 dbg_printk("load s64 %lld\n",
1454 (long long) estack_ax_v
);
1455 next_pc
+= sizeof(struct load_op
)
1456 + sizeof(struct literal_numeric
);
1460 OP(BYTECODE_OP_LOAD_DOUBLE
):
1467 OP(BYTECODE_OP_CAST_TO_S64
):
1468 printk(KERN_WARNING
"LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1469 (unsigned int) *(bytecode_opcode_t
*) pc
);
1473 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64
):
1479 OP(BYTECODE_OP_CAST_NOP
):
1481 next_pc
+= sizeof(struct cast_op
);
1485 /* get context ref */
1486 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING
):
1488 struct load_op
*insn
= (struct load_op
*) pc
;
1489 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1490 struct lttng_kernel_ctx_field
*ctx_field
;
1491 struct lttng_ctx_value v
;
1493 dbg_printk("get context ref offset %u type string\n",
1495 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1496 ctx_field
->get_value(ctx_field
->priv
, lttng_probe_ctx
, &v
);
1497 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1498 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
1499 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1500 dbg_printk("Bytecode warning: loading a NULL string.\n");
1504 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1505 estack_ax(stack
, top
)->u
.s
.literal_type
=
1506 ESTACK_STRING_LITERAL_TYPE_NONE
;
1507 estack_ax(stack
, top
)->u
.s
.user
= 0;
1508 estack_ax(stack
, top
)->type
= REG_STRING
;
1509 dbg_printk("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1510 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1514 OP(BYTECODE_OP_GET_CONTEXT_REF_S64
):
1516 struct load_op
*insn
= (struct load_op
*) pc
;
1517 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1518 struct lttng_kernel_ctx_field
*ctx_field
;
1519 struct lttng_ctx_value v
;
1521 dbg_printk("get context ref offset %u type s64\n",
1523 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1524 ctx_field
->get_value(ctx_field
->priv
, lttng_probe_ctx
, &v
);
1525 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1526 estack_ax_v
= v
.u
.s64
;
1527 estack_ax_t
= REG_S64
;
1528 dbg_printk("ref get context s64 %lld\n",
1529 (long long) estack_ax_v
);
1530 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1534 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
):
1540 /* load userspace field ref */
1541 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
):
1543 struct load_op
*insn
= (struct load_op
*) pc
;
1544 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1546 dbg_printk("load field ref offset %u type user string\n",
1548 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1549 estack_ax(stack
, top
)->u
.s
.user_str
=
1550 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
1551 if (unlikely(!estack_ax(stack
, top
)->u
.s
.user_str
)) {
1552 dbg_printk("Bytecode warning: loading a NULL string.\n");
1556 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1557 estack_ax(stack
, top
)->u
.s
.literal_type
=
1558 ESTACK_STRING_LITERAL_TYPE_NONE
;
1559 estack_ax(stack
, top
)->u
.s
.user
= 1;
1560 estack_ax(stack
, top
)->type
= REG_STRING
;
1561 dbg_load_ref_user_str_printk(estack_ax(stack
, top
));
1562 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1566 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
):
1568 struct load_op
*insn
= (struct load_op
*) pc
;
1569 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1571 dbg_printk("load field ref offset %u type user sequence\n",
1573 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1574 estack_ax(stack
, top
)->u
.s
.seq_len
=
1575 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
1576 estack_ax(stack
, top
)->u
.s
.user_str
=
1577 *(const char **) (&interpreter_stack_data
[ref
->offset
1578 + sizeof(unsigned long)]);
1579 if (unlikely(!estack_ax(stack
, top
)->u
.s
.user_str
)) {
1580 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1584 estack_ax(stack
, top
)->u
.s
.literal_type
=
1585 ESTACK_STRING_LITERAL_TYPE_NONE
;
1586 estack_ax(stack
, top
)->u
.s
.user
= 1;
1587 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1591 OP(BYTECODE_OP_GET_CONTEXT_ROOT
):
1593 dbg_printk("op get context root\n");
1594 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1595 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
1596 /* "field" only needed for variants. */
1597 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1598 estack_ax(stack
, top
)->type
= REG_PTR
;
1599 next_pc
+= sizeof(struct load_op
);
1603 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT
):
1609 OP(BYTECODE_OP_GET_PAYLOAD_ROOT
):
1611 dbg_printk("op get app payload root\n");
1612 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1613 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
1614 estack_ax(stack
, top
)->u
.ptr
.ptr
= interpreter_stack_data
;
1615 /* "field" only needed for variants. */
1616 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1617 estack_ax(stack
, top
)->type
= REG_PTR
;
1618 next_pc
+= sizeof(struct load_op
);
1622 OP(BYTECODE_OP_GET_SYMBOL
):
1624 dbg_printk("op get symbol\n");
1625 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
1627 printk(KERN_WARNING
"LTTng: bytecode: Nested fields not implemented yet.\n");
1630 case LOAD_ROOT_CONTEXT
:
1631 case LOAD_ROOT_APP_CONTEXT
:
1632 case LOAD_ROOT_PAYLOAD
:
1634 * symbol lookup is performed by
1640 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1644 OP(BYTECODE_OP_GET_SYMBOL_FIELD
):
1647 * Used for first variant encountered in a
1648 * traversal. Variants are not implemented yet.
1654 OP(BYTECODE_OP_GET_INDEX_U16
):
1656 struct load_op
*insn
= (struct load_op
*) pc
;
1657 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1659 dbg_printk("op get index u16\n");
1660 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1663 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1664 estack_ax_t
= estack_ax(stack
, top
)->type
;
1665 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1669 OP(BYTECODE_OP_GET_INDEX_U64
):
1671 struct load_op
*insn
= (struct load_op
*) pc
;
1672 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1674 dbg_printk("op get index u64\n");
1675 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1678 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1679 estack_ax_t
= estack_ax(stack
, top
)->type
;
1680 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1684 OP(BYTECODE_OP_LOAD_FIELD
):
1686 dbg_printk("op load field\n");
1687 ret
= dynamic_load_field(estack_ax(stack
, top
));
1690 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1691 estack_ax_t
= estack_ax(stack
, top
)->type
;
1692 next_pc
+= sizeof(struct load_op
);
1696 OP(BYTECODE_OP_LOAD_FIELD_S8
):
1698 dbg_printk("op load field s8\n");
1700 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1701 estack_ax_t
= REG_S64
;
1702 next_pc
+= sizeof(struct load_op
);
1705 OP(BYTECODE_OP_LOAD_FIELD_S16
):
1707 dbg_printk("op load field s16\n");
1709 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1710 estack_ax_t
= REG_S64
;
1711 next_pc
+= sizeof(struct load_op
);
1714 OP(BYTECODE_OP_LOAD_FIELD_S32
):
1716 dbg_printk("op load field s32\n");
1718 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1719 estack_ax_t
= REG_S64
;
1720 next_pc
+= sizeof(struct load_op
);
1723 OP(BYTECODE_OP_LOAD_FIELD_S64
):
1725 dbg_printk("op load field s64\n");
1727 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1728 estack_ax_t
= REG_S64
;
1729 next_pc
+= sizeof(struct load_op
);
1732 OP(BYTECODE_OP_LOAD_FIELD_U8
):
1734 dbg_printk("op load field u8\n");
1736 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1737 estack_ax_t
= REG_S64
;
1738 next_pc
+= sizeof(struct load_op
);
1741 OP(BYTECODE_OP_LOAD_FIELD_U16
):
1743 dbg_printk("op load field u16\n");
1745 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1746 estack_ax_t
= REG_S64
;
1747 next_pc
+= sizeof(struct load_op
);
1750 OP(BYTECODE_OP_LOAD_FIELD_U32
):
1752 dbg_printk("op load field u32\n");
1754 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1755 estack_ax_t
= REG_S64
;
1756 next_pc
+= sizeof(struct load_op
);
1759 OP(BYTECODE_OP_LOAD_FIELD_U64
):
1761 dbg_printk("op load field u64\n");
1763 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1764 estack_ax_t
= REG_S64
;
1765 next_pc
+= sizeof(struct load_op
);
1768 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE
):
1774 OP(BYTECODE_OP_LOAD_FIELD_STRING
):
1778 dbg_printk("op load field string\n");
1779 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1780 estack_ax(stack
, top
)->u
.s
.str
= str
;
1781 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1782 dbg_printk("Bytecode warning: loading a NULL string.\n");
1786 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1787 estack_ax(stack
, top
)->u
.s
.literal_type
=
1788 ESTACK_STRING_LITERAL_TYPE_NONE
;
1789 estack_ax(stack
, top
)->type
= REG_STRING
;
1790 next_pc
+= sizeof(struct load_op
);
1794 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE
):
1798 dbg_printk("op load field string sequence\n");
1799 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
1800 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
1801 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
1802 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1803 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1807 estack_ax(stack
, top
)->u
.s
.literal_type
=
1808 ESTACK_STRING_LITERAL_TYPE_NONE
;
1809 estack_ax(stack
, top
)->type
= REG_STRING
;
1810 next_pc
+= sizeof(struct load_op
);
1816 /* No need to prepare output if an error occurred. */
1818 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR
;
1820 /* Prepare output. */
1821 switch (kernel_bytecode
->type
) {
1822 case LTTNG_KERNEL_BYTECODE_TYPE_FILTER
:
1824 struct lttng_kernel_bytecode_filter_ctx
*filter_ctx
=
1825 (struct lttng_kernel_bytecode_filter_ctx
*) caller_ctx
;
1827 filter_ctx
->result
= LTTNG_KERNEL_BYTECODE_FILTER_ACCEPT
;
1829 filter_ctx
->result
= LTTNG_KERNEL_BYTECODE_FILTER_REJECT
;
1832 case LTTNG_KERNEL_BYTECODE_TYPE_CAPTURE
:
1833 ret
= lttng_bytecode_interpret_format_output(estack_ax(stack
, top
),
1834 (struct lttng_interpreter_output
*) caller_ctx
);
1841 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR
;
1843 return LTTNG_KERNEL_BYTECODE_INTERPRETER_OK
;
1845 LTTNG_STACK_FRAME_NON_STANDARD(lttng_bytecode_interpret
);
1848 * Return LTTNG_KERNEL_EVENT_FILTER_ACCEPT or LTTNG_KERNEL_EVENT_FILTER_REJECT.
1850 int lttng_kernel_interpret_event_filter(const struct lttng_kernel_event_common
*event
,
1851 const char *interpreter_stack_data
,
1852 struct lttng_kernel_probe_ctx
*probe_ctx
,
1853 void *event_filter_ctx
__attribute__((unused
)))
1855 struct lttng_kernel_bytecode_runtime
*filter_bc_runtime
;
1856 struct list_head
*filter_bytecode_runtime_head
= &event
->priv
->filter_bytecode_runtime_head
;
1857 struct lttng_kernel_bytecode_filter_ctx bytecode_filter_ctx
;
1858 bool filter_record
= false;
1860 list_for_each_entry_rcu(filter_bc_runtime
, filter_bytecode_runtime_head
, node
) {
1861 if (likely(filter_bc_runtime
->interpreter_func(filter_bc_runtime
,
1862 interpreter_stack_data
, probe_ctx
, &bytecode_filter_ctx
) == LTTNG_KERNEL_BYTECODE_INTERPRETER_OK
)) {
1863 if (unlikely(bytecode_filter_ctx
.result
== LTTNG_KERNEL_BYTECODE_FILTER_ACCEPT
)) {
1864 filter_record
= true;
1870 return LTTNG_KERNEL_EVENT_FILTER_ACCEPT
;
1872 return LTTNG_KERNEL_EVENT_FILTER_REJECT
;