1 /* SPDX-License-Identifier: MIT
3 * lttng-bytecode-specialize.c
5 * LTTng modules bytecode code specializer.
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #include <linux/slab.h>
11 #include <lttng/lttng-bytecode.h>
12 #include <lttng/align.h>
14 static ssize_t
bytecode_reserve_data(struct bytecode_runtime
*runtime
,
15 size_t align
, size_t len
)
18 size_t padding
= offset_align(runtime
->data_len
, align
);
19 size_t new_len
= runtime
->data_len
+ padding
+ len
;
20 size_t new_alloc_len
= new_len
;
21 size_t old_alloc_len
= runtime
->data_alloc_len
;
23 if (new_len
> INTERPRETER_MAX_DATA_LEN
)
26 if (new_alloc_len
> old_alloc_len
) {
30 max_t(size_t, 1U << get_count_order(new_alloc_len
), old_alloc_len
<< 1);
31 newptr
= krealloc(runtime
->data
, new_alloc_len
, GFP_KERNEL
);
34 runtime
->data
= newptr
;
35 /* We zero directly the memory from start of allocation. */
36 memset(&runtime
->data
[old_alloc_len
], 0, new_alloc_len
- old_alloc_len
);
37 runtime
->data_alloc_len
= new_alloc_len
;
39 runtime
->data_len
+= padding
;
40 ret
= runtime
->data_len
;
41 runtime
->data_len
+= len
;
45 static ssize_t
bytecode_push_data(struct bytecode_runtime
*runtime
,
46 const void *p
, size_t align
, size_t len
)
50 offset
= bytecode_reserve_data(runtime
, align
, len
);
53 memcpy(&runtime
->data
[offset
], p
, len
);
57 static int specialize_load_field(struct vstack_entry
*stack_top
,
62 switch (stack_top
->load
.type
) {
65 case LOAD_ROOT_CONTEXT
:
66 case LOAD_ROOT_APP_CONTEXT
:
67 case LOAD_ROOT_PAYLOAD
:
69 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
73 switch (stack_top
->load
.object_type
) {
75 dbg_printk("op load field s8\n");
76 stack_top
->type
= REG_S64
;
77 if (!stack_top
->load
.rev_bo
)
78 insn
->op
= BYTECODE_OP_LOAD_FIELD_S8
;
81 dbg_printk("op load field s16\n");
82 stack_top
->type
= REG_S64
;
83 if (!stack_top
->load
.rev_bo
)
84 insn
->op
= BYTECODE_OP_LOAD_FIELD_S16
;
87 dbg_printk("op load field s32\n");
88 stack_top
->type
= REG_S64
;
89 if (!stack_top
->load
.rev_bo
)
90 insn
->op
= BYTECODE_OP_LOAD_FIELD_S32
;
93 dbg_printk("op load field s64\n");
94 stack_top
->type
= REG_S64
;
95 if (!stack_top
->load
.rev_bo
)
96 insn
->op
= BYTECODE_OP_LOAD_FIELD_S64
;
99 dbg_printk("op load field u8\n");
100 stack_top
->type
= REG_S64
;
101 insn
->op
= BYTECODE_OP_LOAD_FIELD_U8
;
103 case OBJECT_TYPE_U16
:
104 dbg_printk("op load field u16\n");
105 stack_top
->type
= REG_S64
;
106 if (!stack_top
->load
.rev_bo
)
107 insn
->op
= BYTECODE_OP_LOAD_FIELD_U16
;
109 case OBJECT_TYPE_U32
:
110 dbg_printk("op load field u32\n");
111 stack_top
->type
= REG_S64
;
112 if (!stack_top
->load
.rev_bo
)
113 insn
->op
= BYTECODE_OP_LOAD_FIELD_U32
;
115 case OBJECT_TYPE_U64
:
116 dbg_printk("op load field u64\n");
117 stack_top
->type
= REG_S64
;
118 if (!stack_top
->load
.rev_bo
)
119 insn
->op
= BYTECODE_OP_LOAD_FIELD_U64
;
121 case OBJECT_TYPE_DOUBLE
:
122 printk(KERN_WARNING
"LTTng: bytecode: Double type unsupported\n\n");
125 case OBJECT_TYPE_STRING
:
126 dbg_printk("op load field string\n");
127 stack_top
->type
= REG_STRING
;
128 insn
->op
= BYTECODE_OP_LOAD_FIELD_STRING
;
130 case OBJECT_TYPE_STRING_SEQUENCE
:
131 dbg_printk("op load field string sequence\n");
132 stack_top
->type
= REG_STRING
;
133 insn
->op
= BYTECODE_OP_LOAD_FIELD_SEQUENCE
;
135 case OBJECT_TYPE_DYNAMIC
:
138 case OBJECT_TYPE_SEQUENCE
:
139 case OBJECT_TYPE_ARRAY
:
140 case OBJECT_TYPE_STRUCT
:
141 case OBJECT_TYPE_VARIANT
:
142 printk(KERN_WARNING
"LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
152 static int specialize_get_index_object_type(enum object_type
*otype
,
153 int signedness
, uint32_t elem_len
)
158 *otype
= OBJECT_TYPE_S8
;
160 *otype
= OBJECT_TYPE_U8
;
164 *otype
= OBJECT_TYPE_S16
;
166 *otype
= OBJECT_TYPE_U16
;
170 *otype
= OBJECT_TYPE_S32
;
172 *otype
= OBJECT_TYPE_U32
;
176 *otype
= OBJECT_TYPE_S64
;
178 *otype
= OBJECT_TYPE_U64
;
186 static int specialize_get_index(struct bytecode_runtime
*runtime
,
187 struct load_op
*insn
, uint64_t index
,
188 struct vstack_entry
*stack_top
,
192 struct bytecode_get_index_data gid
;
195 memset(&gid
, 0, sizeof(gid
));
196 switch (stack_top
->load
.type
) {
198 switch (stack_top
->load
.object_type
) {
199 case OBJECT_TYPE_ARRAY
:
201 const struct lttng_integer_type
*integer_type
;
202 const struct lttng_event_field
*field
;
203 uint32_t elem_len
, num_elems
;
206 field
= stack_top
->load
.field
;
207 if (!lttng_is_bytewise_integer(field
->type
.u
.array_nestable
.elem_type
)) {
211 integer_type
= &field
->type
.u
.array_nestable
.elem_type
->u
.integer
;
212 num_elems
= field
->type
.u
.array_nestable
.length
;
213 elem_len
= integer_type
->size
;
214 signedness
= integer_type
->signedness
;
215 if (index
>= num_elems
) {
219 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
220 signedness
, elem_len
);
223 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
224 gid
.array_len
= num_elems
* (elem_len
/ CHAR_BIT
);
225 gid
.elem
.type
= stack_top
->load
.object_type
;
226 gid
.elem
.len
= elem_len
;
227 if (integer_type
->reverse_byte_order
)
228 gid
.elem
.rev_bo
= true;
229 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
232 case OBJECT_TYPE_SEQUENCE
:
234 const struct lttng_integer_type
*integer_type
;
235 const struct lttng_event_field
*field
;
239 field
= stack_top
->load
.field
;
240 if (!lttng_is_bytewise_integer(field
->type
.u
.sequence_nestable
.elem_type
)) {
244 integer_type
= &field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
;
245 elem_len
= integer_type
->size
;
246 signedness
= integer_type
->signedness
;
247 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
248 signedness
, elem_len
);
251 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
252 gid
.elem
.type
= stack_top
->load
.object_type
;
253 gid
.elem
.len
= elem_len
;
254 if (integer_type
->reverse_byte_order
)
255 gid
.elem
.rev_bo
= true;
256 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
259 case OBJECT_TYPE_STRUCT
:
260 /* Only generated by the specialize phase. */
261 case OBJECT_TYPE_VARIANT
: /* Fall-through */
263 printk(KERN_WARNING
"LTTng: bytecode: Unexpected get index type %d",
264 (int) stack_top
->load
.object_type
);
269 case LOAD_ROOT_CONTEXT
:
270 case LOAD_ROOT_APP_CONTEXT
:
271 case LOAD_ROOT_PAYLOAD
:
272 printk(KERN_WARNING
"LTTng: bytecode: Index lookup for root field not implemented yet.\n");
276 data_offset
= bytecode_push_data(runtime
, &gid
,
277 __alignof__(gid
), sizeof(gid
));
278 if (data_offset
< 0) {
284 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
287 ((struct get_index_u64
*) insn
->data
)->index
= data_offset
;
300 static int specialize_context_lookup_name(struct lttng_ctx
*ctx
,
301 struct bytecode_runtime
*bytecode
,
302 struct load_op
*insn
)
307 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
308 name
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ offset
;
309 return lttng_get_context_index(ctx
, name
);
312 static int specialize_load_object(const struct lttng_event_field
*field
,
313 struct vstack_load
*load
, bool is_context
)
315 load
->type
= LOAD_OBJECT
;
317 switch (field
->type
.atype
) {
319 if (field
->type
.u
.integer
.signedness
)
320 load
->object_type
= OBJECT_TYPE_S64
;
322 load
->object_type
= OBJECT_TYPE_U64
;
323 load
->rev_bo
= false;
325 case atype_enum_nestable
:
327 const struct lttng_integer_type
*itype
=
328 &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
330 if (itype
->signedness
)
331 load
->object_type
= OBJECT_TYPE_S64
;
333 load
->object_type
= OBJECT_TYPE_U64
;
334 load
->rev_bo
= false;
337 case atype_array_nestable
:
338 if (!lttng_is_bytewise_integer(field
->type
.u
.array_nestable
.elem_type
)) {
339 printk(KERN_WARNING
"LTTng: bytecode: Array nesting only supports integer types.\n");
343 load
->object_type
= OBJECT_TYPE_STRING
;
345 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
346 load
->object_type
= OBJECT_TYPE_ARRAY
;
349 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
353 case atype_sequence_nestable
:
354 if (!lttng_is_bytewise_integer(field
->type
.u
.sequence_nestable
.elem_type
)) {
355 printk(KERN_WARNING
"LTTng: bytecode: Sequence nesting only supports integer types.\n");
359 load
->object_type
= OBJECT_TYPE_STRING
;
361 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
362 load
->object_type
= OBJECT_TYPE_SEQUENCE
;
365 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
370 load
->object_type
= OBJECT_TYPE_STRING
;
372 case atype_struct_nestable
:
373 printk(KERN_WARNING
"LTTng: bytecode: Structure type cannot be loaded.\n");
375 case atype_variant_nestable
:
376 printk(KERN_WARNING
"LTTng: bytecode: Variant type cannot be loaded.\n");
379 printk(KERN_WARNING
"LTTng: bytecode: Unknown type: %d", (int) field
->type
.atype
);
385 static int specialize_context_lookup(struct lttng_ctx
*ctx
,
386 struct bytecode_runtime
*runtime
,
387 struct load_op
*insn
,
388 struct vstack_load
*load
)
391 struct lttng_ctx_field
*ctx_field
;
392 struct lttng_event_field
*field
;
393 struct bytecode_get_index_data gid
;
396 idx
= specialize_context_lookup_name(ctx
, runtime
, insn
);
400 ctx_field
= <tng_static_ctx
->fields
[idx
];
401 field
= &ctx_field
->event_field
;
402 ret
= specialize_load_object(field
, load
, true);
405 /* Specialize each get_symbol into a get_index. */
406 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
407 memset(&gid
, 0, sizeof(gid
));
409 gid
.elem
.type
= load
->object_type
;
410 gid
.elem
.rev_bo
= load
->rev_bo
;
412 data_offset
= bytecode_push_data(runtime
, &gid
,
413 __alignof__(gid
), sizeof(gid
));
414 if (data_offset
< 0) {
417 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
421 static int specialize_payload_lookup(const struct lttng_event_desc
*event_desc
,
422 struct bytecode_runtime
*runtime
,
423 struct load_op
*insn
,
424 struct vstack_load
*load
)
428 unsigned int i
, nr_fields
;
430 uint32_t field_offset
= 0;
431 const struct lttng_event_field
*field
;
433 struct bytecode_get_index_data gid
;
436 nr_fields
= event_desc
->nr_fields
;
437 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
438 name
= runtime
->p
.bc
->bc
.data
+ runtime
->p
.bc
->bc
.reloc_offset
+ offset
;
439 for (i
= 0; i
< nr_fields
; i
++) {
440 field
= &event_desc
->fields
[i
];
441 if (field
->nofilter
) {
444 if (!strcmp(field
->name
, name
)) {
448 /* compute field offset on stack */
449 switch (field
->type
.atype
) {
451 case atype_enum_nestable
:
452 field_offset
+= sizeof(int64_t);
454 case atype_array_nestable
:
455 case atype_sequence_nestable
:
456 field_offset
+= sizeof(unsigned long);
457 field_offset
+= sizeof(void *);
460 field_offset
+= sizeof(void *);
472 ret
= specialize_load_object(field
, load
, false);
476 /* Specialize each get_symbol into a get_index. */
477 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
478 memset(&gid
, 0, sizeof(gid
));
479 gid
.offset
= field_offset
;
480 gid
.elem
.type
= load
->object_type
;
481 gid
.elem
.rev_bo
= load
->rev_bo
;
483 data_offset
= bytecode_push_data(runtime
, &gid
,
484 __alignof__(gid
), sizeof(gid
));
485 if (data_offset
< 0) {
489 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
495 int lttng_bytecode_specialize(const struct lttng_event_desc
*event_desc
,
496 struct bytecode_runtime
*bytecode
)
498 void *pc
, *next_pc
, *start_pc
;
500 struct vstack _stack
;
501 struct vstack
*stack
= &_stack
;
502 struct lttng_ctx
*ctx
= bytecode
->p
.ctx
;
506 start_pc
= &bytecode
->code
[0];
507 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
509 switch (*(bytecode_opcode_t
*) pc
) {
510 case BYTECODE_OP_UNKNOWN
:
512 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
513 (unsigned int) *(bytecode_opcode_t
*) pc
);
517 case BYTECODE_OP_RETURN
:
518 case BYTECODE_OP_RETURN_S64
:
523 case BYTECODE_OP_MUL
:
524 case BYTECODE_OP_DIV
:
525 case BYTECODE_OP_MOD
:
526 case BYTECODE_OP_PLUS
:
527 case BYTECODE_OP_MINUS
:
528 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
529 (unsigned int) *(bytecode_opcode_t
*) pc
);
535 struct binary_op
*insn
= (struct binary_op
*) pc
;
537 switch(vstack_ax(stack
)->type
) {
539 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
544 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
545 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
547 insn
->op
= BYTECODE_OP_EQ_STRING
;
549 case REG_STAR_GLOB_STRING
:
550 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
553 if (vstack_bx(stack
)->type
== REG_S64
)
554 insn
->op
= BYTECODE_OP_EQ_S64
;
556 insn
->op
= BYTECODE_OP_EQ_DOUBLE_S64
;
559 if (vstack_bx(stack
)->type
== REG_S64
)
560 insn
->op
= BYTECODE_OP_EQ_S64_DOUBLE
;
562 insn
->op
= BYTECODE_OP_EQ_DOUBLE
;
566 if (vstack_pop(stack
)) {
570 vstack_ax(stack
)->type
= REG_S64
;
571 next_pc
+= sizeof(struct binary_op
);
577 struct binary_op
*insn
= (struct binary_op
*) pc
;
579 switch(vstack_ax(stack
)->type
) {
581 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
586 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
587 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
589 insn
->op
= BYTECODE_OP_NE_STRING
;
591 case REG_STAR_GLOB_STRING
:
592 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
595 if (vstack_bx(stack
)->type
== REG_S64
)
596 insn
->op
= BYTECODE_OP_NE_S64
;
598 insn
->op
= BYTECODE_OP_NE_DOUBLE_S64
;
601 if (vstack_bx(stack
)->type
== REG_S64
)
602 insn
->op
= BYTECODE_OP_NE_S64_DOUBLE
;
604 insn
->op
= BYTECODE_OP_NE_DOUBLE
;
608 if (vstack_pop(stack
)) {
612 vstack_ax(stack
)->type
= REG_S64
;
613 next_pc
+= sizeof(struct binary_op
);
619 struct binary_op
*insn
= (struct binary_op
*) pc
;
621 switch(vstack_ax(stack
)->type
) {
623 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
627 case REG_STAR_GLOB_STRING
:
628 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '>' binary operator\n");
632 insn
->op
= BYTECODE_OP_GT_STRING
;
635 if (vstack_bx(stack
)->type
== REG_S64
)
636 insn
->op
= BYTECODE_OP_GT_S64
;
638 insn
->op
= BYTECODE_OP_GT_DOUBLE_S64
;
641 if (vstack_bx(stack
)->type
== REG_S64
)
642 insn
->op
= BYTECODE_OP_GT_S64_DOUBLE
;
644 insn
->op
= BYTECODE_OP_GT_DOUBLE
;
648 if (vstack_pop(stack
)) {
652 vstack_ax(stack
)->type
= REG_S64
;
653 next_pc
+= sizeof(struct binary_op
);
659 struct binary_op
*insn
= (struct binary_op
*) pc
;
661 switch(vstack_ax(stack
)->type
) {
663 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
667 case REG_STAR_GLOB_STRING
:
668 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '<' binary operator\n");
672 insn
->op
= BYTECODE_OP_LT_STRING
;
675 if (vstack_bx(stack
)->type
== REG_S64
)
676 insn
->op
= BYTECODE_OP_LT_S64
;
678 insn
->op
= BYTECODE_OP_LT_DOUBLE_S64
;
681 if (vstack_bx(stack
)->type
== REG_S64
)
682 insn
->op
= BYTECODE_OP_LT_S64_DOUBLE
;
684 insn
->op
= BYTECODE_OP_LT_DOUBLE
;
688 if (vstack_pop(stack
)) {
692 vstack_ax(stack
)->type
= REG_S64
;
693 next_pc
+= sizeof(struct binary_op
);
699 struct binary_op
*insn
= (struct binary_op
*) pc
;
701 switch(vstack_ax(stack
)->type
) {
703 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
707 case REG_STAR_GLOB_STRING
:
708 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '>=' binary operator\n");
712 insn
->op
= BYTECODE_OP_GE_STRING
;
715 if (vstack_bx(stack
)->type
== REG_S64
)
716 insn
->op
= BYTECODE_OP_GE_S64
;
718 insn
->op
= BYTECODE_OP_GE_DOUBLE_S64
;
721 if (vstack_bx(stack
)->type
== REG_S64
)
722 insn
->op
= BYTECODE_OP_GE_S64_DOUBLE
;
724 insn
->op
= BYTECODE_OP_GE_DOUBLE
;
728 if (vstack_pop(stack
)) {
732 vstack_ax(stack
)->type
= REG_S64
;
733 next_pc
+= sizeof(struct binary_op
);
738 struct binary_op
*insn
= (struct binary_op
*) pc
;
740 switch(vstack_ax(stack
)->type
) {
742 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
746 case REG_STAR_GLOB_STRING
:
747 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '<=' binary operator\n");
751 insn
->op
= BYTECODE_OP_LE_STRING
;
754 if (vstack_bx(stack
)->type
== REG_S64
)
755 insn
->op
= BYTECODE_OP_LE_S64
;
757 insn
->op
= BYTECODE_OP_LE_DOUBLE_S64
;
760 if (vstack_bx(stack
)->type
== REG_S64
)
761 insn
->op
= BYTECODE_OP_LE_S64_DOUBLE
;
763 insn
->op
= BYTECODE_OP_LE_DOUBLE
;
766 vstack_ax(stack
)->type
= REG_S64
;
767 next_pc
+= sizeof(struct binary_op
);
771 case BYTECODE_OP_EQ_STRING
:
772 case BYTECODE_OP_NE_STRING
:
773 case BYTECODE_OP_GT_STRING
:
774 case BYTECODE_OP_LT_STRING
:
775 case BYTECODE_OP_GE_STRING
:
776 case BYTECODE_OP_LE_STRING
:
777 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
778 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
779 case BYTECODE_OP_EQ_S64
:
780 case BYTECODE_OP_NE_S64
:
781 case BYTECODE_OP_GT_S64
:
782 case BYTECODE_OP_LT_S64
:
783 case BYTECODE_OP_GE_S64
:
784 case BYTECODE_OP_LE_S64
:
785 case BYTECODE_OP_EQ_DOUBLE
:
786 case BYTECODE_OP_NE_DOUBLE
:
787 case BYTECODE_OP_GT_DOUBLE
:
788 case BYTECODE_OP_LT_DOUBLE
:
789 case BYTECODE_OP_GE_DOUBLE
:
790 case BYTECODE_OP_LE_DOUBLE
:
791 case BYTECODE_OP_EQ_DOUBLE_S64
:
792 case BYTECODE_OP_NE_DOUBLE_S64
:
793 case BYTECODE_OP_GT_DOUBLE_S64
:
794 case BYTECODE_OP_LT_DOUBLE_S64
:
795 case BYTECODE_OP_GE_DOUBLE_S64
:
796 case BYTECODE_OP_LE_DOUBLE_S64
:
797 case BYTECODE_OP_EQ_S64_DOUBLE
:
798 case BYTECODE_OP_NE_S64_DOUBLE
:
799 case BYTECODE_OP_GT_S64_DOUBLE
:
800 case BYTECODE_OP_LT_S64_DOUBLE
:
801 case BYTECODE_OP_GE_S64_DOUBLE
:
802 case BYTECODE_OP_LE_S64_DOUBLE
:
803 case BYTECODE_OP_BIT_RSHIFT
:
804 case BYTECODE_OP_BIT_LSHIFT
:
805 case BYTECODE_OP_BIT_AND
:
806 case BYTECODE_OP_BIT_OR
:
807 case BYTECODE_OP_BIT_XOR
:
810 if (vstack_pop(stack
)) {
814 vstack_ax(stack
)->type
= REG_S64
;
815 next_pc
+= sizeof(struct binary_op
);
820 case BYTECODE_OP_UNARY_PLUS
:
822 struct unary_op
*insn
= (struct unary_op
*) pc
;
824 switch(vstack_ax(stack
)->type
) {
826 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
831 insn
->op
= BYTECODE_OP_UNARY_PLUS_S64
;
834 insn
->op
= BYTECODE_OP_UNARY_PLUS_DOUBLE
;
838 next_pc
+= sizeof(struct unary_op
);
842 case BYTECODE_OP_UNARY_MINUS
:
844 struct unary_op
*insn
= (struct unary_op
*) pc
;
846 switch(vstack_ax(stack
)->type
) {
848 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
853 insn
->op
= BYTECODE_OP_UNARY_MINUS_S64
;
856 insn
->op
= BYTECODE_OP_UNARY_MINUS_DOUBLE
;
860 next_pc
+= sizeof(struct unary_op
);
864 case BYTECODE_OP_UNARY_NOT
:
866 struct unary_op
*insn
= (struct unary_op
*) pc
;
868 switch(vstack_ax(stack
)->type
) {
870 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
875 insn
->op
= BYTECODE_OP_UNARY_NOT_S64
;
878 insn
->op
= BYTECODE_OP_UNARY_NOT_DOUBLE
;
882 next_pc
+= sizeof(struct unary_op
);
886 case BYTECODE_OP_UNARY_BIT_NOT
:
889 next_pc
+= sizeof(struct unary_op
);
893 case BYTECODE_OP_UNARY_PLUS_S64
:
894 case BYTECODE_OP_UNARY_MINUS_S64
:
895 case BYTECODE_OP_UNARY_NOT_S64
:
896 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
897 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
898 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
901 next_pc
+= sizeof(struct unary_op
);
906 case BYTECODE_OP_AND
:
909 /* Continue to next instruction */
910 /* Pop 1 when jump not taken */
911 if (vstack_pop(stack
)) {
915 next_pc
+= sizeof(struct logical_op
);
920 case BYTECODE_OP_LOAD_FIELD_REF
:
922 printk(KERN_WARNING
"LTTng: bytecode: Unknown field ref type\n");
926 /* get context ref */
927 case BYTECODE_OP_GET_CONTEXT_REF
:
929 printk(KERN_WARNING
"LTTng: bytecode: Unknown get context ref type\n");
933 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
934 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
935 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
936 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
937 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
939 if (vstack_push(stack
)) {
943 vstack_ax(stack
)->type
= REG_STRING
;
944 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
947 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
948 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
950 if (vstack_push(stack
)) {
954 vstack_ax(stack
)->type
= REG_S64
;
955 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
958 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
959 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
961 if (vstack_push(stack
)) {
965 vstack_ax(stack
)->type
= REG_DOUBLE
;
966 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
970 /* load from immediate operand */
971 case BYTECODE_OP_LOAD_STRING
:
973 struct load_op
*insn
= (struct load_op
*) pc
;
975 if (vstack_push(stack
)) {
979 vstack_ax(stack
)->type
= REG_STRING
;
980 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
984 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
986 struct load_op
*insn
= (struct load_op
*) pc
;
988 if (vstack_push(stack
)) {
992 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
993 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
997 case BYTECODE_OP_LOAD_S64
:
999 if (vstack_push(stack
)) {
1003 vstack_ax(stack
)->type
= REG_S64
;
1004 next_pc
+= sizeof(struct load_op
)
1005 + sizeof(struct literal_numeric
);
1009 case BYTECODE_OP_LOAD_DOUBLE
:
1011 if (vstack_push(stack
)) {
1015 vstack_ax(stack
)->type
= REG_DOUBLE
;
1016 next_pc
+= sizeof(struct load_op
)
1017 + sizeof(struct literal_double
);
1022 case BYTECODE_OP_CAST_TO_S64
:
1024 struct cast_op
*insn
= (struct cast_op
*) pc
;
1026 switch (vstack_ax(stack
)->type
) {
1028 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
1033 case REG_STAR_GLOB_STRING
:
1034 printk(KERN_WARNING
"LTTng: bytecode: Cast op can only be applied to numeric or floating point registers\n");
1038 insn
->op
= BYTECODE_OP_CAST_NOP
;
1041 insn
->op
= BYTECODE_OP_CAST_DOUBLE_TO_S64
;
1045 vstack_ax(stack
)->type
= REG_S64
;
1046 next_pc
+= sizeof(struct cast_op
);
1049 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1052 vstack_ax(stack
)->type
= REG_S64
;
1053 next_pc
+= sizeof(struct cast_op
);
1056 case BYTECODE_OP_CAST_NOP
:
1058 next_pc
+= sizeof(struct cast_op
);
1063 * Instructions for recursive traversal through composed types.
1065 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1067 if (vstack_push(stack
)) {
1071 vstack_ax(stack
)->type
= REG_PTR
;
1072 vstack_ax(stack
)->load
.type
= LOAD_ROOT_CONTEXT
;
1073 next_pc
+= sizeof(struct load_op
);
1076 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1078 if (vstack_push(stack
)) {
1082 vstack_ax(stack
)->type
= REG_PTR
;
1083 vstack_ax(stack
)->load
.type
= LOAD_ROOT_APP_CONTEXT
;
1084 next_pc
+= sizeof(struct load_op
);
1087 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1089 if (vstack_push(stack
)) {
1093 vstack_ax(stack
)->type
= REG_PTR
;
1094 vstack_ax(stack
)->load
.type
= LOAD_ROOT_PAYLOAD
;
1095 next_pc
+= sizeof(struct load_op
);
1099 case BYTECODE_OP_LOAD_FIELD
:
1101 struct load_op
*insn
= (struct load_op
*) pc
;
1103 WARN_ON_ONCE(vstack_ax(stack
)->type
!= REG_PTR
);
1105 ret
= specialize_load_field(vstack_ax(stack
), insn
);
1109 next_pc
+= sizeof(struct load_op
);
1113 case BYTECODE_OP_LOAD_FIELD_S8
:
1114 case BYTECODE_OP_LOAD_FIELD_S16
:
1115 case BYTECODE_OP_LOAD_FIELD_S32
:
1116 case BYTECODE_OP_LOAD_FIELD_S64
:
1117 case BYTECODE_OP_LOAD_FIELD_U8
:
1118 case BYTECODE_OP_LOAD_FIELD_U16
:
1119 case BYTECODE_OP_LOAD_FIELD_U32
:
1120 case BYTECODE_OP_LOAD_FIELD_U64
:
1123 vstack_ax(stack
)->type
= REG_S64
;
1124 next_pc
+= sizeof(struct load_op
);
1128 case BYTECODE_OP_LOAD_FIELD_STRING
:
1129 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1132 vstack_ax(stack
)->type
= REG_STRING
;
1133 next_pc
+= sizeof(struct load_op
);
1137 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1140 vstack_ax(stack
)->type
= REG_DOUBLE
;
1141 next_pc
+= sizeof(struct load_op
);
1145 case BYTECODE_OP_GET_SYMBOL
:
1147 struct load_op
*insn
= (struct load_op
*) pc
;
1149 dbg_printk("op get symbol\n");
1150 switch (vstack_ax(stack
)->load
.type
) {
1152 printk(KERN_WARNING
"LTTng: bytecode: Nested fields not implemented yet.\n");
1155 case LOAD_ROOT_CONTEXT
:
1156 /* Lookup context field. */
1157 ret
= specialize_context_lookup(ctx
, bytecode
, insn
,
1158 &vstack_ax(stack
)->load
);
1162 case LOAD_ROOT_APP_CONTEXT
:
1165 case LOAD_ROOT_PAYLOAD
:
1166 /* Lookup event payload field. */
1167 ret
= specialize_payload_lookup(event_desc
,
1169 &vstack_ax(stack
)->load
);
1174 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1178 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1180 /* Always generated by specialize phase. */
1185 case BYTECODE_OP_GET_INDEX_U16
:
1187 struct load_op
*insn
= (struct load_op
*) pc
;
1188 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1190 dbg_printk("op get index u16\n");
1192 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1193 vstack_ax(stack
), sizeof(*index
));
1196 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1200 case BYTECODE_OP_GET_INDEX_U64
:
1202 struct load_op
*insn
= (struct load_op
*) pc
;
1203 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1205 dbg_printk("op get index u64\n");
1207 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1208 vstack_ax(stack
), sizeof(*index
));
1211 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);