2 * SPDX-License-Identifier: MIT
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 * LTTng UST bytecode specializer.
14 #include <lttng/ust-utils.h>
16 #include "context-internal.h"
17 #include "lttng-bytecode.h"
18 #include "ust-events-internal.h"
19 #include "common/macros.h"
21 static int lttng_fls(int val
)
24 unsigned int x
= (unsigned int) val
;
28 if (!(x
& 0xFFFF0000U
)) {
32 if (!(x
& 0xFF000000U
)) {
36 if (!(x
& 0xF0000000U
)) {
40 if (!(x
& 0xC0000000U
)) {
44 if (!(x
& 0x80000000U
)) {
50 static int get_count_order(unsigned int count
)
54 order
= lttng_fls(count
) - 1;
55 if (count
& (count
- 1))
60 static ssize_t
bytecode_reserve_data(struct bytecode_runtime
*runtime
,
61 size_t align
, size_t len
)
64 size_t padding
= lttng_ust_offset_align(runtime
->data_len
, align
);
65 size_t new_len
= runtime
->data_len
+ padding
+ len
;
66 size_t new_alloc_len
= new_len
;
67 size_t old_alloc_len
= runtime
->data_alloc_len
;
69 if (new_len
> BYTECODE_MAX_DATA_LEN
)
72 if (new_alloc_len
> old_alloc_len
) {
76 max_t(size_t, 1U << get_count_order(new_alloc_len
), old_alloc_len
<< 1);
77 newptr
= realloc(runtime
->data
, new_alloc_len
);
80 runtime
->data
= newptr
;
81 /* We zero directly the memory from start of allocation. */
82 memset(&runtime
->data
[old_alloc_len
], 0, new_alloc_len
- old_alloc_len
);
83 runtime
->data_alloc_len
= new_alloc_len
;
85 runtime
->data_len
+= padding
;
86 ret
= runtime
->data_len
;
87 runtime
->data_len
+= len
;
91 static ssize_t
bytecode_push_data(struct bytecode_runtime
*runtime
,
92 const void *p
, size_t align
, size_t len
)
96 offset
= bytecode_reserve_data(runtime
, align
, len
);
99 memcpy(&runtime
->data
[offset
], p
, len
);
103 static int specialize_load_field(struct vstack_entry
*stack_top
,
104 struct load_op
*insn
)
108 switch (stack_top
->load
.type
) {
111 case LOAD_ROOT_CONTEXT
:
112 case LOAD_ROOT_APP_CONTEXT
:
113 case LOAD_ROOT_PAYLOAD
:
115 dbg_printf("Bytecode warning: cannot load root, missing field name.\n");
119 switch (stack_top
->load
.object_type
) {
121 dbg_printf("op load field s8\n");
122 stack_top
->type
= REG_S64
;
123 if (!stack_top
->load
.rev_bo
)
124 insn
->op
= BYTECODE_OP_LOAD_FIELD_S8
;
126 case OBJECT_TYPE_S16
:
127 dbg_printf("op load field s16\n");
128 stack_top
->type
= REG_S64
;
129 if (!stack_top
->load
.rev_bo
)
130 insn
->op
= BYTECODE_OP_LOAD_FIELD_S16
;
132 case OBJECT_TYPE_S32
:
133 dbg_printf("op load field s32\n");
134 stack_top
->type
= REG_S64
;
135 if (!stack_top
->load
.rev_bo
)
136 insn
->op
= BYTECODE_OP_LOAD_FIELD_S32
;
138 case OBJECT_TYPE_S64
:
139 dbg_printf("op load field s64\n");
140 stack_top
->type
= REG_S64
;
141 if (!stack_top
->load
.rev_bo
)
142 insn
->op
= BYTECODE_OP_LOAD_FIELD_S64
;
144 case OBJECT_TYPE_SIGNED_ENUM
:
145 dbg_printf("op load field signed enumeration\n");
146 stack_top
->type
= REG_PTR
;
149 dbg_printf("op load field u8\n");
150 stack_top
->type
= REG_U64
;
151 insn
->op
= BYTECODE_OP_LOAD_FIELD_U8
;
153 case OBJECT_TYPE_U16
:
154 dbg_printf("op load field u16\n");
155 stack_top
->type
= REG_U64
;
156 if (!stack_top
->load
.rev_bo
)
157 insn
->op
= BYTECODE_OP_LOAD_FIELD_U16
;
159 case OBJECT_TYPE_U32
:
160 dbg_printf("op load field u32\n");
161 stack_top
->type
= REG_U64
;
162 if (!stack_top
->load
.rev_bo
)
163 insn
->op
= BYTECODE_OP_LOAD_FIELD_U32
;
165 case OBJECT_TYPE_U64
:
166 dbg_printf("op load field u64\n");
167 stack_top
->type
= REG_U64
;
168 if (!stack_top
->load
.rev_bo
)
169 insn
->op
= BYTECODE_OP_LOAD_FIELD_U64
;
171 case OBJECT_TYPE_UNSIGNED_ENUM
:
172 dbg_printf("op load field unsigned enumeration\n");
173 stack_top
->type
= REG_PTR
;
175 case OBJECT_TYPE_DOUBLE
:
176 stack_top
->type
= REG_DOUBLE
;
177 insn
->op
= BYTECODE_OP_LOAD_FIELD_DOUBLE
;
179 case OBJECT_TYPE_STRING
:
180 dbg_printf("op load field string\n");
181 stack_top
->type
= REG_STRING
;
182 insn
->op
= BYTECODE_OP_LOAD_FIELD_STRING
;
184 case OBJECT_TYPE_STRING_SEQUENCE
:
185 dbg_printf("op load field string sequence\n");
186 stack_top
->type
= REG_STRING
;
187 insn
->op
= BYTECODE_OP_LOAD_FIELD_SEQUENCE
;
189 case OBJECT_TYPE_DYNAMIC
:
190 dbg_printf("op load field dynamic\n");
191 stack_top
->type
= REG_UNKNOWN
;
192 /* Don't specialize load op. */
194 case OBJECT_TYPE_SEQUENCE
:
195 case OBJECT_TYPE_ARRAY
:
196 case OBJECT_TYPE_STRUCT
:
197 case OBJECT_TYPE_VARIANT
:
198 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
208 static int specialize_get_index_object_type(enum object_type
*otype
,
209 int signedness
, uint32_t elem_len
)
214 *otype
= OBJECT_TYPE_S8
;
216 *otype
= OBJECT_TYPE_U8
;
220 *otype
= OBJECT_TYPE_S16
;
222 *otype
= OBJECT_TYPE_U16
;
226 *otype
= OBJECT_TYPE_S32
;
228 *otype
= OBJECT_TYPE_U32
;
232 *otype
= OBJECT_TYPE_S64
;
234 *otype
= OBJECT_TYPE_U64
;
242 static int specialize_get_index(struct bytecode_runtime
*runtime
,
243 struct load_op
*insn
, uint64_t index
,
244 struct vstack_entry
*stack_top
,
248 struct bytecode_get_index_data gid
;
251 memset(&gid
, 0, sizeof(gid
));
252 switch (stack_top
->load
.type
) {
254 switch (stack_top
->load
.object_type
) {
255 case OBJECT_TYPE_ARRAY
:
257 const struct lttng_ust_type_integer
*integer_type
;
258 const struct lttng_ust_event_field
*field
;
259 uint32_t elem_len
, num_elems
;
262 field
= stack_top
->load
.field
;
263 switch (field
->type
->type
) {
264 case lttng_ust_type_array
:
265 if (lttng_ust_get_type_array(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
269 integer_type
= lttng_ust_get_type_integer(lttng_ust_get_type_array(field
->type
)->elem_type
);
270 num_elems
= lttng_ust_get_type_array(field
->type
)->length
;
276 elem_len
= integer_type
->size
;
277 signedness
= integer_type
->signedness
;
278 if (index
>= num_elems
) {
282 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
283 signedness
, elem_len
);
286 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
287 gid
.array_len
= num_elems
* (elem_len
/ CHAR_BIT
);
288 gid
.elem
.type
= stack_top
->load
.object_type
;
289 gid
.elem
.len
= elem_len
;
290 if (integer_type
->reverse_byte_order
)
291 gid
.elem
.rev_bo
= true;
292 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
295 case OBJECT_TYPE_SEQUENCE
:
297 const struct lttng_ust_type_integer
*integer_type
;
298 const struct lttng_ust_event_field
*field
;
302 field
= stack_top
->load
.field
;
303 switch (field
->type
->type
) {
304 case lttng_ust_type_sequence
:
305 if (lttng_ust_get_type_sequence(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
309 integer_type
= lttng_ust_get_type_integer(lttng_ust_get_type_sequence(field
->type
)->elem_type
);
315 elem_len
= integer_type
->size
;
316 signedness
= integer_type
->signedness
;
317 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
318 signedness
, elem_len
);
321 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
322 gid
.elem
.type
= stack_top
->load
.object_type
;
323 gid
.elem
.len
= elem_len
;
324 if (integer_type
->reverse_byte_order
)
325 gid
.elem
.rev_bo
= true;
326 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
329 case OBJECT_TYPE_STRUCT
:
330 /* Only generated by the specialize phase. */
331 case OBJECT_TYPE_VARIANT
: /* Fall-through */
333 ERR("Unexpected get index type %d",
334 (int) stack_top
->load
.object_type
);
339 case LOAD_ROOT_CONTEXT
:
340 case LOAD_ROOT_APP_CONTEXT
:
341 case LOAD_ROOT_PAYLOAD
:
342 ERR("Index lookup for root field not implemented yet.");
346 data_offset
= bytecode_push_data(runtime
, &gid
,
347 __alignof__(gid
), sizeof(gid
));
348 if (data_offset
< 0) {
354 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
357 ((struct get_index_u64
*) insn
->data
)->index
= data_offset
;
370 static int specialize_context_lookup_name(struct lttng_ust_ctx
*ctx
,
371 struct bytecode_runtime
*bytecode
,
372 struct load_op
*insn
)
377 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
378 name
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ offset
;
379 return lttng_get_context_index(ctx
, name
);
382 static int specialize_load_object(const struct lttng_ust_event_field
*field
,
383 struct vstack_load
*load
, bool is_context
)
385 load
->type
= LOAD_OBJECT
;
387 switch (field
->type
->type
) {
388 case lttng_ust_type_integer
:
389 if (lttng_ust_get_type_integer(field
->type
)->signedness
)
390 load
->object_type
= OBJECT_TYPE_S64
;
392 load
->object_type
= OBJECT_TYPE_U64
;
393 load
->rev_bo
= false;
395 case lttng_ust_type_enum
:
397 const struct lttng_ust_type_integer
*itype
;
399 itype
= lttng_ust_get_type_integer(lttng_ust_get_type_enum(field
->type
)->container_type
);
400 if (itype
->signedness
)
401 load
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
403 load
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
404 load
->rev_bo
= false;
407 case lttng_ust_type_array
:
408 if (lttng_ust_get_type_array(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
409 ERR("Array nesting only supports integer types.");
413 load
->object_type
= OBJECT_TYPE_STRING
;
415 if (lttng_ust_get_type_array(field
->type
)->encoding
== lttng_ust_string_encoding_none
) {
416 load
->object_type
= OBJECT_TYPE_ARRAY
;
419 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
423 case lttng_ust_type_sequence
:
424 if (lttng_ust_get_type_sequence(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
425 ERR("Sequence nesting only supports integer types.");
429 load
->object_type
= OBJECT_TYPE_STRING
;
431 if (lttng_ust_get_type_sequence(field
->type
)->encoding
== lttng_ust_string_encoding_none
) {
432 load
->object_type
= OBJECT_TYPE_SEQUENCE
;
435 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
440 case lttng_ust_type_string
:
441 load
->object_type
= OBJECT_TYPE_STRING
;
443 case lttng_ust_type_float
:
444 load
->object_type
= OBJECT_TYPE_DOUBLE
;
446 case lttng_ust_type_dynamic
:
447 load
->object_type
= OBJECT_TYPE_DYNAMIC
;
450 ERR("Unknown type: %d", (int) field
->type
->type
);
456 static int specialize_context_lookup(struct lttng_ust_ctx
*ctx
,
457 struct bytecode_runtime
*runtime
,
458 struct load_op
*insn
,
459 struct vstack_load
*load
)
462 const struct lttng_ust_ctx_field
*ctx_field
;
463 const struct lttng_ust_event_field
*field
;
464 struct bytecode_get_index_data gid
;
467 idx
= specialize_context_lookup_name(ctx
, runtime
, insn
);
471 ctx_field
= &ctx
->fields
[idx
];
472 field
= ctx_field
->event_field
;
473 ret
= specialize_load_object(field
, load
, true);
476 /* Specialize each get_symbol into a get_index. */
477 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
478 memset(&gid
, 0, sizeof(gid
));
480 gid
.elem
.type
= load
->object_type
;
481 gid
.elem
.rev_bo
= load
->rev_bo
;
483 data_offset
= bytecode_push_data(runtime
, &gid
,
484 __alignof__(gid
), sizeof(gid
));
485 if (data_offset
< 0) {
488 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
492 static int specialize_app_context_lookup(struct lttng_ust_ctx
**pctx
,
493 struct bytecode_runtime
*runtime
,
494 struct load_op
*insn
,
495 struct vstack_load
*load
)
498 const char *orig_name
;
501 const struct lttng_ust_ctx_field
*ctx_field
;
502 const struct lttng_ust_event_field
*field
;
503 struct bytecode_get_index_data gid
;
506 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
507 orig_name
= runtime
->p
.bc
->bc
.data
+ runtime
->p
.bc
->bc
.reloc_offset
+ offset
;
508 name
= zmalloc(strlen(orig_name
) + strlen("$app.") + 1);
513 strcpy(name
, "$app.");
514 strcat(name
, orig_name
);
515 idx
= lttng_get_context_index(*pctx
, name
);
517 assert(lttng_context_is_app(name
));
518 ret
= lttng_ust_add_app_context_to_ctx_rcu(name
,
522 idx
= lttng_get_context_index(*pctx
, name
);
526 ctx_field
= &(*pctx
)->fields
[idx
];
527 field
= ctx_field
->event_field
;
528 ret
= specialize_load_object(field
, load
, true);
531 /* Specialize each get_symbol into a get_index. */
532 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
533 memset(&gid
, 0, sizeof(gid
));
535 gid
.elem
.type
= load
->object_type
;
536 gid
.elem
.rev_bo
= load
->rev_bo
;
538 data_offset
= bytecode_push_data(runtime
, &gid
,
539 __alignof__(gid
), sizeof(gid
));
540 if (data_offset
< 0) {
544 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
551 static int specialize_payload_lookup(const struct lttng_ust_event_desc
*event_desc
,
552 struct bytecode_runtime
*runtime
,
553 struct load_op
*insn
,
554 struct vstack_load
*load
)
558 unsigned int i
, nr_fields
;
560 uint32_t field_offset
= 0;
561 const struct lttng_ust_event_field
*field
;
563 struct bytecode_get_index_data gid
;
566 nr_fields
= event_desc
->nr_fields
;
567 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
568 name
= runtime
->p
.bc
->bc
.data
+ runtime
->p
.bc
->bc
.reloc_offset
+ offset
;
569 for (i
= 0; i
< nr_fields
; i
++) {
570 field
= event_desc
->fields
[i
];
571 if (field
->nofilter
) {
574 if (!strcmp(field
->name
, name
)) {
578 /* compute field offset on stack */
579 switch (field
->type
->type
) {
580 case lttng_ust_type_integer
:
581 case lttng_ust_type_enum
:
582 field_offset
+= sizeof(int64_t);
584 case lttng_ust_type_array
:
585 case lttng_ust_type_sequence
:
586 field_offset
+= sizeof(unsigned long);
587 field_offset
+= sizeof(void *);
589 case lttng_ust_type_string
:
590 field_offset
+= sizeof(void *);
592 case lttng_ust_type_float
:
593 field_offset
+= sizeof(double);
605 ret
= specialize_load_object(field
, load
, false);
609 /* Specialize each get_symbol into a get_index. */
610 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
611 memset(&gid
, 0, sizeof(gid
));
612 gid
.offset
= field_offset
;
613 gid
.elem
.type
= load
->object_type
;
614 gid
.elem
.rev_bo
= load
->rev_bo
;
616 data_offset
= bytecode_push_data(runtime
, &gid
,
617 __alignof__(gid
), sizeof(gid
));
618 if (data_offset
< 0) {
622 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
628 int lttng_bytecode_specialize(const struct lttng_ust_event_desc
*event_desc
,
629 struct bytecode_runtime
*bytecode
)
631 void *pc
, *next_pc
, *start_pc
;
633 struct vstack _stack
;
634 struct vstack
*stack
= &_stack
;
635 struct lttng_ust_ctx
**pctx
= bytecode
->p
.pctx
;
639 start_pc
= &bytecode
->code
[0];
640 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
642 switch (*(bytecode_opcode_t
*) pc
) {
643 case BYTECODE_OP_UNKNOWN
:
645 ERR("unknown bytecode op %u\n",
646 (unsigned int) *(bytecode_opcode_t
*) pc
);
650 case BYTECODE_OP_RETURN
:
651 if (vstack_ax(stack
)->type
== REG_S64
||
652 vstack_ax(stack
)->type
== REG_U64
)
653 *(bytecode_opcode_t
*) pc
= BYTECODE_OP_RETURN_S64
;
657 case BYTECODE_OP_RETURN_S64
:
658 if (vstack_ax(stack
)->type
!= REG_S64
&&
659 vstack_ax(stack
)->type
!= REG_U64
) {
660 ERR("Unexpected register type\n");
668 case BYTECODE_OP_MUL
:
669 case BYTECODE_OP_DIV
:
670 case BYTECODE_OP_MOD
:
671 case BYTECODE_OP_PLUS
:
672 case BYTECODE_OP_MINUS
:
673 ERR("unsupported bytecode op %u\n",
674 (unsigned int) *(bytecode_opcode_t
*) pc
);
680 struct binary_op
*insn
= (struct binary_op
*) pc
;
682 switch(vstack_ax(stack
)->type
) {
684 ERR("unknown register type\n");
689 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
691 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
692 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
694 insn
->op
= BYTECODE_OP_EQ_STRING
;
696 case REG_STAR_GLOB_STRING
:
697 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
699 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
703 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
705 if (vstack_bx(stack
)->type
== REG_S64
||
706 vstack_bx(stack
)->type
== REG_U64
)
707 insn
->op
= BYTECODE_OP_EQ_S64
;
709 insn
->op
= BYTECODE_OP_EQ_DOUBLE_S64
;
712 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
714 if (vstack_bx(stack
)->type
== REG_S64
||
715 vstack_bx(stack
)->type
== REG_U64
)
716 insn
->op
= BYTECODE_OP_EQ_S64_DOUBLE
;
718 insn
->op
= BYTECODE_OP_EQ_DOUBLE
;
721 break; /* Dynamic typing. */
724 if (vstack_pop(stack
)) {
728 vstack_ax(stack
)->type
= REG_S64
;
729 next_pc
+= sizeof(struct binary_op
);
735 struct binary_op
*insn
= (struct binary_op
*) pc
;
737 switch(vstack_ax(stack
)->type
) {
739 ERR("unknown register type\n");
744 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
746 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
747 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
749 insn
->op
= BYTECODE_OP_NE_STRING
;
751 case REG_STAR_GLOB_STRING
:
752 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
754 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
758 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
760 if (vstack_bx(stack
)->type
== REG_S64
||
761 vstack_bx(stack
)->type
== REG_U64
)
762 insn
->op
= BYTECODE_OP_NE_S64
;
764 insn
->op
= BYTECODE_OP_NE_DOUBLE_S64
;
767 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
769 if (vstack_bx(stack
)->type
== REG_S64
||
770 vstack_bx(stack
)->type
== REG_U64
)
771 insn
->op
= BYTECODE_OP_NE_S64_DOUBLE
;
773 insn
->op
= BYTECODE_OP_NE_DOUBLE
;
776 break; /* Dynamic typing. */
779 if (vstack_pop(stack
)) {
783 vstack_ax(stack
)->type
= REG_S64
;
784 next_pc
+= sizeof(struct binary_op
);
790 struct binary_op
*insn
= (struct binary_op
*) pc
;
792 switch(vstack_ax(stack
)->type
) {
794 ERR("unknown register type\n");
798 case REG_STAR_GLOB_STRING
:
799 ERR("invalid register type for > binary operator\n");
803 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
805 insn
->op
= BYTECODE_OP_GT_STRING
;
809 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
811 if (vstack_bx(stack
)->type
== REG_S64
||
812 vstack_bx(stack
)->type
== REG_U64
)
813 insn
->op
= BYTECODE_OP_GT_S64
;
815 insn
->op
= BYTECODE_OP_GT_DOUBLE_S64
;
818 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
820 if (vstack_bx(stack
)->type
== REG_S64
||
821 vstack_bx(stack
)->type
== REG_U64
)
822 insn
->op
= BYTECODE_OP_GT_S64_DOUBLE
;
824 insn
->op
= BYTECODE_OP_GT_DOUBLE
;
827 break; /* Dynamic typing. */
830 if (vstack_pop(stack
)) {
834 vstack_ax(stack
)->type
= REG_S64
;
835 next_pc
+= sizeof(struct binary_op
);
841 struct binary_op
*insn
= (struct binary_op
*) pc
;
843 switch(vstack_ax(stack
)->type
) {
845 ERR("unknown register type\n");
849 case REG_STAR_GLOB_STRING
:
850 ERR("invalid register type for < binary operator\n");
854 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
856 insn
->op
= BYTECODE_OP_LT_STRING
;
860 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
862 if (vstack_bx(stack
)->type
== REG_S64
||
863 vstack_bx(stack
)->type
== REG_U64
)
864 insn
->op
= BYTECODE_OP_LT_S64
;
866 insn
->op
= BYTECODE_OP_LT_DOUBLE_S64
;
869 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
871 if (vstack_bx(stack
)->type
== REG_S64
||
872 vstack_bx(stack
)->type
== REG_U64
)
873 insn
->op
= BYTECODE_OP_LT_S64_DOUBLE
;
875 insn
->op
= BYTECODE_OP_LT_DOUBLE
;
878 break; /* Dynamic typing. */
881 if (vstack_pop(stack
)) {
885 vstack_ax(stack
)->type
= REG_S64
;
886 next_pc
+= sizeof(struct binary_op
);
892 struct binary_op
*insn
= (struct binary_op
*) pc
;
894 switch(vstack_ax(stack
)->type
) {
896 ERR("unknown register type\n");
900 case REG_STAR_GLOB_STRING
:
901 ERR("invalid register type for >= binary operator\n");
905 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
907 insn
->op
= BYTECODE_OP_GE_STRING
;
911 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
913 if (vstack_bx(stack
)->type
== REG_S64
||
914 vstack_bx(stack
)->type
== REG_U64
)
915 insn
->op
= BYTECODE_OP_GE_S64
;
917 insn
->op
= BYTECODE_OP_GE_DOUBLE_S64
;
920 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
922 if (vstack_bx(stack
)->type
== REG_S64
||
923 vstack_bx(stack
)->type
== REG_U64
)
924 insn
->op
= BYTECODE_OP_GE_S64_DOUBLE
;
926 insn
->op
= BYTECODE_OP_GE_DOUBLE
;
929 break; /* Dynamic typing. */
932 if (vstack_pop(stack
)) {
936 vstack_ax(stack
)->type
= REG_U64
;
937 next_pc
+= sizeof(struct binary_op
);
942 struct binary_op
*insn
= (struct binary_op
*) pc
;
944 switch(vstack_ax(stack
)->type
) {
946 ERR("unknown register type\n");
950 case REG_STAR_GLOB_STRING
:
951 ERR("invalid register type for <= binary operator\n");
955 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
957 insn
->op
= BYTECODE_OP_LE_STRING
;
961 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
963 if (vstack_bx(stack
)->type
== REG_S64
||
964 vstack_bx(stack
)->type
== REG_U64
)
965 insn
->op
= BYTECODE_OP_LE_S64
;
967 insn
->op
= BYTECODE_OP_LE_DOUBLE_S64
;
970 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
972 if (vstack_bx(stack
)->type
== REG_S64
||
973 vstack_bx(stack
)->type
== REG_U64
)
974 insn
->op
= BYTECODE_OP_LE_S64_DOUBLE
;
976 insn
->op
= BYTECODE_OP_LE_DOUBLE
;
979 break; /* Dynamic typing. */
981 vstack_ax(stack
)->type
= REG_S64
;
982 next_pc
+= sizeof(struct binary_op
);
986 case BYTECODE_OP_EQ_STRING
:
987 case BYTECODE_OP_NE_STRING
:
988 case BYTECODE_OP_GT_STRING
:
989 case BYTECODE_OP_LT_STRING
:
990 case BYTECODE_OP_GE_STRING
:
991 case BYTECODE_OP_LE_STRING
:
992 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
993 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
994 case BYTECODE_OP_EQ_S64
:
995 case BYTECODE_OP_NE_S64
:
996 case BYTECODE_OP_GT_S64
:
997 case BYTECODE_OP_LT_S64
:
998 case BYTECODE_OP_GE_S64
:
999 case BYTECODE_OP_LE_S64
:
1000 case BYTECODE_OP_EQ_DOUBLE
:
1001 case BYTECODE_OP_NE_DOUBLE
:
1002 case BYTECODE_OP_GT_DOUBLE
:
1003 case BYTECODE_OP_LT_DOUBLE
:
1004 case BYTECODE_OP_GE_DOUBLE
:
1005 case BYTECODE_OP_LE_DOUBLE
:
1006 case BYTECODE_OP_EQ_DOUBLE_S64
:
1007 case BYTECODE_OP_NE_DOUBLE_S64
:
1008 case BYTECODE_OP_GT_DOUBLE_S64
:
1009 case BYTECODE_OP_LT_DOUBLE_S64
:
1010 case BYTECODE_OP_GE_DOUBLE_S64
:
1011 case BYTECODE_OP_LE_DOUBLE_S64
:
1012 case BYTECODE_OP_EQ_S64_DOUBLE
:
1013 case BYTECODE_OP_NE_S64_DOUBLE
:
1014 case BYTECODE_OP_GT_S64_DOUBLE
:
1015 case BYTECODE_OP_LT_S64_DOUBLE
:
1016 case BYTECODE_OP_GE_S64_DOUBLE
:
1017 case BYTECODE_OP_LE_S64_DOUBLE
:
1020 if (vstack_pop(stack
)) {
1024 vstack_ax(stack
)->type
= REG_S64
;
1025 next_pc
+= sizeof(struct binary_op
);
1029 case BYTECODE_OP_BIT_RSHIFT
:
1030 case BYTECODE_OP_BIT_LSHIFT
:
1031 case BYTECODE_OP_BIT_AND
:
1032 case BYTECODE_OP_BIT_OR
:
1033 case BYTECODE_OP_BIT_XOR
:
1036 if (vstack_pop(stack
)) {
1040 vstack_ax(stack
)->type
= REG_S64
;
1041 next_pc
+= sizeof(struct binary_op
);
1046 case BYTECODE_OP_UNARY_PLUS
:
1048 struct unary_op
*insn
= (struct unary_op
*) pc
;
1050 switch(vstack_ax(stack
)->type
) {
1052 ERR("unknown register type\n");
1058 insn
->op
= BYTECODE_OP_UNARY_PLUS_S64
;
1061 insn
->op
= BYTECODE_OP_UNARY_PLUS_DOUBLE
;
1063 case REG_UNKNOWN
: /* Dynamic typing. */
1067 next_pc
+= sizeof(struct unary_op
);
1071 case BYTECODE_OP_UNARY_MINUS
:
1073 struct unary_op
*insn
= (struct unary_op
*) pc
;
1075 switch(vstack_ax(stack
)->type
) {
1077 ERR("unknown register type\n");
1083 insn
->op
= BYTECODE_OP_UNARY_MINUS_S64
;
1086 insn
->op
= BYTECODE_OP_UNARY_MINUS_DOUBLE
;
1088 case REG_UNKNOWN
: /* Dynamic typing. */
1092 next_pc
+= sizeof(struct unary_op
);
1096 case BYTECODE_OP_UNARY_NOT
:
1098 struct unary_op
*insn
= (struct unary_op
*) pc
;
1100 switch(vstack_ax(stack
)->type
) {
1102 ERR("unknown register type\n");
1108 insn
->op
= BYTECODE_OP_UNARY_NOT_S64
;
1111 insn
->op
= BYTECODE_OP_UNARY_NOT_DOUBLE
;
1113 case REG_UNKNOWN
: /* Dynamic typing. */
1117 next_pc
+= sizeof(struct unary_op
);
1121 case BYTECODE_OP_UNARY_BIT_NOT
:
1124 next_pc
+= sizeof(struct unary_op
);
1128 case BYTECODE_OP_UNARY_PLUS_S64
:
1129 case BYTECODE_OP_UNARY_MINUS_S64
:
1130 case BYTECODE_OP_UNARY_NOT_S64
:
1131 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1132 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1133 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1136 next_pc
+= sizeof(struct unary_op
);
1141 case BYTECODE_OP_AND
:
1142 case BYTECODE_OP_OR
:
1144 /* Continue to next instruction */
1145 /* Pop 1 when jump not taken */
1146 if (vstack_pop(stack
)) {
1150 next_pc
+= sizeof(struct logical_op
);
1154 /* load field ref */
1155 case BYTECODE_OP_LOAD_FIELD_REF
:
1157 ERR("Unknown field ref type\n");
1161 /* get context ref */
1162 case BYTECODE_OP_GET_CONTEXT_REF
:
1164 if (vstack_push(stack
)) {
1168 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1169 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1172 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1173 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1174 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1176 if (vstack_push(stack
)) {
1180 vstack_ax(stack
)->type
= REG_STRING
;
1181 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1184 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1185 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1187 if (vstack_push(stack
)) {
1191 vstack_ax(stack
)->type
= REG_S64
;
1192 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1195 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1196 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1198 if (vstack_push(stack
)) {
1202 vstack_ax(stack
)->type
= REG_DOUBLE
;
1203 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1207 /* load from immediate operand */
1208 case BYTECODE_OP_LOAD_STRING
:
1210 struct load_op
*insn
= (struct load_op
*) pc
;
1212 if (vstack_push(stack
)) {
1216 vstack_ax(stack
)->type
= REG_STRING
;
1217 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1221 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1223 struct load_op
*insn
= (struct load_op
*) pc
;
1225 if (vstack_push(stack
)) {
1229 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1230 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1234 case BYTECODE_OP_LOAD_S64
:
1236 if (vstack_push(stack
)) {
1240 vstack_ax(stack
)->type
= REG_S64
;
1241 next_pc
+= sizeof(struct load_op
)
1242 + sizeof(struct literal_numeric
);
1246 case BYTECODE_OP_LOAD_DOUBLE
:
1248 if (vstack_push(stack
)) {
1252 vstack_ax(stack
)->type
= REG_DOUBLE
;
1253 next_pc
+= sizeof(struct load_op
)
1254 + sizeof(struct literal_double
);
1259 case BYTECODE_OP_CAST_TO_S64
:
1261 struct cast_op
*insn
= (struct cast_op
*) pc
;
1263 switch (vstack_ax(stack
)->type
) {
1265 ERR("unknown register type\n");
1270 case REG_STAR_GLOB_STRING
:
1271 ERR("Cast op can only be applied to numeric or floating point registers\n");
1275 insn
->op
= BYTECODE_OP_CAST_NOP
;
1278 insn
->op
= BYTECODE_OP_CAST_DOUBLE_TO_S64
;
1285 vstack_ax(stack
)->type
= REG_S64
;
1286 next_pc
+= sizeof(struct cast_op
);
1289 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1292 vstack_ax(stack
)->type
= REG_S64
;
1293 next_pc
+= sizeof(struct cast_op
);
1296 case BYTECODE_OP_CAST_NOP
:
1298 next_pc
+= sizeof(struct cast_op
);
1303 * Instructions for recursive traversal through composed types.
1305 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1307 if (vstack_push(stack
)) {
1311 vstack_ax(stack
)->type
= REG_PTR
;
1312 vstack_ax(stack
)->load
.type
= LOAD_ROOT_CONTEXT
;
1313 next_pc
+= sizeof(struct load_op
);
1316 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1318 if (vstack_push(stack
)) {
1322 vstack_ax(stack
)->type
= REG_PTR
;
1323 vstack_ax(stack
)->load
.type
= LOAD_ROOT_APP_CONTEXT
;
1324 next_pc
+= sizeof(struct load_op
);
1327 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1329 if (vstack_push(stack
)) {
1333 vstack_ax(stack
)->type
= REG_PTR
;
1334 vstack_ax(stack
)->load
.type
= LOAD_ROOT_PAYLOAD
;
1335 next_pc
+= sizeof(struct load_op
);
1339 case BYTECODE_OP_LOAD_FIELD
:
1341 struct load_op
*insn
= (struct load_op
*) pc
;
1343 assert(vstack_ax(stack
)->type
== REG_PTR
);
1345 ret
= specialize_load_field(vstack_ax(stack
), insn
);
1349 next_pc
+= sizeof(struct load_op
);
1353 case BYTECODE_OP_LOAD_FIELD_S8
:
1354 case BYTECODE_OP_LOAD_FIELD_S16
:
1355 case BYTECODE_OP_LOAD_FIELD_S32
:
1356 case BYTECODE_OP_LOAD_FIELD_S64
:
1359 vstack_ax(stack
)->type
= REG_S64
;
1360 next_pc
+= sizeof(struct load_op
);
1364 case BYTECODE_OP_LOAD_FIELD_U8
:
1365 case BYTECODE_OP_LOAD_FIELD_U16
:
1366 case BYTECODE_OP_LOAD_FIELD_U32
:
1367 case BYTECODE_OP_LOAD_FIELD_U64
:
1370 vstack_ax(stack
)->type
= REG_U64
;
1371 next_pc
+= sizeof(struct load_op
);
1375 case BYTECODE_OP_LOAD_FIELD_STRING
:
1376 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1379 vstack_ax(stack
)->type
= REG_STRING
;
1380 next_pc
+= sizeof(struct load_op
);
1384 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1387 vstack_ax(stack
)->type
= REG_DOUBLE
;
1388 next_pc
+= sizeof(struct load_op
);
1392 case BYTECODE_OP_GET_SYMBOL
:
1394 struct load_op
*insn
= (struct load_op
*) pc
;
1396 dbg_printf("op get symbol\n");
1397 switch (vstack_ax(stack
)->load
.type
) {
1399 ERR("Nested fields not implemented yet.");
1402 case LOAD_ROOT_CONTEXT
:
1403 /* Lookup context field. */
1404 ret
= specialize_context_lookup(*pctx
,
1406 &vstack_ax(stack
)->load
);
1410 case LOAD_ROOT_APP_CONTEXT
:
1411 /* Lookup app context field. */
1412 ret
= specialize_app_context_lookup(pctx
,
1414 &vstack_ax(stack
)->load
);
1418 case LOAD_ROOT_PAYLOAD
:
1419 /* Lookup event payload field. */
1420 ret
= specialize_payload_lookup(event_desc
,
1422 &vstack_ax(stack
)->load
);
1427 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1431 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1433 /* Always generated by specialize phase. */
1438 case BYTECODE_OP_GET_INDEX_U16
:
1440 struct load_op
*insn
= (struct load_op
*) pc
;
1441 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1443 dbg_printf("op get index u16\n");
1445 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1446 vstack_ax(stack
), sizeof(*index
));
1449 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1453 case BYTECODE_OP_GET_INDEX_U64
:
1455 struct load_op
*insn
= (struct load_op
*) pc
;
1456 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1458 dbg_printf("op get index u64\n");
1460 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1461 vstack_ax(stack
), sizeof(*index
));
1464 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);