15a45b93947fb19f9ae057984f6307c98befb45c
[lttng-ust.git] / liblttng-ust / lttng-bytecode-specialize.c
1 /*
2 * lttng-bytecode-specialize.c
3 *
4 * LTTng UST bytecode specializer.
5 *
6 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
7 *
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
14 *
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
24 * SOFTWARE.
25 */
26
27 #define _LGPL_SOURCE
28 #include <stddef.h>
29 #include <stdint.h>
30
31 #include "lttng-bytecode.h"
32 #include <lttng/align.h>
33 #include "ust-events-internal.h"
34
35 static int lttng_fls(int val)
36 {
37 int r = 32;
38 unsigned int x = (unsigned int) val;
39
40 if (!x)
41 return 0;
42 if (!(x & 0xFFFF0000U)) {
43 x <<= 16;
44 r -= 16;
45 }
46 if (!(x & 0xFF000000U)) {
47 x <<= 8;
48 r -= 8;
49 }
50 if (!(x & 0xF0000000U)) {
51 x <<= 4;
52 r -= 4;
53 }
54 if (!(x & 0xC0000000U)) {
55 x <<= 2;
56 r -= 2;
57 }
58 if (!(x & 0x80000000U)) {
59 r -= 1;
60 }
61 return r;
62 }
63
64 static int get_count_order(unsigned int count)
65 {
66 int order;
67
68 order = lttng_fls(count) - 1;
69 if (count & (count - 1))
70 order++;
71 return order;
72 }
73
74 static ssize_t bytecode_reserve_data(struct bytecode_runtime *runtime,
75 size_t align, size_t len)
76 {
77 ssize_t ret;
78 size_t padding = lttng_ust_offset_align(runtime->data_len, align);
79 size_t new_len = runtime->data_len + padding + len;
80 size_t new_alloc_len = new_len;
81 size_t old_alloc_len = runtime->data_alloc_len;
82
83 if (new_len > BYTECODE_MAX_DATA_LEN)
84 return -EINVAL;
85
86 if (new_alloc_len > old_alloc_len) {
87 char *newptr;
88
89 new_alloc_len =
90 max_t(size_t, 1U << get_count_order(new_alloc_len), old_alloc_len << 1);
91 newptr = realloc(runtime->data, new_alloc_len);
92 if (!newptr)
93 return -ENOMEM;
94 runtime->data = newptr;
95 /* We zero directly the memory from start of allocation. */
96 memset(&runtime->data[old_alloc_len], 0, new_alloc_len - old_alloc_len);
97 runtime->data_alloc_len = new_alloc_len;
98 }
99 runtime->data_len += padding;
100 ret = runtime->data_len;
101 runtime->data_len += len;
102 return ret;
103 }
104
105 static ssize_t bytecode_push_data(struct bytecode_runtime *runtime,
106 const void *p, size_t align, size_t len)
107 {
108 ssize_t offset;
109
110 offset = bytecode_reserve_data(runtime, align, len);
111 if (offset < 0)
112 return -ENOMEM;
113 memcpy(&runtime->data[offset], p, len);
114 return offset;
115 }
116
117 static int specialize_load_field(struct vstack_entry *stack_top,
118 struct load_op *insn)
119 {
120 int ret;
121
122 switch (stack_top->load.type) {
123 case LOAD_OBJECT:
124 break;
125 case LOAD_ROOT_CONTEXT:
126 case LOAD_ROOT_APP_CONTEXT:
127 case LOAD_ROOT_PAYLOAD:
128 default:
129 dbg_printf("Bytecode warning: cannot load root, missing field name.\n");
130 ret = -EINVAL;
131 goto end;
132 }
133 switch (stack_top->load.object_type) {
134 case OBJECT_TYPE_S8:
135 dbg_printf("op load field s8\n");
136 stack_top->type = REG_S64;
137 if (!stack_top->load.rev_bo)
138 insn->op = BYTECODE_OP_LOAD_FIELD_S8;
139 break;
140 case OBJECT_TYPE_S16:
141 dbg_printf("op load field s16\n");
142 stack_top->type = REG_S64;
143 if (!stack_top->load.rev_bo)
144 insn->op = BYTECODE_OP_LOAD_FIELD_S16;
145 break;
146 case OBJECT_TYPE_S32:
147 dbg_printf("op load field s32\n");
148 stack_top->type = REG_S64;
149 if (!stack_top->load.rev_bo)
150 insn->op = BYTECODE_OP_LOAD_FIELD_S32;
151 break;
152 case OBJECT_TYPE_S64:
153 dbg_printf("op load field s64\n");
154 stack_top->type = REG_S64;
155 if (!stack_top->load.rev_bo)
156 insn->op = BYTECODE_OP_LOAD_FIELD_S64;
157 break;
158 case OBJECT_TYPE_SIGNED_ENUM:
159 dbg_printf("op load field signed enumeration\n");
160 stack_top->type = REG_PTR;
161 break;
162 case OBJECT_TYPE_U8:
163 dbg_printf("op load field u8\n");
164 stack_top->type = REG_U64;
165 insn->op = BYTECODE_OP_LOAD_FIELD_U8;
166 break;
167 case OBJECT_TYPE_U16:
168 dbg_printf("op load field u16\n");
169 stack_top->type = REG_U64;
170 if (!stack_top->load.rev_bo)
171 insn->op = BYTECODE_OP_LOAD_FIELD_U16;
172 break;
173 case OBJECT_TYPE_U32:
174 dbg_printf("op load field u32\n");
175 stack_top->type = REG_U64;
176 if (!stack_top->load.rev_bo)
177 insn->op = BYTECODE_OP_LOAD_FIELD_U32;
178 break;
179 case OBJECT_TYPE_U64:
180 dbg_printf("op load field u64\n");
181 stack_top->type = REG_U64;
182 if (!stack_top->load.rev_bo)
183 insn->op = BYTECODE_OP_LOAD_FIELD_U64;
184 break;
185 case OBJECT_TYPE_UNSIGNED_ENUM:
186 dbg_printf("op load field unsigned enumeration\n");
187 stack_top->type = REG_PTR;
188 break;
189 case OBJECT_TYPE_DOUBLE:
190 stack_top->type = REG_DOUBLE;
191 insn->op = BYTECODE_OP_LOAD_FIELD_DOUBLE;
192 break;
193 case OBJECT_TYPE_STRING:
194 dbg_printf("op load field string\n");
195 stack_top->type = REG_STRING;
196 insn->op = BYTECODE_OP_LOAD_FIELD_STRING;
197 break;
198 case OBJECT_TYPE_STRING_SEQUENCE:
199 dbg_printf("op load field string sequence\n");
200 stack_top->type = REG_STRING;
201 insn->op = BYTECODE_OP_LOAD_FIELD_SEQUENCE;
202 break;
203 case OBJECT_TYPE_DYNAMIC:
204 dbg_printf("op load field dynamic\n");
205 stack_top->type = REG_UNKNOWN;
206 /* Don't specialize load op. */
207 break;
208 case OBJECT_TYPE_SEQUENCE:
209 case OBJECT_TYPE_ARRAY:
210 case OBJECT_TYPE_STRUCT:
211 case OBJECT_TYPE_VARIANT:
212 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
213 ret = -EINVAL;
214 goto end;
215 }
216 return 0;
217
218 end:
219 return ret;
220 }
221
222 static int specialize_get_index_object_type(enum object_type *otype,
223 int signedness, uint32_t elem_len)
224 {
225 switch (elem_len) {
226 case 8:
227 if (signedness)
228 *otype = OBJECT_TYPE_S8;
229 else
230 *otype = OBJECT_TYPE_U8;
231 break;
232 case 16:
233 if (signedness)
234 *otype = OBJECT_TYPE_S16;
235 else
236 *otype = OBJECT_TYPE_U16;
237 break;
238 case 32:
239 if (signedness)
240 *otype = OBJECT_TYPE_S32;
241 else
242 *otype = OBJECT_TYPE_U32;
243 break;
244 case 64:
245 if (signedness)
246 *otype = OBJECT_TYPE_S64;
247 else
248 *otype = OBJECT_TYPE_U64;
249 break;
250 default:
251 return -EINVAL;
252 }
253 return 0;
254 }
255
256 static int specialize_get_index(struct bytecode_runtime *runtime,
257 struct load_op *insn, uint64_t index,
258 struct vstack_entry *stack_top,
259 int idx_len)
260 {
261 int ret;
262 struct bytecode_get_index_data gid;
263 ssize_t data_offset;
264
265 memset(&gid, 0, sizeof(gid));
266 switch (stack_top->load.type) {
267 case LOAD_OBJECT:
268 switch (stack_top->load.object_type) {
269 case OBJECT_TYPE_ARRAY:
270 {
271 const struct lttng_integer_type *integer_type;
272 const struct lttng_event_field *field;
273 uint32_t elem_len, num_elems;
274 int signedness;
275
276 field = stack_top->load.field;
277 switch (field->type.atype) {
278 case atype_array:
279 integer_type = &field->type.u.legacy.array.elem_type.u.basic.integer;
280 num_elems = field->type.u.legacy.array.length;
281 break;
282 case atype_array_nestable:
283 if (field->type.u.array_nestable.elem_type->atype != atype_integer) {
284 ret = -EINVAL;
285 goto end;
286 }
287 integer_type = &field->type.u.array_nestable.elem_type->u.integer;
288 num_elems = field->type.u.array_nestable.length;
289 break;
290 default:
291 ret = -EINVAL;
292 goto end;
293 }
294 elem_len = integer_type->size;
295 signedness = integer_type->signedness;
296 if (index >= num_elems) {
297 ret = -EINVAL;
298 goto end;
299 }
300 ret = specialize_get_index_object_type(&stack_top->load.object_type,
301 signedness, elem_len);
302 if (ret)
303 goto end;
304 gid.offset = index * (elem_len / CHAR_BIT);
305 gid.array_len = num_elems * (elem_len / CHAR_BIT);
306 gid.elem.type = stack_top->load.object_type;
307 gid.elem.len = elem_len;
308 if (integer_type->reverse_byte_order)
309 gid.elem.rev_bo = true;
310 stack_top->load.rev_bo = gid.elem.rev_bo;
311 break;
312 }
313 case OBJECT_TYPE_SEQUENCE:
314 {
315 const struct lttng_integer_type *integer_type;
316 const struct lttng_event_field *field;
317 uint32_t elem_len;
318 int signedness;
319
320 field = stack_top->load.field;
321 switch (field->type.atype) {
322 case atype_sequence:
323 integer_type = &field->type.u.legacy.sequence.elem_type.u.basic.integer;
324 break;
325 case atype_sequence_nestable:
326 if (field->type.u.sequence_nestable.elem_type->atype != atype_integer) {
327 ret = -EINVAL;
328 goto end;
329 }
330 integer_type = &field->type.u.sequence_nestable.elem_type->u.integer;
331 break;
332 default:
333 ret = -EINVAL;
334 goto end;
335 }
336 elem_len = integer_type->size;
337 signedness = integer_type->signedness;
338 ret = specialize_get_index_object_type(&stack_top->load.object_type,
339 signedness, elem_len);
340 if (ret)
341 goto end;
342 gid.offset = index * (elem_len / CHAR_BIT);
343 gid.elem.type = stack_top->load.object_type;
344 gid.elem.len = elem_len;
345 if (integer_type->reverse_byte_order)
346 gid.elem.rev_bo = true;
347 stack_top->load.rev_bo = gid.elem.rev_bo;
348 break;
349 }
350 case OBJECT_TYPE_STRUCT:
351 /* Only generated by the specialize phase. */
352 case OBJECT_TYPE_VARIANT: /* Fall-through */
353 default:
354 ERR("Unexpected get index type %d",
355 (int) stack_top->load.object_type);
356 ret = -EINVAL;
357 goto end;
358 }
359 break;
360 case LOAD_ROOT_CONTEXT:
361 case LOAD_ROOT_APP_CONTEXT:
362 case LOAD_ROOT_PAYLOAD:
363 ERR("Index lookup for root field not implemented yet.");
364 ret = -EINVAL;
365 goto end;
366 }
367 data_offset = bytecode_push_data(runtime, &gid,
368 __alignof__(gid), sizeof(gid));
369 if (data_offset < 0) {
370 ret = -EINVAL;
371 goto end;
372 }
373 switch (idx_len) {
374 case 2:
375 ((struct get_index_u16 *) insn->data)->index = data_offset;
376 break;
377 case 8:
378 ((struct get_index_u64 *) insn->data)->index = data_offset;
379 break;
380 default:
381 ret = -EINVAL;
382 goto end;
383 }
384
385 return 0;
386
387 end:
388 return ret;
389 }
390
391 static int specialize_context_lookup_name(struct lttng_ctx *ctx,
392 struct bytecode_runtime *bytecode,
393 struct load_op *insn)
394 {
395 uint16_t offset;
396 const char *name;
397
398 offset = ((struct get_symbol *) insn->data)->offset;
399 name = bytecode->p.bc->bc.data + bytecode->p.bc->bc.reloc_offset + offset;
400 return lttng_get_context_index(ctx, name);
401 }
402
403 static int specialize_load_object(const struct lttng_event_field *field,
404 struct vstack_load *load, bool is_context)
405 {
406 load->type = LOAD_OBJECT;
407
408 switch (field->type.atype) {
409 case atype_integer:
410 if (field->type.u.integer.signedness)
411 load->object_type = OBJECT_TYPE_S64;
412 else
413 load->object_type = OBJECT_TYPE_U64;
414 load->rev_bo = false;
415 break;
416 case atype_enum:
417 case atype_enum_nestable:
418 {
419 const struct lttng_integer_type *itype;
420
421 if (field->type.atype == atype_enum) {
422 itype = &field->type.u.legacy.basic.enumeration.container_type;
423 } else {
424 itype = &field->type.u.enum_nestable.container_type->u.integer;
425 }
426 if (itype->signedness)
427 load->object_type = OBJECT_TYPE_SIGNED_ENUM;
428 else
429 load->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
430 load->rev_bo = false;
431 break;
432 }
433 case atype_array:
434 if (field->type.u.legacy.array.elem_type.atype != atype_integer) {
435 ERR("Array nesting only supports integer types.");
436 return -EINVAL;
437 }
438 if (is_context) {
439 load->object_type = OBJECT_TYPE_STRING;
440 } else {
441 if (field->type.u.legacy.array.elem_type.u.basic.integer.encoding == lttng_encode_none) {
442 load->object_type = OBJECT_TYPE_ARRAY;
443 load->field = field;
444 } else {
445 load->object_type = OBJECT_TYPE_STRING_SEQUENCE;
446 }
447 }
448 break;
449 case atype_array_nestable:
450 if (field->type.u.array_nestable.elem_type->atype != atype_integer) {
451 ERR("Array nesting only supports integer types.");
452 return -EINVAL;
453 }
454 if (is_context) {
455 load->object_type = OBJECT_TYPE_STRING;
456 } else {
457 if (field->type.u.array_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
458 load->object_type = OBJECT_TYPE_ARRAY;
459 load->field = field;
460 } else {
461 load->object_type = OBJECT_TYPE_STRING_SEQUENCE;
462 }
463 }
464 break;
465 case atype_sequence:
466 if (field->type.u.legacy.sequence.elem_type.atype != atype_integer) {
467 ERR("Sequence nesting only supports integer types.");
468 return -EINVAL;
469 }
470 if (is_context) {
471 load->object_type = OBJECT_TYPE_STRING;
472 } else {
473 if (field->type.u.legacy.sequence.elem_type.u.basic.integer.encoding == lttng_encode_none) {
474 load->object_type = OBJECT_TYPE_SEQUENCE;
475 load->field = field;
476 } else {
477 load->object_type = OBJECT_TYPE_STRING_SEQUENCE;
478 }
479 }
480 break;
481 case atype_sequence_nestable:
482 if (field->type.u.sequence_nestable.elem_type->atype != atype_integer) {
483 ERR("Sequence nesting only supports integer types.");
484 return -EINVAL;
485 }
486 if (is_context) {
487 load->object_type = OBJECT_TYPE_STRING;
488 } else {
489 if (field->type.u.sequence_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
490 load->object_type = OBJECT_TYPE_SEQUENCE;
491 load->field = field;
492 } else {
493 load->object_type = OBJECT_TYPE_STRING_SEQUENCE;
494 }
495 }
496 break;
497
498 case atype_string:
499 load->object_type = OBJECT_TYPE_STRING;
500 break;
501 case atype_float:
502 load->object_type = OBJECT_TYPE_DOUBLE;
503 break;
504 case atype_dynamic:
505 load->object_type = OBJECT_TYPE_DYNAMIC;
506 break;
507 case atype_struct:
508 ERR("Structure type cannot be loaded.");
509 return -EINVAL;
510 default:
511 ERR("Unknown type: %d", (int) field->type.atype);
512 return -EINVAL;
513 }
514 return 0;
515 }
516
517 static int specialize_context_lookup(struct lttng_ctx *ctx,
518 struct bytecode_runtime *runtime,
519 struct load_op *insn,
520 struct vstack_load *load)
521 {
522 int idx, ret;
523 struct lttng_ctx_field *ctx_field;
524 struct lttng_event_field *field;
525 struct bytecode_get_index_data gid;
526 ssize_t data_offset;
527
528 idx = specialize_context_lookup_name(ctx, runtime, insn);
529 if (idx < 0) {
530 return -ENOENT;
531 }
532 ctx_field = &ctx->fields[idx];
533 field = &ctx_field->event_field;
534 ret = specialize_load_object(field, load, true);
535 if (ret)
536 return ret;
537 /* Specialize each get_symbol into a get_index. */
538 insn->op = BYTECODE_OP_GET_INDEX_U16;
539 memset(&gid, 0, sizeof(gid));
540 gid.ctx_index = idx;
541 gid.elem.type = load->object_type;
542 gid.elem.rev_bo = load->rev_bo;
543 gid.field = field;
544 data_offset = bytecode_push_data(runtime, &gid,
545 __alignof__(gid), sizeof(gid));
546 if (data_offset < 0) {
547 return -EINVAL;
548 }
549 ((struct get_index_u16 *) insn->data)->index = data_offset;
550 return 0;
551 }
552
553 static int specialize_app_context_lookup(struct lttng_ctx **pctx,
554 struct bytecode_runtime *runtime,
555 struct load_op *insn,
556 struct vstack_load *load)
557 {
558 uint16_t offset;
559 const char *orig_name;
560 char *name = NULL;
561 int idx, ret;
562 struct lttng_ctx_field *ctx_field;
563 struct lttng_event_field *field;
564 struct bytecode_get_index_data gid;
565 ssize_t data_offset;
566
567 offset = ((struct get_symbol *) insn->data)->offset;
568 orig_name = runtime->p.bc->bc.data + runtime->p.bc->bc.reloc_offset + offset;
569 name = zmalloc(strlen(orig_name) + strlen("$app.") + 1);
570 if (!name) {
571 ret = -ENOMEM;
572 goto end;
573 }
574 strcpy(name, "$app.");
575 strcat(name, orig_name);
576 idx = lttng_get_context_index(*pctx, name);
577 if (idx < 0) {
578 assert(lttng_context_is_app(name));
579 ret = lttng_ust_add_app_context_to_ctx_rcu(name,
580 pctx);
581 if (ret)
582 return ret;
583 idx = lttng_get_context_index(*pctx, name);
584 if (idx < 0)
585 return -ENOENT;
586 }
587 ctx_field = &(*pctx)->fields[idx];
588 field = &ctx_field->event_field;
589 ret = specialize_load_object(field, load, true);
590 if (ret)
591 goto end;
592 /* Specialize each get_symbol into a get_index. */
593 insn->op = BYTECODE_OP_GET_INDEX_U16;
594 memset(&gid, 0, sizeof(gid));
595 gid.ctx_index = idx;
596 gid.elem.type = load->object_type;
597 gid.elem.rev_bo = load->rev_bo;
598 gid.field = field;
599 data_offset = bytecode_push_data(runtime, &gid,
600 __alignof__(gid), sizeof(gid));
601 if (data_offset < 0) {
602 ret = -EINVAL;
603 goto end;
604 }
605 ((struct get_index_u16 *) insn->data)->index = data_offset;
606 ret = 0;
607 end:
608 free(name);
609 return ret;
610 }
611
612 static int specialize_payload_lookup(const struct lttng_event_desc *event_desc,
613 struct bytecode_runtime *runtime,
614 struct load_op *insn,
615 struct vstack_load *load)
616 {
617 const char *name;
618 uint16_t offset;
619 unsigned int i, nr_fields;
620 bool found = false;
621 uint32_t field_offset = 0;
622 const struct lttng_event_field *field;
623 int ret;
624 struct bytecode_get_index_data gid;
625 ssize_t data_offset;
626
627 nr_fields = event_desc->nr_fields;
628 offset = ((struct get_symbol *) insn->data)->offset;
629 name = runtime->p.bc->bc.data + runtime->p.bc->bc.reloc_offset + offset;
630 for (i = 0; i < nr_fields; i++) {
631 field = &event_desc->fields[i];
632 if (field->u.ext.nofilter) {
633 continue;
634 }
635 if (!strcmp(field->name, name)) {
636 found = true;
637 break;
638 }
639 /* compute field offset on stack */
640 switch (field->type.atype) {
641 case atype_integer:
642 case atype_enum:
643 case atype_enum_nestable:
644 field_offset += sizeof(int64_t);
645 break;
646 case atype_array:
647 case atype_array_nestable:
648 case atype_sequence:
649 case atype_sequence_nestable:
650 field_offset += sizeof(unsigned long);
651 field_offset += sizeof(void *);
652 break;
653 case atype_string:
654 field_offset += sizeof(void *);
655 break;
656 case atype_float:
657 field_offset += sizeof(double);
658 break;
659 default:
660 ret = -EINVAL;
661 goto end;
662 }
663 }
664 if (!found) {
665 ret = -EINVAL;
666 goto end;
667 }
668
669 ret = specialize_load_object(field, load, false);
670 if (ret)
671 goto end;
672
673 /* Specialize each get_symbol into a get_index. */
674 insn->op = BYTECODE_OP_GET_INDEX_U16;
675 memset(&gid, 0, sizeof(gid));
676 gid.offset = field_offset;
677 gid.elem.type = load->object_type;
678 gid.elem.rev_bo = load->rev_bo;
679 gid.field = field;
680 data_offset = bytecode_push_data(runtime, &gid,
681 __alignof__(gid), sizeof(gid));
682 if (data_offset < 0) {
683 ret = -EINVAL;
684 goto end;
685 }
686 ((struct get_index_u16 *) insn->data)->index = data_offset;
687 ret = 0;
688 end:
689 return ret;
690 }
691
692 int lttng_bytecode_specialize(const struct lttng_event_desc *event_desc,
693 struct bytecode_runtime *bytecode)
694 {
695 void *pc, *next_pc, *start_pc;
696 int ret = -EINVAL;
697 struct vstack _stack;
698 struct vstack *stack = &_stack;
699 struct lttng_ctx **pctx = bytecode->p.pctx;
700
701 vstack_init(stack);
702
703 start_pc = &bytecode->code[0];
704 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len;
705 pc = next_pc) {
706 switch (*(bytecode_opcode_t *) pc) {
707 case BYTECODE_OP_UNKNOWN:
708 default:
709 ERR("unknown bytecode op %u\n",
710 (unsigned int) *(bytecode_opcode_t *) pc);
711 ret = -EINVAL;
712 goto end;
713
714 case BYTECODE_OP_RETURN:
715 if (vstack_ax(stack)->type == REG_S64 ||
716 vstack_ax(stack)->type == REG_U64)
717 *(bytecode_opcode_t *) pc = BYTECODE_OP_RETURN_S64;
718 ret = 0;
719 goto end;
720
721 case BYTECODE_OP_RETURN_S64:
722 if (vstack_ax(stack)->type != REG_S64 &&
723 vstack_ax(stack)->type != REG_U64) {
724 ERR("Unexpected register type\n");
725 ret = -EINVAL;
726 goto end;
727 }
728 ret = 0;
729 goto end;
730
731 /* binary */
732 case BYTECODE_OP_MUL:
733 case BYTECODE_OP_DIV:
734 case BYTECODE_OP_MOD:
735 case BYTECODE_OP_PLUS:
736 case BYTECODE_OP_MINUS:
737 ERR("unsupported bytecode op %u\n",
738 (unsigned int) *(bytecode_opcode_t *) pc);
739 ret = -EINVAL;
740 goto end;
741
742 case BYTECODE_OP_EQ:
743 {
744 struct binary_op *insn = (struct binary_op *) pc;
745
746 switch(vstack_ax(stack)->type) {
747 default:
748 ERR("unknown register type\n");
749 ret = -EINVAL;
750 goto end;
751
752 case REG_STRING:
753 if (vstack_bx(stack)->type == REG_UNKNOWN)
754 break;
755 if (vstack_bx(stack)->type == REG_STAR_GLOB_STRING)
756 insn->op = BYTECODE_OP_EQ_STAR_GLOB_STRING;
757 else
758 insn->op = BYTECODE_OP_EQ_STRING;
759 break;
760 case REG_STAR_GLOB_STRING:
761 if (vstack_bx(stack)->type == REG_UNKNOWN)
762 break;
763 insn->op = BYTECODE_OP_EQ_STAR_GLOB_STRING;
764 break;
765 case REG_S64:
766 case REG_U64:
767 if (vstack_bx(stack)->type == REG_UNKNOWN)
768 break;
769 if (vstack_bx(stack)->type == REG_S64 ||
770 vstack_bx(stack)->type == REG_U64)
771 insn->op = BYTECODE_OP_EQ_S64;
772 else
773 insn->op = BYTECODE_OP_EQ_DOUBLE_S64;
774 break;
775 case REG_DOUBLE:
776 if (vstack_bx(stack)->type == REG_UNKNOWN)
777 break;
778 if (vstack_bx(stack)->type == REG_S64 ||
779 vstack_bx(stack)->type == REG_U64)
780 insn->op = BYTECODE_OP_EQ_S64_DOUBLE;
781 else
782 insn->op = BYTECODE_OP_EQ_DOUBLE;
783 break;
784 case REG_UNKNOWN:
785 break; /* Dynamic typing. */
786 }
787 /* Pop 2, push 1 */
788 if (vstack_pop(stack)) {
789 ret = -EINVAL;
790 goto end;
791 }
792 vstack_ax(stack)->type = REG_S64;
793 next_pc += sizeof(struct binary_op);
794 break;
795 }
796
797 case BYTECODE_OP_NE:
798 {
799 struct binary_op *insn = (struct binary_op *) pc;
800
801 switch(vstack_ax(stack)->type) {
802 default:
803 ERR("unknown register type\n");
804 ret = -EINVAL;
805 goto end;
806
807 case REG_STRING:
808 if (vstack_bx(stack)->type == REG_UNKNOWN)
809 break;
810 if (vstack_bx(stack)->type == REG_STAR_GLOB_STRING)
811 insn->op = BYTECODE_OP_NE_STAR_GLOB_STRING;
812 else
813 insn->op = BYTECODE_OP_NE_STRING;
814 break;
815 case REG_STAR_GLOB_STRING:
816 if (vstack_bx(stack)->type == REG_UNKNOWN)
817 break;
818 insn->op = BYTECODE_OP_NE_STAR_GLOB_STRING;
819 break;
820 case REG_S64:
821 case REG_U64:
822 if (vstack_bx(stack)->type == REG_UNKNOWN)
823 break;
824 if (vstack_bx(stack)->type == REG_S64 ||
825 vstack_bx(stack)->type == REG_U64)
826 insn->op = BYTECODE_OP_NE_S64;
827 else
828 insn->op = BYTECODE_OP_NE_DOUBLE_S64;
829 break;
830 case REG_DOUBLE:
831 if (vstack_bx(stack)->type == REG_UNKNOWN)
832 break;
833 if (vstack_bx(stack)->type == REG_S64 ||
834 vstack_bx(stack)->type == REG_U64)
835 insn->op = BYTECODE_OP_NE_S64_DOUBLE;
836 else
837 insn->op = BYTECODE_OP_NE_DOUBLE;
838 break;
839 case REG_UNKNOWN:
840 break; /* Dynamic typing. */
841 }
842 /* Pop 2, push 1 */
843 if (vstack_pop(stack)) {
844 ret = -EINVAL;
845 goto end;
846 }
847 vstack_ax(stack)->type = REG_S64;
848 next_pc += sizeof(struct binary_op);
849 break;
850 }
851
852 case BYTECODE_OP_GT:
853 {
854 struct binary_op *insn = (struct binary_op *) pc;
855
856 switch(vstack_ax(stack)->type) {
857 default:
858 ERR("unknown register type\n");
859 ret = -EINVAL;
860 goto end;
861
862 case REG_STAR_GLOB_STRING:
863 ERR("invalid register type for > binary operator\n");
864 ret = -EINVAL;
865 goto end;
866 case REG_STRING:
867 if (vstack_bx(stack)->type == REG_UNKNOWN)
868 break;
869 insn->op = BYTECODE_OP_GT_STRING;
870 break;
871 case REG_S64:
872 case REG_U64:
873 if (vstack_bx(stack)->type == REG_UNKNOWN)
874 break;
875 if (vstack_bx(stack)->type == REG_S64 ||
876 vstack_bx(stack)->type == REG_U64)
877 insn->op = BYTECODE_OP_GT_S64;
878 else
879 insn->op = BYTECODE_OP_GT_DOUBLE_S64;
880 break;
881 case REG_DOUBLE:
882 if (vstack_bx(stack)->type == REG_UNKNOWN)
883 break;
884 if (vstack_bx(stack)->type == REG_S64 ||
885 vstack_bx(stack)->type == REG_U64)
886 insn->op = BYTECODE_OP_GT_S64_DOUBLE;
887 else
888 insn->op = BYTECODE_OP_GT_DOUBLE;
889 break;
890 case REG_UNKNOWN:
891 break; /* Dynamic typing. */
892 }
893 /* Pop 2, push 1 */
894 if (vstack_pop(stack)) {
895 ret = -EINVAL;
896 goto end;
897 }
898 vstack_ax(stack)->type = REG_S64;
899 next_pc += sizeof(struct binary_op);
900 break;
901 }
902
903 case BYTECODE_OP_LT:
904 {
905 struct binary_op *insn = (struct binary_op *) pc;
906
907 switch(vstack_ax(stack)->type) {
908 default:
909 ERR("unknown register type\n");
910 ret = -EINVAL;
911 goto end;
912
913 case REG_STAR_GLOB_STRING:
914 ERR("invalid register type for < binary operator\n");
915 ret = -EINVAL;
916 goto end;
917 case REG_STRING:
918 if (vstack_bx(stack)->type == REG_UNKNOWN)
919 break;
920 insn->op = BYTECODE_OP_LT_STRING;
921 break;
922 case REG_S64:
923 case REG_U64:
924 if (vstack_bx(stack)->type == REG_UNKNOWN)
925 break;
926 if (vstack_bx(stack)->type == REG_S64 ||
927 vstack_bx(stack)->type == REG_U64)
928 insn->op = BYTECODE_OP_LT_S64;
929 else
930 insn->op = BYTECODE_OP_LT_DOUBLE_S64;
931 break;
932 case REG_DOUBLE:
933 if (vstack_bx(stack)->type == REG_UNKNOWN)
934 break;
935 if (vstack_bx(stack)->type == REG_S64 ||
936 vstack_bx(stack)->type == REG_U64)
937 insn->op = BYTECODE_OP_LT_S64_DOUBLE;
938 else
939 insn->op = BYTECODE_OP_LT_DOUBLE;
940 break;
941 case REG_UNKNOWN:
942 break; /* Dynamic typing. */
943 }
944 /* Pop 2, push 1 */
945 if (vstack_pop(stack)) {
946 ret = -EINVAL;
947 goto end;
948 }
949 vstack_ax(stack)->type = REG_S64;
950 next_pc += sizeof(struct binary_op);
951 break;
952 }
953
954 case BYTECODE_OP_GE:
955 {
956 struct binary_op *insn = (struct binary_op *) pc;
957
958 switch(vstack_ax(stack)->type) {
959 default:
960 ERR("unknown register type\n");
961 ret = -EINVAL;
962 goto end;
963
964 case REG_STAR_GLOB_STRING:
965 ERR("invalid register type for >= binary operator\n");
966 ret = -EINVAL;
967 goto end;
968 case REG_STRING:
969 if (vstack_bx(stack)->type == REG_UNKNOWN)
970 break;
971 insn->op = BYTECODE_OP_GE_STRING;
972 break;
973 case REG_S64:
974 case REG_U64:
975 if (vstack_bx(stack)->type == REG_UNKNOWN)
976 break;
977 if (vstack_bx(stack)->type == REG_S64 ||
978 vstack_bx(stack)->type == REG_U64)
979 insn->op = BYTECODE_OP_GE_S64;
980 else
981 insn->op = BYTECODE_OP_GE_DOUBLE_S64;
982 break;
983 case REG_DOUBLE:
984 if (vstack_bx(stack)->type == REG_UNKNOWN)
985 break;
986 if (vstack_bx(stack)->type == REG_S64 ||
987 vstack_bx(stack)->type == REG_U64)
988 insn->op = BYTECODE_OP_GE_S64_DOUBLE;
989 else
990 insn->op = BYTECODE_OP_GE_DOUBLE;
991 break;
992 case REG_UNKNOWN:
993 break; /* Dynamic typing. */
994 }
995 /* Pop 2, push 1 */
996 if (vstack_pop(stack)) {
997 ret = -EINVAL;
998 goto end;
999 }
1000 vstack_ax(stack)->type = REG_U64;
1001 next_pc += sizeof(struct binary_op);
1002 break;
1003 }
1004 case BYTECODE_OP_LE:
1005 {
1006 struct binary_op *insn = (struct binary_op *) pc;
1007
1008 switch(vstack_ax(stack)->type) {
1009 default:
1010 ERR("unknown register type\n");
1011 ret = -EINVAL;
1012 goto end;
1013
1014 case REG_STAR_GLOB_STRING:
1015 ERR("invalid register type for <= binary operator\n");
1016 ret = -EINVAL;
1017 goto end;
1018 case REG_STRING:
1019 if (vstack_bx(stack)->type == REG_UNKNOWN)
1020 break;
1021 insn->op = BYTECODE_OP_LE_STRING;
1022 break;
1023 case REG_S64:
1024 case REG_U64:
1025 if (vstack_bx(stack)->type == REG_UNKNOWN)
1026 break;
1027 if (vstack_bx(stack)->type == REG_S64 ||
1028 vstack_bx(stack)->type == REG_U64)
1029 insn->op = BYTECODE_OP_LE_S64;
1030 else
1031 insn->op = BYTECODE_OP_LE_DOUBLE_S64;
1032 break;
1033 case REG_DOUBLE:
1034 if (vstack_bx(stack)->type == REG_UNKNOWN)
1035 break;
1036 if (vstack_bx(stack)->type == REG_S64 ||
1037 vstack_bx(stack)->type == REG_U64)
1038 insn->op = BYTECODE_OP_LE_S64_DOUBLE;
1039 else
1040 insn->op = BYTECODE_OP_LE_DOUBLE;
1041 break;
1042 case REG_UNKNOWN:
1043 break; /* Dynamic typing. */
1044 }
1045 vstack_ax(stack)->type = REG_S64;
1046 next_pc += sizeof(struct binary_op);
1047 break;
1048 }
1049
1050 case BYTECODE_OP_EQ_STRING:
1051 case BYTECODE_OP_NE_STRING:
1052 case BYTECODE_OP_GT_STRING:
1053 case BYTECODE_OP_LT_STRING:
1054 case BYTECODE_OP_GE_STRING:
1055 case BYTECODE_OP_LE_STRING:
1056 case BYTECODE_OP_EQ_STAR_GLOB_STRING:
1057 case BYTECODE_OP_NE_STAR_GLOB_STRING:
1058 case BYTECODE_OP_EQ_S64:
1059 case BYTECODE_OP_NE_S64:
1060 case BYTECODE_OP_GT_S64:
1061 case BYTECODE_OP_LT_S64:
1062 case BYTECODE_OP_GE_S64:
1063 case BYTECODE_OP_LE_S64:
1064 case BYTECODE_OP_EQ_DOUBLE:
1065 case BYTECODE_OP_NE_DOUBLE:
1066 case BYTECODE_OP_GT_DOUBLE:
1067 case BYTECODE_OP_LT_DOUBLE:
1068 case BYTECODE_OP_GE_DOUBLE:
1069 case BYTECODE_OP_LE_DOUBLE:
1070 case BYTECODE_OP_EQ_DOUBLE_S64:
1071 case BYTECODE_OP_NE_DOUBLE_S64:
1072 case BYTECODE_OP_GT_DOUBLE_S64:
1073 case BYTECODE_OP_LT_DOUBLE_S64:
1074 case BYTECODE_OP_GE_DOUBLE_S64:
1075 case BYTECODE_OP_LE_DOUBLE_S64:
1076 case BYTECODE_OP_EQ_S64_DOUBLE:
1077 case BYTECODE_OP_NE_S64_DOUBLE:
1078 case BYTECODE_OP_GT_S64_DOUBLE:
1079 case BYTECODE_OP_LT_S64_DOUBLE:
1080 case BYTECODE_OP_GE_S64_DOUBLE:
1081 case BYTECODE_OP_LE_S64_DOUBLE:
1082 {
1083 /* Pop 2, push 1 */
1084 if (vstack_pop(stack)) {
1085 ret = -EINVAL;
1086 goto end;
1087 }
1088 vstack_ax(stack)->type = REG_S64;
1089 next_pc += sizeof(struct binary_op);
1090 break;
1091 }
1092
1093 case BYTECODE_OP_BIT_RSHIFT:
1094 case BYTECODE_OP_BIT_LSHIFT:
1095 case BYTECODE_OP_BIT_AND:
1096 case BYTECODE_OP_BIT_OR:
1097 case BYTECODE_OP_BIT_XOR:
1098 {
1099 /* Pop 2, push 1 */
1100 if (vstack_pop(stack)) {
1101 ret = -EINVAL;
1102 goto end;
1103 }
1104 vstack_ax(stack)->type = REG_S64;
1105 next_pc += sizeof(struct binary_op);
1106 break;
1107 }
1108
1109 /* unary */
1110 case BYTECODE_OP_UNARY_PLUS:
1111 {
1112 struct unary_op *insn = (struct unary_op *) pc;
1113
1114 switch(vstack_ax(stack)->type) {
1115 default:
1116 ERR("unknown register type\n");
1117 ret = -EINVAL;
1118 goto end;
1119
1120 case REG_S64:
1121 case REG_U64:
1122 insn->op = BYTECODE_OP_UNARY_PLUS_S64;
1123 break;
1124 case REG_DOUBLE:
1125 insn->op = BYTECODE_OP_UNARY_PLUS_DOUBLE;
1126 break;
1127 case REG_UNKNOWN: /* Dynamic typing. */
1128 break;
1129 }
1130 /* Pop 1, push 1 */
1131 next_pc += sizeof(struct unary_op);
1132 break;
1133 }
1134
1135 case BYTECODE_OP_UNARY_MINUS:
1136 {
1137 struct unary_op *insn = (struct unary_op *) pc;
1138
1139 switch(vstack_ax(stack)->type) {
1140 default:
1141 ERR("unknown register type\n");
1142 ret = -EINVAL;
1143 goto end;
1144
1145 case REG_S64:
1146 case REG_U64:
1147 insn->op = BYTECODE_OP_UNARY_MINUS_S64;
1148 break;
1149 case REG_DOUBLE:
1150 insn->op = BYTECODE_OP_UNARY_MINUS_DOUBLE;
1151 break;
1152 case REG_UNKNOWN: /* Dynamic typing. */
1153 break;
1154 }
1155 /* Pop 1, push 1 */
1156 next_pc += sizeof(struct unary_op);
1157 break;
1158 }
1159
1160 case BYTECODE_OP_UNARY_NOT:
1161 {
1162 struct unary_op *insn = (struct unary_op *) pc;
1163
1164 switch(vstack_ax(stack)->type) {
1165 default:
1166 ERR("unknown register type\n");
1167 ret = -EINVAL;
1168 goto end;
1169
1170 case REG_S64:
1171 case REG_U64:
1172 insn->op = BYTECODE_OP_UNARY_NOT_S64;
1173 break;
1174 case REG_DOUBLE:
1175 insn->op = BYTECODE_OP_UNARY_NOT_DOUBLE;
1176 break;
1177 case REG_UNKNOWN: /* Dynamic typing. */
1178 break;
1179 }
1180 /* Pop 1, push 1 */
1181 next_pc += sizeof(struct unary_op);
1182 break;
1183 }
1184
1185 case BYTECODE_OP_UNARY_BIT_NOT:
1186 {
1187 /* Pop 1, push 1 */
1188 next_pc += sizeof(struct unary_op);
1189 break;
1190 }
1191
1192 case BYTECODE_OP_UNARY_PLUS_S64:
1193 case BYTECODE_OP_UNARY_MINUS_S64:
1194 case BYTECODE_OP_UNARY_NOT_S64:
1195 case BYTECODE_OP_UNARY_PLUS_DOUBLE:
1196 case BYTECODE_OP_UNARY_MINUS_DOUBLE:
1197 case BYTECODE_OP_UNARY_NOT_DOUBLE:
1198 {
1199 /* Pop 1, push 1 */
1200 next_pc += sizeof(struct unary_op);
1201 break;
1202 }
1203
1204 /* logical */
1205 case BYTECODE_OP_AND:
1206 case BYTECODE_OP_OR:
1207 {
1208 /* Continue to next instruction */
1209 /* Pop 1 when jump not taken */
1210 if (vstack_pop(stack)) {
1211 ret = -EINVAL;
1212 goto end;
1213 }
1214 next_pc += sizeof(struct logical_op);
1215 break;
1216 }
1217
1218 /* load field ref */
1219 case BYTECODE_OP_LOAD_FIELD_REF:
1220 {
1221 ERR("Unknown field ref type\n");
1222 ret = -EINVAL;
1223 goto end;
1224 }
1225 /* get context ref */
1226 case BYTECODE_OP_GET_CONTEXT_REF:
1227 {
1228 if (vstack_push(stack)) {
1229 ret = -EINVAL;
1230 goto end;
1231 }
1232 vstack_ax(stack)->type = REG_UNKNOWN;
1233 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1234 break;
1235 }
1236 case BYTECODE_OP_LOAD_FIELD_REF_STRING:
1237 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE:
1238 case BYTECODE_OP_GET_CONTEXT_REF_STRING:
1239 {
1240 if (vstack_push(stack)) {
1241 ret = -EINVAL;
1242 goto end;
1243 }
1244 vstack_ax(stack)->type = REG_STRING;
1245 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1246 break;
1247 }
1248 case BYTECODE_OP_LOAD_FIELD_REF_S64:
1249 case BYTECODE_OP_GET_CONTEXT_REF_S64:
1250 {
1251 if (vstack_push(stack)) {
1252 ret = -EINVAL;
1253 goto end;
1254 }
1255 vstack_ax(stack)->type = REG_S64;
1256 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1257 break;
1258 }
1259 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE:
1260 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE:
1261 {
1262 if (vstack_push(stack)) {
1263 ret = -EINVAL;
1264 goto end;
1265 }
1266 vstack_ax(stack)->type = REG_DOUBLE;
1267 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1268 break;
1269 }
1270
1271 /* load from immediate operand */
1272 case BYTECODE_OP_LOAD_STRING:
1273 {
1274 struct load_op *insn = (struct load_op *) pc;
1275
1276 if (vstack_push(stack)) {
1277 ret = -EINVAL;
1278 goto end;
1279 }
1280 vstack_ax(stack)->type = REG_STRING;
1281 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1282 break;
1283 }
1284
1285 case BYTECODE_OP_LOAD_STAR_GLOB_STRING:
1286 {
1287 struct load_op *insn = (struct load_op *) pc;
1288
1289 if (vstack_push(stack)) {
1290 ret = -EINVAL;
1291 goto end;
1292 }
1293 vstack_ax(stack)->type = REG_STAR_GLOB_STRING;
1294 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1295 break;
1296 }
1297
1298 case BYTECODE_OP_LOAD_S64:
1299 {
1300 if (vstack_push(stack)) {
1301 ret = -EINVAL;
1302 goto end;
1303 }
1304 vstack_ax(stack)->type = REG_S64;
1305 next_pc += sizeof(struct load_op)
1306 + sizeof(struct literal_numeric);
1307 break;
1308 }
1309
1310 case BYTECODE_OP_LOAD_DOUBLE:
1311 {
1312 if (vstack_push(stack)) {
1313 ret = -EINVAL;
1314 goto end;
1315 }
1316 vstack_ax(stack)->type = REG_DOUBLE;
1317 next_pc += sizeof(struct load_op)
1318 + sizeof(struct literal_double);
1319 break;
1320 }
1321
1322 /* cast */
1323 case BYTECODE_OP_CAST_TO_S64:
1324 {
1325 struct cast_op *insn = (struct cast_op *) pc;
1326
1327 switch (vstack_ax(stack)->type) {
1328 default:
1329 ERR("unknown register type\n");
1330 ret = -EINVAL;
1331 goto end;
1332
1333 case REG_STRING:
1334 case REG_STAR_GLOB_STRING:
1335 ERR("Cast op can only be applied to numeric or floating point registers\n");
1336 ret = -EINVAL;
1337 goto end;
1338 case REG_S64:
1339 insn->op = BYTECODE_OP_CAST_NOP;
1340 break;
1341 case REG_DOUBLE:
1342 insn->op = BYTECODE_OP_CAST_DOUBLE_TO_S64;
1343 break;
1344 case REG_UNKNOWN:
1345 case REG_U64:
1346 break;
1347 }
1348 /* Pop 1, push 1 */
1349 vstack_ax(stack)->type = REG_S64;
1350 next_pc += sizeof(struct cast_op);
1351 break;
1352 }
1353 case BYTECODE_OP_CAST_DOUBLE_TO_S64:
1354 {
1355 /* Pop 1, push 1 */
1356 vstack_ax(stack)->type = REG_S64;
1357 next_pc += sizeof(struct cast_op);
1358 break;
1359 }
1360 case BYTECODE_OP_CAST_NOP:
1361 {
1362 next_pc += sizeof(struct cast_op);
1363 break;
1364 }
1365
1366 /*
1367 * Instructions for recursive traversal through composed types.
1368 */
1369 case BYTECODE_OP_GET_CONTEXT_ROOT:
1370 {
1371 if (vstack_push(stack)) {
1372 ret = -EINVAL;
1373 goto end;
1374 }
1375 vstack_ax(stack)->type = REG_PTR;
1376 vstack_ax(stack)->load.type = LOAD_ROOT_CONTEXT;
1377 next_pc += sizeof(struct load_op);
1378 break;
1379 }
1380 case BYTECODE_OP_GET_APP_CONTEXT_ROOT:
1381 {
1382 if (vstack_push(stack)) {
1383 ret = -EINVAL;
1384 goto end;
1385 }
1386 vstack_ax(stack)->type = REG_PTR;
1387 vstack_ax(stack)->load.type = LOAD_ROOT_APP_CONTEXT;
1388 next_pc += sizeof(struct load_op);
1389 break;
1390 }
1391 case BYTECODE_OP_GET_PAYLOAD_ROOT:
1392 {
1393 if (vstack_push(stack)) {
1394 ret = -EINVAL;
1395 goto end;
1396 }
1397 vstack_ax(stack)->type = REG_PTR;
1398 vstack_ax(stack)->load.type = LOAD_ROOT_PAYLOAD;
1399 next_pc += sizeof(struct load_op);
1400 break;
1401 }
1402
1403 case BYTECODE_OP_LOAD_FIELD:
1404 {
1405 struct load_op *insn = (struct load_op *) pc;
1406
1407 assert(vstack_ax(stack)->type == REG_PTR);
1408 /* Pop 1, push 1 */
1409 ret = specialize_load_field(vstack_ax(stack), insn);
1410 if (ret)
1411 goto end;
1412
1413 next_pc += sizeof(struct load_op);
1414 break;
1415 }
1416
1417 case BYTECODE_OP_LOAD_FIELD_S8:
1418 case BYTECODE_OP_LOAD_FIELD_S16:
1419 case BYTECODE_OP_LOAD_FIELD_S32:
1420 case BYTECODE_OP_LOAD_FIELD_S64:
1421 {
1422 /* Pop 1, push 1 */
1423 vstack_ax(stack)->type = REG_S64;
1424 next_pc += sizeof(struct load_op);
1425 break;
1426 }
1427
1428 case BYTECODE_OP_LOAD_FIELD_U8:
1429 case BYTECODE_OP_LOAD_FIELD_U16:
1430 case BYTECODE_OP_LOAD_FIELD_U32:
1431 case BYTECODE_OP_LOAD_FIELD_U64:
1432 {
1433 /* Pop 1, push 1 */
1434 vstack_ax(stack)->type = REG_U64;
1435 next_pc += sizeof(struct load_op);
1436 break;
1437 }
1438
1439 case BYTECODE_OP_LOAD_FIELD_STRING:
1440 case BYTECODE_OP_LOAD_FIELD_SEQUENCE:
1441 {
1442 /* Pop 1, push 1 */
1443 vstack_ax(stack)->type = REG_STRING;
1444 next_pc += sizeof(struct load_op);
1445 break;
1446 }
1447
1448 case BYTECODE_OP_LOAD_FIELD_DOUBLE:
1449 {
1450 /* Pop 1, push 1 */
1451 vstack_ax(stack)->type = REG_DOUBLE;
1452 next_pc += sizeof(struct load_op);
1453 break;
1454 }
1455
1456 case BYTECODE_OP_GET_SYMBOL:
1457 {
1458 struct load_op *insn = (struct load_op *) pc;
1459
1460 dbg_printf("op get symbol\n");
1461 switch (vstack_ax(stack)->load.type) {
1462 case LOAD_OBJECT:
1463 ERR("Nested fields not implemented yet.");
1464 ret = -EINVAL;
1465 goto end;
1466 case LOAD_ROOT_CONTEXT:
1467 /* Lookup context field. */
1468 ret = specialize_context_lookup(*pctx,
1469 bytecode, insn,
1470 &vstack_ax(stack)->load);
1471 if (ret)
1472 goto end;
1473 break;
1474 case LOAD_ROOT_APP_CONTEXT:
1475 /* Lookup app context field. */
1476 ret = specialize_app_context_lookup(pctx,
1477 bytecode, insn,
1478 &vstack_ax(stack)->load);
1479 if (ret)
1480 goto end;
1481 break;
1482 case LOAD_ROOT_PAYLOAD:
1483 /* Lookup event payload field. */
1484 ret = specialize_payload_lookup(event_desc,
1485 bytecode, insn,
1486 &vstack_ax(stack)->load);
1487 if (ret)
1488 goto end;
1489 break;
1490 }
1491 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1492 break;
1493 }
1494
1495 case BYTECODE_OP_GET_SYMBOL_FIELD:
1496 {
1497 /* Always generated by specialize phase. */
1498 ret = -EINVAL;
1499 goto end;
1500 }
1501
1502 case BYTECODE_OP_GET_INDEX_U16:
1503 {
1504 struct load_op *insn = (struct load_op *) pc;
1505 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1506
1507 dbg_printf("op get index u16\n");
1508 /* Pop 1, push 1 */
1509 ret = specialize_get_index(bytecode, insn, index->index,
1510 vstack_ax(stack), sizeof(*index));
1511 if (ret)
1512 goto end;
1513 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1514 break;
1515 }
1516
1517 case BYTECODE_OP_GET_INDEX_U64:
1518 {
1519 struct load_op *insn = (struct load_op *) pc;
1520 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1521
1522 dbg_printf("op get index u64\n");
1523 /* Pop 1, push 1 */
1524 ret = specialize_get_index(bytecode, insn, index->index,
1525 vstack_ax(stack), sizeof(*index));
1526 if (ret)
1527 goto end;
1528 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1529 break;
1530 }
1531
1532 }
1533 }
1534 end:
1535 return ret;
1536 }
This page took 0.105138 seconds and 3 git commands to generate.