Fix: bytecode interpreter context_get_index() leaves byte order uninitialized
[lttng-ust.git] / src / lib / lttng-ust / lttng-bytecode-interpreter.c
1 /*
2 * SPDX-License-Identifier: MIT
3 *
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5 *
6 * LTTng UST bytecode interpreter.
7 */
8
9 #define _LGPL_SOURCE
10 #include <stddef.h>
11 #include <stdint.h>
12
13 #include <lttng/urcu/pointer.h>
14 #include <urcu/rculist.h>
15 #include <lttng/ust-endian.h>
16 #include <lttng/ust-events.h>
17 #include "lib/lttng-ust/events.h"
18
19 #include "lttng-bytecode.h"
20 #include "common/strutils.h"
21
22
23 /*
24 * -1: wildcard found.
25 * -2: unknown escape char.
26 * 0: normal char.
27 */
28
29 static
30 int parse_char(const char **p)
31 {
32 switch (**p) {
33 case '\\':
34 (*p)++;
35 switch (**p) {
36 case '\\':
37 case '*':
38 return 0;
39 default:
40 return -2;
41 }
42 case '*':
43 return -1;
44 default:
45 return 0;
46 }
47 }
48
49 /*
50 * Returns SIZE_MAX if the string is null-terminated, or the number of
51 * characters if not.
52 */
53 static
54 size_t get_str_or_seq_len(const struct estack_entry *entry)
55 {
56 return entry->u.s.seq_len;
57 }
58
59 static
60 int stack_star_glob_match(struct estack *stack, int top,
61 const char *cmp_type __attribute__((unused)))
62 {
63 const char *pattern;
64 const char *candidate;
65 size_t pattern_len;
66 size_t candidate_len;
67
68 /* Find out which side is the pattern vs. the candidate. */
69 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
70 pattern = estack_ax(stack, top)->u.s.str;
71 pattern_len = get_str_or_seq_len(estack_ax(stack, top));
72 candidate = estack_bx(stack, top)->u.s.str;
73 candidate_len = get_str_or_seq_len(estack_bx(stack, top));
74 } else {
75 pattern = estack_bx(stack, top)->u.s.str;
76 pattern_len = get_str_or_seq_len(estack_bx(stack, top));
77 candidate = estack_ax(stack, top)->u.s.str;
78 candidate_len = get_str_or_seq_len(estack_ax(stack, top));
79 }
80
81 /* Perform the match. Returns 0 when the result is true. */
82 return !strutils_star_glob_match(pattern, pattern_len, candidate,
83 candidate_len);
84 }
85
86 static
87 int stack_strcmp(struct estack *stack, int top, const char *cmp_type __attribute__((unused)))
88 {
89 const char *p = estack_bx(stack, top)->u.s.str, *q = estack_ax(stack, top)->u.s.str;
90 int ret;
91 int diff;
92
93 for (;;) {
94 int escaped_r0 = 0;
95
96 if (unlikely(p - estack_bx(stack, top)->u.s.str >= estack_bx(stack, top)->u.s.seq_len || *p == '\0')) {
97 if (q - estack_ax(stack, top)->u.s.str >= estack_ax(stack, top)->u.s.seq_len || *q == '\0') {
98 return 0;
99 } else {
100 if (estack_ax(stack, top)->u.s.literal_type ==
101 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
102 ret = parse_char(&q);
103 if (ret == -1)
104 return 0;
105 }
106 return -1;
107 }
108 }
109 if (unlikely(q - estack_ax(stack, top)->u.s.str >= estack_ax(stack, top)->u.s.seq_len || *q == '\0')) {
110 if (estack_bx(stack, top)->u.s.literal_type ==
111 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
112 ret = parse_char(&p);
113 if (ret == -1)
114 return 0;
115 }
116 return 1;
117 }
118 if (estack_bx(stack, top)->u.s.literal_type ==
119 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
120 ret = parse_char(&p);
121 if (ret == -1) {
122 return 0;
123 } else if (ret == -2) {
124 escaped_r0 = 1;
125 }
126 /* else compare both char */
127 }
128 if (estack_ax(stack, top)->u.s.literal_type ==
129 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
130 ret = parse_char(&q);
131 if (ret == -1) {
132 return 0;
133 } else if (ret == -2) {
134 if (!escaped_r0)
135 return -1;
136 } else {
137 if (escaped_r0)
138 return 1;
139 }
140 } else {
141 if (escaped_r0)
142 return 1;
143 }
144 diff = *p - *q;
145 if (diff != 0)
146 break;
147 p++;
148 q++;
149 }
150 return diff;
151 }
152
153 int lttng_bytecode_interpret_error(
154 struct lttng_ust_bytecode_runtime *bytecode_runtime __attribute__((unused)),
155 const char *stack_data __attribute__((unused)),
156 struct lttng_ust_probe_ctx *probe_ctx __attribute__((unused)),
157 void *ctx __attribute__((unused)))
158 {
159 return LTTNG_UST_BYTECODE_INTERPRETER_ERROR;
160 }
161
162 #ifdef INTERPRETER_USE_SWITCH
163
164 /*
165 * Fallback for compilers that do not support taking address of labels.
166 */
167
168 #define START_OP \
169 start_pc = &bytecode->data[0]; \
170 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
171 pc = next_pc) { \
172 dbg_printf("Executing op %s (%u)\n", \
173 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
174 (unsigned int) *(bytecode_opcode_t *) pc); \
175 switch (*(bytecode_opcode_t *) pc) {
176
177 #define OP(name) jump_target_##name: __attribute__((unused)); \
178 case name
179
180 #define PO break
181
182 #define END_OP } \
183 }
184
185 #define JUMP_TO(name) \
186 goto jump_target_##name
187
188 #else
189
190 /*
191 * Dispatch-table based interpreter.
192 */
193
194 #define START_OP \
195 start_pc = &bytecode->code[0]; \
196 pc = next_pc = start_pc; \
197 if (unlikely(pc - start_pc >= bytecode->len)) \
198 goto end; \
199 goto *dispatch[*(bytecode_opcode_t *) pc];
200
201 #define OP(name) \
202 LABEL_##name
203
204 #define PO \
205 pc = next_pc; \
206 goto *dispatch[*(bytecode_opcode_t *) pc];
207
208 #define END_OP
209
210 #define JUMP_TO(name) \
211 goto LABEL_##name
212
213 #endif
214
215 #define IS_INTEGER_REGISTER(reg_type) \
216 (reg_type == REG_U64 || reg_type == REG_S64)
217
218 static int context_get_index(struct lttng_ust_ctx *ctx,
219 struct lttng_ust_probe_ctx *probe_ctx,
220 struct load_ptr *ptr,
221 uint32_t idx)
222 {
223
224 const struct lttng_ust_ctx_field *ctx_field;
225 const struct lttng_ust_event_field *field;
226 struct lttng_ust_ctx_value v;
227
228 ctx_field = &ctx->fields[idx];
229 field = ctx_field->event_field;
230 ptr->type = LOAD_OBJECT;
231 ptr->field = field;
232
233 switch (field->type->type) {
234 case lttng_ust_type_integer:
235 ctx_field->get_value(ctx_field->priv, probe_ctx, &v);
236 if (lttng_ust_get_type_integer(field->type)->signedness) {
237 ptr->object_type = OBJECT_TYPE_S64;
238 ptr->u.s64 = v.u.s64;
239 ptr->ptr = &ptr->u.s64;
240 } else {
241 ptr->object_type = OBJECT_TYPE_U64;
242 ptr->u.u64 = v.u.s64; /* Cast. */
243 ptr->ptr = &ptr->u.u64;
244 }
245 ptr->rev_bo = lttng_ust_get_type_integer(field->type)->reverse_byte_order;
246 break;
247 case lttng_ust_type_enum:
248 {
249 const struct lttng_ust_type_integer *itype;
250
251 itype = lttng_ust_get_type_integer(lttng_ust_get_type_enum(field->type)->container_type);
252 ctx_field->get_value(ctx_field->priv, probe_ctx, &v);
253 if (itype->signedness) {
254 ptr->object_type = OBJECT_TYPE_SIGNED_ENUM;
255 ptr->u.s64 = v.u.s64;
256 ptr->ptr = &ptr->u.s64;
257 } else {
258 ptr->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
259 ptr->u.u64 = v.u.s64; /* Cast. */
260 ptr->ptr = &ptr->u.u64;
261 }
262 ptr->rev_bo = itype->reverse_byte_order;
263 break;
264 }
265 case lttng_ust_type_array:
266 if (lttng_ust_get_type_array(field->type)->elem_type->type != lttng_ust_type_integer) {
267 ERR("Array nesting only supports integer types.");
268 return -EINVAL;
269 }
270 if (lttng_ust_get_type_array(field->type)->encoding == lttng_ust_string_encoding_none) {
271 ERR("Only string arrays are supported for contexts.");
272 return -EINVAL;
273 }
274 ptr->object_type = OBJECT_TYPE_STRING;
275 ctx_field->get_value(ctx_field->priv, probe_ctx, &v);
276 ptr->ptr = v.u.str;
277 break;
278 case lttng_ust_type_sequence:
279 if (lttng_ust_get_type_sequence(field->type)->elem_type->type != lttng_ust_type_integer) {
280 ERR("Sequence nesting only supports integer types.");
281 return -EINVAL;
282 }
283 if (lttng_ust_get_type_sequence(field->type)->encoding == lttng_ust_string_encoding_none) {
284 ERR("Only string sequences are supported for contexts.");
285 return -EINVAL;
286 }
287 ptr->object_type = OBJECT_TYPE_STRING;
288 ctx_field->get_value(ctx_field->priv, probe_ctx, &v);
289 ptr->ptr = v.u.str;
290 break;
291 case lttng_ust_type_string:
292 ptr->object_type = OBJECT_TYPE_STRING;
293 ctx_field->get_value(ctx_field->priv, probe_ctx, &v);
294 ptr->ptr = v.u.str;
295 break;
296 case lttng_ust_type_float:
297 ptr->object_type = OBJECT_TYPE_DOUBLE;
298 ctx_field->get_value(ctx_field->priv, probe_ctx, &v);
299 ptr->u.d = v.u.d;
300 ptr->ptr = &ptr->u.d;
301 ptr->rev_bo = lttng_ust_get_type_float(field->type)->reverse_byte_order;
302 break;
303 case lttng_ust_type_dynamic:
304 ctx_field->get_value(ctx_field->priv, probe_ctx, &v);
305 switch (v.sel) {
306 case LTTNG_UST_DYNAMIC_TYPE_NONE:
307 return -EINVAL;
308 case LTTNG_UST_DYNAMIC_TYPE_U8:
309 case LTTNG_UST_DYNAMIC_TYPE_U16:
310 case LTTNG_UST_DYNAMIC_TYPE_U32:
311 case LTTNG_UST_DYNAMIC_TYPE_U64:
312 ptr->object_type = OBJECT_TYPE_U64;
313 ptr->u.u64 = v.u.u64;
314 ptr->ptr = &ptr->u.u64;
315 /*
316 * struct lttng_ust_ctx_value does not currently
317 * feature a byte order field.
318 */
319 ptr->rev_bo = false;
320 dbg_printf("context get index dynamic u64 %" PRIi64 "\n", ptr->u.u64);
321 break;
322 case LTTNG_UST_DYNAMIC_TYPE_S8:
323 case LTTNG_UST_DYNAMIC_TYPE_S16:
324 case LTTNG_UST_DYNAMIC_TYPE_S32:
325 case LTTNG_UST_DYNAMIC_TYPE_S64:
326 ptr->object_type = OBJECT_TYPE_S64;
327 ptr->u.s64 = v.u.s64;
328 ptr->ptr = &ptr->u.s64;
329 /*
330 * struct lttng_ust_ctx_value does not currently
331 * feature a byte order field.
332 */
333 ptr->rev_bo = false;
334 dbg_printf("context get index dynamic s64 %" PRIi64 "\n", ptr->u.s64);
335 break;
336 case LTTNG_UST_DYNAMIC_TYPE_FLOAT:
337 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE:
338 ptr->object_type = OBJECT_TYPE_DOUBLE;
339 ptr->u.d = v.u.d;
340 ptr->ptr = &ptr->u.d;
341 /*
342 * struct lttng_ust_ctx_value does not currently
343 * feature a byte order field.
344 */
345 ptr->rev_bo = false;
346 dbg_printf("context get index dynamic double %g\n", ptr->u.d);
347 break;
348 case LTTNG_UST_DYNAMIC_TYPE_STRING:
349 ptr->object_type = OBJECT_TYPE_STRING;
350 ptr->ptr = v.u.str;
351 dbg_printf("context get index dynamic string %s\n", (const char *) ptr->ptr);
352 break;
353 default:
354 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v.sel);
355 return -EINVAL;
356 }
357 break;
358 default:
359 ERR("Unknown type: %d", (int) field->type->type);
360 return -EINVAL;
361 }
362 return 0;
363 }
364
365 static int dynamic_get_index(struct lttng_ust_ctx *ctx,
366 struct lttng_ust_probe_ctx *probe_ctx,
367 struct bytecode_runtime *runtime,
368 uint64_t index, struct estack_entry *stack_top)
369 {
370 int ret;
371 const struct bytecode_get_index_data *gid;
372
373 gid = (const struct bytecode_get_index_data *) &runtime->data[index];
374 switch (stack_top->u.ptr.type) {
375 case LOAD_OBJECT:
376 switch (stack_top->u.ptr.object_type) {
377 case OBJECT_TYPE_ARRAY:
378 {
379 const char *ptr;
380
381 assert(gid->offset < gid->array_len);
382 /* Skip count (unsigned long) */
383 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
384 ptr = ptr + gid->offset;
385 stack_top->u.ptr.ptr = ptr;
386 stack_top->u.ptr.object_type = gid->elem.type;
387 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
388 assert(stack_top->u.ptr.field->type->type == lttng_ust_type_array);
389 stack_top->u.ptr.field = NULL;
390 break;
391 }
392 case OBJECT_TYPE_SEQUENCE:
393 {
394 const char *ptr;
395 size_t ptr_seq_len;
396
397 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
398 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
399 if (gid->offset >= gid->elem.len * ptr_seq_len) {
400 ret = -EINVAL;
401 goto end;
402 }
403 ptr = ptr + gid->offset;
404 stack_top->u.ptr.ptr = ptr;
405 stack_top->u.ptr.object_type = gid->elem.type;
406 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
407 assert(stack_top->u.ptr.field->type->type == lttng_ust_type_sequence);
408 stack_top->u.ptr.field = NULL;
409 break;
410 }
411 case OBJECT_TYPE_STRUCT:
412 ERR("Nested structures are not supported yet.");
413 ret = -EINVAL;
414 goto end;
415 case OBJECT_TYPE_VARIANT:
416 default:
417 ERR("Unexpected get index type %d",
418 (int) stack_top->u.ptr.object_type);
419 ret = -EINVAL;
420 goto end;
421 }
422 break;
423 case LOAD_ROOT_CONTEXT:
424 case LOAD_ROOT_APP_CONTEXT: /* Fall-through */
425 {
426 ret = context_get_index(ctx,
427 probe_ctx,
428 &stack_top->u.ptr,
429 gid->ctx_index);
430 if (ret) {
431 goto end;
432 }
433 break;
434 }
435 case LOAD_ROOT_PAYLOAD:
436 stack_top->u.ptr.ptr += gid->offset;
437 if (gid->elem.type == OBJECT_TYPE_STRING)
438 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
439 stack_top->u.ptr.object_type = gid->elem.type;
440 stack_top->u.ptr.type = LOAD_OBJECT;
441 stack_top->u.ptr.field = gid->field;
442 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
443 break;
444 }
445
446 stack_top->type = REG_PTR;
447
448 return 0;
449
450 end:
451 return ret;
452 }
453
454 static int dynamic_load_field(struct estack_entry *stack_top)
455 {
456 int ret;
457
458 switch (stack_top->u.ptr.type) {
459 case LOAD_OBJECT:
460 break;
461 case LOAD_ROOT_CONTEXT:
462 case LOAD_ROOT_APP_CONTEXT:
463 case LOAD_ROOT_PAYLOAD:
464 default:
465 dbg_printf("Interpreter warning: cannot load root, missing field name.\n");
466 ret = -EINVAL;
467 goto end;
468 }
469 switch (stack_top->u.ptr.object_type) {
470 case OBJECT_TYPE_S8:
471 dbg_printf("op load field s8\n");
472 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
473 stack_top->type = REG_S64;
474 break;
475 case OBJECT_TYPE_S16:
476 {
477 int16_t tmp;
478
479 dbg_printf("op load field s16\n");
480 tmp = *(int16_t *) stack_top->u.ptr.ptr;
481 if (stack_top->u.ptr.rev_bo)
482 tmp = lttng_ust_bswap_16(tmp);
483 stack_top->u.v = tmp;
484 stack_top->type = REG_S64;
485 break;
486 }
487 case OBJECT_TYPE_S32:
488 {
489 int32_t tmp;
490
491 dbg_printf("op load field s32\n");
492 tmp = *(int32_t *) stack_top->u.ptr.ptr;
493 if (stack_top->u.ptr.rev_bo)
494 tmp = lttng_ust_bswap_32(tmp);
495 stack_top->u.v = tmp;
496 stack_top->type = REG_S64;
497 break;
498 }
499 case OBJECT_TYPE_S64:
500 {
501 int64_t tmp;
502
503 dbg_printf("op load field s64\n");
504 tmp = *(int64_t *) stack_top->u.ptr.ptr;
505 if (stack_top->u.ptr.rev_bo)
506 tmp = lttng_ust_bswap_64(tmp);
507 stack_top->u.v = tmp;
508 stack_top->type = REG_S64;
509 break;
510 }
511 case OBJECT_TYPE_SIGNED_ENUM:
512 {
513 int64_t tmp;
514
515 dbg_printf("op load field signed enumeration\n");
516 tmp = *(int64_t *) stack_top->u.ptr.ptr;
517 if (stack_top->u.ptr.rev_bo)
518 tmp = lttng_ust_bswap_64(tmp);
519 stack_top->u.v = tmp;
520 stack_top->type = REG_S64;
521 break;
522 }
523 case OBJECT_TYPE_U8:
524 dbg_printf("op load field u8\n");
525 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
526 stack_top->type = REG_U64;
527 break;
528 case OBJECT_TYPE_U16:
529 {
530 uint16_t tmp;
531
532 dbg_printf("op load field u16\n");
533 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
534 if (stack_top->u.ptr.rev_bo)
535 tmp = lttng_ust_bswap_16(tmp);
536 stack_top->u.v = tmp;
537 stack_top->type = REG_U64;
538 break;
539 }
540 case OBJECT_TYPE_U32:
541 {
542 uint32_t tmp;
543
544 dbg_printf("op load field u32\n");
545 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
546 if (stack_top->u.ptr.rev_bo)
547 tmp = lttng_ust_bswap_32(tmp);
548 stack_top->u.v = tmp;
549 stack_top->type = REG_U64;
550 break;
551 }
552 case OBJECT_TYPE_U64:
553 {
554 uint64_t tmp;
555
556 dbg_printf("op load field u64\n");
557 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
558 if (stack_top->u.ptr.rev_bo)
559 tmp = lttng_ust_bswap_64(tmp);
560 stack_top->u.v = tmp;
561 stack_top->type = REG_U64;
562 break;
563 }
564 case OBJECT_TYPE_UNSIGNED_ENUM:
565 {
566 uint64_t tmp;
567
568 dbg_printf("op load field unsigned enumeration\n");
569 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
570 if (stack_top->u.ptr.rev_bo)
571 tmp = lttng_ust_bswap_64(tmp);
572 stack_top->u.v = tmp;
573 stack_top->type = REG_U64;
574 break;
575 }
576 case OBJECT_TYPE_DOUBLE:
577 memcpy(&stack_top->u.d,
578 stack_top->u.ptr.ptr,
579 sizeof(struct literal_double));
580 stack_top->type = REG_DOUBLE;
581 break;
582 case OBJECT_TYPE_STRING:
583 {
584 const char *str;
585
586 dbg_printf("op load field string\n");
587 str = (const char *) stack_top->u.ptr.ptr;
588 stack_top->u.s.str = str;
589 if (unlikely(!stack_top->u.s.str)) {
590 dbg_printf("Interpreter warning: loading a NULL string.\n");
591 ret = -EINVAL;
592 goto end;
593 }
594 stack_top->u.s.seq_len = SIZE_MAX;
595 stack_top->u.s.literal_type =
596 ESTACK_STRING_LITERAL_TYPE_NONE;
597 stack_top->type = REG_STRING;
598 break;
599 }
600 case OBJECT_TYPE_STRING_SEQUENCE:
601 {
602 const char *ptr;
603
604 dbg_printf("op load field string sequence\n");
605 ptr = stack_top->u.ptr.ptr;
606 stack_top->u.s.seq_len = *(unsigned long *) ptr;
607 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
608 stack_top->type = REG_STRING;
609 if (unlikely(!stack_top->u.s.str)) {
610 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
611 ret = -EINVAL;
612 goto end;
613 }
614 stack_top->u.s.literal_type =
615 ESTACK_STRING_LITERAL_TYPE_NONE;
616 break;
617 }
618 case OBJECT_TYPE_DYNAMIC:
619 /*
620 * Dynamic types in context are looked up
621 * by context get index.
622 */
623 ret = -EINVAL;
624 goto end;
625 case OBJECT_TYPE_SEQUENCE:
626 case OBJECT_TYPE_ARRAY:
627 case OBJECT_TYPE_STRUCT:
628 case OBJECT_TYPE_VARIANT:
629 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
630 ret = -EINVAL;
631 goto end;
632 }
633 return 0;
634
635 end:
636 return ret;
637 }
638
639 static
640 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
641 struct lttng_interpreter_output *output)
642 {
643 int ret;
644
645 again:
646 switch (ax->type) {
647 case REG_S64:
648 output->type = LTTNG_INTERPRETER_TYPE_S64;
649 output->u.s = ax->u.v;
650 break;
651 case REG_U64:
652 output->type = LTTNG_INTERPRETER_TYPE_U64;
653 output->u.u = (uint64_t) ax->u.v;
654 break;
655 case REG_DOUBLE:
656 output->type = LTTNG_INTERPRETER_TYPE_DOUBLE;
657 output->u.d = ax->u.d;
658 break;
659 case REG_STRING:
660 output->type = LTTNG_INTERPRETER_TYPE_STRING;
661 output->u.str.str = ax->u.s.str;
662 output->u.str.len = ax->u.s.seq_len;
663 break;
664 case REG_PTR:
665 switch (ax->u.ptr.object_type) {
666 case OBJECT_TYPE_S8:
667 case OBJECT_TYPE_S16:
668 case OBJECT_TYPE_S32:
669 case OBJECT_TYPE_S64:
670 case OBJECT_TYPE_U8:
671 case OBJECT_TYPE_U16:
672 case OBJECT_TYPE_U32:
673 case OBJECT_TYPE_U64:
674 case OBJECT_TYPE_DOUBLE:
675 case OBJECT_TYPE_STRING:
676 case OBJECT_TYPE_STRING_SEQUENCE:
677 ret = dynamic_load_field(ax);
678 if (ret)
679 return ret;
680 /* Retry after loading ptr into stack top. */
681 goto again;
682 case OBJECT_TYPE_SEQUENCE:
683 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
684 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
685 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
686 output->u.sequence.nested_type = lttng_ust_get_type_sequence(ax->u.ptr.field->type)->elem_type;
687 break;
688 case OBJECT_TYPE_ARRAY:
689 /* Skip count (unsigned long) */
690 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
691 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
692 output->u.sequence.nr_elem = lttng_ust_get_type_array(ax->u.ptr.field->type)->length;
693 output->u.sequence.nested_type = lttng_ust_get_type_array(ax->u.ptr.field->type)->elem_type;
694 break;
695 case OBJECT_TYPE_SIGNED_ENUM:
696 ret = dynamic_load_field(ax);
697 if (ret)
698 return ret;
699 output->type = LTTNG_INTERPRETER_TYPE_SIGNED_ENUM;
700 output->u.s = ax->u.v;
701 break;
702 case OBJECT_TYPE_UNSIGNED_ENUM:
703 ret = dynamic_load_field(ax);
704 if (ret)
705 return ret;
706 output->type = LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM;
707 output->u.u = ax->u.v;
708 break;
709 case OBJECT_TYPE_STRUCT:
710 case OBJECT_TYPE_VARIANT:
711 default:
712 return -EINVAL;
713 }
714
715 break;
716 case REG_STAR_GLOB_STRING:
717 case REG_UNKNOWN:
718 default:
719 return -EINVAL;
720 }
721
722 return 0;
723 }
724
725 /*
726 * Return LTTNG_UST_BYTECODE_INTERPRETER_OK on success.
727 * Return LTTNG_UST_BYTECODE_INTERPRETER_ERROR on error.
728 *
729 * For FILTER bytecode: expect a struct lttng_ust_bytecode_filter_ctx *
730 * as @ctx argument.
731 * For CAPTURE bytecode: expect a struct lttng_interpreter_output *
732 * as @ctx argument.
733 */
734 int lttng_bytecode_interpret(struct lttng_ust_bytecode_runtime *ust_bytecode,
735 const char *interpreter_stack_data,
736 struct lttng_ust_probe_ctx *probe_ctx,
737 void *caller_ctx)
738 {
739 struct bytecode_runtime *bytecode = caa_container_of(ust_bytecode, struct bytecode_runtime, p);
740 struct lttng_ust_ctx *ctx = lttng_ust_rcu_dereference(*ust_bytecode->pctx);
741 void *pc, *next_pc, *start_pc;
742 int ret = -EINVAL, retval = 0;
743 struct estack _stack;
744 struct estack *stack = &_stack;
745 register int64_t ax = 0, bx = 0;
746 register enum entry_type ax_t = REG_UNKNOWN, bx_t = REG_UNKNOWN;
747 register int top = INTERPRETER_STACK_EMPTY;
748 #ifndef INTERPRETER_USE_SWITCH
749 static void *dispatch[NR_BYTECODE_OPS] = {
750 [ BYTECODE_OP_UNKNOWN ] = &&LABEL_BYTECODE_OP_UNKNOWN,
751
752 [ BYTECODE_OP_RETURN ] = &&LABEL_BYTECODE_OP_RETURN,
753
754 /* binary */
755 [ BYTECODE_OP_MUL ] = &&LABEL_BYTECODE_OP_MUL,
756 [ BYTECODE_OP_DIV ] = &&LABEL_BYTECODE_OP_DIV,
757 [ BYTECODE_OP_MOD ] = &&LABEL_BYTECODE_OP_MOD,
758 [ BYTECODE_OP_PLUS ] = &&LABEL_BYTECODE_OP_PLUS,
759 [ BYTECODE_OP_MINUS ] = &&LABEL_BYTECODE_OP_MINUS,
760 [ BYTECODE_OP_BIT_RSHIFT ] = &&LABEL_BYTECODE_OP_BIT_RSHIFT,
761 [ BYTECODE_OP_BIT_LSHIFT ] = &&LABEL_BYTECODE_OP_BIT_LSHIFT,
762 [ BYTECODE_OP_BIT_AND ] = &&LABEL_BYTECODE_OP_BIT_AND,
763 [ BYTECODE_OP_BIT_OR ] = &&LABEL_BYTECODE_OP_BIT_OR,
764 [ BYTECODE_OP_BIT_XOR ] = &&LABEL_BYTECODE_OP_BIT_XOR,
765
766 /* binary comparators */
767 [ BYTECODE_OP_EQ ] = &&LABEL_BYTECODE_OP_EQ,
768 [ BYTECODE_OP_NE ] = &&LABEL_BYTECODE_OP_NE,
769 [ BYTECODE_OP_GT ] = &&LABEL_BYTECODE_OP_GT,
770 [ BYTECODE_OP_LT ] = &&LABEL_BYTECODE_OP_LT,
771 [ BYTECODE_OP_GE ] = &&LABEL_BYTECODE_OP_GE,
772 [ BYTECODE_OP_LE ] = &&LABEL_BYTECODE_OP_LE,
773
774 /* string binary comparator */
775 [ BYTECODE_OP_EQ_STRING ] = &&LABEL_BYTECODE_OP_EQ_STRING,
776 [ BYTECODE_OP_NE_STRING ] = &&LABEL_BYTECODE_OP_NE_STRING,
777 [ BYTECODE_OP_GT_STRING ] = &&LABEL_BYTECODE_OP_GT_STRING,
778 [ BYTECODE_OP_LT_STRING ] = &&LABEL_BYTECODE_OP_LT_STRING,
779 [ BYTECODE_OP_GE_STRING ] = &&LABEL_BYTECODE_OP_GE_STRING,
780 [ BYTECODE_OP_LE_STRING ] = &&LABEL_BYTECODE_OP_LE_STRING,
781
782 /* globbing pattern binary comparator */
783 [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING,
784 [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING,
785
786 /* s64 binary comparator */
787 [ BYTECODE_OP_EQ_S64 ] = &&LABEL_BYTECODE_OP_EQ_S64,
788 [ BYTECODE_OP_NE_S64 ] = &&LABEL_BYTECODE_OP_NE_S64,
789 [ BYTECODE_OP_GT_S64 ] = &&LABEL_BYTECODE_OP_GT_S64,
790 [ BYTECODE_OP_LT_S64 ] = &&LABEL_BYTECODE_OP_LT_S64,
791 [ BYTECODE_OP_GE_S64 ] = &&LABEL_BYTECODE_OP_GE_S64,
792 [ BYTECODE_OP_LE_S64 ] = &&LABEL_BYTECODE_OP_LE_S64,
793
794 /* double binary comparator */
795 [ BYTECODE_OP_EQ_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE,
796 [ BYTECODE_OP_NE_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_DOUBLE,
797 [ BYTECODE_OP_GT_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_DOUBLE,
798 [ BYTECODE_OP_LT_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_DOUBLE,
799 [ BYTECODE_OP_GE_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_DOUBLE,
800 [ BYTECODE_OP_LE_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_DOUBLE,
801
802 /* Mixed S64-double binary comparators */
803 [ BYTECODE_OP_EQ_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64,
804 [ BYTECODE_OP_NE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64,
805 [ BYTECODE_OP_GT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64,
806 [ BYTECODE_OP_LT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64,
807 [ BYTECODE_OP_GE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64,
808 [ BYTECODE_OP_LE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64,
809
810 [ BYTECODE_OP_EQ_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE,
811 [ BYTECODE_OP_NE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE,
812 [ BYTECODE_OP_GT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE,
813 [ BYTECODE_OP_LT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE,
814 [ BYTECODE_OP_GE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE,
815 [ BYTECODE_OP_LE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE,
816
817 /* unary */
818 [ BYTECODE_OP_UNARY_PLUS ] = &&LABEL_BYTECODE_OP_UNARY_PLUS,
819 [ BYTECODE_OP_UNARY_MINUS ] = &&LABEL_BYTECODE_OP_UNARY_MINUS,
820 [ BYTECODE_OP_UNARY_NOT ] = &&LABEL_BYTECODE_OP_UNARY_NOT,
821 [ BYTECODE_OP_UNARY_PLUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64,
822 [ BYTECODE_OP_UNARY_MINUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64,
823 [ BYTECODE_OP_UNARY_NOT_S64 ] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64,
824 [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE,
825 [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE,
826 [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE,
827
828 /* logical */
829 [ BYTECODE_OP_AND ] = &&LABEL_BYTECODE_OP_AND,
830 [ BYTECODE_OP_OR ] = &&LABEL_BYTECODE_OP_OR,
831
832 /* load field ref */
833 [ BYTECODE_OP_LOAD_FIELD_REF ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF,
834 [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING,
835 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE,
836 [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64,
837 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE,
838
839 /* load from immediate operand */
840 [ BYTECODE_OP_LOAD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STRING,
841 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING,
842 [ BYTECODE_OP_LOAD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_S64,
843 [ BYTECODE_OP_LOAD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE,
844
845 /* cast */
846 [ BYTECODE_OP_CAST_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_TO_S64,
847 [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64,
848 [ BYTECODE_OP_CAST_NOP ] = &&LABEL_BYTECODE_OP_CAST_NOP,
849
850 /* get context ref */
851 [ BYTECODE_OP_GET_CONTEXT_REF ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF,
852 [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING,
853 [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64,
854 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE,
855
856 /* Instructions for recursive traversal through composed types. */
857 [ BYTECODE_OP_GET_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT,
858 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT,
859 [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT,
860
861 [ BYTECODE_OP_GET_SYMBOL ] = &&LABEL_BYTECODE_OP_GET_SYMBOL,
862 [ BYTECODE_OP_GET_SYMBOL_FIELD ] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD,
863 [ BYTECODE_OP_GET_INDEX_U16 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U16,
864 [ BYTECODE_OP_GET_INDEX_U64 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U64,
865
866 [ BYTECODE_OP_LOAD_FIELD ] = &&LABEL_BYTECODE_OP_LOAD_FIELD,
867 [ BYTECODE_OP_LOAD_FIELD_S8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8,
868 [ BYTECODE_OP_LOAD_FIELD_S16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16,
869 [ BYTECODE_OP_LOAD_FIELD_S32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32,
870 [ BYTECODE_OP_LOAD_FIELD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64,
871 [ BYTECODE_OP_LOAD_FIELD_U8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8,
872 [ BYTECODE_OP_LOAD_FIELD_U16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16,
873 [ BYTECODE_OP_LOAD_FIELD_U32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32,
874 [ BYTECODE_OP_LOAD_FIELD_U64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64,
875 [ BYTECODE_OP_LOAD_FIELD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING,
876 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE,
877 [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE,
878
879 [ BYTECODE_OP_UNARY_BIT_NOT ] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT,
880
881 [ BYTECODE_OP_RETURN_S64 ] = &&LABEL_BYTECODE_OP_RETURN_S64,
882 };
883 #endif /* #ifndef INTERPRETER_USE_SWITCH */
884
885 START_OP
886
887 OP(BYTECODE_OP_UNKNOWN):
888 OP(BYTECODE_OP_LOAD_FIELD_REF):
889 #ifdef INTERPRETER_USE_SWITCH
890 default:
891 #endif /* INTERPRETER_USE_SWITCH */
892 ERR("unknown bytecode op %u",
893 (unsigned int) *(bytecode_opcode_t *) pc);
894 ret = -EINVAL;
895 goto end;
896
897 OP(BYTECODE_OP_RETURN):
898 /* LTTNG_UST_BYTECODE_INTERPRETER_ERROR or LTTNG_UST_BYTECODE_INTERPRETER_OK */
899 /* Handle dynamic typing. */
900 switch (estack_ax_t) {
901 case REG_S64:
902 case REG_U64:
903 retval = !!estack_ax_v;
904 break;
905 case REG_DOUBLE:
906 case REG_STRING:
907 case REG_PTR:
908 if (ust_bytecode->type != LTTNG_UST_BYTECODE_TYPE_CAPTURE) {
909 ret = -EINVAL;
910 goto end;
911 }
912 retval = 0;
913 break;
914 case REG_STAR_GLOB_STRING:
915 case REG_UNKNOWN:
916 default:
917 ret = -EINVAL;
918 goto end;
919 }
920 ret = 0;
921 goto end;
922
923 OP(BYTECODE_OP_RETURN_S64):
924 /* LTTNG_UST_BYTECODE_INTERPRETER_ERROR or LTTNG_UST_BYTECODE_INTERPRETER_OK */
925 retval = !!estack_ax_v;
926 ret = 0;
927 goto end;
928
929 /* binary */
930 OP(BYTECODE_OP_MUL):
931 OP(BYTECODE_OP_DIV):
932 OP(BYTECODE_OP_MOD):
933 OP(BYTECODE_OP_PLUS):
934 OP(BYTECODE_OP_MINUS):
935 ERR("unsupported bytecode op %u",
936 (unsigned int) *(bytecode_opcode_t *) pc);
937 ret = -EINVAL;
938 goto end;
939
940 OP(BYTECODE_OP_EQ):
941 {
942 /* Dynamic typing. */
943 switch (estack_ax_t) {
944 case REG_S64: /* Fall-through */
945 case REG_U64:
946 switch (estack_bx_t) {
947 case REG_S64: /* Fall-through */
948 case REG_U64:
949 JUMP_TO(BYTECODE_OP_EQ_S64);
950 case REG_DOUBLE:
951 JUMP_TO(BYTECODE_OP_EQ_DOUBLE_S64);
952 case REG_STRING: /* Fall-through */
953 case REG_STAR_GLOB_STRING:
954 ret = -EINVAL;
955 goto end;
956 default:
957 ERR("Unknown interpreter register type (%d)",
958 (int) estack_bx_t);
959 ret = -EINVAL;
960 goto end;
961 }
962 break;
963 case REG_DOUBLE:
964 switch (estack_bx_t) {
965 case REG_S64: /* Fall-through */
966 case REG_U64:
967 JUMP_TO(BYTECODE_OP_EQ_S64_DOUBLE);
968 case REG_DOUBLE:
969 JUMP_TO(BYTECODE_OP_EQ_DOUBLE);
970 case REG_STRING: /* Fall-through */
971 case REG_STAR_GLOB_STRING:
972 ret = -EINVAL;
973 goto end;
974 default:
975 ERR("Unknown interpreter register type (%d)",
976 (int) estack_bx_t);
977 ret = -EINVAL;
978 goto end;
979 }
980 break;
981 case REG_STRING:
982 switch (estack_bx_t) {
983 case REG_S64: /* Fall-through */
984 case REG_U64: /* Fall-through */
985 case REG_DOUBLE:
986 ret = -EINVAL;
987 goto end;
988 case REG_STRING:
989 JUMP_TO(BYTECODE_OP_EQ_STRING);
990 case REG_STAR_GLOB_STRING:
991 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING);
992 default:
993 ERR("Unknown interpreter register type (%d)",
994 (int) estack_bx_t);
995 ret = -EINVAL;
996 goto end;
997 }
998 break;
999 case REG_STAR_GLOB_STRING:
1000 switch (estack_bx_t) {
1001 case REG_S64: /* Fall-through */
1002 case REG_U64: /* Fall-through */
1003 case REG_DOUBLE:
1004 ret = -EINVAL;
1005 goto end;
1006 case REG_STRING:
1007 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING);
1008 case REG_STAR_GLOB_STRING:
1009 ret = -EINVAL;
1010 goto end;
1011 default:
1012 ERR("Unknown interpreter register type (%d)",
1013 (int) estack_bx_t);
1014 ret = -EINVAL;
1015 goto end;
1016 }
1017 break;
1018 default:
1019 ERR("Unknown interpreter register type (%d)",
1020 (int) estack_ax_t);
1021 ret = -EINVAL;
1022 goto end;
1023 }
1024 }
1025 OP(BYTECODE_OP_NE):
1026 {
1027 /* Dynamic typing. */
1028 switch (estack_ax_t) {
1029 case REG_S64: /* Fall-through */
1030 case REG_U64:
1031 switch (estack_bx_t) {
1032 case REG_S64: /* Fall-through */
1033 case REG_U64:
1034 JUMP_TO(BYTECODE_OP_NE_S64);
1035 case REG_DOUBLE:
1036 JUMP_TO(BYTECODE_OP_NE_DOUBLE_S64);
1037 case REG_STRING: /* Fall-through */
1038 case REG_STAR_GLOB_STRING:
1039 ret = -EINVAL;
1040 goto end;
1041 default:
1042 ERR("Unknown interpreter register type (%d)",
1043 (int) estack_bx_t);
1044 ret = -EINVAL;
1045 goto end;
1046 }
1047 break;
1048 case REG_DOUBLE:
1049 switch (estack_bx_t) {
1050 case REG_S64: /* Fall-through */
1051 case REG_U64:
1052 JUMP_TO(BYTECODE_OP_NE_S64_DOUBLE);
1053 case REG_DOUBLE:
1054 JUMP_TO(BYTECODE_OP_NE_DOUBLE);
1055 case REG_STRING: /* Fall-through */
1056 case REG_STAR_GLOB_STRING:
1057 ret = -EINVAL;
1058 goto end;
1059 default:
1060 ERR("Unknown interpreter register type (%d)",
1061 (int) estack_bx_t);
1062 ret = -EINVAL;
1063 goto end;
1064 }
1065 break;
1066 case REG_STRING:
1067 switch (estack_bx_t) {
1068 case REG_S64: /* Fall-through */
1069 case REG_U64:
1070 case REG_DOUBLE:
1071 ret = -EINVAL;
1072 goto end;
1073 case REG_STRING:
1074 JUMP_TO(BYTECODE_OP_NE_STRING);
1075 case REG_STAR_GLOB_STRING:
1076 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING);
1077 default:
1078 ERR("Unknown interpreter register type (%d)",
1079 (int) estack_bx_t);
1080 ret = -EINVAL;
1081 goto end;
1082 }
1083 break;
1084 case REG_STAR_GLOB_STRING:
1085 switch (estack_bx_t) {
1086 case REG_S64: /* Fall-through */
1087 case REG_U64:
1088 case REG_DOUBLE:
1089 ret = -EINVAL;
1090 goto end;
1091 case REG_STRING:
1092 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING);
1093 case REG_STAR_GLOB_STRING:
1094 ret = -EINVAL;
1095 goto end;
1096 default:
1097 ERR("Unknown interpreter register type (%d)",
1098 (int) estack_bx_t);
1099 ret = -EINVAL;
1100 goto end;
1101 }
1102 break;
1103 default:
1104 ERR("Unknown interpreter register type (%d)",
1105 (int) estack_ax_t);
1106 ret = -EINVAL;
1107 goto end;
1108 }
1109 }
1110 OP(BYTECODE_OP_GT):
1111 {
1112 /* Dynamic typing. */
1113 switch (estack_ax_t) {
1114 case REG_S64: /* Fall-through */
1115 case REG_U64:
1116 switch (estack_bx_t) {
1117 case REG_S64: /* Fall-through */
1118 case REG_U64:
1119 JUMP_TO(BYTECODE_OP_GT_S64);
1120 case REG_DOUBLE:
1121 JUMP_TO(BYTECODE_OP_GT_DOUBLE_S64);
1122 case REG_STRING: /* Fall-through */
1123 case REG_STAR_GLOB_STRING:
1124 ret = -EINVAL;
1125 goto end;
1126 default:
1127 ERR("Unknown interpreter register type (%d)",
1128 (int) estack_bx_t);
1129 ret = -EINVAL;
1130 goto end;
1131 }
1132 break;
1133 case REG_DOUBLE:
1134 switch (estack_bx_t) {
1135 case REG_S64: /* Fall-through */
1136 case REG_U64:
1137 JUMP_TO(BYTECODE_OP_GT_S64_DOUBLE);
1138 case REG_DOUBLE:
1139 JUMP_TO(BYTECODE_OP_GT_DOUBLE);
1140 case REG_STRING: /* Fall-through */
1141 case REG_STAR_GLOB_STRING:
1142 ret = -EINVAL;
1143 goto end;
1144 default:
1145 ERR("Unknown interpreter register type (%d)",
1146 (int) estack_bx_t);
1147 ret = -EINVAL;
1148 goto end;
1149 }
1150 break;
1151 case REG_STRING:
1152 switch (estack_bx_t) {
1153 case REG_S64: /* Fall-through */
1154 case REG_U64: /* Fall-through */
1155 case REG_DOUBLE: /* Fall-through */
1156 case REG_STAR_GLOB_STRING:
1157 ret = -EINVAL;
1158 goto end;
1159 case REG_STRING:
1160 JUMP_TO(BYTECODE_OP_GT_STRING);
1161 default:
1162 ERR("Unknown interpreter register type (%d)",
1163 (int) estack_bx_t);
1164 ret = -EINVAL;
1165 goto end;
1166 }
1167 break;
1168 default:
1169 ERR("Unknown interpreter register type (%d)",
1170 (int) estack_ax_t);
1171 ret = -EINVAL;
1172 goto end;
1173 }
1174 }
1175 OP(BYTECODE_OP_LT):
1176 {
1177 /* Dynamic typing. */
1178 switch (estack_ax_t) {
1179 case REG_S64: /* Fall-through */
1180 case REG_U64:
1181 switch (estack_bx_t) {
1182 case REG_S64: /* Fall-through */
1183 case REG_U64:
1184 JUMP_TO(BYTECODE_OP_LT_S64);
1185 case REG_DOUBLE:
1186 JUMP_TO(BYTECODE_OP_LT_DOUBLE_S64);
1187 case REG_STRING: /* Fall-through */
1188 case REG_STAR_GLOB_STRING:
1189 ret = -EINVAL;
1190 goto end;
1191 default:
1192 ERR("Unknown interpreter register type (%d)",
1193 (int) estack_bx_t);
1194 ret = -EINVAL;
1195 goto end;
1196 }
1197 break;
1198 case REG_DOUBLE:
1199 switch (estack_bx_t) {
1200 case REG_S64: /* Fall-through */
1201 case REG_U64:
1202 JUMP_TO(BYTECODE_OP_LT_S64_DOUBLE);
1203 case REG_DOUBLE:
1204 JUMP_TO(BYTECODE_OP_LT_DOUBLE);
1205 case REG_STRING: /* Fall-through */
1206 case REG_STAR_GLOB_STRING:
1207 ret = -EINVAL;
1208 goto end;
1209 default:
1210 ERR("Unknown interpreter register type (%d)",
1211 (int) estack_bx_t);
1212 ret = -EINVAL;
1213 goto end;
1214 }
1215 break;
1216 case REG_STRING:
1217 switch (estack_bx_t) {
1218 case REG_S64: /* Fall-through */
1219 case REG_U64: /* Fall-through */
1220 case REG_DOUBLE: /* Fall-through */
1221 case REG_STAR_GLOB_STRING:
1222 ret = -EINVAL;
1223 goto end;
1224 case REG_STRING:
1225 JUMP_TO(BYTECODE_OP_LT_STRING);
1226 default:
1227 ERR("Unknown interpreter register type (%d)",
1228 (int) estack_bx_t);
1229 ret = -EINVAL;
1230 goto end;
1231 }
1232 break;
1233 default:
1234 ERR("Unknown interpreter register type (%d)",
1235 (int) estack_ax_t);
1236 ret = -EINVAL;
1237 goto end;
1238 }
1239 }
1240 OP(BYTECODE_OP_GE):
1241 {
1242 /* Dynamic typing. */
1243 switch (estack_ax_t) {
1244 case REG_S64: /* Fall-through */
1245 case REG_U64:
1246 switch (estack_bx_t) {
1247 case REG_S64: /* Fall-through */
1248 case REG_U64:
1249 JUMP_TO(BYTECODE_OP_GE_S64);
1250 case REG_DOUBLE:
1251 JUMP_TO(BYTECODE_OP_GE_DOUBLE_S64);
1252 case REG_STRING: /* Fall-through */
1253 case REG_STAR_GLOB_STRING:
1254 ret = -EINVAL;
1255 goto end;
1256 default:
1257 ERR("Unknown interpreter register type (%d)",
1258 (int) estack_bx_t);
1259 ret = -EINVAL;
1260 goto end;
1261 }
1262 break;
1263 case REG_DOUBLE:
1264 switch (estack_bx_t) {
1265 case REG_S64: /* Fall-through */
1266 case REG_U64:
1267 JUMP_TO(BYTECODE_OP_GE_S64_DOUBLE);
1268 case REG_DOUBLE:
1269 JUMP_TO(BYTECODE_OP_GE_DOUBLE);
1270 case REG_STRING: /* Fall-through */
1271 case REG_STAR_GLOB_STRING:
1272 ret = -EINVAL;
1273 goto end;
1274 default:
1275 ERR("Unknown interpreter register type (%d)",
1276 (int) estack_bx_t);
1277 ret = -EINVAL;
1278 goto end;
1279 }
1280 break;
1281 case REG_STRING:
1282 switch (estack_bx_t) {
1283 case REG_S64: /* Fall-through */
1284 case REG_U64: /* Fall-through */
1285 case REG_DOUBLE: /* Fall-through */
1286 case REG_STAR_GLOB_STRING:
1287 ret = -EINVAL;
1288 goto end;
1289 case REG_STRING:
1290 JUMP_TO(BYTECODE_OP_GE_STRING);
1291 default:
1292 ERR("Unknown interpreter register type (%d)",
1293 (int) estack_bx_t);
1294 ret = -EINVAL;
1295 goto end;
1296 }
1297 break;
1298 default:
1299 ERR("Unknown interpreter register type (%d)",
1300 (int) estack_ax_t);
1301 ret = -EINVAL;
1302 goto end;
1303 }
1304 }
1305 OP(BYTECODE_OP_LE):
1306 {
1307 /* Dynamic typing. */
1308 switch (estack_ax_t) {
1309 case REG_S64: /* Fall-through */
1310 case REG_U64:
1311 switch (estack_bx_t) {
1312 case REG_S64: /* Fall-through */
1313 case REG_U64:
1314 JUMP_TO(BYTECODE_OP_LE_S64);
1315 case REG_DOUBLE:
1316 JUMP_TO(BYTECODE_OP_LE_DOUBLE_S64);
1317 case REG_STRING: /* Fall-through */
1318 case REG_STAR_GLOB_STRING:
1319 ret = -EINVAL;
1320 goto end;
1321 default:
1322 ERR("Unknown interpreter register type (%d)",
1323 (int) estack_bx_t);
1324 ret = -EINVAL;
1325 goto end;
1326 }
1327 break;
1328 case REG_DOUBLE:
1329 switch (estack_bx_t) {
1330 case REG_S64: /* Fall-through */
1331 case REG_U64:
1332 JUMP_TO(BYTECODE_OP_LE_S64_DOUBLE);
1333 case REG_DOUBLE:
1334 JUMP_TO(BYTECODE_OP_LE_DOUBLE);
1335 case REG_STRING: /* Fall-through */
1336 case REG_STAR_GLOB_STRING:
1337 ret = -EINVAL;
1338 goto end;
1339 default:
1340 ERR("Unknown interpreter register type (%d)",
1341 (int) estack_bx_t);
1342 ret = -EINVAL;
1343 goto end;
1344 }
1345 break;
1346 case REG_STRING:
1347 switch (estack_bx_t) {
1348 case REG_S64: /* Fall-through */
1349 case REG_U64: /* Fall-through */
1350 case REG_DOUBLE: /* Fall-through */
1351 case REG_STAR_GLOB_STRING:
1352 ret = -EINVAL;
1353 goto end;
1354 case REG_STRING:
1355 JUMP_TO(BYTECODE_OP_LE_STRING);
1356 default:
1357 ERR("Unknown interpreter register type (%d)",
1358 (int) estack_bx_t);
1359 ret = -EINVAL;
1360 goto end;
1361 }
1362 break;
1363 default:
1364 ERR("Unknown interpreter register type (%d)",
1365 (int) estack_ax_t);
1366 ret = -EINVAL;
1367 goto end;
1368 }
1369 }
1370
1371 OP(BYTECODE_OP_EQ_STRING):
1372 {
1373 int res;
1374
1375 res = (stack_strcmp(stack, top, "==") == 0);
1376 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1377 estack_ax_v = res;
1378 estack_ax_t = REG_S64;
1379 next_pc += sizeof(struct binary_op);
1380 PO;
1381 }
1382 OP(BYTECODE_OP_NE_STRING):
1383 {
1384 int res;
1385
1386 res = (stack_strcmp(stack, top, "!=") != 0);
1387 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1388 estack_ax_v = res;
1389 estack_ax_t = REG_S64;
1390 next_pc += sizeof(struct binary_op);
1391 PO;
1392 }
1393 OP(BYTECODE_OP_GT_STRING):
1394 {
1395 int res;
1396
1397 res = (stack_strcmp(stack, top, ">") > 0);
1398 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1399 estack_ax_v = res;
1400 estack_ax_t = REG_S64;
1401 next_pc += sizeof(struct binary_op);
1402 PO;
1403 }
1404 OP(BYTECODE_OP_LT_STRING):
1405 {
1406 int res;
1407
1408 res = (stack_strcmp(stack, top, "<") < 0);
1409 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1410 estack_ax_v = res;
1411 estack_ax_t = REG_S64;
1412 next_pc += sizeof(struct binary_op);
1413 PO;
1414 }
1415 OP(BYTECODE_OP_GE_STRING):
1416 {
1417 int res;
1418
1419 res = (stack_strcmp(stack, top, ">=") >= 0);
1420 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1421 estack_ax_v = res;
1422 estack_ax_t = REG_S64;
1423 next_pc += sizeof(struct binary_op);
1424 PO;
1425 }
1426 OP(BYTECODE_OP_LE_STRING):
1427 {
1428 int res;
1429
1430 res = (stack_strcmp(stack, top, "<=") <= 0);
1431 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1432 estack_ax_v = res;
1433 estack_ax_t = REG_S64;
1434 next_pc += sizeof(struct binary_op);
1435 PO;
1436 }
1437
1438 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING):
1439 {
1440 int res;
1441
1442 res = (stack_star_glob_match(stack, top, "==") == 0);
1443 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1444 estack_ax_v = res;
1445 estack_ax_t = REG_S64;
1446 next_pc += sizeof(struct binary_op);
1447 PO;
1448 }
1449 OP(BYTECODE_OP_NE_STAR_GLOB_STRING):
1450 {
1451 int res;
1452
1453 res = (stack_star_glob_match(stack, top, "!=") != 0);
1454 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1455 estack_ax_v = res;
1456 estack_ax_t = REG_S64;
1457 next_pc += sizeof(struct binary_op);
1458 PO;
1459 }
1460
1461 OP(BYTECODE_OP_EQ_S64):
1462 {
1463 int res;
1464
1465 res = (estack_bx_v == estack_ax_v);
1466 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1467 estack_ax_v = res;
1468 estack_ax_t = REG_S64;
1469 next_pc += sizeof(struct binary_op);
1470 PO;
1471 }
1472 OP(BYTECODE_OP_NE_S64):
1473 {
1474 int res;
1475
1476 res = (estack_bx_v != estack_ax_v);
1477 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1478 estack_ax_v = res;
1479 estack_ax_t = REG_S64;
1480 next_pc += sizeof(struct binary_op);
1481 PO;
1482 }
1483 OP(BYTECODE_OP_GT_S64):
1484 {
1485 int res;
1486
1487 res = (estack_bx_v > estack_ax_v);
1488 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1489 estack_ax_v = res;
1490 estack_ax_t = REG_S64;
1491 next_pc += sizeof(struct binary_op);
1492 PO;
1493 }
1494 OP(BYTECODE_OP_LT_S64):
1495 {
1496 int res;
1497
1498 res = (estack_bx_v < estack_ax_v);
1499 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1500 estack_ax_v = res;
1501 estack_ax_t = REG_S64;
1502 next_pc += sizeof(struct binary_op);
1503 PO;
1504 }
1505 OP(BYTECODE_OP_GE_S64):
1506 {
1507 int res;
1508
1509 res = (estack_bx_v >= estack_ax_v);
1510 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1511 estack_ax_v = res;
1512 estack_ax_t = REG_S64;
1513 next_pc += sizeof(struct binary_op);
1514 PO;
1515 }
1516 OP(BYTECODE_OP_LE_S64):
1517 {
1518 int res;
1519
1520 res = (estack_bx_v <= estack_ax_v);
1521 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1522 estack_ax_v = res;
1523 estack_ax_t = REG_S64;
1524 next_pc += sizeof(struct binary_op);
1525 PO;
1526 }
1527
1528 OP(BYTECODE_OP_EQ_DOUBLE):
1529 {
1530 int res;
1531
1532 res = (estack_bx(stack, top)->u.d == estack_ax(stack, top)->u.d);
1533 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1534 estack_ax_v = res;
1535 estack_ax_t = REG_S64;
1536 next_pc += sizeof(struct binary_op);
1537 PO;
1538 }
1539 OP(BYTECODE_OP_NE_DOUBLE):
1540 {
1541 int res;
1542
1543 res = (estack_bx(stack, top)->u.d != estack_ax(stack, top)->u.d);
1544 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1545 estack_ax_v = res;
1546 estack_ax_t = REG_S64;
1547 next_pc += sizeof(struct binary_op);
1548 PO;
1549 }
1550 OP(BYTECODE_OP_GT_DOUBLE):
1551 {
1552 int res;
1553
1554 res = (estack_bx(stack, top)->u.d > estack_ax(stack, top)->u.d);
1555 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1556 estack_ax_v = res;
1557 estack_ax_t = REG_S64;
1558 next_pc += sizeof(struct binary_op);
1559 PO;
1560 }
1561 OP(BYTECODE_OP_LT_DOUBLE):
1562 {
1563 int res;
1564
1565 res = (estack_bx(stack, top)->u.d < estack_ax(stack, top)->u.d);
1566 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1567 estack_ax_v = res;
1568 estack_ax_t = REG_S64;
1569 next_pc += sizeof(struct binary_op);
1570 PO;
1571 }
1572 OP(BYTECODE_OP_GE_DOUBLE):
1573 {
1574 int res;
1575
1576 res = (estack_bx(stack, top)->u.d >= estack_ax(stack, top)->u.d);
1577 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1578 estack_ax_v = res;
1579 estack_ax_t = REG_S64;
1580 next_pc += sizeof(struct binary_op);
1581 PO;
1582 }
1583 OP(BYTECODE_OP_LE_DOUBLE):
1584 {
1585 int res;
1586
1587 res = (estack_bx(stack, top)->u.d <= estack_ax(stack, top)->u.d);
1588 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1589 estack_ax_v = res;
1590 estack_ax_t = REG_S64;
1591 next_pc += sizeof(struct binary_op);
1592 PO;
1593 }
1594
1595 /* Mixed S64-double binary comparators */
1596 OP(BYTECODE_OP_EQ_DOUBLE_S64):
1597 {
1598 int res;
1599
1600 res = (estack_bx(stack, top)->u.d == estack_ax_v);
1601 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1602 estack_ax_v = res;
1603 estack_ax_t = REG_S64;
1604 next_pc += sizeof(struct binary_op);
1605 PO;
1606 }
1607 OP(BYTECODE_OP_NE_DOUBLE_S64):
1608 {
1609 int res;
1610
1611 res = (estack_bx(stack, top)->u.d != estack_ax_v);
1612 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1613 estack_ax_v = res;
1614 estack_ax_t = REG_S64;
1615 next_pc += sizeof(struct binary_op);
1616 PO;
1617 }
1618 OP(BYTECODE_OP_GT_DOUBLE_S64):
1619 {
1620 int res;
1621
1622 res = (estack_bx(stack, top)->u.d > estack_ax_v);
1623 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1624 estack_ax_v = res;
1625 estack_ax_t = REG_S64;
1626 next_pc += sizeof(struct binary_op);
1627 PO;
1628 }
1629 OP(BYTECODE_OP_LT_DOUBLE_S64):
1630 {
1631 int res;
1632
1633 res = (estack_bx(stack, top)->u.d < estack_ax_v);
1634 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1635 estack_ax_v = res;
1636 estack_ax_t = REG_S64;
1637 next_pc += sizeof(struct binary_op);
1638 PO;
1639 }
1640 OP(BYTECODE_OP_GE_DOUBLE_S64):
1641 {
1642 int res;
1643
1644 res = (estack_bx(stack, top)->u.d >= estack_ax_v);
1645 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1646 estack_ax_v = res;
1647 estack_ax_t = REG_S64;
1648 next_pc += sizeof(struct binary_op);
1649 PO;
1650 }
1651 OP(BYTECODE_OP_LE_DOUBLE_S64):
1652 {
1653 int res;
1654
1655 res = (estack_bx(stack, top)->u.d <= estack_ax_v);
1656 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1657 estack_ax_v = res;
1658 estack_ax_t = REG_S64;
1659 next_pc += sizeof(struct binary_op);
1660 PO;
1661 }
1662
1663 OP(BYTECODE_OP_EQ_S64_DOUBLE):
1664 {
1665 int res;
1666
1667 res = (estack_bx_v == estack_ax(stack, top)->u.d);
1668 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1669 estack_ax_v = res;
1670 estack_ax_t = REG_S64;
1671 next_pc += sizeof(struct binary_op);
1672 PO;
1673 }
1674 OP(BYTECODE_OP_NE_S64_DOUBLE):
1675 {
1676 int res;
1677
1678 res = (estack_bx_v != estack_ax(stack, top)->u.d);
1679 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1680 estack_ax_v = res;
1681 estack_ax_t = REG_S64;
1682 next_pc += sizeof(struct binary_op);
1683 PO;
1684 }
1685 OP(BYTECODE_OP_GT_S64_DOUBLE):
1686 {
1687 int res;
1688
1689 res = (estack_bx_v > estack_ax(stack, top)->u.d);
1690 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1691 estack_ax_v = res;
1692 estack_ax_t = REG_S64;
1693 next_pc += sizeof(struct binary_op);
1694 PO;
1695 }
1696 OP(BYTECODE_OP_LT_S64_DOUBLE):
1697 {
1698 int res;
1699
1700 res = (estack_bx_v < estack_ax(stack, top)->u.d);
1701 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1702 estack_ax_v = res;
1703 estack_ax_t = REG_S64;
1704 next_pc += sizeof(struct binary_op);
1705 PO;
1706 }
1707 OP(BYTECODE_OP_GE_S64_DOUBLE):
1708 {
1709 int res;
1710
1711 res = (estack_bx_v >= estack_ax(stack, top)->u.d);
1712 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1713 estack_ax_v = res;
1714 estack_ax_t = REG_S64;
1715 next_pc += sizeof(struct binary_op);
1716 PO;
1717 }
1718 OP(BYTECODE_OP_LE_S64_DOUBLE):
1719 {
1720 int res;
1721
1722 res = (estack_bx_v <= estack_ax(stack, top)->u.d);
1723 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1724 estack_ax_v = res;
1725 estack_ax_t = REG_S64;
1726 next_pc += sizeof(struct binary_op);
1727 PO;
1728 }
1729 OP(BYTECODE_OP_BIT_RSHIFT):
1730 {
1731 int64_t res;
1732
1733 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1734 ret = -EINVAL;
1735 goto end;
1736 }
1737
1738 /* Catch undefined behavior. */
1739 if (caa_unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1740 ret = -EINVAL;
1741 goto end;
1742 }
1743 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1744 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1745 estack_ax_v = res;
1746 estack_ax_t = REG_U64;
1747 next_pc += sizeof(struct binary_op);
1748 PO;
1749 }
1750 OP(BYTECODE_OP_BIT_LSHIFT):
1751 {
1752 int64_t res;
1753
1754 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1755 ret = -EINVAL;
1756 goto end;
1757 }
1758
1759 /* Catch undefined behavior. */
1760 if (caa_unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1761 ret = -EINVAL;
1762 goto end;
1763 }
1764 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1765 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1766 estack_ax_v = res;
1767 estack_ax_t = REG_U64;
1768 next_pc += sizeof(struct binary_op);
1769 PO;
1770 }
1771 OP(BYTECODE_OP_BIT_AND):
1772 {
1773 int64_t res;
1774
1775 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1776 ret = -EINVAL;
1777 goto end;
1778 }
1779
1780 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1781 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1782 estack_ax_v = res;
1783 estack_ax_t = REG_U64;
1784 next_pc += sizeof(struct binary_op);
1785 PO;
1786 }
1787 OP(BYTECODE_OP_BIT_OR):
1788 {
1789 int64_t res;
1790
1791 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1792 ret = -EINVAL;
1793 goto end;
1794 }
1795
1796 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1797 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1798 estack_ax_v = res;
1799 estack_ax_t = REG_U64;
1800 next_pc += sizeof(struct binary_op);
1801 PO;
1802 }
1803 OP(BYTECODE_OP_BIT_XOR):
1804 {
1805 int64_t res;
1806
1807 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1808 ret = -EINVAL;
1809 goto end;
1810 }
1811
1812 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1813 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1814 estack_ax_v = res;
1815 estack_ax_t = REG_U64;
1816 next_pc += sizeof(struct binary_op);
1817 PO;
1818 }
1819
1820 /* unary */
1821 OP(BYTECODE_OP_UNARY_PLUS):
1822 {
1823 /* Dynamic typing. */
1824 switch (estack_ax_t) {
1825 case REG_S64: /* Fall-through. */
1826 case REG_U64:
1827 JUMP_TO(BYTECODE_OP_UNARY_PLUS_S64);
1828 case REG_DOUBLE:
1829 JUMP_TO(BYTECODE_OP_UNARY_PLUS_DOUBLE);
1830 case REG_STRING: /* Fall-through */
1831 case REG_STAR_GLOB_STRING:
1832 ret = -EINVAL;
1833 goto end;
1834 default:
1835 ERR("Unknown interpreter register type (%d)",
1836 (int) estack_ax_t);
1837 ret = -EINVAL;
1838 goto end;
1839 }
1840 }
1841 OP(BYTECODE_OP_UNARY_MINUS):
1842 {
1843 /* Dynamic typing. */
1844 switch (estack_ax_t) {
1845 case REG_S64: /* Fall-through. */
1846 case REG_U64:
1847 JUMP_TO(BYTECODE_OP_UNARY_MINUS_S64);
1848 case REG_DOUBLE:
1849 JUMP_TO(BYTECODE_OP_UNARY_MINUS_DOUBLE);
1850 case REG_STRING: /* Fall-through */
1851 case REG_STAR_GLOB_STRING:
1852 ret = -EINVAL;
1853 goto end;
1854 default:
1855 ERR("Unknown interpreter register type (%d)",
1856 (int) estack_ax_t);
1857 ret = -EINVAL;
1858 goto end;
1859 }
1860 }
1861 OP(BYTECODE_OP_UNARY_NOT):
1862 {
1863 /* Dynamic typing. */
1864 switch (estack_ax_t) {
1865 case REG_S64: /* Fall-through. */
1866 case REG_U64:
1867 JUMP_TO(BYTECODE_OP_UNARY_NOT_S64);
1868 case REG_DOUBLE:
1869 JUMP_TO(BYTECODE_OP_UNARY_NOT_DOUBLE);
1870 case REG_STRING: /* Fall-through */
1871 case REG_STAR_GLOB_STRING:
1872 ret = -EINVAL;
1873 goto end;
1874 default:
1875 ERR("Unknown interpreter register type (%d)",
1876 (int) estack_ax_t);
1877 ret = -EINVAL;
1878 goto end;
1879 }
1880 next_pc += sizeof(struct unary_op);
1881 PO;
1882 }
1883
1884 OP(BYTECODE_OP_UNARY_BIT_NOT):
1885 {
1886 /* Dynamic typing. */
1887 if (!IS_INTEGER_REGISTER(estack_ax_t)) {
1888 ret = -EINVAL;
1889 goto end;
1890 }
1891
1892 estack_ax_v = ~(uint64_t) estack_ax_v;
1893 estack_ax_t = REG_U64;
1894 next_pc += sizeof(struct unary_op);
1895 PO;
1896 }
1897
1898 OP(BYTECODE_OP_UNARY_PLUS_S64):
1899 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE):
1900 {
1901 next_pc += sizeof(struct unary_op);
1902 PO;
1903 }
1904 OP(BYTECODE_OP_UNARY_MINUS_S64):
1905 {
1906 estack_ax_v = -estack_ax_v;
1907 next_pc += sizeof(struct unary_op);
1908 PO;
1909 }
1910 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE):
1911 {
1912 estack_ax(stack, top)->u.d = -estack_ax(stack, top)->u.d;
1913 next_pc += sizeof(struct unary_op);
1914 PO;
1915 }
1916 OP(BYTECODE_OP_UNARY_NOT_S64):
1917 {
1918 estack_ax_v = !estack_ax_v;
1919 estack_ax_t = REG_S64;
1920 next_pc += sizeof(struct unary_op);
1921 PO;
1922 }
1923 OP(BYTECODE_OP_UNARY_NOT_DOUBLE):
1924 {
1925 estack_ax_v = !estack_ax(stack, top)->u.d;
1926 estack_ax_t = REG_S64;
1927 next_pc += sizeof(struct unary_op);
1928 PO;
1929 }
1930
1931 /* logical */
1932 OP(BYTECODE_OP_AND):
1933 {
1934 struct logical_op *insn = (struct logical_op *) pc;
1935
1936 if (estack_ax_t != REG_S64 && estack_ax_t != REG_U64) {
1937 ret = -EINVAL;
1938 goto end;
1939 }
1940 /* If AX is 0, skip and evaluate to 0 */
1941 if (unlikely(estack_ax_v == 0)) {
1942 dbg_printf("Jumping to bytecode offset %u\n",
1943 (unsigned int) insn->skip_offset);
1944 next_pc = start_pc + insn->skip_offset;
1945 } else {
1946 /* Pop 1 when jump not taken */
1947 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1948 next_pc += sizeof(struct logical_op);
1949 }
1950 PO;
1951 }
1952 OP(BYTECODE_OP_OR):
1953 {
1954 struct logical_op *insn = (struct logical_op *) pc;
1955
1956 if (estack_ax_t != REG_S64 && estack_ax_t != REG_U64) {
1957 ret = -EINVAL;
1958 goto end;
1959 }
1960 /* If AX is nonzero, skip and evaluate to 1 */
1961 if (unlikely(estack_ax_v != 0)) {
1962 estack_ax_v = 1;
1963 dbg_printf("Jumping to bytecode offset %u\n",
1964 (unsigned int) insn->skip_offset);
1965 next_pc = start_pc + insn->skip_offset;
1966 } else {
1967 /* Pop 1 when jump not taken */
1968 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1969 next_pc += sizeof(struct logical_op);
1970 }
1971 PO;
1972 }
1973
1974
1975 /* load field ref */
1976 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING):
1977 {
1978 struct load_op *insn = (struct load_op *) pc;
1979 struct field_ref *ref = (struct field_ref *) insn->data;
1980
1981 dbg_printf("load field ref offset %u type string\n",
1982 ref->offset);
1983 estack_push(stack, top, ax, bx, ax_t, bx_t);
1984 estack_ax(stack, top)->u.s.str =
1985 *(const char * const *) &interpreter_stack_data[ref->offset];
1986 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1987 dbg_printf("Interpreter warning: loading a NULL string.\n");
1988 ret = -EINVAL;
1989 goto end;
1990 }
1991 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
1992 estack_ax(stack, top)->u.s.literal_type =
1993 ESTACK_STRING_LITERAL_TYPE_NONE;
1994 estack_ax_t = REG_STRING;
1995 dbg_printf("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1996 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1997 PO;
1998 }
1999
2000 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE):
2001 {
2002 struct load_op *insn = (struct load_op *) pc;
2003 struct field_ref *ref = (struct field_ref *) insn->data;
2004
2005 dbg_printf("load field ref offset %u type sequence\n",
2006 ref->offset);
2007 estack_push(stack, top, ax, bx, ax_t, bx_t);
2008 estack_ax(stack, top)->u.s.seq_len =
2009 *(unsigned long *) &interpreter_stack_data[ref->offset];
2010 estack_ax(stack, top)->u.s.str =
2011 *(const char **) (&interpreter_stack_data[ref->offset
2012 + sizeof(unsigned long)]);
2013 estack_ax_t = REG_STRING;
2014 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
2015 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2016 ret = -EINVAL;
2017 goto end;
2018 }
2019 estack_ax(stack, top)->u.s.literal_type =
2020 ESTACK_STRING_LITERAL_TYPE_NONE;
2021 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2022 PO;
2023 }
2024
2025 OP(BYTECODE_OP_LOAD_FIELD_REF_S64):
2026 {
2027 struct load_op *insn = (struct load_op *) pc;
2028 struct field_ref *ref = (struct field_ref *) insn->data;
2029
2030 dbg_printf("load field ref offset %u type s64\n",
2031 ref->offset);
2032 estack_push(stack, top, ax, bx, ax_t, bx_t);
2033 estack_ax_v =
2034 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
2035 estack_ax_t = REG_S64;
2036 dbg_printf("ref load s64 %" PRIi64 "\n", estack_ax_v);
2037 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2038 PO;
2039 }
2040
2041 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE):
2042 {
2043 struct load_op *insn = (struct load_op *) pc;
2044 struct field_ref *ref = (struct field_ref *) insn->data;
2045
2046 dbg_printf("load field ref offset %u type double\n",
2047 ref->offset);
2048 estack_push(stack, top, ax, bx, ax_t, bx_t);
2049 memcpy(&estack_ax(stack, top)->u.d, &interpreter_stack_data[ref->offset],
2050 sizeof(struct literal_double));
2051 estack_ax_t = REG_DOUBLE;
2052 dbg_printf("ref load double %g\n", estack_ax(stack, top)->u.d);
2053 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2054 PO;
2055 }
2056
2057 /* load from immediate operand */
2058 OP(BYTECODE_OP_LOAD_STRING):
2059 {
2060 struct load_op *insn = (struct load_op *) pc;
2061
2062 dbg_printf("load string %s\n", insn->data);
2063 estack_push(stack, top, ax, bx, ax_t, bx_t);
2064 estack_ax(stack, top)->u.s.str = insn->data;
2065 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2066 estack_ax(stack, top)->u.s.literal_type =
2067 ESTACK_STRING_LITERAL_TYPE_PLAIN;
2068 estack_ax_t = REG_STRING;
2069 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
2070 PO;
2071 }
2072
2073 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING):
2074 {
2075 struct load_op *insn = (struct load_op *) pc;
2076
2077 dbg_printf("load globbing pattern %s\n", insn->data);
2078 estack_push(stack, top, ax, bx, ax_t, bx_t);
2079 estack_ax(stack, top)->u.s.str = insn->data;
2080 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2081 estack_ax(stack, top)->u.s.literal_type =
2082 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
2083 estack_ax_t = REG_STAR_GLOB_STRING;
2084 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
2085 PO;
2086 }
2087
2088 OP(BYTECODE_OP_LOAD_S64):
2089 {
2090 struct load_op *insn = (struct load_op *) pc;
2091
2092 estack_push(stack, top, ax, bx, ax_t, bx_t);
2093 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
2094 estack_ax_t = REG_S64;
2095 dbg_printf("load s64 %" PRIi64 "\n", estack_ax_v);
2096 next_pc += sizeof(struct load_op)
2097 + sizeof(struct literal_numeric);
2098 PO;
2099 }
2100
2101 OP(BYTECODE_OP_LOAD_DOUBLE):
2102 {
2103 struct load_op *insn = (struct load_op *) pc;
2104
2105 estack_push(stack, top, ax, bx, ax_t, bx_t);
2106 memcpy(&estack_ax(stack, top)->u.d, insn->data,
2107 sizeof(struct literal_double));
2108 estack_ax_t = REG_DOUBLE;
2109 dbg_printf("load double %g\n", estack_ax(stack, top)->u.d);
2110 next_pc += sizeof(struct load_op)
2111 + sizeof(struct literal_double);
2112 PO;
2113 }
2114
2115 /* cast */
2116 OP(BYTECODE_OP_CAST_TO_S64):
2117 {
2118 /* Dynamic typing. */
2119 switch (estack_ax_t) {
2120 case REG_S64:
2121 JUMP_TO(BYTECODE_OP_CAST_NOP);
2122 case REG_DOUBLE:
2123 JUMP_TO(BYTECODE_OP_CAST_DOUBLE_TO_S64);
2124 case REG_U64:
2125 estack_ax_t = REG_S64;
2126 next_pc += sizeof(struct cast_op); /* Fall-through */
2127 case REG_STRING: /* Fall-through */
2128 case REG_STAR_GLOB_STRING:
2129 ret = -EINVAL;
2130 goto end;
2131 default:
2132 ERR("Unknown interpreter register type (%d)",
2133 (int) estack_ax_t);
2134 ret = -EINVAL;
2135 goto end;
2136 }
2137 }
2138
2139 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64):
2140 {
2141 estack_ax_v = (int64_t) estack_ax(stack, top)->u.d;
2142 estack_ax_t = REG_S64;
2143 next_pc += sizeof(struct cast_op);
2144 PO;
2145 }
2146
2147 OP(BYTECODE_OP_CAST_NOP):
2148 {
2149 next_pc += sizeof(struct cast_op);
2150 PO;
2151 }
2152
2153 /* get context ref */
2154 OP(BYTECODE_OP_GET_CONTEXT_REF):
2155 {
2156 struct load_op *insn = (struct load_op *) pc;
2157 struct field_ref *ref = (struct field_ref *) insn->data;
2158 const struct lttng_ust_ctx_field *ctx_field;
2159 struct lttng_ust_ctx_value v;
2160
2161 dbg_printf("get context ref offset %u type dynamic\n",
2162 ref->offset);
2163 ctx_field = &ctx->fields[ref->offset];
2164 ctx_field->get_value(ctx_field->priv, probe_ctx, &v);
2165 estack_push(stack, top, ax, bx, ax_t, bx_t);
2166 switch (v.sel) {
2167 case LTTNG_UST_DYNAMIC_TYPE_NONE:
2168 ret = -EINVAL;
2169 goto end;
2170 case LTTNG_UST_DYNAMIC_TYPE_S64:
2171 estack_ax_v = v.u.s64;
2172 estack_ax_t = REG_S64;
2173 dbg_printf("ref get context dynamic s64 %" PRIi64 "\n", estack_ax_v);
2174 break;
2175 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE:
2176 estack_ax(stack, top)->u.d = v.u.d;
2177 estack_ax_t = REG_DOUBLE;
2178 dbg_printf("ref get context dynamic double %g\n", estack_ax(stack, top)->u.d);
2179 break;
2180 case LTTNG_UST_DYNAMIC_TYPE_STRING:
2181 estack_ax(stack, top)->u.s.str = v.u.str;
2182 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
2183 dbg_printf("Interpreter warning: loading a NULL string.\n");
2184 ret = -EINVAL;
2185 goto end;
2186 }
2187 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2188 estack_ax(stack, top)->u.s.literal_type =
2189 ESTACK_STRING_LITERAL_TYPE_NONE;
2190 dbg_printf("ref get context dynamic string %s\n", estack_ax(stack, top)->u.s.str);
2191 estack_ax_t = REG_STRING;
2192 break;
2193 default:
2194 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v.sel);
2195 ret = -EINVAL;
2196 goto end;
2197 }
2198 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2199 PO;
2200 }
2201
2202 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING):
2203 {
2204 struct load_op *insn = (struct load_op *) pc;
2205 struct field_ref *ref = (struct field_ref *) insn->data;
2206 const struct lttng_ust_ctx_field *ctx_field;
2207 struct lttng_ust_ctx_value v;
2208
2209 dbg_printf("get context ref offset %u type string\n",
2210 ref->offset);
2211 ctx_field = &ctx->fields[ref->offset];
2212 ctx_field->get_value(ctx_field->priv, probe_ctx, &v);
2213 estack_push(stack, top, ax, bx, ax_t, bx_t);
2214 estack_ax(stack, top)->u.s.str = v.u.str;
2215 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
2216 dbg_printf("Interpreter warning: loading a NULL string.\n");
2217 ret = -EINVAL;
2218 goto end;
2219 }
2220 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2221 estack_ax(stack, top)->u.s.literal_type =
2222 ESTACK_STRING_LITERAL_TYPE_NONE;
2223 estack_ax_t = REG_STRING;
2224 dbg_printf("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
2225 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2226 PO;
2227 }
2228
2229 OP(BYTECODE_OP_GET_CONTEXT_REF_S64):
2230 {
2231 struct load_op *insn = (struct load_op *) pc;
2232 struct field_ref *ref = (struct field_ref *) insn->data;
2233 const struct lttng_ust_ctx_field *ctx_field;
2234 struct lttng_ust_ctx_value v;
2235
2236 dbg_printf("get context ref offset %u type s64\n",
2237 ref->offset);
2238 ctx_field = &ctx->fields[ref->offset];
2239 ctx_field->get_value(ctx_field->priv, probe_ctx, &v);
2240 estack_push(stack, top, ax, bx, ax_t, bx_t);
2241 estack_ax_v = v.u.s64;
2242 estack_ax_t = REG_S64;
2243 dbg_printf("ref get context s64 %" PRIi64 "\n", estack_ax_v);
2244 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2245 PO;
2246 }
2247
2248 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE):
2249 {
2250 struct load_op *insn = (struct load_op *) pc;
2251 struct field_ref *ref = (struct field_ref *) insn->data;
2252 const struct lttng_ust_ctx_field *ctx_field;
2253 struct lttng_ust_ctx_value v;
2254
2255 dbg_printf("get context ref offset %u type double\n",
2256 ref->offset);
2257 ctx_field = &ctx->fields[ref->offset];
2258 ctx_field->get_value(ctx_field->priv, probe_ctx, &v);
2259 estack_push(stack, top, ax, bx, ax_t, bx_t);
2260 memcpy(&estack_ax(stack, top)->u.d, &v.u.d, sizeof(struct literal_double));
2261 estack_ax_t = REG_DOUBLE;
2262 dbg_printf("ref get context double %g\n", estack_ax(stack, top)->u.d);
2263 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2264 PO;
2265 }
2266
2267 OP(BYTECODE_OP_GET_CONTEXT_ROOT):
2268 {
2269 dbg_printf("op get context root\n");
2270 estack_push(stack, top, ax, bx, ax_t, bx_t);
2271 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
2272 /* "field" only needed for variants. */
2273 estack_ax(stack, top)->u.ptr.field = NULL;
2274 estack_ax_t = REG_PTR;
2275 next_pc += sizeof(struct load_op);
2276 PO;
2277 }
2278
2279 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT):
2280 {
2281 dbg_printf("op get app context root\n");
2282 estack_push(stack, top, ax, bx, ax_t, bx_t);
2283 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_APP_CONTEXT;
2284 /* "field" only needed for variants. */
2285 estack_ax(stack, top)->u.ptr.field = NULL;
2286 estack_ax_t = REG_PTR;
2287 next_pc += sizeof(struct load_op);
2288 PO;
2289 }
2290
2291 OP(BYTECODE_OP_GET_PAYLOAD_ROOT):
2292 {
2293 dbg_printf("op get app payload root\n");
2294 estack_push(stack, top, ax, bx, ax_t, bx_t);
2295 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
2296 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
2297 /* "field" only needed for variants. */
2298 estack_ax(stack, top)->u.ptr.field = NULL;
2299 estack_ax_t = REG_PTR;
2300 next_pc += sizeof(struct load_op);
2301 PO;
2302 }
2303
2304 OP(BYTECODE_OP_GET_SYMBOL):
2305 {
2306 dbg_printf("op get symbol\n");
2307 switch (estack_ax(stack, top)->u.ptr.type) {
2308 case LOAD_OBJECT:
2309 ERR("Nested fields not implemented yet.");
2310 ret = -EINVAL;
2311 goto end;
2312 case LOAD_ROOT_CONTEXT:
2313 case LOAD_ROOT_APP_CONTEXT:
2314 case LOAD_ROOT_PAYLOAD:
2315 /*
2316 * symbol lookup is performed by
2317 * specialization.
2318 */
2319 ret = -EINVAL;
2320 goto end;
2321 }
2322 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
2323 PO;
2324 }
2325
2326 OP(BYTECODE_OP_GET_SYMBOL_FIELD):
2327 {
2328 /*
2329 * Used for first variant encountered in a
2330 * traversal. Variants are not implemented yet.
2331 */
2332 ret = -EINVAL;
2333 goto end;
2334 }
2335
2336 OP(BYTECODE_OP_GET_INDEX_U16):
2337 {
2338 struct load_op *insn = (struct load_op *) pc;
2339 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
2340
2341 dbg_printf("op get index u16\n");
2342 ret = dynamic_get_index(ctx, probe_ctx, bytecode, index->index, estack_ax(stack, top));
2343 if (ret)
2344 goto end;
2345 estack_ax_v = estack_ax(stack, top)->u.v;
2346 estack_ax_t = estack_ax(stack, top)->type;
2347 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
2348 PO;
2349 }
2350
2351 OP(BYTECODE_OP_GET_INDEX_U64):
2352 {
2353 struct load_op *insn = (struct load_op *) pc;
2354 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
2355
2356 dbg_printf("op get index u64\n");
2357 ret = dynamic_get_index(ctx, probe_ctx, bytecode, index->index, estack_ax(stack, top));
2358 if (ret)
2359 goto end;
2360 estack_ax_v = estack_ax(stack, top)->u.v;
2361 estack_ax_t = estack_ax(stack, top)->type;
2362 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
2363 PO;
2364 }
2365
2366 OP(BYTECODE_OP_LOAD_FIELD):
2367 {
2368 dbg_printf("op load field\n");
2369 ret = dynamic_load_field(estack_ax(stack, top));
2370 if (ret)
2371 goto end;
2372 estack_ax_v = estack_ax(stack, top)->u.v;
2373 estack_ax_t = estack_ax(stack, top)->type;
2374 next_pc += sizeof(struct load_op);
2375 PO;
2376 }
2377
2378 OP(BYTECODE_OP_LOAD_FIELD_S8):
2379 {
2380 dbg_printf("op load field s8\n");
2381
2382 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
2383 estack_ax_t = REG_S64;
2384 next_pc += sizeof(struct load_op);
2385 PO;
2386 }
2387 OP(BYTECODE_OP_LOAD_FIELD_S16):
2388 {
2389 dbg_printf("op load field s16\n");
2390
2391 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
2392 estack_ax_t = REG_S64;
2393 next_pc += sizeof(struct load_op);
2394 PO;
2395 }
2396 OP(BYTECODE_OP_LOAD_FIELD_S32):
2397 {
2398 dbg_printf("op load field s32\n");
2399
2400 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
2401 estack_ax_t = REG_S64;
2402 next_pc += sizeof(struct load_op);
2403 PO;
2404 }
2405 OP(BYTECODE_OP_LOAD_FIELD_S64):
2406 {
2407 dbg_printf("op load field s64\n");
2408
2409 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
2410 estack_ax_t = REG_S64;
2411 next_pc += sizeof(struct load_op);
2412 PO;
2413 }
2414 OP(BYTECODE_OP_LOAD_FIELD_U8):
2415 {
2416 dbg_printf("op load field u8\n");
2417
2418 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
2419 estack_ax_t = REG_U64;
2420 next_pc += sizeof(struct load_op);
2421 PO;
2422 }
2423 OP(BYTECODE_OP_LOAD_FIELD_U16):
2424 {
2425 dbg_printf("op load field u16\n");
2426
2427 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
2428 estack_ax_t = REG_U64;
2429 next_pc += sizeof(struct load_op);
2430 PO;
2431 }
2432 OP(BYTECODE_OP_LOAD_FIELD_U32):
2433 {
2434 dbg_printf("op load field u32\n");
2435
2436 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
2437 estack_ax_t = REG_U64;
2438 next_pc += sizeof(struct load_op);
2439 PO;
2440 }
2441 OP(BYTECODE_OP_LOAD_FIELD_U64):
2442 {
2443 dbg_printf("op load field u64\n");
2444
2445 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
2446 estack_ax_t = REG_U64;
2447 next_pc += sizeof(struct load_op);
2448 PO;
2449 }
2450 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE):
2451 {
2452 dbg_printf("op load field double\n");
2453
2454 memcpy(&estack_ax(stack, top)->u.d,
2455 estack_ax(stack, top)->u.ptr.ptr,
2456 sizeof(struct literal_double));
2457 estack_ax(stack, top)->type = REG_DOUBLE;
2458 next_pc += sizeof(struct load_op);
2459 PO;
2460 }
2461
2462 OP(BYTECODE_OP_LOAD_FIELD_STRING):
2463 {
2464 const char *str;
2465
2466 dbg_printf("op load field string\n");
2467 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
2468 estack_ax(stack, top)->u.s.str = str;
2469 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
2470 dbg_printf("Interpreter warning: loading a NULL string.\n");
2471 ret = -EINVAL;
2472 goto end;
2473 }
2474 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2475 estack_ax(stack, top)->u.s.literal_type =
2476 ESTACK_STRING_LITERAL_TYPE_NONE;
2477 estack_ax(stack, top)->type = REG_STRING;
2478 next_pc += sizeof(struct load_op);
2479 PO;
2480 }
2481
2482 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE):
2483 {
2484 const char *ptr;
2485
2486 dbg_printf("op load field string sequence\n");
2487 ptr = estack_ax(stack, top)->u.ptr.ptr;
2488 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
2489 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
2490 estack_ax(stack, top)->type = REG_STRING;
2491 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
2492 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2493 ret = -EINVAL;
2494 goto end;
2495 }
2496 estack_ax(stack, top)->u.s.literal_type =
2497 ESTACK_STRING_LITERAL_TYPE_NONE;
2498 next_pc += sizeof(struct load_op);
2499 PO;
2500 }
2501
2502 END_OP
2503 end:
2504 /* No need to prepare output if an error occurred. */
2505 if (ret)
2506 return LTTNG_UST_BYTECODE_INTERPRETER_ERROR;
2507
2508 /* Prepare output. */
2509 switch (ust_bytecode->type) {
2510 case LTTNG_UST_BYTECODE_TYPE_FILTER:
2511 {
2512 struct lttng_ust_bytecode_filter_ctx *filter_ctx =
2513 (struct lttng_ust_bytecode_filter_ctx *) caller_ctx;
2514 if (retval)
2515 filter_ctx->result = LTTNG_UST_BYTECODE_FILTER_ACCEPT;
2516 else
2517 filter_ctx->result = LTTNG_UST_BYTECODE_FILTER_REJECT;
2518 break;
2519 }
2520 case LTTNG_UST_BYTECODE_TYPE_CAPTURE:
2521 ret = lttng_bytecode_interpret_format_output(estack_ax(stack, top),
2522 (struct lttng_interpreter_output *) caller_ctx);
2523 break;
2524 default:
2525 ret = -EINVAL;
2526 break;
2527 }
2528 if (ret)
2529 return LTTNG_UST_BYTECODE_INTERPRETER_ERROR;
2530 else
2531 return LTTNG_UST_BYTECODE_INTERPRETER_OK;
2532 }
2533
2534 /*
2535 * Return LTTNG_UST_EVENT_FILTER_ACCEPT or LTTNG_UST_EVENT_FILTER_REJECT.
2536 */
2537 int lttng_ust_interpret_event_filter(const struct lttng_ust_event_common *event,
2538 const char *interpreter_stack_data,
2539 struct lttng_ust_probe_ctx *probe_ctx,
2540 void *event_filter_ctx __attribute__((unused)))
2541 {
2542 struct lttng_ust_bytecode_runtime *filter_bc_runtime;
2543 struct cds_list_head *filter_bytecode_runtime_head = &event->priv->filter_bytecode_runtime_head;
2544 struct lttng_ust_bytecode_filter_ctx bytecode_filter_ctx;
2545 bool filter_record = false;
2546
2547 cds_list_for_each_entry_rcu(filter_bc_runtime, filter_bytecode_runtime_head, node) {
2548 if (caa_likely(filter_bc_runtime->interpreter_func(filter_bc_runtime,
2549 interpreter_stack_data, probe_ctx, &bytecode_filter_ctx) == LTTNG_UST_BYTECODE_INTERPRETER_OK)) {
2550 if (caa_unlikely(bytecode_filter_ctx.result == LTTNG_UST_BYTECODE_FILTER_ACCEPT)) {
2551 filter_record = true;
2552 break;
2553 }
2554 }
2555 }
2556 if (filter_record)
2557 return LTTNG_UST_EVENT_FILTER_ACCEPT;
2558 else
2559 return LTTNG_UST_EVENT_FILTER_REJECT;
2560 }
2561
2562 #undef START_OP
2563 #undef OP
2564 #undef PO
2565 #undef END_OP
This page took 0.135518 seconds and 4 git commands to generate.