bytecode: Add `OBJECT_TYPE_{UN,}SIGNED_ENUM` type
[lttng-modules.git] / src / lttng-bytecode-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-bytecode-interpreter.c
4 *
5 * LTTng modules bytecode interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
14
15 #include <lttng/lttng-bytecode.h>
16 #include <lttng/string-utils.h>
17
18 /*
19 * get_char should be called with page fault handler disabled if it is expected
20 * to handle user-space read.
21 */
22 static
23 char get_char(struct estack_entry *reg, size_t offset)
24 {
25 if (unlikely(offset >= reg->u.s.seq_len))
26 return '\0';
27 if (reg->u.s.user) {
28 char c;
29
30 /* Handle invalid access as end of string. */
31 if (unlikely(!lttng_access_ok(VERIFY_READ,
32 reg->u.s.user_str + offset,
33 sizeof(c))))
34 return '\0';
35 /* Handle fault (nonzero return value) as end of string. */
36 if (unlikely(__copy_from_user_inatomic(&c,
37 reg->u.s.user_str + offset,
38 sizeof(c))))
39 return '\0';
40 return c;
41 } else {
42 return reg->u.s.str[offset];
43 }
44 }
45
46 /*
47 * -1: wildcard found.
48 * -2: unknown escape char.
49 * 0: normal char.
50 */
51 static
52 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
53 {
54 switch (*c) {
55 case '\\':
56 (*offset)++;
57 *c = get_char(reg, *offset);
58 switch (*c) {
59 case '\\':
60 case '*':
61 return 0;
62 default:
63 return -2;
64 }
65 case '*':
66 return -1;
67 default:
68 return 0;
69 }
70 }
71
72 static
73 char get_char_at_cb(size_t at, void *data)
74 {
75 return get_char(data, at);
76 }
77
78 static
79 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
80 {
81 bool has_user = false;
82 int result;
83 struct estack_entry *pattern_reg;
84 struct estack_entry *candidate_reg;
85
86 /* Disable the page fault handler when reading from userspace. */
87 if (estack_bx(stack, top)->u.s.user
88 || estack_ax(stack, top)->u.s.user) {
89 has_user = true;
90 pagefault_disable();
91 }
92
93 /* Find out which side is the pattern vs. the candidate. */
94 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
95 pattern_reg = estack_ax(stack, top);
96 candidate_reg = estack_bx(stack, top);
97 } else {
98 pattern_reg = estack_bx(stack, top);
99 candidate_reg = estack_ax(stack, top);
100 }
101
102 /* Perform the match operation. */
103 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
104 pattern_reg, get_char_at_cb, candidate_reg);
105 if (has_user)
106 pagefault_enable();
107
108 return result;
109 }
110
111 static
112 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
113 {
114 size_t offset_bx = 0, offset_ax = 0;
115 int diff, has_user = 0;
116
117 if (estack_bx(stack, top)->u.s.user
118 || estack_ax(stack, top)->u.s.user) {
119 has_user = 1;
120 pagefault_disable();
121 }
122
123 for (;;) {
124 int ret;
125 int escaped_r0 = 0;
126 char char_bx, char_ax;
127
128 char_bx = get_char(estack_bx(stack, top), offset_bx);
129 char_ax = get_char(estack_ax(stack, top), offset_ax);
130
131 if (unlikely(char_bx == '\0')) {
132 if (char_ax == '\0') {
133 diff = 0;
134 break;
135 } else {
136 if (estack_ax(stack, top)->u.s.literal_type ==
137 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
138 ret = parse_char(estack_ax(stack, top),
139 &char_ax, &offset_ax);
140 if (ret == -1) {
141 diff = 0;
142 break;
143 }
144 }
145 diff = -1;
146 break;
147 }
148 }
149 if (unlikely(char_ax == '\0')) {
150 if (estack_bx(stack, top)->u.s.literal_type ==
151 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
152 ret = parse_char(estack_bx(stack, top),
153 &char_bx, &offset_bx);
154 if (ret == -1) {
155 diff = 0;
156 break;
157 }
158 }
159 diff = 1;
160 break;
161 }
162 if (estack_bx(stack, top)->u.s.literal_type ==
163 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
164 ret = parse_char(estack_bx(stack, top),
165 &char_bx, &offset_bx);
166 if (ret == -1) {
167 diff = 0;
168 break;
169 } else if (ret == -2) {
170 escaped_r0 = 1;
171 }
172 /* else compare both char */
173 }
174 if (estack_ax(stack, top)->u.s.literal_type ==
175 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
176 ret = parse_char(estack_ax(stack, top),
177 &char_ax, &offset_ax);
178 if (ret == -1) {
179 diff = 0;
180 break;
181 } else if (ret == -2) {
182 if (!escaped_r0) {
183 diff = -1;
184 break;
185 }
186 } else {
187 if (escaped_r0) {
188 diff = 1;
189 break;
190 }
191 }
192 } else {
193 if (escaped_r0) {
194 diff = 1;
195 break;
196 }
197 }
198 diff = char_bx - char_ax;
199 if (diff != 0)
200 break;
201 offset_bx++;
202 offset_ax++;
203 }
204 if (has_user)
205 pagefault_enable();
206
207 return diff;
208 }
209
210 uint64_t lttng_bytecode_filter_interpret_false(void *filter_data,
211 struct lttng_probe_ctx *lttng_probe_ctx,
212 const char *filter_stack_data)
213 {
214 return LTTNG_INTERPRETER_DISCARD;
215 }
216
217 #ifdef INTERPRETER_USE_SWITCH
218
219 /*
220 * Fallback for compilers that do not support taking address of labels.
221 */
222
223 #define START_OP \
224 start_pc = &bytecode->data[0]; \
225 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
226 pc = next_pc) { \
227 dbg_printk("LTTng: Executing op %s (%u)\n", \
228 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
229 (unsigned int) *(bytecode_opcode_t *) pc); \
230 switch (*(bytecode_opcode_t *) pc) {
231
232 #define OP(name) case name
233
234 #define PO break
235
236 #define END_OP } \
237 }
238
239 #else
240
241 /*
242 * Dispatch-table based interpreter.
243 */
244
245 #define START_OP \
246 start_pc = &bytecode->code[0]; \
247 pc = next_pc = start_pc; \
248 if (unlikely(pc - start_pc >= bytecode->len)) \
249 goto end; \
250 goto *dispatch[*(bytecode_opcode_t *) pc];
251
252 #define OP(name) \
253 LABEL_##name
254
255 #define PO \
256 pc = next_pc; \
257 goto *dispatch[*(bytecode_opcode_t *) pc];
258
259 #define END_OP
260
261 #endif
262
263 #define IS_INTEGER_REGISTER(reg_type) \
264 (reg_type == REG_S64 || reg_type == REG_U64)
265
266 static int context_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
267 struct load_ptr *ptr,
268 uint32_t idx)
269 {
270
271 struct lttng_ctx_field *ctx_field;
272 struct lttng_event_field *field;
273 union lttng_ctx_value v;
274
275 ctx_field = &lttng_static_ctx->fields[idx];
276 field = &ctx_field->event_field;
277 ptr->type = LOAD_OBJECT;
278 /* field is only used for types nested within variants. */
279 ptr->field = NULL;
280
281 switch (field->type.atype) {
282 case atype_integer:
283 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
284 if (field->type.u.integer.signedness) {
285 ptr->object_type = OBJECT_TYPE_S64;
286 ptr->u.s64 = v.s64;
287 ptr->ptr = &ptr->u.s64;
288 } else {
289 ptr->object_type = OBJECT_TYPE_U64;
290 ptr->u.u64 = v.s64; /* Cast. */
291 ptr->ptr = &ptr->u.u64;
292 }
293 break;
294 case atype_enum_nestable:
295 {
296 const struct lttng_integer_type *itype =
297 &field->type.u.enum_nestable.container_type->u.integer;
298
299 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
300 if (itype->signedness) {
301 ptr->object_type = OBJECT_TYPE_SIGNED_ENUM;
302 ptr->u.s64 = v.s64;
303 ptr->ptr = &ptr->u.s64;
304 } else {
305 ptr->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
306 ptr->u.u64 = v.s64; /* Cast. */
307 ptr->ptr = &ptr->u.u64;
308 }
309 break;
310 }
311 case atype_array_nestable:
312 if (!lttng_is_bytewise_integer(field->type.u.array_nestable.elem_type)) {
313 printk(KERN_WARNING "LTTng: bytecode: Array nesting only supports integer types.\n");
314 return -EINVAL;
315 }
316 if (field->type.u.array_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
317 printk(KERN_WARNING "LTTng: bytecode: Only string arrays are supported for contexts.\n");
318 return -EINVAL;
319 }
320 ptr->object_type = OBJECT_TYPE_STRING;
321 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
322 ptr->ptr = v.str;
323 break;
324 case atype_sequence_nestable:
325 if (!lttng_is_bytewise_integer(field->type.u.sequence_nestable.elem_type)) {
326 printk(KERN_WARNING "LTTng: bytecode: Sequence nesting only supports integer types.\n");
327 return -EINVAL;
328 }
329 if (field->type.u.sequence_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
330 printk(KERN_WARNING "LTTng: bytecode: Only string sequences are supported for contexts.\n");
331 return -EINVAL;
332 }
333 ptr->object_type = OBJECT_TYPE_STRING;
334 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
335 ptr->ptr = v.str;
336 break;
337 case atype_string:
338 ptr->object_type = OBJECT_TYPE_STRING;
339 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
340 ptr->ptr = v.str;
341 break;
342 case atype_struct_nestable:
343 printk(KERN_WARNING "LTTng: bytecode: Structure type cannot be loaded.\n");
344 return -EINVAL;
345 case atype_variant_nestable:
346 printk(KERN_WARNING "LTTng: bytecode: Variant type cannot be loaded.\n");
347 return -EINVAL;
348 default:
349 printk(KERN_WARNING "LTTng: bytecode: Unknown type: %d", (int) field->type.atype);
350 return -EINVAL;
351 }
352 return 0;
353 }
354
355 static int dynamic_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
356 struct bytecode_runtime *runtime,
357 uint64_t index, struct estack_entry *stack_top)
358 {
359 int ret;
360 const struct bytecode_get_index_data *gid;
361
362 gid = (const struct bytecode_get_index_data *) &runtime->data[index];
363 switch (stack_top->u.ptr.type) {
364 case LOAD_OBJECT:
365 switch (stack_top->u.ptr.object_type) {
366 case OBJECT_TYPE_ARRAY:
367 {
368 const char *ptr;
369
370 WARN_ON_ONCE(gid->offset >= gid->array_len);
371 /* Skip count (unsigned long) */
372 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
373 ptr = ptr + gid->offset;
374 stack_top->u.ptr.ptr = ptr;
375 stack_top->u.ptr.object_type = gid->elem.type;
376 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
377 BUG_ON(stack_top->u.ptr.field->type.atype != atype_array_nestable);
378 stack_top->u.ptr.field = NULL;
379 break;
380 }
381 case OBJECT_TYPE_SEQUENCE:
382 {
383 const char *ptr;
384 size_t ptr_seq_len;
385
386 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
387 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
388 if (gid->offset >= gid->elem.len * ptr_seq_len) {
389 ret = -EINVAL;
390 goto end;
391 }
392 ptr = ptr + gid->offset;
393 stack_top->u.ptr.ptr = ptr;
394 stack_top->u.ptr.object_type = gid->elem.type;
395 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
396 BUG_ON(stack_top->u.ptr.field->type.atype != atype_sequence_nestable);
397 stack_top->u.ptr.field = NULL;
398 break;
399 }
400 case OBJECT_TYPE_STRUCT:
401 printk(KERN_WARNING "LTTng: bytecode: Nested structures are not supported yet.\n");
402 ret = -EINVAL;
403 goto end;
404 case OBJECT_TYPE_VARIANT:
405 default:
406 printk(KERN_WARNING "LTTng: bytecode: Unexpected get index type %d",
407 (int) stack_top->u.ptr.object_type);
408 ret = -EINVAL;
409 goto end;
410 }
411 break;
412 case LOAD_ROOT_CONTEXT:
413 case LOAD_ROOT_APP_CONTEXT: /* Fall-through */
414 {
415 ret = context_get_index(lttng_probe_ctx,
416 &stack_top->u.ptr,
417 gid->ctx_index);
418 if (ret) {
419 goto end;
420 }
421 break;
422 }
423 case LOAD_ROOT_PAYLOAD:
424 stack_top->u.ptr.ptr += gid->offset;
425 if (gid->elem.type == OBJECT_TYPE_STRING)
426 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
427 stack_top->u.ptr.object_type = gid->elem.type;
428 stack_top->u.ptr.type = LOAD_OBJECT;
429 stack_top->u.ptr.field = gid->field;
430 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
431 break;
432 }
433
434 stack_top->type = REG_PTR;
435
436 return 0;
437
438 end:
439 return ret;
440 }
441
442 static int dynamic_load_field(struct estack_entry *stack_top)
443 {
444 int ret;
445
446 switch (stack_top->u.ptr.type) {
447 case LOAD_OBJECT:
448 break;
449 case LOAD_ROOT_CONTEXT:
450 case LOAD_ROOT_APP_CONTEXT:
451 case LOAD_ROOT_PAYLOAD:
452 default:
453 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
454 ret = -EINVAL;
455 goto end;
456 }
457 switch (stack_top->u.ptr.object_type) {
458 case OBJECT_TYPE_S8:
459 dbg_printk("op load field s8\n");
460 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
461 stack_top->type = REG_S64;
462 break;
463 case OBJECT_TYPE_S16:
464 {
465 int16_t tmp;
466
467 dbg_printk("op load field s16\n");
468 tmp = *(int16_t *) stack_top->u.ptr.ptr;
469 if (stack_top->u.ptr.rev_bo)
470 __swab16s(&tmp);
471 stack_top->u.v = tmp;
472 stack_top->type = REG_S64;
473 break;
474 }
475 case OBJECT_TYPE_S32:
476 {
477 int32_t tmp;
478
479 dbg_printk("op load field s32\n");
480 tmp = *(int32_t *) stack_top->u.ptr.ptr;
481 if (stack_top->u.ptr.rev_bo)
482 __swab32s(&tmp);
483 stack_top->u.v = tmp;
484 stack_top->type = REG_S64;
485 break;
486 }
487 case OBJECT_TYPE_S64:
488 {
489 int64_t tmp;
490
491 dbg_printk("op load field s64\n");
492 tmp = *(int64_t *) stack_top->u.ptr.ptr;
493 if (stack_top->u.ptr.rev_bo)
494 __swab64s(&tmp);
495 stack_top->u.v = tmp;
496 stack_top->type = REG_S64;
497 break;
498 }
499 case OBJECT_TYPE_SIGNED_ENUM:
500 {
501 int64_t tmp;
502
503 dbg_printk("op load field signed enumeration\n");
504 tmp = *(int64_t *) stack_top->u.ptr.ptr;
505 if (stack_top->u.ptr.rev_bo)
506 __swab64s(&tmp);
507 stack_top->u.v = tmp;
508 stack_top->type = REG_S64;
509 break;
510 }
511 case OBJECT_TYPE_U8:
512 dbg_printk("op load field u8\n");
513 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
514 stack_top->type = REG_U64;
515 break;
516 case OBJECT_TYPE_U16:
517 {
518 uint16_t tmp;
519
520 dbg_printk("op load field u16\n");
521 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
522 if (stack_top->u.ptr.rev_bo)
523 __swab16s(&tmp);
524 stack_top->u.v = tmp;
525 stack_top->type = REG_U64;
526 break;
527 }
528 case OBJECT_TYPE_U32:
529 {
530 uint32_t tmp;
531
532 dbg_printk("op load field u32\n");
533 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
534 if (stack_top->u.ptr.rev_bo)
535 __swab32s(&tmp);
536 stack_top->u.v = tmp;
537 stack_top->type = REG_U64;
538 break;
539 }
540 case OBJECT_TYPE_U64:
541 {
542 uint64_t tmp;
543
544 dbg_printk("op load field u64\n");
545 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
546 if (stack_top->u.ptr.rev_bo)
547 __swab64s(&tmp);
548 stack_top->u.v = tmp;
549 stack_top->type = REG_U64;
550 break;
551 }
552 case OBJECT_TYPE_UNSIGNED_ENUM:
553 {
554 uint64_t tmp;
555
556 dbg_printk("op load field unsigned enumeration\n");
557 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
558 if (stack_top->u.ptr.rev_bo)
559 __swab64s(&tmp);
560 stack_top->u.v = tmp;
561 stack_top->type = REG_U64;
562 break;
563 }
564 case OBJECT_TYPE_STRING:
565 {
566 const char *str;
567
568 dbg_printk("op load field string\n");
569 str = (const char *) stack_top->u.ptr.ptr;
570 stack_top->u.s.str = str;
571 if (unlikely(!stack_top->u.s.str)) {
572 dbg_printk("Bytecode warning: loading a NULL string.\n");
573 ret = -EINVAL;
574 goto end;
575 }
576 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
577 stack_top->u.s.literal_type =
578 ESTACK_STRING_LITERAL_TYPE_NONE;
579 stack_top->type = REG_STRING;
580 break;
581 }
582 case OBJECT_TYPE_STRING_SEQUENCE:
583 {
584 const char *ptr;
585
586 dbg_printk("op load field string sequence\n");
587 ptr = stack_top->u.ptr.ptr;
588 stack_top->u.s.seq_len = *(unsigned long *) ptr;
589 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
590 if (unlikely(!stack_top->u.s.str)) {
591 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
592 ret = -EINVAL;
593 goto end;
594 }
595 stack_top->u.s.literal_type =
596 ESTACK_STRING_LITERAL_TYPE_NONE;
597 stack_top->type = REG_STRING;
598 break;
599 }
600 case OBJECT_TYPE_DYNAMIC:
601 /*
602 * Dynamic types in context are looked up
603 * by context get index.
604 */
605 ret = -EINVAL;
606 goto end;
607 case OBJECT_TYPE_DOUBLE:
608 ret = -EINVAL;
609 goto end;
610 case OBJECT_TYPE_SEQUENCE:
611 case OBJECT_TYPE_ARRAY:
612 case OBJECT_TYPE_STRUCT:
613 case OBJECT_TYPE_VARIANT:
614 printk(KERN_WARNING "LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
615 ret = -EINVAL;
616 goto end;
617 }
618 return 0;
619
620 end:
621 return ret;
622 }
623
624 static
625 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
626 struct lttng_interpreter_output *output)
627 {
628 int ret;
629
630 again:
631 switch (ax->type) {
632 case REG_S64:
633 output->type = LTTNG_INTERPRETER_TYPE_S64;
634 output->u.s = ax->u.v;
635 break;
636 case REG_U64:
637 output->type = LTTNG_INTERPRETER_TYPE_U64;
638 output->u.u = (uint64_t) ax->u.v;
639 break;
640 case REG_STRING:
641 output->type = LTTNG_INTERPRETER_TYPE_STRING;
642 output->u.str.str = ax->u.s.str;
643 output->u.str.len = ax->u.s.seq_len;
644 break;
645 case REG_PTR:
646 switch (ax->u.ptr.object_type) {
647 case OBJECT_TYPE_S8:
648 case OBJECT_TYPE_S16:
649 case OBJECT_TYPE_S32:
650 case OBJECT_TYPE_S64:
651 case OBJECT_TYPE_U8:
652 case OBJECT_TYPE_U16:
653 case OBJECT_TYPE_U32:
654 case OBJECT_TYPE_U64:
655 case OBJECT_TYPE_DOUBLE:
656 case OBJECT_TYPE_STRING:
657 case OBJECT_TYPE_STRING_SEQUENCE:
658 ret = dynamic_load_field(ax);
659 if (ret)
660 return ret;
661 /* Retry after loading ptr into stack top. */
662 goto again;
663 case OBJECT_TYPE_SEQUENCE:
664 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
665 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
666 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
667 output->u.sequence.nested_type = ax->u.ptr.field->type.u.sequence_nestable.elem_type;
668 break;
669 case OBJECT_TYPE_ARRAY:
670 /* Skip count (unsigned long) */
671 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
672 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
673 output->u.sequence.nr_elem = ax->u.ptr.field->type.u.array_nestable.length;
674 output->u.sequence.nested_type = ax->u.ptr.field->type.u.array_nestable.elem_type;
675 break;
676 case OBJECT_TYPE_SIGNED_ENUM:
677 ret = dynamic_load_field(ax);
678 if (ret)
679 return ret;
680 output->type = LTTNG_INTERPRETER_TYPE_SIGNED_ENUM;
681 output->u.s = ax->u.v;
682 break;
683 case OBJECT_TYPE_UNSIGNED_ENUM:
684 ret = dynamic_load_field(ax);
685 if (ret)
686 return ret;
687 output->type = LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM;
688 output->u.u = ax->u.v;
689 break;
690 case OBJECT_TYPE_STRUCT:
691 case OBJECT_TYPE_VARIANT:
692 default:
693 return -EINVAL;
694 }
695
696 break;
697 case REG_STAR_GLOB_STRING:
698 case REG_TYPE_UNKNOWN:
699 default:
700 return -EINVAL;
701 }
702
703 return LTTNG_INTERPRETER_RECORD_FLAG;
704 }
705
706 /*
707 * Return 0 (discard), or raise the 0x1 flag (log event).
708 * Currently, other flags are kept for future extensions and have no
709 * effect.
710 */
711 static
712 uint64_t bytecode_interpret(void *interpreter_data,
713 struct lttng_probe_ctx *lttng_probe_ctx,
714 const char *interpreter_stack_data,
715 struct lttng_interpreter_output *output)
716 {
717 struct bytecode_runtime *bytecode = interpreter_data;
718 void *pc, *next_pc, *start_pc;
719 int ret = -EINVAL;
720 uint64_t retval = 0;
721 struct estack _stack;
722 struct estack *stack = &_stack;
723 register int64_t ax = 0, bx = 0;
724 register enum entry_type ax_t = REG_TYPE_UNKNOWN, bx_t = REG_TYPE_UNKNOWN;
725 register int top = INTERPRETER_STACK_EMPTY;
726 #ifndef INTERPRETER_USE_SWITCH
727 static void *dispatch[NR_BYTECODE_OPS] = {
728 [ BYTECODE_OP_UNKNOWN ] = &&LABEL_BYTECODE_OP_UNKNOWN,
729
730 [ BYTECODE_OP_RETURN ] = &&LABEL_BYTECODE_OP_RETURN,
731
732 /* binary */
733 [ BYTECODE_OP_MUL ] = &&LABEL_BYTECODE_OP_MUL,
734 [ BYTECODE_OP_DIV ] = &&LABEL_BYTECODE_OP_DIV,
735 [ BYTECODE_OP_MOD ] = &&LABEL_BYTECODE_OP_MOD,
736 [ BYTECODE_OP_PLUS ] = &&LABEL_BYTECODE_OP_PLUS,
737 [ BYTECODE_OP_MINUS ] = &&LABEL_BYTECODE_OP_MINUS,
738 [ BYTECODE_OP_BIT_RSHIFT ] = &&LABEL_BYTECODE_OP_BIT_RSHIFT,
739 [ BYTECODE_OP_BIT_LSHIFT ] = &&LABEL_BYTECODE_OP_BIT_LSHIFT,
740 [ BYTECODE_OP_BIT_AND ] = &&LABEL_BYTECODE_OP_BIT_AND,
741 [ BYTECODE_OP_BIT_OR ] = &&LABEL_BYTECODE_OP_BIT_OR,
742 [ BYTECODE_OP_BIT_XOR ] = &&LABEL_BYTECODE_OP_BIT_XOR,
743
744 /* binary comparators */
745 [ BYTECODE_OP_EQ ] = &&LABEL_BYTECODE_OP_EQ,
746 [ BYTECODE_OP_NE ] = &&LABEL_BYTECODE_OP_NE,
747 [ BYTECODE_OP_GT ] = &&LABEL_BYTECODE_OP_GT,
748 [ BYTECODE_OP_LT ] = &&LABEL_BYTECODE_OP_LT,
749 [ BYTECODE_OP_GE ] = &&LABEL_BYTECODE_OP_GE,
750 [ BYTECODE_OP_LE ] = &&LABEL_BYTECODE_OP_LE,
751
752 /* string binary comparator */
753 [ BYTECODE_OP_EQ_STRING ] = &&LABEL_BYTECODE_OP_EQ_STRING,
754 [ BYTECODE_OP_NE_STRING ] = &&LABEL_BYTECODE_OP_NE_STRING,
755 [ BYTECODE_OP_GT_STRING ] = &&LABEL_BYTECODE_OP_GT_STRING,
756 [ BYTECODE_OP_LT_STRING ] = &&LABEL_BYTECODE_OP_LT_STRING,
757 [ BYTECODE_OP_GE_STRING ] = &&LABEL_BYTECODE_OP_GE_STRING,
758 [ BYTECODE_OP_LE_STRING ] = &&LABEL_BYTECODE_OP_LE_STRING,
759
760 /* globbing pattern binary comparator */
761 [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING,
762 [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING,
763
764 /* s64 binary comparator */
765 [ BYTECODE_OP_EQ_S64 ] = &&LABEL_BYTECODE_OP_EQ_S64,
766 [ BYTECODE_OP_NE_S64 ] = &&LABEL_BYTECODE_OP_NE_S64,
767 [ BYTECODE_OP_GT_S64 ] = &&LABEL_BYTECODE_OP_GT_S64,
768 [ BYTECODE_OP_LT_S64 ] = &&LABEL_BYTECODE_OP_LT_S64,
769 [ BYTECODE_OP_GE_S64 ] = &&LABEL_BYTECODE_OP_GE_S64,
770 [ BYTECODE_OP_LE_S64 ] = &&LABEL_BYTECODE_OP_LE_S64,
771
772 /* double binary comparator */
773 [ BYTECODE_OP_EQ_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE,
774 [ BYTECODE_OP_NE_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_DOUBLE,
775 [ BYTECODE_OP_GT_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_DOUBLE,
776 [ BYTECODE_OP_LT_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_DOUBLE,
777 [ BYTECODE_OP_GE_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_DOUBLE,
778 [ BYTECODE_OP_LE_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_DOUBLE,
779
780 /* Mixed S64-double binary comparators */
781 [ BYTECODE_OP_EQ_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64,
782 [ BYTECODE_OP_NE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64,
783 [ BYTECODE_OP_GT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64,
784 [ BYTECODE_OP_LT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64,
785 [ BYTECODE_OP_GE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64,
786 [ BYTECODE_OP_LE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64,
787
788 [ BYTECODE_OP_EQ_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE,
789 [ BYTECODE_OP_NE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE,
790 [ BYTECODE_OP_GT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE,
791 [ BYTECODE_OP_LT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE,
792 [ BYTECODE_OP_GE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE,
793 [ BYTECODE_OP_LE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE,
794
795 /* unary */
796 [ BYTECODE_OP_UNARY_PLUS ] = &&LABEL_BYTECODE_OP_UNARY_PLUS,
797 [ BYTECODE_OP_UNARY_MINUS ] = &&LABEL_BYTECODE_OP_UNARY_MINUS,
798 [ BYTECODE_OP_UNARY_NOT ] = &&LABEL_BYTECODE_OP_UNARY_NOT,
799 [ BYTECODE_OP_UNARY_PLUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64,
800 [ BYTECODE_OP_UNARY_MINUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64,
801 [ BYTECODE_OP_UNARY_NOT_S64 ] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64,
802 [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE,
803 [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE,
804 [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE,
805
806 /* logical */
807 [ BYTECODE_OP_AND ] = &&LABEL_BYTECODE_OP_AND,
808 [ BYTECODE_OP_OR ] = &&LABEL_BYTECODE_OP_OR,
809
810 /* load field ref */
811 [ BYTECODE_OP_LOAD_FIELD_REF ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF,
812 [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING,
813 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE,
814 [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64,
815 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE,
816
817 /* load from immediate operand */
818 [ BYTECODE_OP_LOAD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STRING,
819 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING,
820 [ BYTECODE_OP_LOAD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_S64,
821 [ BYTECODE_OP_LOAD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE,
822
823 /* cast */
824 [ BYTECODE_OP_CAST_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_TO_S64,
825 [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64,
826 [ BYTECODE_OP_CAST_NOP ] = &&LABEL_BYTECODE_OP_CAST_NOP,
827
828 /* get context ref */
829 [ BYTECODE_OP_GET_CONTEXT_REF ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF,
830 [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING,
831 [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64,
832 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE,
833
834 /* load userspace field ref */
835 [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_STRING,
836 [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE,
837
838 /* Instructions for recursive traversal through composed types. */
839 [ BYTECODE_OP_GET_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT,
840 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT,
841 [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT,
842
843 [ BYTECODE_OP_GET_SYMBOL ] = &&LABEL_BYTECODE_OP_GET_SYMBOL,
844 [ BYTECODE_OP_GET_SYMBOL_FIELD ] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD,
845 [ BYTECODE_OP_GET_INDEX_U16 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U16,
846 [ BYTECODE_OP_GET_INDEX_U64 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U64,
847
848 [ BYTECODE_OP_LOAD_FIELD ] = &&LABEL_BYTECODE_OP_LOAD_FIELD,
849 [ BYTECODE_OP_LOAD_FIELD_S8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8,
850 [ BYTECODE_OP_LOAD_FIELD_S16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16,
851 [ BYTECODE_OP_LOAD_FIELD_S32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32,
852 [ BYTECODE_OP_LOAD_FIELD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64,
853 [ BYTECODE_OP_LOAD_FIELD_U8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8,
854 [ BYTECODE_OP_LOAD_FIELD_U16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16,
855 [ BYTECODE_OP_LOAD_FIELD_U32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32,
856 [ BYTECODE_OP_LOAD_FIELD_U64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64,
857 [ BYTECODE_OP_LOAD_FIELD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING,
858 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE,
859 [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE,
860
861 [ BYTECODE_OP_UNARY_BIT_NOT ] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT,
862
863 [ BYTECODE_OP_RETURN_S64 ] = &&LABEL_BYTECODE_OP_RETURN_S64,
864 };
865 #endif /* #ifndef INTERPRETER_USE_SWITCH */
866
867 START_OP
868
869 OP(BYTECODE_OP_UNKNOWN):
870 OP(BYTECODE_OP_LOAD_FIELD_REF):
871 OP(BYTECODE_OP_GET_CONTEXT_REF):
872 #ifdef INTERPRETER_USE_SWITCH
873 default:
874 #endif /* INTERPRETER_USE_SWITCH */
875 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
876 (unsigned int) *(bytecode_opcode_t *) pc);
877 ret = -EINVAL;
878 goto end;
879
880 OP(BYTECODE_OP_RETURN):
881 OP(BYTECODE_OP_RETURN_S64):
882 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
883 switch (estack_ax_t) {
884 case REG_S64:
885 case REG_U64:
886 retval = !!estack_ax_v;
887 break;
888 case REG_DOUBLE:
889 case REG_STRING:
890 case REG_PTR:
891 if (!output) {
892 ret = -EINVAL;
893 goto end;
894 }
895 retval = 0;
896 break;
897 case REG_STAR_GLOB_STRING:
898 case REG_TYPE_UNKNOWN:
899 ret = -EINVAL;
900 goto end;
901 }
902 ret = 0;
903 goto end;
904
905 /* binary */
906 OP(BYTECODE_OP_MUL):
907 OP(BYTECODE_OP_DIV):
908 OP(BYTECODE_OP_MOD):
909 OP(BYTECODE_OP_PLUS):
910 OP(BYTECODE_OP_MINUS):
911 printk(KERN_WARNING "LTTng: bytecode: unsupported bytecode op %u\n",
912 (unsigned int) *(bytecode_opcode_t *) pc);
913 ret = -EINVAL;
914 goto end;
915
916 OP(BYTECODE_OP_EQ):
917 OP(BYTECODE_OP_NE):
918 OP(BYTECODE_OP_GT):
919 OP(BYTECODE_OP_LT):
920 OP(BYTECODE_OP_GE):
921 OP(BYTECODE_OP_LE):
922 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
923 (unsigned int) *(bytecode_opcode_t *) pc);
924 ret = -EINVAL;
925 goto end;
926
927 OP(BYTECODE_OP_EQ_STRING):
928 {
929 int res;
930
931 res = (stack_strcmp(stack, top, "==") == 0);
932 estack_pop(stack, top, ax, bx, ax_t, bx_t);
933 estack_ax_v = res;
934 estack_ax_t = REG_S64;
935 next_pc += sizeof(struct binary_op);
936 PO;
937 }
938 OP(BYTECODE_OP_NE_STRING):
939 {
940 int res;
941
942 res = (stack_strcmp(stack, top, "!=") != 0);
943 estack_pop(stack, top, ax, bx, ax_t, bx_t);
944 estack_ax_v = res;
945 estack_ax_t = REG_S64;
946 next_pc += sizeof(struct binary_op);
947 PO;
948 }
949 OP(BYTECODE_OP_GT_STRING):
950 {
951 int res;
952
953 res = (stack_strcmp(stack, top, ">") > 0);
954 estack_pop(stack, top, ax, bx, ax_t, bx_t);
955 estack_ax_v = res;
956 estack_ax_t = REG_S64;
957 next_pc += sizeof(struct binary_op);
958 PO;
959 }
960 OP(BYTECODE_OP_LT_STRING):
961 {
962 int res;
963
964 res = (stack_strcmp(stack, top, "<") < 0);
965 estack_pop(stack, top, ax, bx, ax_t, bx_t);
966 estack_ax_v = res;
967 estack_ax_t = REG_S64;
968 next_pc += sizeof(struct binary_op);
969 PO;
970 }
971 OP(BYTECODE_OP_GE_STRING):
972 {
973 int res;
974
975 res = (stack_strcmp(stack, top, ">=") >= 0);
976 estack_pop(stack, top, ax, bx, ax_t, bx_t);
977 estack_ax_v = res;
978 estack_ax_t = REG_S64;
979 next_pc += sizeof(struct binary_op);
980 PO;
981 }
982 OP(BYTECODE_OP_LE_STRING):
983 {
984 int res;
985
986 res = (stack_strcmp(stack, top, "<=") <= 0);
987 estack_pop(stack, top, ax, bx, ax_t, bx_t);
988 estack_ax_v = res;
989 estack_ax_t = REG_S64;
990 next_pc += sizeof(struct binary_op);
991 PO;
992 }
993
994 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING):
995 {
996 int res;
997
998 res = (stack_star_glob_match(stack, top, "==") == 0);
999 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1000 estack_ax_v = res;
1001 estack_ax_t = REG_S64;
1002 next_pc += sizeof(struct binary_op);
1003 PO;
1004 }
1005 OP(BYTECODE_OP_NE_STAR_GLOB_STRING):
1006 {
1007 int res;
1008
1009 res = (stack_star_glob_match(stack, top, "!=") != 0);
1010 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1011 estack_ax_v = res;
1012 estack_ax_t = REG_S64;
1013 next_pc += sizeof(struct binary_op);
1014 PO;
1015 }
1016
1017 OP(BYTECODE_OP_EQ_S64):
1018 {
1019 int res;
1020
1021 res = (estack_bx_v == estack_ax_v);
1022 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1023 estack_ax_v = res;
1024 estack_ax_t = REG_S64;
1025 next_pc += sizeof(struct binary_op);
1026 PO;
1027 }
1028 OP(BYTECODE_OP_NE_S64):
1029 {
1030 int res;
1031
1032 res = (estack_bx_v != estack_ax_v);
1033 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1034 estack_ax_v = res;
1035 estack_ax_t = REG_S64;
1036 next_pc += sizeof(struct binary_op);
1037 PO;
1038 }
1039 OP(BYTECODE_OP_GT_S64):
1040 {
1041 int res;
1042
1043 res = (estack_bx_v > estack_ax_v);
1044 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1045 estack_ax_v = res;
1046 estack_ax_t = REG_S64;
1047 next_pc += sizeof(struct binary_op);
1048 PO;
1049 }
1050 OP(BYTECODE_OP_LT_S64):
1051 {
1052 int res;
1053
1054 res = (estack_bx_v < estack_ax_v);
1055 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1056 estack_ax_v = res;
1057 estack_ax_t = REG_S64;
1058 next_pc += sizeof(struct binary_op);
1059 PO;
1060 }
1061 OP(BYTECODE_OP_GE_S64):
1062 {
1063 int res;
1064
1065 res = (estack_bx_v >= estack_ax_v);
1066 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1067 estack_ax_v = res;
1068 estack_ax_t = REG_S64;
1069 next_pc += sizeof(struct binary_op);
1070 PO;
1071 }
1072 OP(BYTECODE_OP_LE_S64):
1073 {
1074 int res;
1075
1076 res = (estack_bx_v <= estack_ax_v);
1077 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1078 estack_ax_v = res;
1079 estack_ax_t = REG_S64;
1080 next_pc += sizeof(struct binary_op);
1081 PO;
1082 }
1083
1084 OP(BYTECODE_OP_EQ_DOUBLE):
1085 OP(BYTECODE_OP_NE_DOUBLE):
1086 OP(BYTECODE_OP_GT_DOUBLE):
1087 OP(BYTECODE_OP_LT_DOUBLE):
1088 OP(BYTECODE_OP_GE_DOUBLE):
1089 OP(BYTECODE_OP_LE_DOUBLE):
1090 {
1091 BUG_ON(1);
1092 PO;
1093 }
1094
1095 /* Mixed S64-double binary comparators */
1096 OP(BYTECODE_OP_EQ_DOUBLE_S64):
1097 OP(BYTECODE_OP_NE_DOUBLE_S64):
1098 OP(BYTECODE_OP_GT_DOUBLE_S64):
1099 OP(BYTECODE_OP_LT_DOUBLE_S64):
1100 OP(BYTECODE_OP_GE_DOUBLE_S64):
1101 OP(BYTECODE_OP_LE_DOUBLE_S64):
1102 OP(BYTECODE_OP_EQ_S64_DOUBLE):
1103 OP(BYTECODE_OP_NE_S64_DOUBLE):
1104 OP(BYTECODE_OP_GT_S64_DOUBLE):
1105 OP(BYTECODE_OP_LT_S64_DOUBLE):
1106 OP(BYTECODE_OP_GE_S64_DOUBLE):
1107 OP(BYTECODE_OP_LE_S64_DOUBLE):
1108 {
1109 BUG_ON(1);
1110 PO;
1111 }
1112 OP(BYTECODE_OP_BIT_RSHIFT):
1113 {
1114 int64_t res;
1115
1116 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1117 ret = -EINVAL;
1118 goto end;
1119 }
1120
1121 /* Catch undefined behavior. */
1122 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1123 ret = -EINVAL;
1124 goto end;
1125 }
1126 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1127 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1128 estack_ax_v = res;
1129 estack_ax_t = REG_U64;
1130 next_pc += sizeof(struct binary_op);
1131 PO;
1132 }
1133 OP(BYTECODE_OP_BIT_LSHIFT):
1134 {
1135 int64_t res;
1136
1137 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1138 ret = -EINVAL;
1139 goto end;
1140 }
1141
1142 /* Catch undefined behavior. */
1143 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1144 ret = -EINVAL;
1145 goto end;
1146 }
1147 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1148 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1149 estack_ax_v = res;
1150 estack_ax_t = REG_U64;
1151 next_pc += sizeof(struct binary_op);
1152 PO;
1153 }
1154 OP(BYTECODE_OP_BIT_AND):
1155 {
1156 int64_t res;
1157
1158 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1159 ret = -EINVAL;
1160 goto end;
1161 }
1162
1163 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1164 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1165 estack_ax_v = res;
1166 estack_ax_t = REG_U64;
1167 next_pc += sizeof(struct binary_op);
1168 PO;
1169 }
1170 OP(BYTECODE_OP_BIT_OR):
1171 {
1172 int64_t res;
1173
1174 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1175 ret = -EINVAL;
1176 goto end;
1177 }
1178
1179 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1180 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1181 estack_ax_v = res;
1182 estack_ax_t = REG_U64;
1183 next_pc += sizeof(struct binary_op);
1184 PO;
1185 }
1186 OP(BYTECODE_OP_BIT_XOR):
1187 {
1188 int64_t res;
1189
1190 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1191 ret = -EINVAL;
1192 goto end;
1193 }
1194
1195 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1196 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1197 estack_ax_v = res;
1198 estack_ax_t = REG_U64;
1199 next_pc += sizeof(struct binary_op);
1200 PO;
1201 }
1202
1203 /* unary */
1204 OP(BYTECODE_OP_UNARY_PLUS):
1205 OP(BYTECODE_OP_UNARY_MINUS):
1206 OP(BYTECODE_OP_UNARY_NOT):
1207 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1208 (unsigned int) *(bytecode_opcode_t *) pc);
1209 ret = -EINVAL;
1210 goto end;
1211
1212
1213 OP(BYTECODE_OP_UNARY_BIT_NOT):
1214 {
1215 estack_ax_v = ~(uint64_t) estack_ax_v;
1216 estack_ax_t = REG_S64;
1217 next_pc += sizeof(struct unary_op);
1218 PO;
1219 }
1220
1221 OP(BYTECODE_OP_UNARY_PLUS_S64):
1222 {
1223 next_pc += sizeof(struct unary_op);
1224 PO;
1225 }
1226 OP(BYTECODE_OP_UNARY_MINUS_S64):
1227 {
1228 estack_ax_v = -estack_ax_v;
1229 estack_ax_t = REG_S64;
1230 next_pc += sizeof(struct unary_op);
1231 PO;
1232 }
1233 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE):
1234 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE):
1235 {
1236 BUG_ON(1);
1237 PO;
1238 }
1239 OP(BYTECODE_OP_UNARY_NOT_S64):
1240 {
1241 estack_ax_v = !estack_ax_v;
1242 estack_ax_t = REG_S64;
1243 next_pc += sizeof(struct unary_op);
1244 PO;
1245 }
1246 OP(BYTECODE_OP_UNARY_NOT_DOUBLE):
1247 {
1248 BUG_ON(1);
1249 PO;
1250 }
1251
1252 /* logical */
1253 OP(BYTECODE_OP_AND):
1254 {
1255 struct logical_op *insn = (struct logical_op *) pc;
1256
1257 /* If AX is 0, skip and evaluate to 0 */
1258 if (unlikely(estack_ax_v == 0)) {
1259 dbg_printk("Jumping to bytecode offset %u\n",
1260 (unsigned int) insn->skip_offset);
1261 next_pc = start_pc + insn->skip_offset;
1262 } else {
1263 /* Pop 1 when jump not taken */
1264 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1265 next_pc += sizeof(struct logical_op);
1266 }
1267 PO;
1268 }
1269 OP(BYTECODE_OP_OR):
1270 {
1271 struct logical_op *insn = (struct logical_op *) pc;
1272
1273 /* If AX is nonzero, skip and evaluate to 1 */
1274
1275 if (unlikely(estack_ax_v != 0)) {
1276 estack_ax_v = 1;
1277 dbg_printk("Jumping to bytecode offset %u\n",
1278 (unsigned int) insn->skip_offset);
1279 next_pc = start_pc + insn->skip_offset;
1280 } else {
1281 /* Pop 1 when jump not taken */
1282 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1283 next_pc += sizeof(struct logical_op);
1284 }
1285 PO;
1286 }
1287
1288
1289 /* load field ref */
1290 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING):
1291 {
1292 struct load_op *insn = (struct load_op *) pc;
1293 struct field_ref *ref = (struct field_ref *) insn->data;
1294
1295 dbg_printk("load field ref offset %u type string\n",
1296 ref->offset);
1297 estack_push(stack, top, ax, bx, ax_t, bx_t);
1298 estack_ax(stack, top)->u.s.str =
1299 *(const char * const *) &interpreter_stack_data[ref->offset];
1300 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1301 dbg_printk("Bytecode warning: loading a NULL string.\n");
1302 ret = -EINVAL;
1303 goto end;
1304 }
1305 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1306 estack_ax(stack, top)->u.s.literal_type =
1307 ESTACK_STRING_LITERAL_TYPE_NONE;
1308 estack_ax(stack, top)->u.s.user = 0;
1309 estack_ax(stack, top)->type = REG_STRING;
1310 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1311 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1312 PO;
1313 }
1314
1315 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE):
1316 {
1317 struct load_op *insn = (struct load_op *) pc;
1318 struct field_ref *ref = (struct field_ref *) insn->data;
1319
1320 dbg_printk("load field ref offset %u type sequence\n",
1321 ref->offset);
1322 estack_push(stack, top, ax, bx, ax_t, bx_t);
1323 estack_ax(stack, top)->u.s.seq_len =
1324 *(unsigned long *) &interpreter_stack_data[ref->offset];
1325 estack_ax(stack, top)->u.s.str =
1326 *(const char **) (&interpreter_stack_data[ref->offset
1327 + sizeof(unsigned long)]);
1328 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1329 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1330 ret = -EINVAL;
1331 goto end;
1332 }
1333 estack_ax(stack, top)->u.s.literal_type =
1334 ESTACK_STRING_LITERAL_TYPE_NONE;
1335 estack_ax(stack, top)->u.s.user = 0;
1336 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1337 PO;
1338 }
1339
1340 OP(BYTECODE_OP_LOAD_FIELD_REF_S64):
1341 {
1342 struct load_op *insn = (struct load_op *) pc;
1343 struct field_ref *ref = (struct field_ref *) insn->data;
1344
1345 dbg_printk("load field ref offset %u type s64\n",
1346 ref->offset);
1347 estack_push(stack, top, ax, bx, ax_t, bx_t);
1348 estack_ax_v =
1349 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
1350 estack_ax_t = REG_S64;
1351 dbg_printk("ref load s64 %lld\n",
1352 (long long) estack_ax_v);
1353 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1354 PO;
1355 }
1356
1357 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE):
1358 {
1359 BUG_ON(1);
1360 PO;
1361 }
1362
1363 /* load from immediate operand */
1364 OP(BYTECODE_OP_LOAD_STRING):
1365 {
1366 struct load_op *insn = (struct load_op *) pc;
1367
1368 dbg_printk("load string %s\n", insn->data);
1369 estack_push(stack, top, ax, bx, ax_t, bx_t);
1370 estack_ax(stack, top)->u.s.str = insn->data;
1371 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1372 estack_ax(stack, top)->u.s.literal_type =
1373 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1374 estack_ax(stack, top)->u.s.user = 0;
1375 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1376 PO;
1377 }
1378
1379 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING):
1380 {
1381 struct load_op *insn = (struct load_op *) pc;
1382
1383 dbg_printk("load globbing pattern %s\n", insn->data);
1384 estack_push(stack, top, ax, bx, ax_t, bx_t);
1385 estack_ax(stack, top)->u.s.str = insn->data;
1386 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1387 estack_ax(stack, top)->u.s.literal_type =
1388 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1389 estack_ax(stack, top)->u.s.user = 0;
1390 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1391 PO;
1392 }
1393
1394 OP(BYTECODE_OP_LOAD_S64):
1395 {
1396 struct load_op *insn = (struct load_op *) pc;
1397
1398 estack_push(stack, top, ax, bx, ax_t, bx_t);
1399 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1400 estack_ax_t = REG_S64;
1401 dbg_printk("load s64 %lld\n",
1402 (long long) estack_ax_v);
1403 next_pc += sizeof(struct load_op)
1404 + sizeof(struct literal_numeric);
1405 PO;
1406 }
1407
1408 OP(BYTECODE_OP_LOAD_DOUBLE):
1409 {
1410 BUG_ON(1);
1411 PO;
1412 }
1413
1414 /* cast */
1415 OP(BYTECODE_OP_CAST_TO_S64):
1416 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1417 (unsigned int) *(bytecode_opcode_t *) pc);
1418 ret = -EINVAL;
1419 goto end;
1420
1421 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64):
1422 {
1423 BUG_ON(1);
1424 PO;
1425 }
1426
1427 OP(BYTECODE_OP_CAST_NOP):
1428 {
1429 next_pc += sizeof(struct cast_op);
1430 PO;
1431 }
1432
1433 /* get context ref */
1434 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING):
1435 {
1436 struct load_op *insn = (struct load_op *) pc;
1437 struct field_ref *ref = (struct field_ref *) insn->data;
1438 struct lttng_ctx_field *ctx_field;
1439 union lttng_ctx_value v;
1440
1441 dbg_printk("get context ref offset %u type string\n",
1442 ref->offset);
1443 ctx_field = &lttng_static_ctx->fields[ref->offset];
1444 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1445 estack_push(stack, top, ax, bx, ax_t, bx_t);
1446 estack_ax(stack, top)->u.s.str = v.str;
1447 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1448 dbg_printk("Bytecode warning: loading a NULL string.\n");
1449 ret = -EINVAL;
1450 goto end;
1451 }
1452 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1453 estack_ax(stack, top)->u.s.literal_type =
1454 ESTACK_STRING_LITERAL_TYPE_NONE;
1455 estack_ax(stack, top)->u.s.user = 0;
1456 estack_ax(stack, top)->type = REG_STRING;
1457 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1458 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1459 PO;
1460 }
1461
1462 OP(BYTECODE_OP_GET_CONTEXT_REF_S64):
1463 {
1464 struct load_op *insn = (struct load_op *) pc;
1465 struct field_ref *ref = (struct field_ref *) insn->data;
1466 struct lttng_ctx_field *ctx_field;
1467 union lttng_ctx_value v;
1468
1469 dbg_printk("get context ref offset %u type s64\n",
1470 ref->offset);
1471 ctx_field = &lttng_static_ctx->fields[ref->offset];
1472 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1473 estack_push(stack, top, ax, bx, ax_t, bx_t);
1474 estack_ax_v = v.s64;
1475 estack_ax_t = REG_S64;
1476 dbg_printk("ref get context s64 %lld\n",
1477 (long long) estack_ax_v);
1478 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1479 PO;
1480 }
1481
1482 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE):
1483 {
1484 BUG_ON(1);
1485 PO;
1486 }
1487
1488 /* load userspace field ref */
1489 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_STRING):
1490 {
1491 struct load_op *insn = (struct load_op *) pc;
1492 struct field_ref *ref = (struct field_ref *) insn->data;
1493
1494 dbg_printk("load field ref offset %u type user string\n",
1495 ref->offset);
1496 estack_push(stack, top, ax, bx, ax_t, bx_t);
1497 estack_ax(stack, top)->u.s.user_str =
1498 *(const char * const *) &interpreter_stack_data[ref->offset];
1499 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1500 dbg_printk("Bytecode warning: loading a NULL string.\n");
1501 ret = -EINVAL;
1502 goto end;
1503 }
1504 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1505 estack_ax(stack, top)->u.s.literal_type =
1506 ESTACK_STRING_LITERAL_TYPE_NONE;
1507 estack_ax(stack, top)->u.s.user = 1;
1508 estack_ax(stack, top)->type = REG_STRING;
1509 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1510 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1511 PO;
1512 }
1513
1514 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1515 {
1516 struct load_op *insn = (struct load_op *) pc;
1517 struct field_ref *ref = (struct field_ref *) insn->data;
1518
1519 dbg_printk("load field ref offset %u type user sequence\n",
1520 ref->offset);
1521 estack_push(stack, top, ax, bx, ax_t, bx_t);
1522 estack_ax(stack, top)->u.s.seq_len =
1523 *(unsigned long *) &interpreter_stack_data[ref->offset];
1524 estack_ax(stack, top)->u.s.user_str =
1525 *(const char **) (&interpreter_stack_data[ref->offset
1526 + sizeof(unsigned long)]);
1527 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1528 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1529 ret = -EINVAL;
1530 goto end;
1531 }
1532 estack_ax(stack, top)->u.s.literal_type =
1533 ESTACK_STRING_LITERAL_TYPE_NONE;
1534 estack_ax(stack, top)->u.s.user = 1;
1535 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1536 PO;
1537 }
1538
1539 OP(BYTECODE_OP_GET_CONTEXT_ROOT):
1540 {
1541 dbg_printk("op get context root\n");
1542 estack_push(stack, top, ax, bx, ax_t, bx_t);
1543 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1544 /* "field" only needed for variants. */
1545 estack_ax(stack, top)->u.ptr.field = NULL;
1546 estack_ax(stack, top)->type = REG_PTR;
1547 next_pc += sizeof(struct load_op);
1548 PO;
1549 }
1550
1551 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT):
1552 {
1553 BUG_ON(1);
1554 PO;
1555 }
1556
1557 OP(BYTECODE_OP_GET_PAYLOAD_ROOT):
1558 {
1559 dbg_printk("op get app payload root\n");
1560 estack_push(stack, top, ax, bx, ax_t, bx_t);
1561 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1562 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
1563 /* "field" only needed for variants. */
1564 estack_ax(stack, top)->u.ptr.field = NULL;
1565 estack_ax(stack, top)->type = REG_PTR;
1566 next_pc += sizeof(struct load_op);
1567 PO;
1568 }
1569
1570 OP(BYTECODE_OP_GET_SYMBOL):
1571 {
1572 dbg_printk("op get symbol\n");
1573 switch (estack_ax(stack, top)->u.ptr.type) {
1574 case LOAD_OBJECT:
1575 printk(KERN_WARNING "LTTng: bytecode: Nested fields not implemented yet.\n");
1576 ret = -EINVAL;
1577 goto end;
1578 case LOAD_ROOT_CONTEXT:
1579 case LOAD_ROOT_APP_CONTEXT:
1580 case LOAD_ROOT_PAYLOAD:
1581 /*
1582 * symbol lookup is performed by
1583 * specialization.
1584 */
1585 ret = -EINVAL;
1586 goto end;
1587 }
1588 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1589 PO;
1590 }
1591
1592 OP(BYTECODE_OP_GET_SYMBOL_FIELD):
1593 {
1594 /*
1595 * Used for first variant encountered in a
1596 * traversal. Variants are not implemented yet.
1597 */
1598 ret = -EINVAL;
1599 goto end;
1600 }
1601
1602 OP(BYTECODE_OP_GET_INDEX_U16):
1603 {
1604 struct load_op *insn = (struct load_op *) pc;
1605 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1606
1607 dbg_printk("op get index u16\n");
1608 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1609 if (ret)
1610 goto end;
1611 estack_ax_v = estack_ax(stack, top)->u.v;
1612 estack_ax_t = estack_ax(stack, top)->type;
1613 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1614 PO;
1615 }
1616
1617 OP(BYTECODE_OP_GET_INDEX_U64):
1618 {
1619 struct load_op *insn = (struct load_op *) pc;
1620 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1621
1622 dbg_printk("op get index u64\n");
1623 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1624 if (ret)
1625 goto end;
1626 estack_ax_v = estack_ax(stack, top)->u.v;
1627 estack_ax_t = estack_ax(stack, top)->type;
1628 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1629 PO;
1630 }
1631
1632 OP(BYTECODE_OP_LOAD_FIELD):
1633 {
1634 dbg_printk("op load field\n");
1635 ret = dynamic_load_field(estack_ax(stack, top));
1636 if (ret)
1637 goto end;
1638 estack_ax_v = estack_ax(stack, top)->u.v;
1639 estack_ax_t = estack_ax(stack, top)->type;
1640 next_pc += sizeof(struct load_op);
1641 PO;
1642 }
1643
1644 OP(BYTECODE_OP_LOAD_FIELD_S8):
1645 {
1646 dbg_printk("op load field s8\n");
1647
1648 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1649 estack_ax_t = REG_S64;
1650 next_pc += sizeof(struct load_op);
1651 PO;
1652 }
1653 OP(BYTECODE_OP_LOAD_FIELD_S16):
1654 {
1655 dbg_printk("op load field s16\n");
1656
1657 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1658 estack_ax_t = REG_S64;
1659 next_pc += sizeof(struct load_op);
1660 PO;
1661 }
1662 OP(BYTECODE_OP_LOAD_FIELD_S32):
1663 {
1664 dbg_printk("op load field s32\n");
1665
1666 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1667 estack_ax_t = REG_S64;
1668 next_pc += sizeof(struct load_op);
1669 PO;
1670 }
1671 OP(BYTECODE_OP_LOAD_FIELD_S64):
1672 {
1673 dbg_printk("op load field s64\n");
1674
1675 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1676 estack_ax_t = REG_S64;
1677 next_pc += sizeof(struct load_op);
1678 PO;
1679 }
1680 OP(BYTECODE_OP_LOAD_FIELD_U8):
1681 {
1682 dbg_printk("op load field u8\n");
1683
1684 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1685 estack_ax_t = REG_S64;
1686 next_pc += sizeof(struct load_op);
1687 PO;
1688 }
1689 OP(BYTECODE_OP_LOAD_FIELD_U16):
1690 {
1691 dbg_printk("op load field u16\n");
1692
1693 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1694 estack_ax_t = REG_S64;
1695 next_pc += sizeof(struct load_op);
1696 PO;
1697 }
1698 OP(BYTECODE_OP_LOAD_FIELD_U32):
1699 {
1700 dbg_printk("op load field u32\n");
1701
1702 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1703 estack_ax_t = REG_S64;
1704 next_pc += sizeof(struct load_op);
1705 PO;
1706 }
1707 OP(BYTECODE_OP_LOAD_FIELD_U64):
1708 {
1709 dbg_printk("op load field u64\n");
1710
1711 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1712 estack_ax_t = REG_S64;
1713 next_pc += sizeof(struct load_op);
1714 PO;
1715 }
1716 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE):
1717 {
1718 ret = -EINVAL;
1719 goto end;
1720 }
1721
1722 OP(BYTECODE_OP_LOAD_FIELD_STRING):
1723 {
1724 const char *str;
1725
1726 dbg_printk("op load field string\n");
1727 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1728 estack_ax(stack, top)->u.s.str = str;
1729 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1730 dbg_printk("Bytecode warning: loading a NULL string.\n");
1731 ret = -EINVAL;
1732 goto end;
1733 }
1734 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1735 estack_ax(stack, top)->u.s.literal_type =
1736 ESTACK_STRING_LITERAL_TYPE_NONE;
1737 estack_ax(stack, top)->type = REG_STRING;
1738 next_pc += sizeof(struct load_op);
1739 PO;
1740 }
1741
1742 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE):
1743 {
1744 const char *ptr;
1745
1746 dbg_printk("op load field string sequence\n");
1747 ptr = estack_ax(stack, top)->u.ptr.ptr;
1748 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1749 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1750 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1751 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1752 ret = -EINVAL;
1753 goto end;
1754 }
1755 estack_ax(stack, top)->u.s.literal_type =
1756 ESTACK_STRING_LITERAL_TYPE_NONE;
1757 estack_ax(stack, top)->type = REG_STRING;
1758 next_pc += sizeof(struct load_op);
1759 PO;
1760 }
1761
1762 END_OP
1763 end:
1764 /* Return _DISCARD on error. */
1765 if (ret)
1766 return LTTNG_INTERPRETER_DISCARD;
1767
1768 if (output) {
1769 return lttng_bytecode_interpret_format_output(
1770 estack_ax(stack, top), output);
1771 }
1772
1773 return retval;
1774 }
1775 LTTNG_STACK_FRAME_NON_STANDARD(bytecode_interpret);
1776
1777 uint64_t lttng_bytecode_filter_interpret(void *filter_data,
1778 struct lttng_probe_ctx *lttng_probe_ctx,
1779 const char *filter_stack_data)
1780 {
1781 return bytecode_interpret(filter_data, lttng_probe_ctx,
1782 filter_stack_data, NULL);
1783 }
1784
1785 #undef START_OP
1786 #undef OP
1787 #undef PO
1788 #undef END_OP
This page took 0.09809 seconds and 4 git commands to generate.