Implement capturing payload on event notifier
[lttng-modules.git] / src / lttng-bytecode-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-bytecode-interpreter.c
4 *
5 * LTTng modules bytecode interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
14
15 #include <lttng/lttng-bytecode.h>
16 #include <lttng/string-utils.h>
17
18 /*
19 * get_char should be called with page fault handler disabled if it is expected
20 * to handle user-space read.
21 */
22 static
23 char get_char(struct estack_entry *reg, size_t offset)
24 {
25 if (unlikely(offset >= reg->u.s.seq_len))
26 return '\0';
27 if (reg->u.s.user) {
28 char c;
29
30 /* Handle invalid access as end of string. */
31 if (unlikely(!lttng_access_ok(VERIFY_READ,
32 reg->u.s.user_str + offset,
33 sizeof(c))))
34 return '\0';
35 /* Handle fault (nonzero return value) as end of string. */
36 if (unlikely(__copy_from_user_inatomic(&c,
37 reg->u.s.user_str + offset,
38 sizeof(c))))
39 return '\0';
40 return c;
41 } else {
42 return reg->u.s.str[offset];
43 }
44 }
45
46 /*
47 * -1: wildcard found.
48 * -2: unknown escape char.
49 * 0: normal char.
50 */
51 static
52 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
53 {
54 switch (*c) {
55 case '\\':
56 (*offset)++;
57 *c = get_char(reg, *offset);
58 switch (*c) {
59 case '\\':
60 case '*':
61 return 0;
62 default:
63 return -2;
64 }
65 case '*':
66 return -1;
67 default:
68 return 0;
69 }
70 }
71
72 static
73 char get_char_at_cb(size_t at, void *data)
74 {
75 return get_char(data, at);
76 }
77
78 static
79 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
80 {
81 bool has_user = false;
82 int result;
83 struct estack_entry *pattern_reg;
84 struct estack_entry *candidate_reg;
85
86 /* Disable the page fault handler when reading from userspace. */
87 if (estack_bx(stack, top)->u.s.user
88 || estack_ax(stack, top)->u.s.user) {
89 has_user = true;
90 pagefault_disable();
91 }
92
93 /* Find out which side is the pattern vs. the candidate. */
94 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
95 pattern_reg = estack_ax(stack, top);
96 candidate_reg = estack_bx(stack, top);
97 } else {
98 pattern_reg = estack_bx(stack, top);
99 candidate_reg = estack_ax(stack, top);
100 }
101
102 /* Perform the match operation. */
103 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
104 pattern_reg, get_char_at_cb, candidate_reg);
105 if (has_user)
106 pagefault_enable();
107
108 return result;
109 }
110
111 static
112 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
113 {
114 size_t offset_bx = 0, offset_ax = 0;
115 int diff, has_user = 0;
116
117 if (estack_bx(stack, top)->u.s.user
118 || estack_ax(stack, top)->u.s.user) {
119 has_user = 1;
120 pagefault_disable();
121 }
122
123 for (;;) {
124 int ret;
125 int escaped_r0 = 0;
126 char char_bx, char_ax;
127
128 char_bx = get_char(estack_bx(stack, top), offset_bx);
129 char_ax = get_char(estack_ax(stack, top), offset_ax);
130
131 if (unlikely(char_bx == '\0')) {
132 if (char_ax == '\0') {
133 diff = 0;
134 break;
135 } else {
136 if (estack_ax(stack, top)->u.s.literal_type ==
137 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
138 ret = parse_char(estack_ax(stack, top),
139 &char_ax, &offset_ax);
140 if (ret == -1) {
141 diff = 0;
142 break;
143 }
144 }
145 diff = -1;
146 break;
147 }
148 }
149 if (unlikely(char_ax == '\0')) {
150 if (estack_bx(stack, top)->u.s.literal_type ==
151 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
152 ret = parse_char(estack_bx(stack, top),
153 &char_bx, &offset_bx);
154 if (ret == -1) {
155 diff = 0;
156 break;
157 }
158 }
159 diff = 1;
160 break;
161 }
162 if (estack_bx(stack, top)->u.s.literal_type ==
163 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
164 ret = parse_char(estack_bx(stack, top),
165 &char_bx, &offset_bx);
166 if (ret == -1) {
167 diff = 0;
168 break;
169 } else if (ret == -2) {
170 escaped_r0 = 1;
171 }
172 /* else compare both char */
173 }
174 if (estack_ax(stack, top)->u.s.literal_type ==
175 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
176 ret = parse_char(estack_ax(stack, top),
177 &char_ax, &offset_ax);
178 if (ret == -1) {
179 diff = 0;
180 break;
181 } else if (ret == -2) {
182 if (!escaped_r0) {
183 diff = -1;
184 break;
185 }
186 } else {
187 if (escaped_r0) {
188 diff = 1;
189 break;
190 }
191 }
192 } else {
193 if (escaped_r0) {
194 diff = 1;
195 break;
196 }
197 }
198 diff = char_bx - char_ax;
199 if (diff != 0)
200 break;
201 offset_bx++;
202 offset_ax++;
203 }
204 if (has_user)
205 pagefault_enable();
206
207 return diff;
208 }
209
210 uint64_t lttng_bytecode_filter_interpret_false(void *filter_data,
211 struct lttng_probe_ctx *lttng_probe_ctx,
212 const char *filter_stack_data)
213 {
214 return LTTNG_INTERPRETER_DISCARD;
215 }
216
217 uint64_t lttng_bytecode_capture_interpret_false(void *filter_data,
218 struct lttng_probe_ctx *lttng_probe_ctx,
219 const char *capture_stack_data,
220 struct lttng_interpreter_output *output)
221 {
222 return LTTNG_INTERPRETER_DISCARD;
223 }
224
225 #ifdef INTERPRETER_USE_SWITCH
226
227 /*
228 * Fallback for compilers that do not support taking address of labels.
229 */
230
231 #define START_OP \
232 start_pc = &bytecode->data[0]; \
233 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
234 pc = next_pc) { \
235 dbg_printk("LTTng: Executing op %s (%u)\n", \
236 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
237 (unsigned int) *(bytecode_opcode_t *) pc); \
238 switch (*(bytecode_opcode_t *) pc) {
239
240 #define OP(name) case name
241
242 #define PO break
243
244 #define END_OP } \
245 }
246
247 #else
248
249 /*
250 * Dispatch-table based interpreter.
251 */
252
253 #define START_OP \
254 start_pc = &bytecode->code[0]; \
255 pc = next_pc = start_pc; \
256 if (unlikely(pc - start_pc >= bytecode->len)) \
257 goto end; \
258 goto *dispatch[*(bytecode_opcode_t *) pc];
259
260 #define OP(name) \
261 LABEL_##name
262
263 #define PO \
264 pc = next_pc; \
265 goto *dispatch[*(bytecode_opcode_t *) pc];
266
267 #define END_OP
268
269 #endif
270
271 #define IS_INTEGER_REGISTER(reg_type) \
272 (reg_type == REG_S64 || reg_type == REG_U64)
273
274 static int context_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
275 struct load_ptr *ptr,
276 uint32_t idx)
277 {
278
279 struct lttng_ctx_field *ctx_field;
280 struct lttng_event_field *field;
281 union lttng_ctx_value v;
282
283 ctx_field = &lttng_static_ctx->fields[idx];
284 field = &ctx_field->event_field;
285 ptr->type = LOAD_OBJECT;
286 /* field is only used for types nested within variants. */
287 ptr->field = NULL;
288
289 switch (field->type.atype) {
290 case atype_integer:
291 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
292 if (field->type.u.integer.signedness) {
293 ptr->object_type = OBJECT_TYPE_S64;
294 ptr->u.s64 = v.s64;
295 ptr->ptr = &ptr->u.s64;
296 } else {
297 ptr->object_type = OBJECT_TYPE_U64;
298 ptr->u.u64 = v.s64; /* Cast. */
299 ptr->ptr = &ptr->u.u64;
300 }
301 break;
302 case atype_enum_nestable:
303 {
304 const struct lttng_integer_type *itype =
305 &field->type.u.enum_nestable.container_type->u.integer;
306
307 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
308 if (itype->signedness) {
309 ptr->object_type = OBJECT_TYPE_SIGNED_ENUM;
310 ptr->u.s64 = v.s64;
311 ptr->ptr = &ptr->u.s64;
312 } else {
313 ptr->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
314 ptr->u.u64 = v.s64; /* Cast. */
315 ptr->ptr = &ptr->u.u64;
316 }
317 break;
318 }
319 case atype_array_nestable:
320 if (!lttng_is_bytewise_integer(field->type.u.array_nestable.elem_type)) {
321 printk(KERN_WARNING "LTTng: bytecode: Array nesting only supports integer types.\n");
322 return -EINVAL;
323 }
324 if (field->type.u.array_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
325 printk(KERN_WARNING "LTTng: bytecode: Only string arrays are supported for contexts.\n");
326 return -EINVAL;
327 }
328 ptr->object_type = OBJECT_TYPE_STRING;
329 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
330 ptr->ptr = v.str;
331 break;
332 case atype_sequence_nestable:
333 if (!lttng_is_bytewise_integer(field->type.u.sequence_nestable.elem_type)) {
334 printk(KERN_WARNING "LTTng: bytecode: Sequence nesting only supports integer types.\n");
335 return -EINVAL;
336 }
337 if (field->type.u.sequence_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
338 printk(KERN_WARNING "LTTng: bytecode: Only string sequences are supported for contexts.\n");
339 return -EINVAL;
340 }
341 ptr->object_type = OBJECT_TYPE_STRING;
342 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
343 ptr->ptr = v.str;
344 break;
345 case atype_string:
346 ptr->object_type = OBJECT_TYPE_STRING;
347 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
348 ptr->ptr = v.str;
349 break;
350 case atype_struct_nestable:
351 printk(KERN_WARNING "LTTng: bytecode: Structure type cannot be loaded.\n");
352 return -EINVAL;
353 case atype_variant_nestable:
354 printk(KERN_WARNING "LTTng: bytecode: Variant type cannot be loaded.\n");
355 return -EINVAL;
356 default:
357 printk(KERN_WARNING "LTTng: bytecode: Unknown type: %d", (int) field->type.atype);
358 return -EINVAL;
359 }
360 return 0;
361 }
362
363 static int dynamic_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
364 struct bytecode_runtime *runtime,
365 uint64_t index, struct estack_entry *stack_top)
366 {
367 int ret;
368 const struct bytecode_get_index_data *gid;
369
370 gid = (const struct bytecode_get_index_data *) &runtime->data[index];
371 switch (stack_top->u.ptr.type) {
372 case LOAD_OBJECT:
373 switch (stack_top->u.ptr.object_type) {
374 case OBJECT_TYPE_ARRAY:
375 {
376 const char *ptr;
377
378 WARN_ON_ONCE(gid->offset >= gid->array_len);
379 /* Skip count (unsigned long) */
380 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
381 ptr = ptr + gid->offset;
382 stack_top->u.ptr.ptr = ptr;
383 stack_top->u.ptr.object_type = gid->elem.type;
384 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
385 BUG_ON(stack_top->u.ptr.field->type.atype != atype_array_nestable);
386 stack_top->u.ptr.field = NULL;
387 break;
388 }
389 case OBJECT_TYPE_SEQUENCE:
390 {
391 const char *ptr;
392 size_t ptr_seq_len;
393
394 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
395 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
396 if (gid->offset >= gid->elem.len * ptr_seq_len) {
397 ret = -EINVAL;
398 goto end;
399 }
400 ptr = ptr + gid->offset;
401 stack_top->u.ptr.ptr = ptr;
402 stack_top->u.ptr.object_type = gid->elem.type;
403 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
404 BUG_ON(stack_top->u.ptr.field->type.atype != atype_sequence_nestable);
405 stack_top->u.ptr.field = NULL;
406 break;
407 }
408 case OBJECT_TYPE_STRUCT:
409 printk(KERN_WARNING "LTTng: bytecode: Nested structures are not supported yet.\n");
410 ret = -EINVAL;
411 goto end;
412 case OBJECT_TYPE_VARIANT:
413 default:
414 printk(KERN_WARNING "LTTng: bytecode: Unexpected get index type %d",
415 (int) stack_top->u.ptr.object_type);
416 ret = -EINVAL;
417 goto end;
418 }
419 break;
420 case LOAD_ROOT_CONTEXT:
421 case LOAD_ROOT_APP_CONTEXT: /* Fall-through */
422 {
423 ret = context_get_index(lttng_probe_ctx,
424 &stack_top->u.ptr,
425 gid->ctx_index);
426 if (ret) {
427 goto end;
428 }
429 break;
430 }
431 case LOAD_ROOT_PAYLOAD:
432 stack_top->u.ptr.ptr += gid->offset;
433 if (gid->elem.type == OBJECT_TYPE_STRING)
434 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
435 stack_top->u.ptr.object_type = gid->elem.type;
436 stack_top->u.ptr.type = LOAD_OBJECT;
437 stack_top->u.ptr.field = gid->field;
438 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
439 break;
440 }
441
442 stack_top->type = REG_PTR;
443
444 return 0;
445
446 end:
447 return ret;
448 }
449
450 static int dynamic_load_field(struct estack_entry *stack_top)
451 {
452 int ret;
453
454 switch (stack_top->u.ptr.type) {
455 case LOAD_OBJECT:
456 break;
457 case LOAD_ROOT_CONTEXT:
458 case LOAD_ROOT_APP_CONTEXT:
459 case LOAD_ROOT_PAYLOAD:
460 default:
461 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
462 ret = -EINVAL;
463 goto end;
464 }
465 switch (stack_top->u.ptr.object_type) {
466 case OBJECT_TYPE_S8:
467 dbg_printk("op load field s8\n");
468 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
469 stack_top->type = REG_S64;
470 break;
471 case OBJECT_TYPE_S16:
472 {
473 int16_t tmp;
474
475 dbg_printk("op load field s16\n");
476 tmp = *(int16_t *) stack_top->u.ptr.ptr;
477 if (stack_top->u.ptr.rev_bo)
478 __swab16s(&tmp);
479 stack_top->u.v = tmp;
480 stack_top->type = REG_S64;
481 break;
482 }
483 case OBJECT_TYPE_S32:
484 {
485 int32_t tmp;
486
487 dbg_printk("op load field s32\n");
488 tmp = *(int32_t *) stack_top->u.ptr.ptr;
489 if (stack_top->u.ptr.rev_bo)
490 __swab32s(&tmp);
491 stack_top->u.v = tmp;
492 stack_top->type = REG_S64;
493 break;
494 }
495 case OBJECT_TYPE_S64:
496 {
497 int64_t tmp;
498
499 dbg_printk("op load field s64\n");
500 tmp = *(int64_t *) stack_top->u.ptr.ptr;
501 if (stack_top->u.ptr.rev_bo)
502 __swab64s(&tmp);
503 stack_top->u.v = tmp;
504 stack_top->type = REG_S64;
505 break;
506 }
507 case OBJECT_TYPE_SIGNED_ENUM:
508 {
509 int64_t tmp;
510
511 dbg_printk("op load field signed enumeration\n");
512 tmp = *(int64_t *) stack_top->u.ptr.ptr;
513 if (stack_top->u.ptr.rev_bo)
514 __swab64s(&tmp);
515 stack_top->u.v = tmp;
516 stack_top->type = REG_S64;
517 break;
518 }
519 case OBJECT_TYPE_U8:
520 dbg_printk("op load field u8\n");
521 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
522 stack_top->type = REG_U64;
523 break;
524 case OBJECT_TYPE_U16:
525 {
526 uint16_t tmp;
527
528 dbg_printk("op load field u16\n");
529 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
530 if (stack_top->u.ptr.rev_bo)
531 __swab16s(&tmp);
532 stack_top->u.v = tmp;
533 stack_top->type = REG_U64;
534 break;
535 }
536 case OBJECT_TYPE_U32:
537 {
538 uint32_t tmp;
539
540 dbg_printk("op load field u32\n");
541 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
542 if (stack_top->u.ptr.rev_bo)
543 __swab32s(&tmp);
544 stack_top->u.v = tmp;
545 stack_top->type = REG_U64;
546 break;
547 }
548 case OBJECT_TYPE_U64:
549 {
550 uint64_t tmp;
551
552 dbg_printk("op load field u64\n");
553 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
554 if (stack_top->u.ptr.rev_bo)
555 __swab64s(&tmp);
556 stack_top->u.v = tmp;
557 stack_top->type = REG_U64;
558 break;
559 }
560 case OBJECT_TYPE_UNSIGNED_ENUM:
561 {
562 uint64_t tmp;
563
564 dbg_printk("op load field unsigned enumeration\n");
565 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
566 if (stack_top->u.ptr.rev_bo)
567 __swab64s(&tmp);
568 stack_top->u.v = tmp;
569 stack_top->type = REG_U64;
570 break;
571 }
572 case OBJECT_TYPE_STRING:
573 {
574 const char *str;
575
576 dbg_printk("op load field string\n");
577 str = (const char *) stack_top->u.ptr.ptr;
578 stack_top->u.s.str = str;
579 if (unlikely(!stack_top->u.s.str)) {
580 dbg_printk("Bytecode warning: loading a NULL string.\n");
581 ret = -EINVAL;
582 goto end;
583 }
584 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
585 stack_top->u.s.literal_type =
586 ESTACK_STRING_LITERAL_TYPE_NONE;
587 stack_top->type = REG_STRING;
588 break;
589 }
590 case OBJECT_TYPE_STRING_SEQUENCE:
591 {
592 const char *ptr;
593
594 dbg_printk("op load field string sequence\n");
595 ptr = stack_top->u.ptr.ptr;
596 stack_top->u.s.seq_len = *(unsigned long *) ptr;
597 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
598 if (unlikely(!stack_top->u.s.str)) {
599 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
600 ret = -EINVAL;
601 goto end;
602 }
603 stack_top->u.s.literal_type =
604 ESTACK_STRING_LITERAL_TYPE_NONE;
605 stack_top->type = REG_STRING;
606 break;
607 }
608 case OBJECT_TYPE_DYNAMIC:
609 /*
610 * Dynamic types in context are looked up
611 * by context get index.
612 */
613 ret = -EINVAL;
614 goto end;
615 case OBJECT_TYPE_DOUBLE:
616 ret = -EINVAL;
617 goto end;
618 case OBJECT_TYPE_SEQUENCE:
619 case OBJECT_TYPE_ARRAY:
620 case OBJECT_TYPE_STRUCT:
621 case OBJECT_TYPE_VARIANT:
622 printk(KERN_WARNING "LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
623 ret = -EINVAL;
624 goto end;
625 }
626 return 0;
627
628 end:
629 return ret;
630 }
631
632 static
633 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
634 struct lttng_interpreter_output *output)
635 {
636 int ret;
637
638 again:
639 switch (ax->type) {
640 case REG_S64:
641 output->type = LTTNG_INTERPRETER_TYPE_S64;
642 output->u.s = ax->u.v;
643 break;
644 case REG_U64:
645 output->type = LTTNG_INTERPRETER_TYPE_U64;
646 output->u.u = (uint64_t) ax->u.v;
647 break;
648 case REG_STRING:
649 output->type = LTTNG_INTERPRETER_TYPE_STRING;
650 output->u.str.str = ax->u.s.str;
651 output->u.str.len = ax->u.s.seq_len;
652 break;
653 case REG_PTR:
654 switch (ax->u.ptr.object_type) {
655 case OBJECT_TYPE_S8:
656 case OBJECT_TYPE_S16:
657 case OBJECT_TYPE_S32:
658 case OBJECT_TYPE_S64:
659 case OBJECT_TYPE_U8:
660 case OBJECT_TYPE_U16:
661 case OBJECT_TYPE_U32:
662 case OBJECT_TYPE_U64:
663 case OBJECT_TYPE_DOUBLE:
664 case OBJECT_TYPE_STRING:
665 case OBJECT_TYPE_STRING_SEQUENCE:
666 ret = dynamic_load_field(ax);
667 if (ret)
668 return ret;
669 /* Retry after loading ptr into stack top. */
670 goto again;
671 case OBJECT_TYPE_SEQUENCE:
672 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
673 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
674 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
675 output->u.sequence.nested_type = ax->u.ptr.field->type.u.sequence_nestable.elem_type;
676 break;
677 case OBJECT_TYPE_ARRAY:
678 /* Skip count (unsigned long) */
679 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
680 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
681 output->u.sequence.nr_elem = ax->u.ptr.field->type.u.array_nestable.length;
682 output->u.sequence.nested_type = ax->u.ptr.field->type.u.array_nestable.elem_type;
683 break;
684 case OBJECT_TYPE_SIGNED_ENUM:
685 ret = dynamic_load_field(ax);
686 if (ret)
687 return ret;
688 output->type = LTTNG_INTERPRETER_TYPE_SIGNED_ENUM;
689 output->u.s = ax->u.v;
690 break;
691 case OBJECT_TYPE_UNSIGNED_ENUM:
692 ret = dynamic_load_field(ax);
693 if (ret)
694 return ret;
695 output->type = LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM;
696 output->u.u = ax->u.v;
697 break;
698 case OBJECT_TYPE_STRUCT:
699 case OBJECT_TYPE_VARIANT:
700 default:
701 return -EINVAL;
702 }
703
704 break;
705 case REG_STAR_GLOB_STRING:
706 case REG_TYPE_UNKNOWN:
707 default:
708 return -EINVAL;
709 }
710
711 return LTTNG_INTERPRETER_RECORD_FLAG;
712 }
713
714 /*
715 * Return 0 (discard), or raise the 0x1 flag (log event).
716 * Currently, other flags are kept for future extensions and have no
717 * effect.
718 */
719 static
720 uint64_t bytecode_interpret(void *interpreter_data,
721 struct lttng_probe_ctx *lttng_probe_ctx,
722 const char *interpreter_stack_data,
723 struct lttng_interpreter_output *output)
724 {
725 struct bytecode_runtime *bytecode = interpreter_data;
726 void *pc, *next_pc, *start_pc;
727 int ret = -EINVAL;
728 uint64_t retval = 0;
729 struct estack _stack;
730 struct estack *stack = &_stack;
731 register int64_t ax = 0, bx = 0;
732 register enum entry_type ax_t = REG_TYPE_UNKNOWN, bx_t = REG_TYPE_UNKNOWN;
733 register int top = INTERPRETER_STACK_EMPTY;
734 #ifndef INTERPRETER_USE_SWITCH
735 static void *dispatch[NR_BYTECODE_OPS] = {
736 [ BYTECODE_OP_UNKNOWN ] = &&LABEL_BYTECODE_OP_UNKNOWN,
737
738 [ BYTECODE_OP_RETURN ] = &&LABEL_BYTECODE_OP_RETURN,
739
740 /* binary */
741 [ BYTECODE_OP_MUL ] = &&LABEL_BYTECODE_OP_MUL,
742 [ BYTECODE_OP_DIV ] = &&LABEL_BYTECODE_OP_DIV,
743 [ BYTECODE_OP_MOD ] = &&LABEL_BYTECODE_OP_MOD,
744 [ BYTECODE_OP_PLUS ] = &&LABEL_BYTECODE_OP_PLUS,
745 [ BYTECODE_OP_MINUS ] = &&LABEL_BYTECODE_OP_MINUS,
746 [ BYTECODE_OP_BIT_RSHIFT ] = &&LABEL_BYTECODE_OP_BIT_RSHIFT,
747 [ BYTECODE_OP_BIT_LSHIFT ] = &&LABEL_BYTECODE_OP_BIT_LSHIFT,
748 [ BYTECODE_OP_BIT_AND ] = &&LABEL_BYTECODE_OP_BIT_AND,
749 [ BYTECODE_OP_BIT_OR ] = &&LABEL_BYTECODE_OP_BIT_OR,
750 [ BYTECODE_OP_BIT_XOR ] = &&LABEL_BYTECODE_OP_BIT_XOR,
751
752 /* binary comparators */
753 [ BYTECODE_OP_EQ ] = &&LABEL_BYTECODE_OP_EQ,
754 [ BYTECODE_OP_NE ] = &&LABEL_BYTECODE_OP_NE,
755 [ BYTECODE_OP_GT ] = &&LABEL_BYTECODE_OP_GT,
756 [ BYTECODE_OP_LT ] = &&LABEL_BYTECODE_OP_LT,
757 [ BYTECODE_OP_GE ] = &&LABEL_BYTECODE_OP_GE,
758 [ BYTECODE_OP_LE ] = &&LABEL_BYTECODE_OP_LE,
759
760 /* string binary comparator */
761 [ BYTECODE_OP_EQ_STRING ] = &&LABEL_BYTECODE_OP_EQ_STRING,
762 [ BYTECODE_OP_NE_STRING ] = &&LABEL_BYTECODE_OP_NE_STRING,
763 [ BYTECODE_OP_GT_STRING ] = &&LABEL_BYTECODE_OP_GT_STRING,
764 [ BYTECODE_OP_LT_STRING ] = &&LABEL_BYTECODE_OP_LT_STRING,
765 [ BYTECODE_OP_GE_STRING ] = &&LABEL_BYTECODE_OP_GE_STRING,
766 [ BYTECODE_OP_LE_STRING ] = &&LABEL_BYTECODE_OP_LE_STRING,
767
768 /* globbing pattern binary comparator */
769 [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING,
770 [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING,
771
772 /* s64 binary comparator */
773 [ BYTECODE_OP_EQ_S64 ] = &&LABEL_BYTECODE_OP_EQ_S64,
774 [ BYTECODE_OP_NE_S64 ] = &&LABEL_BYTECODE_OP_NE_S64,
775 [ BYTECODE_OP_GT_S64 ] = &&LABEL_BYTECODE_OP_GT_S64,
776 [ BYTECODE_OP_LT_S64 ] = &&LABEL_BYTECODE_OP_LT_S64,
777 [ BYTECODE_OP_GE_S64 ] = &&LABEL_BYTECODE_OP_GE_S64,
778 [ BYTECODE_OP_LE_S64 ] = &&LABEL_BYTECODE_OP_LE_S64,
779
780 /* double binary comparator */
781 [ BYTECODE_OP_EQ_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE,
782 [ BYTECODE_OP_NE_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_DOUBLE,
783 [ BYTECODE_OP_GT_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_DOUBLE,
784 [ BYTECODE_OP_LT_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_DOUBLE,
785 [ BYTECODE_OP_GE_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_DOUBLE,
786 [ BYTECODE_OP_LE_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_DOUBLE,
787
788 /* Mixed S64-double binary comparators */
789 [ BYTECODE_OP_EQ_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64,
790 [ BYTECODE_OP_NE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64,
791 [ BYTECODE_OP_GT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64,
792 [ BYTECODE_OP_LT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64,
793 [ BYTECODE_OP_GE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64,
794 [ BYTECODE_OP_LE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64,
795
796 [ BYTECODE_OP_EQ_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE,
797 [ BYTECODE_OP_NE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE,
798 [ BYTECODE_OP_GT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE,
799 [ BYTECODE_OP_LT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE,
800 [ BYTECODE_OP_GE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE,
801 [ BYTECODE_OP_LE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE,
802
803 /* unary */
804 [ BYTECODE_OP_UNARY_PLUS ] = &&LABEL_BYTECODE_OP_UNARY_PLUS,
805 [ BYTECODE_OP_UNARY_MINUS ] = &&LABEL_BYTECODE_OP_UNARY_MINUS,
806 [ BYTECODE_OP_UNARY_NOT ] = &&LABEL_BYTECODE_OP_UNARY_NOT,
807 [ BYTECODE_OP_UNARY_PLUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64,
808 [ BYTECODE_OP_UNARY_MINUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64,
809 [ BYTECODE_OP_UNARY_NOT_S64 ] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64,
810 [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE,
811 [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE,
812 [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE,
813
814 /* logical */
815 [ BYTECODE_OP_AND ] = &&LABEL_BYTECODE_OP_AND,
816 [ BYTECODE_OP_OR ] = &&LABEL_BYTECODE_OP_OR,
817
818 /* load field ref */
819 [ BYTECODE_OP_LOAD_FIELD_REF ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF,
820 [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING,
821 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE,
822 [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64,
823 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE,
824
825 /* load from immediate operand */
826 [ BYTECODE_OP_LOAD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STRING,
827 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING,
828 [ BYTECODE_OP_LOAD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_S64,
829 [ BYTECODE_OP_LOAD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE,
830
831 /* cast */
832 [ BYTECODE_OP_CAST_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_TO_S64,
833 [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64,
834 [ BYTECODE_OP_CAST_NOP ] = &&LABEL_BYTECODE_OP_CAST_NOP,
835
836 /* get context ref */
837 [ BYTECODE_OP_GET_CONTEXT_REF ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF,
838 [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING,
839 [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64,
840 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE,
841
842 /* load userspace field ref */
843 [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_STRING,
844 [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE,
845
846 /* Instructions for recursive traversal through composed types. */
847 [ BYTECODE_OP_GET_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT,
848 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT,
849 [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT,
850
851 [ BYTECODE_OP_GET_SYMBOL ] = &&LABEL_BYTECODE_OP_GET_SYMBOL,
852 [ BYTECODE_OP_GET_SYMBOL_FIELD ] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD,
853 [ BYTECODE_OP_GET_INDEX_U16 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U16,
854 [ BYTECODE_OP_GET_INDEX_U64 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U64,
855
856 [ BYTECODE_OP_LOAD_FIELD ] = &&LABEL_BYTECODE_OP_LOAD_FIELD,
857 [ BYTECODE_OP_LOAD_FIELD_S8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8,
858 [ BYTECODE_OP_LOAD_FIELD_S16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16,
859 [ BYTECODE_OP_LOAD_FIELD_S32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32,
860 [ BYTECODE_OP_LOAD_FIELD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64,
861 [ BYTECODE_OP_LOAD_FIELD_U8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8,
862 [ BYTECODE_OP_LOAD_FIELD_U16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16,
863 [ BYTECODE_OP_LOAD_FIELD_U32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32,
864 [ BYTECODE_OP_LOAD_FIELD_U64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64,
865 [ BYTECODE_OP_LOAD_FIELD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING,
866 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE,
867 [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE,
868
869 [ BYTECODE_OP_UNARY_BIT_NOT ] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT,
870
871 [ BYTECODE_OP_RETURN_S64 ] = &&LABEL_BYTECODE_OP_RETURN_S64,
872 };
873 #endif /* #ifndef INTERPRETER_USE_SWITCH */
874
875 START_OP
876
877 OP(BYTECODE_OP_UNKNOWN):
878 OP(BYTECODE_OP_LOAD_FIELD_REF):
879 OP(BYTECODE_OP_GET_CONTEXT_REF):
880 #ifdef INTERPRETER_USE_SWITCH
881 default:
882 #endif /* INTERPRETER_USE_SWITCH */
883 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
884 (unsigned int) *(bytecode_opcode_t *) pc);
885 ret = -EINVAL;
886 goto end;
887
888 OP(BYTECODE_OP_RETURN):
889 OP(BYTECODE_OP_RETURN_S64):
890 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
891 switch (estack_ax_t) {
892 case REG_S64:
893 case REG_U64:
894 retval = !!estack_ax_v;
895 break;
896 case REG_DOUBLE:
897 case REG_STRING:
898 case REG_PTR:
899 if (!output) {
900 ret = -EINVAL;
901 goto end;
902 }
903 retval = 0;
904 break;
905 case REG_STAR_GLOB_STRING:
906 case REG_TYPE_UNKNOWN:
907 ret = -EINVAL;
908 goto end;
909 }
910 ret = 0;
911 goto end;
912
913 /* binary */
914 OP(BYTECODE_OP_MUL):
915 OP(BYTECODE_OP_DIV):
916 OP(BYTECODE_OP_MOD):
917 OP(BYTECODE_OP_PLUS):
918 OP(BYTECODE_OP_MINUS):
919 printk(KERN_WARNING "LTTng: bytecode: unsupported bytecode op %u\n",
920 (unsigned int) *(bytecode_opcode_t *) pc);
921 ret = -EINVAL;
922 goto end;
923
924 OP(BYTECODE_OP_EQ):
925 OP(BYTECODE_OP_NE):
926 OP(BYTECODE_OP_GT):
927 OP(BYTECODE_OP_LT):
928 OP(BYTECODE_OP_GE):
929 OP(BYTECODE_OP_LE):
930 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
931 (unsigned int) *(bytecode_opcode_t *) pc);
932 ret = -EINVAL;
933 goto end;
934
935 OP(BYTECODE_OP_EQ_STRING):
936 {
937 int res;
938
939 res = (stack_strcmp(stack, top, "==") == 0);
940 estack_pop(stack, top, ax, bx, ax_t, bx_t);
941 estack_ax_v = res;
942 estack_ax_t = REG_S64;
943 next_pc += sizeof(struct binary_op);
944 PO;
945 }
946 OP(BYTECODE_OP_NE_STRING):
947 {
948 int res;
949
950 res = (stack_strcmp(stack, top, "!=") != 0);
951 estack_pop(stack, top, ax, bx, ax_t, bx_t);
952 estack_ax_v = res;
953 estack_ax_t = REG_S64;
954 next_pc += sizeof(struct binary_op);
955 PO;
956 }
957 OP(BYTECODE_OP_GT_STRING):
958 {
959 int res;
960
961 res = (stack_strcmp(stack, top, ">") > 0);
962 estack_pop(stack, top, ax, bx, ax_t, bx_t);
963 estack_ax_v = res;
964 estack_ax_t = REG_S64;
965 next_pc += sizeof(struct binary_op);
966 PO;
967 }
968 OP(BYTECODE_OP_LT_STRING):
969 {
970 int res;
971
972 res = (stack_strcmp(stack, top, "<") < 0);
973 estack_pop(stack, top, ax, bx, ax_t, bx_t);
974 estack_ax_v = res;
975 estack_ax_t = REG_S64;
976 next_pc += sizeof(struct binary_op);
977 PO;
978 }
979 OP(BYTECODE_OP_GE_STRING):
980 {
981 int res;
982
983 res = (stack_strcmp(stack, top, ">=") >= 0);
984 estack_pop(stack, top, ax, bx, ax_t, bx_t);
985 estack_ax_v = res;
986 estack_ax_t = REG_S64;
987 next_pc += sizeof(struct binary_op);
988 PO;
989 }
990 OP(BYTECODE_OP_LE_STRING):
991 {
992 int res;
993
994 res = (stack_strcmp(stack, top, "<=") <= 0);
995 estack_pop(stack, top, ax, bx, ax_t, bx_t);
996 estack_ax_v = res;
997 estack_ax_t = REG_S64;
998 next_pc += sizeof(struct binary_op);
999 PO;
1000 }
1001
1002 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING):
1003 {
1004 int res;
1005
1006 res = (stack_star_glob_match(stack, top, "==") == 0);
1007 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1008 estack_ax_v = res;
1009 estack_ax_t = REG_S64;
1010 next_pc += sizeof(struct binary_op);
1011 PO;
1012 }
1013 OP(BYTECODE_OP_NE_STAR_GLOB_STRING):
1014 {
1015 int res;
1016
1017 res = (stack_star_glob_match(stack, top, "!=") != 0);
1018 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1019 estack_ax_v = res;
1020 estack_ax_t = REG_S64;
1021 next_pc += sizeof(struct binary_op);
1022 PO;
1023 }
1024
1025 OP(BYTECODE_OP_EQ_S64):
1026 {
1027 int res;
1028
1029 res = (estack_bx_v == estack_ax_v);
1030 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1031 estack_ax_v = res;
1032 estack_ax_t = REG_S64;
1033 next_pc += sizeof(struct binary_op);
1034 PO;
1035 }
1036 OP(BYTECODE_OP_NE_S64):
1037 {
1038 int res;
1039
1040 res = (estack_bx_v != estack_ax_v);
1041 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1042 estack_ax_v = res;
1043 estack_ax_t = REG_S64;
1044 next_pc += sizeof(struct binary_op);
1045 PO;
1046 }
1047 OP(BYTECODE_OP_GT_S64):
1048 {
1049 int res;
1050
1051 res = (estack_bx_v > estack_ax_v);
1052 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1053 estack_ax_v = res;
1054 estack_ax_t = REG_S64;
1055 next_pc += sizeof(struct binary_op);
1056 PO;
1057 }
1058 OP(BYTECODE_OP_LT_S64):
1059 {
1060 int res;
1061
1062 res = (estack_bx_v < estack_ax_v);
1063 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1064 estack_ax_v = res;
1065 estack_ax_t = REG_S64;
1066 next_pc += sizeof(struct binary_op);
1067 PO;
1068 }
1069 OP(BYTECODE_OP_GE_S64):
1070 {
1071 int res;
1072
1073 res = (estack_bx_v >= estack_ax_v);
1074 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1075 estack_ax_v = res;
1076 estack_ax_t = REG_S64;
1077 next_pc += sizeof(struct binary_op);
1078 PO;
1079 }
1080 OP(BYTECODE_OP_LE_S64):
1081 {
1082 int res;
1083
1084 res = (estack_bx_v <= estack_ax_v);
1085 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1086 estack_ax_v = res;
1087 estack_ax_t = REG_S64;
1088 next_pc += sizeof(struct binary_op);
1089 PO;
1090 }
1091
1092 OP(BYTECODE_OP_EQ_DOUBLE):
1093 OP(BYTECODE_OP_NE_DOUBLE):
1094 OP(BYTECODE_OP_GT_DOUBLE):
1095 OP(BYTECODE_OP_LT_DOUBLE):
1096 OP(BYTECODE_OP_GE_DOUBLE):
1097 OP(BYTECODE_OP_LE_DOUBLE):
1098 {
1099 BUG_ON(1);
1100 PO;
1101 }
1102
1103 /* Mixed S64-double binary comparators */
1104 OP(BYTECODE_OP_EQ_DOUBLE_S64):
1105 OP(BYTECODE_OP_NE_DOUBLE_S64):
1106 OP(BYTECODE_OP_GT_DOUBLE_S64):
1107 OP(BYTECODE_OP_LT_DOUBLE_S64):
1108 OP(BYTECODE_OP_GE_DOUBLE_S64):
1109 OP(BYTECODE_OP_LE_DOUBLE_S64):
1110 OP(BYTECODE_OP_EQ_S64_DOUBLE):
1111 OP(BYTECODE_OP_NE_S64_DOUBLE):
1112 OP(BYTECODE_OP_GT_S64_DOUBLE):
1113 OP(BYTECODE_OP_LT_S64_DOUBLE):
1114 OP(BYTECODE_OP_GE_S64_DOUBLE):
1115 OP(BYTECODE_OP_LE_S64_DOUBLE):
1116 {
1117 BUG_ON(1);
1118 PO;
1119 }
1120 OP(BYTECODE_OP_BIT_RSHIFT):
1121 {
1122 int64_t res;
1123
1124 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1125 ret = -EINVAL;
1126 goto end;
1127 }
1128
1129 /* Catch undefined behavior. */
1130 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1131 ret = -EINVAL;
1132 goto end;
1133 }
1134 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1135 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1136 estack_ax_v = res;
1137 estack_ax_t = REG_U64;
1138 next_pc += sizeof(struct binary_op);
1139 PO;
1140 }
1141 OP(BYTECODE_OP_BIT_LSHIFT):
1142 {
1143 int64_t res;
1144
1145 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1146 ret = -EINVAL;
1147 goto end;
1148 }
1149
1150 /* Catch undefined behavior. */
1151 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1152 ret = -EINVAL;
1153 goto end;
1154 }
1155 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1156 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1157 estack_ax_v = res;
1158 estack_ax_t = REG_U64;
1159 next_pc += sizeof(struct binary_op);
1160 PO;
1161 }
1162 OP(BYTECODE_OP_BIT_AND):
1163 {
1164 int64_t res;
1165
1166 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1167 ret = -EINVAL;
1168 goto end;
1169 }
1170
1171 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1172 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1173 estack_ax_v = res;
1174 estack_ax_t = REG_U64;
1175 next_pc += sizeof(struct binary_op);
1176 PO;
1177 }
1178 OP(BYTECODE_OP_BIT_OR):
1179 {
1180 int64_t res;
1181
1182 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1183 ret = -EINVAL;
1184 goto end;
1185 }
1186
1187 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1188 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1189 estack_ax_v = res;
1190 estack_ax_t = REG_U64;
1191 next_pc += sizeof(struct binary_op);
1192 PO;
1193 }
1194 OP(BYTECODE_OP_BIT_XOR):
1195 {
1196 int64_t res;
1197
1198 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1199 ret = -EINVAL;
1200 goto end;
1201 }
1202
1203 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1204 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1205 estack_ax_v = res;
1206 estack_ax_t = REG_U64;
1207 next_pc += sizeof(struct binary_op);
1208 PO;
1209 }
1210
1211 /* unary */
1212 OP(BYTECODE_OP_UNARY_PLUS):
1213 OP(BYTECODE_OP_UNARY_MINUS):
1214 OP(BYTECODE_OP_UNARY_NOT):
1215 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1216 (unsigned int) *(bytecode_opcode_t *) pc);
1217 ret = -EINVAL;
1218 goto end;
1219
1220
1221 OP(BYTECODE_OP_UNARY_BIT_NOT):
1222 {
1223 estack_ax_v = ~(uint64_t) estack_ax_v;
1224 estack_ax_t = REG_S64;
1225 next_pc += sizeof(struct unary_op);
1226 PO;
1227 }
1228
1229 OP(BYTECODE_OP_UNARY_PLUS_S64):
1230 {
1231 next_pc += sizeof(struct unary_op);
1232 PO;
1233 }
1234 OP(BYTECODE_OP_UNARY_MINUS_S64):
1235 {
1236 estack_ax_v = -estack_ax_v;
1237 estack_ax_t = REG_S64;
1238 next_pc += sizeof(struct unary_op);
1239 PO;
1240 }
1241 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE):
1242 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE):
1243 {
1244 BUG_ON(1);
1245 PO;
1246 }
1247 OP(BYTECODE_OP_UNARY_NOT_S64):
1248 {
1249 estack_ax_v = !estack_ax_v;
1250 estack_ax_t = REG_S64;
1251 next_pc += sizeof(struct unary_op);
1252 PO;
1253 }
1254 OP(BYTECODE_OP_UNARY_NOT_DOUBLE):
1255 {
1256 BUG_ON(1);
1257 PO;
1258 }
1259
1260 /* logical */
1261 OP(BYTECODE_OP_AND):
1262 {
1263 struct logical_op *insn = (struct logical_op *) pc;
1264
1265 /* If AX is 0, skip and evaluate to 0 */
1266 if (unlikely(estack_ax_v == 0)) {
1267 dbg_printk("Jumping to bytecode offset %u\n",
1268 (unsigned int) insn->skip_offset);
1269 next_pc = start_pc + insn->skip_offset;
1270 } else {
1271 /* Pop 1 when jump not taken */
1272 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1273 next_pc += sizeof(struct logical_op);
1274 }
1275 PO;
1276 }
1277 OP(BYTECODE_OP_OR):
1278 {
1279 struct logical_op *insn = (struct logical_op *) pc;
1280
1281 /* If AX is nonzero, skip and evaluate to 1 */
1282
1283 if (unlikely(estack_ax_v != 0)) {
1284 estack_ax_v = 1;
1285 dbg_printk("Jumping to bytecode offset %u\n",
1286 (unsigned int) insn->skip_offset);
1287 next_pc = start_pc + insn->skip_offset;
1288 } else {
1289 /* Pop 1 when jump not taken */
1290 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1291 next_pc += sizeof(struct logical_op);
1292 }
1293 PO;
1294 }
1295
1296
1297 /* load field ref */
1298 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING):
1299 {
1300 struct load_op *insn = (struct load_op *) pc;
1301 struct field_ref *ref = (struct field_ref *) insn->data;
1302
1303 dbg_printk("load field ref offset %u type string\n",
1304 ref->offset);
1305 estack_push(stack, top, ax, bx, ax_t, bx_t);
1306 estack_ax(stack, top)->u.s.str =
1307 *(const char * const *) &interpreter_stack_data[ref->offset];
1308 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1309 dbg_printk("Bytecode warning: loading a NULL string.\n");
1310 ret = -EINVAL;
1311 goto end;
1312 }
1313 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1314 estack_ax(stack, top)->u.s.literal_type =
1315 ESTACK_STRING_LITERAL_TYPE_NONE;
1316 estack_ax(stack, top)->u.s.user = 0;
1317 estack_ax(stack, top)->type = REG_STRING;
1318 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1319 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1320 PO;
1321 }
1322
1323 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE):
1324 {
1325 struct load_op *insn = (struct load_op *) pc;
1326 struct field_ref *ref = (struct field_ref *) insn->data;
1327
1328 dbg_printk("load field ref offset %u type sequence\n",
1329 ref->offset);
1330 estack_push(stack, top, ax, bx, ax_t, bx_t);
1331 estack_ax(stack, top)->u.s.seq_len =
1332 *(unsigned long *) &interpreter_stack_data[ref->offset];
1333 estack_ax(stack, top)->u.s.str =
1334 *(const char **) (&interpreter_stack_data[ref->offset
1335 + sizeof(unsigned long)]);
1336 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1337 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1338 ret = -EINVAL;
1339 goto end;
1340 }
1341 estack_ax(stack, top)->u.s.literal_type =
1342 ESTACK_STRING_LITERAL_TYPE_NONE;
1343 estack_ax(stack, top)->u.s.user = 0;
1344 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1345 PO;
1346 }
1347
1348 OP(BYTECODE_OP_LOAD_FIELD_REF_S64):
1349 {
1350 struct load_op *insn = (struct load_op *) pc;
1351 struct field_ref *ref = (struct field_ref *) insn->data;
1352
1353 dbg_printk("load field ref offset %u type s64\n",
1354 ref->offset);
1355 estack_push(stack, top, ax, bx, ax_t, bx_t);
1356 estack_ax_v =
1357 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
1358 estack_ax_t = REG_S64;
1359 dbg_printk("ref load s64 %lld\n",
1360 (long long) estack_ax_v);
1361 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1362 PO;
1363 }
1364
1365 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE):
1366 {
1367 BUG_ON(1);
1368 PO;
1369 }
1370
1371 /* load from immediate operand */
1372 OP(BYTECODE_OP_LOAD_STRING):
1373 {
1374 struct load_op *insn = (struct load_op *) pc;
1375
1376 dbg_printk("load string %s\n", insn->data);
1377 estack_push(stack, top, ax, bx, ax_t, bx_t);
1378 estack_ax(stack, top)->u.s.str = insn->data;
1379 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1380 estack_ax(stack, top)->u.s.literal_type =
1381 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1382 estack_ax(stack, top)->u.s.user = 0;
1383 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1384 PO;
1385 }
1386
1387 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING):
1388 {
1389 struct load_op *insn = (struct load_op *) pc;
1390
1391 dbg_printk("load globbing pattern %s\n", insn->data);
1392 estack_push(stack, top, ax, bx, ax_t, bx_t);
1393 estack_ax(stack, top)->u.s.str = insn->data;
1394 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1395 estack_ax(stack, top)->u.s.literal_type =
1396 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1397 estack_ax(stack, top)->u.s.user = 0;
1398 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1399 PO;
1400 }
1401
1402 OP(BYTECODE_OP_LOAD_S64):
1403 {
1404 struct load_op *insn = (struct load_op *) pc;
1405
1406 estack_push(stack, top, ax, bx, ax_t, bx_t);
1407 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1408 estack_ax_t = REG_S64;
1409 dbg_printk("load s64 %lld\n",
1410 (long long) estack_ax_v);
1411 next_pc += sizeof(struct load_op)
1412 + sizeof(struct literal_numeric);
1413 PO;
1414 }
1415
1416 OP(BYTECODE_OP_LOAD_DOUBLE):
1417 {
1418 BUG_ON(1);
1419 PO;
1420 }
1421
1422 /* cast */
1423 OP(BYTECODE_OP_CAST_TO_S64):
1424 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1425 (unsigned int) *(bytecode_opcode_t *) pc);
1426 ret = -EINVAL;
1427 goto end;
1428
1429 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64):
1430 {
1431 BUG_ON(1);
1432 PO;
1433 }
1434
1435 OP(BYTECODE_OP_CAST_NOP):
1436 {
1437 next_pc += sizeof(struct cast_op);
1438 PO;
1439 }
1440
1441 /* get context ref */
1442 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING):
1443 {
1444 struct load_op *insn = (struct load_op *) pc;
1445 struct field_ref *ref = (struct field_ref *) insn->data;
1446 struct lttng_ctx_field *ctx_field;
1447 union lttng_ctx_value v;
1448
1449 dbg_printk("get context ref offset %u type string\n",
1450 ref->offset);
1451 ctx_field = &lttng_static_ctx->fields[ref->offset];
1452 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1453 estack_push(stack, top, ax, bx, ax_t, bx_t);
1454 estack_ax(stack, top)->u.s.str = v.str;
1455 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1456 dbg_printk("Bytecode warning: loading a NULL string.\n");
1457 ret = -EINVAL;
1458 goto end;
1459 }
1460 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1461 estack_ax(stack, top)->u.s.literal_type =
1462 ESTACK_STRING_LITERAL_TYPE_NONE;
1463 estack_ax(stack, top)->u.s.user = 0;
1464 estack_ax(stack, top)->type = REG_STRING;
1465 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1466 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1467 PO;
1468 }
1469
1470 OP(BYTECODE_OP_GET_CONTEXT_REF_S64):
1471 {
1472 struct load_op *insn = (struct load_op *) pc;
1473 struct field_ref *ref = (struct field_ref *) insn->data;
1474 struct lttng_ctx_field *ctx_field;
1475 union lttng_ctx_value v;
1476
1477 dbg_printk("get context ref offset %u type s64\n",
1478 ref->offset);
1479 ctx_field = &lttng_static_ctx->fields[ref->offset];
1480 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1481 estack_push(stack, top, ax, bx, ax_t, bx_t);
1482 estack_ax_v = v.s64;
1483 estack_ax_t = REG_S64;
1484 dbg_printk("ref get context s64 %lld\n",
1485 (long long) estack_ax_v);
1486 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1487 PO;
1488 }
1489
1490 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE):
1491 {
1492 BUG_ON(1);
1493 PO;
1494 }
1495
1496 /* load userspace field ref */
1497 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_STRING):
1498 {
1499 struct load_op *insn = (struct load_op *) pc;
1500 struct field_ref *ref = (struct field_ref *) insn->data;
1501
1502 dbg_printk("load field ref offset %u type user string\n",
1503 ref->offset);
1504 estack_push(stack, top, ax, bx, ax_t, bx_t);
1505 estack_ax(stack, top)->u.s.user_str =
1506 *(const char * const *) &interpreter_stack_data[ref->offset];
1507 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1508 dbg_printk("Bytecode warning: loading a NULL string.\n");
1509 ret = -EINVAL;
1510 goto end;
1511 }
1512 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1513 estack_ax(stack, top)->u.s.literal_type =
1514 ESTACK_STRING_LITERAL_TYPE_NONE;
1515 estack_ax(stack, top)->u.s.user = 1;
1516 estack_ax(stack, top)->type = REG_STRING;
1517 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1518 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1519 PO;
1520 }
1521
1522 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1523 {
1524 struct load_op *insn = (struct load_op *) pc;
1525 struct field_ref *ref = (struct field_ref *) insn->data;
1526
1527 dbg_printk("load field ref offset %u type user sequence\n",
1528 ref->offset);
1529 estack_push(stack, top, ax, bx, ax_t, bx_t);
1530 estack_ax(stack, top)->u.s.seq_len =
1531 *(unsigned long *) &interpreter_stack_data[ref->offset];
1532 estack_ax(stack, top)->u.s.user_str =
1533 *(const char **) (&interpreter_stack_data[ref->offset
1534 + sizeof(unsigned long)]);
1535 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1536 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1537 ret = -EINVAL;
1538 goto end;
1539 }
1540 estack_ax(stack, top)->u.s.literal_type =
1541 ESTACK_STRING_LITERAL_TYPE_NONE;
1542 estack_ax(stack, top)->u.s.user = 1;
1543 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1544 PO;
1545 }
1546
1547 OP(BYTECODE_OP_GET_CONTEXT_ROOT):
1548 {
1549 dbg_printk("op get context root\n");
1550 estack_push(stack, top, ax, bx, ax_t, bx_t);
1551 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1552 /* "field" only needed for variants. */
1553 estack_ax(stack, top)->u.ptr.field = NULL;
1554 estack_ax(stack, top)->type = REG_PTR;
1555 next_pc += sizeof(struct load_op);
1556 PO;
1557 }
1558
1559 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT):
1560 {
1561 BUG_ON(1);
1562 PO;
1563 }
1564
1565 OP(BYTECODE_OP_GET_PAYLOAD_ROOT):
1566 {
1567 dbg_printk("op get app payload root\n");
1568 estack_push(stack, top, ax, bx, ax_t, bx_t);
1569 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1570 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
1571 /* "field" only needed for variants. */
1572 estack_ax(stack, top)->u.ptr.field = NULL;
1573 estack_ax(stack, top)->type = REG_PTR;
1574 next_pc += sizeof(struct load_op);
1575 PO;
1576 }
1577
1578 OP(BYTECODE_OP_GET_SYMBOL):
1579 {
1580 dbg_printk("op get symbol\n");
1581 switch (estack_ax(stack, top)->u.ptr.type) {
1582 case LOAD_OBJECT:
1583 printk(KERN_WARNING "LTTng: bytecode: Nested fields not implemented yet.\n");
1584 ret = -EINVAL;
1585 goto end;
1586 case LOAD_ROOT_CONTEXT:
1587 case LOAD_ROOT_APP_CONTEXT:
1588 case LOAD_ROOT_PAYLOAD:
1589 /*
1590 * symbol lookup is performed by
1591 * specialization.
1592 */
1593 ret = -EINVAL;
1594 goto end;
1595 }
1596 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1597 PO;
1598 }
1599
1600 OP(BYTECODE_OP_GET_SYMBOL_FIELD):
1601 {
1602 /*
1603 * Used for first variant encountered in a
1604 * traversal. Variants are not implemented yet.
1605 */
1606 ret = -EINVAL;
1607 goto end;
1608 }
1609
1610 OP(BYTECODE_OP_GET_INDEX_U16):
1611 {
1612 struct load_op *insn = (struct load_op *) pc;
1613 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1614
1615 dbg_printk("op get index u16\n");
1616 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1617 if (ret)
1618 goto end;
1619 estack_ax_v = estack_ax(stack, top)->u.v;
1620 estack_ax_t = estack_ax(stack, top)->type;
1621 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1622 PO;
1623 }
1624
1625 OP(BYTECODE_OP_GET_INDEX_U64):
1626 {
1627 struct load_op *insn = (struct load_op *) pc;
1628 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1629
1630 dbg_printk("op get index u64\n");
1631 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1632 if (ret)
1633 goto end;
1634 estack_ax_v = estack_ax(stack, top)->u.v;
1635 estack_ax_t = estack_ax(stack, top)->type;
1636 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1637 PO;
1638 }
1639
1640 OP(BYTECODE_OP_LOAD_FIELD):
1641 {
1642 dbg_printk("op load field\n");
1643 ret = dynamic_load_field(estack_ax(stack, top));
1644 if (ret)
1645 goto end;
1646 estack_ax_v = estack_ax(stack, top)->u.v;
1647 estack_ax_t = estack_ax(stack, top)->type;
1648 next_pc += sizeof(struct load_op);
1649 PO;
1650 }
1651
1652 OP(BYTECODE_OP_LOAD_FIELD_S8):
1653 {
1654 dbg_printk("op load field s8\n");
1655
1656 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1657 estack_ax_t = REG_S64;
1658 next_pc += sizeof(struct load_op);
1659 PO;
1660 }
1661 OP(BYTECODE_OP_LOAD_FIELD_S16):
1662 {
1663 dbg_printk("op load field s16\n");
1664
1665 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1666 estack_ax_t = REG_S64;
1667 next_pc += sizeof(struct load_op);
1668 PO;
1669 }
1670 OP(BYTECODE_OP_LOAD_FIELD_S32):
1671 {
1672 dbg_printk("op load field s32\n");
1673
1674 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1675 estack_ax_t = REG_S64;
1676 next_pc += sizeof(struct load_op);
1677 PO;
1678 }
1679 OP(BYTECODE_OP_LOAD_FIELD_S64):
1680 {
1681 dbg_printk("op load field s64\n");
1682
1683 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1684 estack_ax_t = REG_S64;
1685 next_pc += sizeof(struct load_op);
1686 PO;
1687 }
1688 OP(BYTECODE_OP_LOAD_FIELD_U8):
1689 {
1690 dbg_printk("op load field u8\n");
1691
1692 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1693 estack_ax_t = REG_S64;
1694 next_pc += sizeof(struct load_op);
1695 PO;
1696 }
1697 OP(BYTECODE_OP_LOAD_FIELD_U16):
1698 {
1699 dbg_printk("op load field u16\n");
1700
1701 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1702 estack_ax_t = REG_S64;
1703 next_pc += sizeof(struct load_op);
1704 PO;
1705 }
1706 OP(BYTECODE_OP_LOAD_FIELD_U32):
1707 {
1708 dbg_printk("op load field u32\n");
1709
1710 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1711 estack_ax_t = REG_S64;
1712 next_pc += sizeof(struct load_op);
1713 PO;
1714 }
1715 OP(BYTECODE_OP_LOAD_FIELD_U64):
1716 {
1717 dbg_printk("op load field u64\n");
1718
1719 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1720 estack_ax_t = REG_S64;
1721 next_pc += sizeof(struct load_op);
1722 PO;
1723 }
1724 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE):
1725 {
1726 ret = -EINVAL;
1727 goto end;
1728 }
1729
1730 OP(BYTECODE_OP_LOAD_FIELD_STRING):
1731 {
1732 const char *str;
1733
1734 dbg_printk("op load field string\n");
1735 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1736 estack_ax(stack, top)->u.s.str = str;
1737 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1738 dbg_printk("Bytecode warning: loading a NULL string.\n");
1739 ret = -EINVAL;
1740 goto end;
1741 }
1742 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1743 estack_ax(stack, top)->u.s.literal_type =
1744 ESTACK_STRING_LITERAL_TYPE_NONE;
1745 estack_ax(stack, top)->type = REG_STRING;
1746 next_pc += sizeof(struct load_op);
1747 PO;
1748 }
1749
1750 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE):
1751 {
1752 const char *ptr;
1753
1754 dbg_printk("op load field string sequence\n");
1755 ptr = estack_ax(stack, top)->u.ptr.ptr;
1756 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1757 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1758 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1759 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1760 ret = -EINVAL;
1761 goto end;
1762 }
1763 estack_ax(stack, top)->u.s.literal_type =
1764 ESTACK_STRING_LITERAL_TYPE_NONE;
1765 estack_ax(stack, top)->type = REG_STRING;
1766 next_pc += sizeof(struct load_op);
1767 PO;
1768 }
1769
1770 END_OP
1771 end:
1772 /* Return _DISCARD on error. */
1773 if (ret)
1774 return LTTNG_INTERPRETER_DISCARD;
1775
1776 if (output) {
1777 return lttng_bytecode_interpret_format_output(
1778 estack_ax(stack, top), output);
1779 }
1780
1781 return retval;
1782 }
1783 LTTNG_STACK_FRAME_NON_STANDARD(bytecode_interpret);
1784
1785 uint64_t lttng_bytecode_filter_interpret(void *filter_data,
1786 struct lttng_probe_ctx *lttng_probe_ctx,
1787 const char *filter_stack_data)
1788 {
1789 return bytecode_interpret(filter_data, lttng_probe_ctx,
1790 filter_stack_data, NULL);
1791 }
1792
1793 uint64_t lttng_bytecode_capture_interpret(void *capture_data,
1794 struct lttng_probe_ctx *lttng_probe_ctx,
1795 const char *capture_stack_data,
1796 struct lttng_interpreter_output *output)
1797 {
1798 return bytecode_interpret(capture_data, lttng_probe_ctx,
1799 capture_stack_data, output);
1800 }
1801
1802 #undef START_OP
1803 #undef OP
1804 #undef PO
1805 #undef END_OP
This page took 0.101502 seconds and 4 git commands to generate.