01828b171aaf799201e6c8677b787c14f2d85d68
[lttng-modules.git] / src / lttng-bytecode-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-bytecode-interpreter.c
4 *
5 * LTTng modules bytecode interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
14
15 #include <lttng/lttng-bytecode.h>
16 #include <lttng/string-utils.h>
17
18 /*
19 * get_char should be called with page fault handler disabled if it is expected
20 * to handle user-space read.
21 */
22 static
23 char get_char(const struct estack_entry *reg, size_t offset)
24 {
25 if (unlikely(offset >= reg->u.s.seq_len))
26 return '\0';
27 if (reg->u.s.user) {
28 char c;
29
30 /* Handle invalid access as end of string. */
31 if (unlikely(!lttng_access_ok(VERIFY_READ,
32 reg->u.s.user_str + offset,
33 sizeof(c))))
34 return '\0';
35 /* Handle fault (nonzero return value) as end of string. */
36 if (unlikely(__copy_from_user_inatomic(&c,
37 reg->u.s.user_str + offset,
38 sizeof(c))))
39 return '\0';
40 return c;
41 } else {
42 return reg->u.s.str[offset];
43 }
44 }
45
46 /*
47 * -1: wildcard found.
48 * -2: unknown escape char.
49 * 0: normal char.
50 */
51 static
52 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
53 {
54 switch (*c) {
55 case '\\':
56 (*offset)++;
57 *c = get_char(reg, *offset);
58 switch (*c) {
59 case '\\':
60 case '*':
61 return 0;
62 default:
63 return -2;
64 }
65 case '*':
66 return -1;
67 default:
68 return 0;
69 }
70 }
71
72 static
73 char get_char_at_cb(size_t at, void *data)
74 {
75 return get_char(data, at);
76 }
77
78 static
79 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
80 {
81 bool has_user = false;
82 int result;
83 struct estack_entry *pattern_reg;
84 struct estack_entry *candidate_reg;
85
86 /* Disable the page fault handler when reading from userspace. */
87 if (estack_bx(stack, top)->u.s.user
88 || estack_ax(stack, top)->u.s.user) {
89 has_user = true;
90 pagefault_disable();
91 }
92
93 /* Find out which side is the pattern vs. the candidate. */
94 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
95 pattern_reg = estack_ax(stack, top);
96 candidate_reg = estack_bx(stack, top);
97 } else {
98 pattern_reg = estack_bx(stack, top);
99 candidate_reg = estack_ax(stack, top);
100 }
101
102 /* Perform the match operation. */
103 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
104 pattern_reg, get_char_at_cb, candidate_reg);
105 if (has_user)
106 pagefault_enable();
107
108 return result;
109 }
110
111 static
112 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
113 {
114 size_t offset_bx = 0, offset_ax = 0;
115 int diff, has_user = 0;
116
117 if (estack_bx(stack, top)->u.s.user
118 || estack_ax(stack, top)->u.s.user) {
119 has_user = 1;
120 pagefault_disable();
121 }
122
123 for (;;) {
124 int ret;
125 int escaped_r0 = 0;
126 char char_bx, char_ax;
127
128 char_bx = get_char(estack_bx(stack, top), offset_bx);
129 char_ax = get_char(estack_ax(stack, top), offset_ax);
130
131 if (unlikely(char_bx == '\0')) {
132 if (char_ax == '\0') {
133 diff = 0;
134 break;
135 } else {
136 if (estack_ax(stack, top)->u.s.literal_type ==
137 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
138 ret = parse_char(estack_ax(stack, top),
139 &char_ax, &offset_ax);
140 if (ret == -1) {
141 diff = 0;
142 break;
143 }
144 }
145 diff = -1;
146 break;
147 }
148 }
149 if (unlikely(char_ax == '\0')) {
150 if (estack_bx(stack, top)->u.s.literal_type ==
151 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
152 ret = parse_char(estack_bx(stack, top),
153 &char_bx, &offset_bx);
154 if (ret == -1) {
155 diff = 0;
156 break;
157 }
158 }
159 diff = 1;
160 break;
161 }
162 if (estack_bx(stack, top)->u.s.literal_type ==
163 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
164 ret = parse_char(estack_bx(stack, top),
165 &char_bx, &offset_bx);
166 if (ret == -1) {
167 diff = 0;
168 break;
169 } else if (ret == -2) {
170 escaped_r0 = 1;
171 }
172 /* else compare both char */
173 }
174 if (estack_ax(stack, top)->u.s.literal_type ==
175 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
176 ret = parse_char(estack_ax(stack, top),
177 &char_ax, &offset_ax);
178 if (ret == -1) {
179 diff = 0;
180 break;
181 } else if (ret == -2) {
182 if (!escaped_r0) {
183 diff = -1;
184 break;
185 }
186 } else {
187 if (escaped_r0) {
188 diff = 1;
189 break;
190 }
191 }
192 } else {
193 if (escaped_r0) {
194 diff = 1;
195 break;
196 }
197 }
198 diff = char_bx - char_ax;
199 if (diff != 0)
200 break;
201 offset_bx++;
202 offset_ax++;
203 }
204 if (has_user)
205 pagefault_enable();
206
207 return diff;
208 }
209
210 uint64_t lttng_bytecode_filter_interpret_false(void *filter_data,
211 struct lttng_probe_ctx *lttng_probe_ctx,
212 const char *filter_stack_data)
213 {
214 return LTTNG_INTERPRETER_DISCARD;
215 }
216
217 uint64_t lttng_bytecode_capture_interpret_false(void *filter_data,
218 struct lttng_probe_ctx *lttng_probe_ctx,
219 const char *capture_stack_data,
220 struct lttng_interpreter_output *output)
221 {
222 return LTTNG_INTERPRETER_DISCARD;
223 }
224
225 #ifdef INTERPRETER_USE_SWITCH
226
227 /*
228 * Fallback for compilers that do not support taking address of labels.
229 */
230
231 #define START_OP \
232 start_pc = &bytecode->data[0]; \
233 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
234 pc = next_pc) { \
235 dbg_printk("LTTng: Executing op %s (%u)\n", \
236 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
237 (unsigned int) *(bytecode_opcode_t *) pc); \
238 switch (*(bytecode_opcode_t *) pc) {
239
240 #define OP(name) case name
241
242 #define PO break
243
244 #define END_OP } \
245 }
246
247 #else
248
249 /*
250 * Dispatch-table based interpreter.
251 */
252
253 #define START_OP \
254 start_pc = &bytecode->code[0]; \
255 pc = next_pc = start_pc; \
256 if (unlikely(pc - start_pc >= bytecode->len)) \
257 goto end; \
258 goto *dispatch[*(bytecode_opcode_t *) pc];
259
260 #define OP(name) \
261 LABEL_##name
262
263 #define PO \
264 pc = next_pc; \
265 goto *dispatch[*(bytecode_opcode_t *) pc];
266
267 #define END_OP
268
269 #endif
270
271 #define IS_INTEGER_REGISTER(reg_type) \
272 (reg_type == REG_S64 || reg_type == REG_U64)
273
274 static int context_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
275 struct load_ptr *ptr,
276 uint32_t idx)
277 {
278
279 struct lttng_ctx_field *ctx_field;
280 struct lttng_event_field *field;
281 union lttng_ctx_value v;
282
283 ctx_field = &lttng_static_ctx->fields[idx];
284 field = &ctx_field->event_field;
285 ptr->type = LOAD_OBJECT;
286 /* field is only used for types nested within variants. */
287 ptr->field = NULL;
288
289 switch (field->type.type) {
290 case lttng_kernel_type_integer:
291 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
292 if (field->type.u.integer.signedness) {
293 ptr->object_type = OBJECT_TYPE_S64;
294 ptr->u.s64 = v.s64;
295 ptr->ptr = &ptr->u.s64;
296 } else {
297 ptr->object_type = OBJECT_TYPE_U64;
298 ptr->u.u64 = v.s64; /* Cast. */
299 ptr->ptr = &ptr->u.u64;
300 }
301 break;
302 case lttng_kernel_type_enum_nestable:
303 {
304 const struct lttng_integer_type *itype =
305 &field->type.u.enum_nestable.container_type->u.integer;
306
307 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
308 if (itype->signedness) {
309 ptr->object_type = OBJECT_TYPE_SIGNED_ENUM;
310 ptr->u.s64 = v.s64;
311 ptr->ptr = &ptr->u.s64;
312 } else {
313 ptr->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
314 ptr->u.u64 = v.s64; /* Cast. */
315 ptr->ptr = &ptr->u.u64;
316 }
317 break;
318 }
319 case lttng_kernel_type_array_nestable:
320 if (!lttng_is_bytewise_integer(field->type.u.array_nestable.elem_type)) {
321 printk(KERN_WARNING "LTTng: bytecode: Array nesting only supports integer types.\n");
322 return -EINVAL;
323 }
324 if (field->type.u.array_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
325 printk(KERN_WARNING "LTTng: bytecode: Only string arrays are supported for contexts.\n");
326 return -EINVAL;
327 }
328 ptr->object_type = OBJECT_TYPE_STRING;
329 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
330 ptr->ptr = v.str;
331 break;
332 case lttng_kernel_type_sequence_nestable:
333 if (!lttng_is_bytewise_integer(field->type.u.sequence_nestable.elem_type)) {
334 printk(KERN_WARNING "LTTng: bytecode: Sequence nesting only supports integer types.\n");
335 return -EINVAL;
336 }
337 if (field->type.u.sequence_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
338 printk(KERN_WARNING "LTTng: bytecode: Only string sequences are supported for contexts.\n");
339 return -EINVAL;
340 }
341 ptr->object_type = OBJECT_TYPE_STRING;
342 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
343 ptr->ptr = v.str;
344 break;
345 case lttng_kernel_type_string:
346 ptr->object_type = OBJECT_TYPE_STRING;
347 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
348 ptr->ptr = v.str;
349 break;
350 case lttng_kernel_type_struct_nestable:
351 printk(KERN_WARNING "LTTng: bytecode: Structure type cannot be loaded.\n");
352 return -EINVAL;
353 case lttng_kernel_type_variant_nestable:
354 printk(KERN_WARNING "LTTng: bytecode: Variant type cannot be loaded.\n");
355 return -EINVAL;
356 default:
357 printk(KERN_WARNING "LTTng: bytecode: Unknown type: %d", (int) field->type.type);
358 return -EINVAL;
359 }
360 return 0;
361 }
362
363 static int dynamic_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
364 struct bytecode_runtime *runtime,
365 uint64_t index, struct estack_entry *stack_top)
366 {
367 int ret;
368 const struct bytecode_get_index_data *gid;
369
370 gid = (const struct bytecode_get_index_data *) &runtime->data[index];
371 switch (stack_top->u.ptr.type) {
372 case LOAD_OBJECT:
373 switch (stack_top->u.ptr.object_type) {
374 case OBJECT_TYPE_ARRAY:
375 {
376 const char *ptr;
377
378 WARN_ON_ONCE(gid->offset >= gid->array_len);
379 /* Skip count (unsigned long) */
380 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
381 ptr = ptr + gid->offset;
382 stack_top->u.ptr.ptr = ptr;
383 stack_top->u.ptr.object_type = gid->elem.type;
384 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
385 BUG_ON(stack_top->u.ptr.field->type.type != lttng_kernel_type_array_nestable);
386 stack_top->u.ptr.field = NULL;
387 break;
388 }
389 case OBJECT_TYPE_SEQUENCE:
390 {
391 const char *ptr;
392 size_t ptr_seq_len;
393
394 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
395 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
396 if (gid->offset >= gid->elem.len * ptr_seq_len) {
397 ret = -EINVAL;
398 goto end;
399 }
400 ptr = ptr + gid->offset;
401 stack_top->u.ptr.ptr = ptr;
402 stack_top->u.ptr.object_type = gid->elem.type;
403 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
404 BUG_ON(stack_top->u.ptr.field->type.type != lttng_kernel_type_sequence_nestable);
405 stack_top->u.ptr.field = NULL;
406 break;
407 }
408 case OBJECT_TYPE_STRUCT:
409 printk(KERN_WARNING "LTTng: bytecode: Nested structures are not supported yet.\n");
410 ret = -EINVAL;
411 goto end;
412 case OBJECT_TYPE_VARIANT:
413 default:
414 printk(KERN_WARNING "LTTng: bytecode: Unexpected get index type %d",
415 (int) stack_top->u.ptr.object_type);
416 ret = -EINVAL;
417 goto end;
418 }
419 break;
420 case LOAD_ROOT_CONTEXT:
421 case LOAD_ROOT_APP_CONTEXT: /* Fall-through */
422 {
423 ret = context_get_index(lttng_probe_ctx,
424 &stack_top->u.ptr,
425 gid->ctx_index);
426 if (ret) {
427 goto end;
428 }
429 break;
430 }
431 case LOAD_ROOT_PAYLOAD:
432 stack_top->u.ptr.ptr += gid->offset;
433 if (gid->elem.type == OBJECT_TYPE_STRING)
434 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
435 stack_top->u.ptr.object_type = gid->elem.type;
436 stack_top->u.ptr.type = LOAD_OBJECT;
437 stack_top->u.ptr.field = gid->field;
438 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
439 break;
440 }
441
442 stack_top->type = REG_PTR;
443
444 return 0;
445
446 end:
447 return ret;
448 }
449
450 static int dynamic_load_field(struct estack_entry *stack_top)
451 {
452 int ret;
453
454 switch (stack_top->u.ptr.type) {
455 case LOAD_OBJECT:
456 break;
457 case LOAD_ROOT_CONTEXT:
458 case LOAD_ROOT_APP_CONTEXT:
459 case LOAD_ROOT_PAYLOAD:
460 default:
461 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
462 ret = -EINVAL;
463 goto end;
464 }
465 switch (stack_top->u.ptr.object_type) {
466 case OBJECT_TYPE_S8:
467 dbg_printk("op load field s8\n");
468 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
469 stack_top->type = REG_S64;
470 break;
471 case OBJECT_TYPE_S16:
472 {
473 int16_t tmp;
474
475 dbg_printk("op load field s16\n");
476 tmp = *(int16_t *) stack_top->u.ptr.ptr;
477 if (stack_top->u.ptr.rev_bo)
478 __swab16s(&tmp);
479 stack_top->u.v = tmp;
480 stack_top->type = REG_S64;
481 break;
482 }
483 case OBJECT_TYPE_S32:
484 {
485 int32_t tmp;
486
487 dbg_printk("op load field s32\n");
488 tmp = *(int32_t *) stack_top->u.ptr.ptr;
489 if (stack_top->u.ptr.rev_bo)
490 __swab32s(&tmp);
491 stack_top->u.v = tmp;
492 stack_top->type = REG_S64;
493 break;
494 }
495 case OBJECT_TYPE_S64:
496 {
497 int64_t tmp;
498
499 dbg_printk("op load field s64\n");
500 tmp = *(int64_t *) stack_top->u.ptr.ptr;
501 if (stack_top->u.ptr.rev_bo)
502 __swab64s(&tmp);
503 stack_top->u.v = tmp;
504 stack_top->type = REG_S64;
505 break;
506 }
507 case OBJECT_TYPE_SIGNED_ENUM:
508 {
509 int64_t tmp;
510
511 dbg_printk("op load field signed enumeration\n");
512 tmp = *(int64_t *) stack_top->u.ptr.ptr;
513 if (stack_top->u.ptr.rev_bo)
514 __swab64s(&tmp);
515 stack_top->u.v = tmp;
516 stack_top->type = REG_S64;
517 break;
518 }
519 case OBJECT_TYPE_U8:
520 dbg_printk("op load field u8\n");
521 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
522 stack_top->type = REG_U64;
523 break;
524 case OBJECT_TYPE_U16:
525 {
526 uint16_t tmp;
527
528 dbg_printk("op load field u16\n");
529 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
530 if (stack_top->u.ptr.rev_bo)
531 __swab16s(&tmp);
532 stack_top->u.v = tmp;
533 stack_top->type = REG_U64;
534 break;
535 }
536 case OBJECT_TYPE_U32:
537 {
538 uint32_t tmp;
539
540 dbg_printk("op load field u32\n");
541 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
542 if (stack_top->u.ptr.rev_bo)
543 __swab32s(&tmp);
544 stack_top->u.v = tmp;
545 stack_top->type = REG_U64;
546 break;
547 }
548 case OBJECT_TYPE_U64:
549 {
550 uint64_t tmp;
551
552 dbg_printk("op load field u64\n");
553 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
554 if (stack_top->u.ptr.rev_bo)
555 __swab64s(&tmp);
556 stack_top->u.v = tmp;
557 stack_top->type = REG_U64;
558 break;
559 }
560 case OBJECT_TYPE_UNSIGNED_ENUM:
561 {
562 uint64_t tmp;
563
564 dbg_printk("op load field unsigned enumeration\n");
565 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
566 if (stack_top->u.ptr.rev_bo)
567 __swab64s(&tmp);
568 stack_top->u.v = tmp;
569 stack_top->type = REG_U64;
570 break;
571 }
572 case OBJECT_TYPE_STRING:
573 {
574 const char *str;
575
576 dbg_printk("op load field string\n");
577 str = (const char *) stack_top->u.ptr.ptr;
578 stack_top->u.s.str = str;
579 if (unlikely(!stack_top->u.s.str)) {
580 dbg_printk("Bytecode warning: loading a NULL string.\n");
581 ret = -EINVAL;
582 goto end;
583 }
584 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
585 stack_top->u.s.literal_type =
586 ESTACK_STRING_LITERAL_TYPE_NONE;
587 stack_top->type = REG_STRING;
588 break;
589 }
590 case OBJECT_TYPE_STRING_SEQUENCE:
591 {
592 const char *ptr;
593
594 dbg_printk("op load field string sequence\n");
595 ptr = stack_top->u.ptr.ptr;
596 stack_top->u.s.seq_len = *(unsigned long *) ptr;
597 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
598 if (unlikely(!stack_top->u.s.str)) {
599 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
600 ret = -EINVAL;
601 goto end;
602 }
603 stack_top->u.s.literal_type =
604 ESTACK_STRING_LITERAL_TYPE_NONE;
605 stack_top->type = REG_STRING;
606 break;
607 }
608 case OBJECT_TYPE_DYNAMIC:
609 /*
610 * Dynamic types in context are looked up
611 * by context get index.
612 */
613 ret = -EINVAL;
614 goto end;
615 case OBJECT_TYPE_DOUBLE:
616 ret = -EINVAL;
617 goto end;
618 case OBJECT_TYPE_SEQUENCE:
619 case OBJECT_TYPE_ARRAY:
620 case OBJECT_TYPE_STRUCT:
621 case OBJECT_TYPE_VARIANT:
622 printk(KERN_WARNING "LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
623 ret = -EINVAL;
624 goto end;
625 }
626 return 0;
627
628 end:
629 return ret;
630 }
631
632 static
633 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
634 struct lttng_interpreter_output *output)
635 {
636 int ret;
637
638 again:
639 switch (ax->type) {
640 case REG_S64:
641 output->type = LTTNG_INTERPRETER_TYPE_S64;
642 output->u.s = ax->u.v;
643 break;
644 case REG_U64:
645 output->type = LTTNG_INTERPRETER_TYPE_U64;
646 output->u.u = (uint64_t) ax->u.v;
647 break;
648 case REG_STRING:
649 output->type = LTTNG_INTERPRETER_TYPE_STRING;
650 output->u.str.str = ax->u.s.str;
651 output->u.str.len = ax->u.s.seq_len;
652 break;
653 case REG_PTR:
654 switch (ax->u.ptr.object_type) {
655 case OBJECT_TYPE_S8:
656 case OBJECT_TYPE_S16:
657 case OBJECT_TYPE_S32:
658 case OBJECT_TYPE_S64:
659 case OBJECT_TYPE_U8:
660 case OBJECT_TYPE_U16:
661 case OBJECT_TYPE_U32:
662 case OBJECT_TYPE_U64:
663 case OBJECT_TYPE_DOUBLE:
664 case OBJECT_TYPE_STRING:
665 case OBJECT_TYPE_STRING_SEQUENCE:
666 ret = dynamic_load_field(ax);
667 if (ret)
668 return ret;
669 /* Retry after loading ptr into stack top. */
670 goto again;
671 case OBJECT_TYPE_SEQUENCE:
672 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
673 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
674 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
675 output->u.sequence.nested_type = ax->u.ptr.field->type.u.sequence_nestable.elem_type;
676 break;
677 case OBJECT_TYPE_ARRAY:
678 /* Skip count (unsigned long) */
679 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
680 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
681 output->u.sequence.nr_elem = ax->u.ptr.field->type.u.array_nestable.length;
682 output->u.sequence.nested_type = ax->u.ptr.field->type.u.array_nestable.elem_type;
683 break;
684 case OBJECT_TYPE_SIGNED_ENUM:
685 ret = dynamic_load_field(ax);
686 if (ret)
687 return ret;
688 output->type = LTTNG_INTERPRETER_TYPE_SIGNED_ENUM;
689 output->u.s = ax->u.v;
690 break;
691 case OBJECT_TYPE_UNSIGNED_ENUM:
692 ret = dynamic_load_field(ax);
693 if (ret)
694 return ret;
695 output->type = LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM;
696 output->u.u = ax->u.v;
697 break;
698 case OBJECT_TYPE_STRUCT:
699 case OBJECT_TYPE_VARIANT:
700 default:
701 return -EINVAL;
702 }
703
704 break;
705 case REG_STAR_GLOB_STRING:
706 case REG_TYPE_UNKNOWN:
707 default:
708 return -EINVAL;
709 }
710
711 return LTTNG_INTERPRETER_RECORD_FLAG;
712 }
713
714 #ifdef DEBUG
715
716 #define DBG_USER_STR_CUTOFF 32
717
718 /*
719 * In debug mode, print user string (truncated, if necessary).
720 */
721 static inline
722 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
723 {
724 size_t pos = 0;
725 char last_char;
726 char user_str[DBG_USER_STR_CUTOFF];
727
728 pagefault_disable();
729 do {
730 last_char = get_char(user_str_reg, pos);
731 user_str[pos] = last_char;
732 pos++;
733 } while (last_char != '\0' && pos < sizeof(user_str));
734 pagefault_enable();
735
736 user_str[sizeof(user_str) - 1] = '\0';
737 dbg_printk("load field ref user string: '%s%s'\n", user_str,
738 last_char != '\0' ? "[...]" : "");
739 }
740 #else
741 static inline
742 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
743 {
744 }
745 #endif
746
747 /*
748 * Return 0 (discard), or raise the 0x1 flag (log event).
749 * Currently, other flags are kept for future extensions and have no
750 * effect.
751 */
752 static
753 uint64_t bytecode_interpret(void *interpreter_data,
754 struct lttng_probe_ctx *lttng_probe_ctx,
755 const char *interpreter_stack_data,
756 struct lttng_interpreter_output *output)
757 {
758 struct bytecode_runtime *bytecode = interpreter_data;
759 void *pc, *next_pc, *start_pc;
760 int ret = -EINVAL;
761 uint64_t retval = 0;
762 struct estack _stack;
763 struct estack *stack = &_stack;
764 register int64_t ax = 0, bx = 0;
765 register enum entry_type ax_t = REG_TYPE_UNKNOWN, bx_t = REG_TYPE_UNKNOWN;
766 register int top = INTERPRETER_STACK_EMPTY;
767 #ifndef INTERPRETER_USE_SWITCH
768 static void *dispatch[NR_BYTECODE_OPS] = {
769 [ BYTECODE_OP_UNKNOWN ] = &&LABEL_BYTECODE_OP_UNKNOWN,
770
771 [ BYTECODE_OP_RETURN ] = &&LABEL_BYTECODE_OP_RETURN,
772
773 /* binary */
774 [ BYTECODE_OP_MUL ] = &&LABEL_BYTECODE_OP_MUL,
775 [ BYTECODE_OP_DIV ] = &&LABEL_BYTECODE_OP_DIV,
776 [ BYTECODE_OP_MOD ] = &&LABEL_BYTECODE_OP_MOD,
777 [ BYTECODE_OP_PLUS ] = &&LABEL_BYTECODE_OP_PLUS,
778 [ BYTECODE_OP_MINUS ] = &&LABEL_BYTECODE_OP_MINUS,
779 [ BYTECODE_OP_BIT_RSHIFT ] = &&LABEL_BYTECODE_OP_BIT_RSHIFT,
780 [ BYTECODE_OP_BIT_LSHIFT ] = &&LABEL_BYTECODE_OP_BIT_LSHIFT,
781 [ BYTECODE_OP_BIT_AND ] = &&LABEL_BYTECODE_OP_BIT_AND,
782 [ BYTECODE_OP_BIT_OR ] = &&LABEL_BYTECODE_OP_BIT_OR,
783 [ BYTECODE_OP_BIT_XOR ] = &&LABEL_BYTECODE_OP_BIT_XOR,
784
785 /* binary comparators */
786 [ BYTECODE_OP_EQ ] = &&LABEL_BYTECODE_OP_EQ,
787 [ BYTECODE_OP_NE ] = &&LABEL_BYTECODE_OP_NE,
788 [ BYTECODE_OP_GT ] = &&LABEL_BYTECODE_OP_GT,
789 [ BYTECODE_OP_LT ] = &&LABEL_BYTECODE_OP_LT,
790 [ BYTECODE_OP_GE ] = &&LABEL_BYTECODE_OP_GE,
791 [ BYTECODE_OP_LE ] = &&LABEL_BYTECODE_OP_LE,
792
793 /* string binary comparator */
794 [ BYTECODE_OP_EQ_STRING ] = &&LABEL_BYTECODE_OP_EQ_STRING,
795 [ BYTECODE_OP_NE_STRING ] = &&LABEL_BYTECODE_OP_NE_STRING,
796 [ BYTECODE_OP_GT_STRING ] = &&LABEL_BYTECODE_OP_GT_STRING,
797 [ BYTECODE_OP_LT_STRING ] = &&LABEL_BYTECODE_OP_LT_STRING,
798 [ BYTECODE_OP_GE_STRING ] = &&LABEL_BYTECODE_OP_GE_STRING,
799 [ BYTECODE_OP_LE_STRING ] = &&LABEL_BYTECODE_OP_LE_STRING,
800
801 /* globbing pattern binary comparator */
802 [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING,
803 [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING,
804
805 /* s64 binary comparator */
806 [ BYTECODE_OP_EQ_S64 ] = &&LABEL_BYTECODE_OP_EQ_S64,
807 [ BYTECODE_OP_NE_S64 ] = &&LABEL_BYTECODE_OP_NE_S64,
808 [ BYTECODE_OP_GT_S64 ] = &&LABEL_BYTECODE_OP_GT_S64,
809 [ BYTECODE_OP_LT_S64 ] = &&LABEL_BYTECODE_OP_LT_S64,
810 [ BYTECODE_OP_GE_S64 ] = &&LABEL_BYTECODE_OP_GE_S64,
811 [ BYTECODE_OP_LE_S64 ] = &&LABEL_BYTECODE_OP_LE_S64,
812
813 /* double binary comparator */
814 [ BYTECODE_OP_EQ_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE,
815 [ BYTECODE_OP_NE_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_DOUBLE,
816 [ BYTECODE_OP_GT_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_DOUBLE,
817 [ BYTECODE_OP_LT_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_DOUBLE,
818 [ BYTECODE_OP_GE_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_DOUBLE,
819 [ BYTECODE_OP_LE_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_DOUBLE,
820
821 /* Mixed S64-double binary comparators */
822 [ BYTECODE_OP_EQ_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64,
823 [ BYTECODE_OP_NE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64,
824 [ BYTECODE_OP_GT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64,
825 [ BYTECODE_OP_LT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64,
826 [ BYTECODE_OP_GE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64,
827 [ BYTECODE_OP_LE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64,
828
829 [ BYTECODE_OP_EQ_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE,
830 [ BYTECODE_OP_NE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE,
831 [ BYTECODE_OP_GT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE,
832 [ BYTECODE_OP_LT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE,
833 [ BYTECODE_OP_GE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE,
834 [ BYTECODE_OP_LE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE,
835
836 /* unary */
837 [ BYTECODE_OP_UNARY_PLUS ] = &&LABEL_BYTECODE_OP_UNARY_PLUS,
838 [ BYTECODE_OP_UNARY_MINUS ] = &&LABEL_BYTECODE_OP_UNARY_MINUS,
839 [ BYTECODE_OP_UNARY_NOT ] = &&LABEL_BYTECODE_OP_UNARY_NOT,
840 [ BYTECODE_OP_UNARY_PLUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64,
841 [ BYTECODE_OP_UNARY_MINUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64,
842 [ BYTECODE_OP_UNARY_NOT_S64 ] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64,
843 [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE,
844 [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE,
845 [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE,
846
847 /* logical */
848 [ BYTECODE_OP_AND ] = &&LABEL_BYTECODE_OP_AND,
849 [ BYTECODE_OP_OR ] = &&LABEL_BYTECODE_OP_OR,
850
851 /* load field ref */
852 [ BYTECODE_OP_LOAD_FIELD_REF ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF,
853 [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING,
854 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE,
855 [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64,
856 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE,
857
858 /* load from immediate operand */
859 [ BYTECODE_OP_LOAD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STRING,
860 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING,
861 [ BYTECODE_OP_LOAD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_S64,
862 [ BYTECODE_OP_LOAD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE,
863
864 /* cast */
865 [ BYTECODE_OP_CAST_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_TO_S64,
866 [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64,
867 [ BYTECODE_OP_CAST_NOP ] = &&LABEL_BYTECODE_OP_CAST_NOP,
868
869 /* get context ref */
870 [ BYTECODE_OP_GET_CONTEXT_REF ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF,
871 [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING,
872 [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64,
873 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE,
874
875 /* load userspace field ref */
876 [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_STRING,
877 [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE,
878
879 /* Instructions for recursive traversal through composed types. */
880 [ BYTECODE_OP_GET_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT,
881 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT,
882 [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT,
883
884 [ BYTECODE_OP_GET_SYMBOL ] = &&LABEL_BYTECODE_OP_GET_SYMBOL,
885 [ BYTECODE_OP_GET_SYMBOL_FIELD ] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD,
886 [ BYTECODE_OP_GET_INDEX_U16 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U16,
887 [ BYTECODE_OP_GET_INDEX_U64 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U64,
888
889 [ BYTECODE_OP_LOAD_FIELD ] = &&LABEL_BYTECODE_OP_LOAD_FIELD,
890 [ BYTECODE_OP_LOAD_FIELD_S8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8,
891 [ BYTECODE_OP_LOAD_FIELD_S16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16,
892 [ BYTECODE_OP_LOAD_FIELD_S32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32,
893 [ BYTECODE_OP_LOAD_FIELD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64,
894 [ BYTECODE_OP_LOAD_FIELD_U8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8,
895 [ BYTECODE_OP_LOAD_FIELD_U16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16,
896 [ BYTECODE_OP_LOAD_FIELD_U32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32,
897 [ BYTECODE_OP_LOAD_FIELD_U64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64,
898 [ BYTECODE_OP_LOAD_FIELD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING,
899 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE,
900 [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE,
901
902 [ BYTECODE_OP_UNARY_BIT_NOT ] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT,
903
904 [ BYTECODE_OP_RETURN_S64 ] = &&LABEL_BYTECODE_OP_RETURN_S64,
905 };
906 #endif /* #ifndef INTERPRETER_USE_SWITCH */
907
908 START_OP
909
910 OP(BYTECODE_OP_UNKNOWN):
911 OP(BYTECODE_OP_LOAD_FIELD_REF):
912 OP(BYTECODE_OP_GET_CONTEXT_REF):
913 #ifdef INTERPRETER_USE_SWITCH
914 default:
915 #endif /* INTERPRETER_USE_SWITCH */
916 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
917 (unsigned int) *(bytecode_opcode_t *) pc);
918 ret = -EINVAL;
919 goto end;
920
921 OP(BYTECODE_OP_RETURN):
922 OP(BYTECODE_OP_RETURN_S64):
923 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
924 switch (estack_ax_t) {
925 case REG_S64:
926 case REG_U64:
927 retval = !!estack_ax_v;
928 break;
929 case REG_DOUBLE:
930 case REG_STRING:
931 case REG_PTR:
932 if (!output) {
933 ret = -EINVAL;
934 goto end;
935 }
936 retval = 0;
937 break;
938 case REG_STAR_GLOB_STRING:
939 case REG_TYPE_UNKNOWN:
940 ret = -EINVAL;
941 goto end;
942 }
943 ret = 0;
944 goto end;
945
946 /* binary */
947 OP(BYTECODE_OP_MUL):
948 OP(BYTECODE_OP_DIV):
949 OP(BYTECODE_OP_MOD):
950 OP(BYTECODE_OP_PLUS):
951 OP(BYTECODE_OP_MINUS):
952 printk(KERN_WARNING "LTTng: bytecode: unsupported bytecode op %u\n",
953 (unsigned int) *(bytecode_opcode_t *) pc);
954 ret = -EINVAL;
955 goto end;
956
957 OP(BYTECODE_OP_EQ):
958 OP(BYTECODE_OP_NE):
959 OP(BYTECODE_OP_GT):
960 OP(BYTECODE_OP_LT):
961 OP(BYTECODE_OP_GE):
962 OP(BYTECODE_OP_LE):
963 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
964 (unsigned int) *(bytecode_opcode_t *) pc);
965 ret = -EINVAL;
966 goto end;
967
968 OP(BYTECODE_OP_EQ_STRING):
969 {
970 int res;
971
972 res = (stack_strcmp(stack, top, "==") == 0);
973 estack_pop(stack, top, ax, bx, ax_t, bx_t);
974 estack_ax_v = res;
975 estack_ax_t = REG_S64;
976 next_pc += sizeof(struct binary_op);
977 PO;
978 }
979 OP(BYTECODE_OP_NE_STRING):
980 {
981 int res;
982
983 res = (stack_strcmp(stack, top, "!=") != 0);
984 estack_pop(stack, top, ax, bx, ax_t, bx_t);
985 estack_ax_v = res;
986 estack_ax_t = REG_S64;
987 next_pc += sizeof(struct binary_op);
988 PO;
989 }
990 OP(BYTECODE_OP_GT_STRING):
991 {
992 int res;
993
994 res = (stack_strcmp(stack, top, ">") > 0);
995 estack_pop(stack, top, ax, bx, ax_t, bx_t);
996 estack_ax_v = res;
997 estack_ax_t = REG_S64;
998 next_pc += sizeof(struct binary_op);
999 PO;
1000 }
1001 OP(BYTECODE_OP_LT_STRING):
1002 {
1003 int res;
1004
1005 res = (stack_strcmp(stack, top, "<") < 0);
1006 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1007 estack_ax_v = res;
1008 estack_ax_t = REG_S64;
1009 next_pc += sizeof(struct binary_op);
1010 PO;
1011 }
1012 OP(BYTECODE_OP_GE_STRING):
1013 {
1014 int res;
1015
1016 res = (stack_strcmp(stack, top, ">=") >= 0);
1017 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1018 estack_ax_v = res;
1019 estack_ax_t = REG_S64;
1020 next_pc += sizeof(struct binary_op);
1021 PO;
1022 }
1023 OP(BYTECODE_OP_LE_STRING):
1024 {
1025 int res;
1026
1027 res = (stack_strcmp(stack, top, "<=") <= 0);
1028 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1029 estack_ax_v = res;
1030 estack_ax_t = REG_S64;
1031 next_pc += sizeof(struct binary_op);
1032 PO;
1033 }
1034
1035 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING):
1036 {
1037 int res;
1038
1039 res = (stack_star_glob_match(stack, top, "==") == 0);
1040 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1041 estack_ax_v = res;
1042 estack_ax_t = REG_S64;
1043 next_pc += sizeof(struct binary_op);
1044 PO;
1045 }
1046 OP(BYTECODE_OP_NE_STAR_GLOB_STRING):
1047 {
1048 int res;
1049
1050 res = (stack_star_glob_match(stack, top, "!=") != 0);
1051 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1052 estack_ax_v = res;
1053 estack_ax_t = REG_S64;
1054 next_pc += sizeof(struct binary_op);
1055 PO;
1056 }
1057
1058 OP(BYTECODE_OP_EQ_S64):
1059 {
1060 int res;
1061
1062 res = (estack_bx_v == estack_ax_v);
1063 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1064 estack_ax_v = res;
1065 estack_ax_t = REG_S64;
1066 next_pc += sizeof(struct binary_op);
1067 PO;
1068 }
1069 OP(BYTECODE_OP_NE_S64):
1070 {
1071 int res;
1072
1073 res = (estack_bx_v != estack_ax_v);
1074 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1075 estack_ax_v = res;
1076 estack_ax_t = REG_S64;
1077 next_pc += sizeof(struct binary_op);
1078 PO;
1079 }
1080 OP(BYTECODE_OP_GT_S64):
1081 {
1082 int res;
1083
1084 res = (estack_bx_v > estack_ax_v);
1085 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1086 estack_ax_v = res;
1087 estack_ax_t = REG_S64;
1088 next_pc += sizeof(struct binary_op);
1089 PO;
1090 }
1091 OP(BYTECODE_OP_LT_S64):
1092 {
1093 int res;
1094
1095 res = (estack_bx_v < estack_ax_v);
1096 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1097 estack_ax_v = res;
1098 estack_ax_t = REG_S64;
1099 next_pc += sizeof(struct binary_op);
1100 PO;
1101 }
1102 OP(BYTECODE_OP_GE_S64):
1103 {
1104 int res;
1105
1106 res = (estack_bx_v >= estack_ax_v);
1107 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1108 estack_ax_v = res;
1109 estack_ax_t = REG_S64;
1110 next_pc += sizeof(struct binary_op);
1111 PO;
1112 }
1113 OP(BYTECODE_OP_LE_S64):
1114 {
1115 int res;
1116
1117 res = (estack_bx_v <= estack_ax_v);
1118 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1119 estack_ax_v = res;
1120 estack_ax_t = REG_S64;
1121 next_pc += sizeof(struct binary_op);
1122 PO;
1123 }
1124
1125 OP(BYTECODE_OP_EQ_DOUBLE):
1126 OP(BYTECODE_OP_NE_DOUBLE):
1127 OP(BYTECODE_OP_GT_DOUBLE):
1128 OP(BYTECODE_OP_LT_DOUBLE):
1129 OP(BYTECODE_OP_GE_DOUBLE):
1130 OP(BYTECODE_OP_LE_DOUBLE):
1131 {
1132 BUG_ON(1);
1133 PO;
1134 }
1135
1136 /* Mixed S64-double binary comparators */
1137 OP(BYTECODE_OP_EQ_DOUBLE_S64):
1138 OP(BYTECODE_OP_NE_DOUBLE_S64):
1139 OP(BYTECODE_OP_GT_DOUBLE_S64):
1140 OP(BYTECODE_OP_LT_DOUBLE_S64):
1141 OP(BYTECODE_OP_GE_DOUBLE_S64):
1142 OP(BYTECODE_OP_LE_DOUBLE_S64):
1143 OP(BYTECODE_OP_EQ_S64_DOUBLE):
1144 OP(BYTECODE_OP_NE_S64_DOUBLE):
1145 OP(BYTECODE_OP_GT_S64_DOUBLE):
1146 OP(BYTECODE_OP_LT_S64_DOUBLE):
1147 OP(BYTECODE_OP_GE_S64_DOUBLE):
1148 OP(BYTECODE_OP_LE_S64_DOUBLE):
1149 {
1150 BUG_ON(1);
1151 PO;
1152 }
1153 OP(BYTECODE_OP_BIT_RSHIFT):
1154 {
1155 int64_t res;
1156
1157 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1158 ret = -EINVAL;
1159 goto end;
1160 }
1161
1162 /* Catch undefined behavior. */
1163 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1164 ret = -EINVAL;
1165 goto end;
1166 }
1167 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1168 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1169 estack_ax_v = res;
1170 estack_ax_t = REG_U64;
1171 next_pc += sizeof(struct binary_op);
1172 PO;
1173 }
1174 OP(BYTECODE_OP_BIT_LSHIFT):
1175 {
1176 int64_t res;
1177
1178 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1179 ret = -EINVAL;
1180 goto end;
1181 }
1182
1183 /* Catch undefined behavior. */
1184 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1185 ret = -EINVAL;
1186 goto end;
1187 }
1188 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1189 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1190 estack_ax_v = res;
1191 estack_ax_t = REG_U64;
1192 next_pc += sizeof(struct binary_op);
1193 PO;
1194 }
1195 OP(BYTECODE_OP_BIT_AND):
1196 {
1197 int64_t res;
1198
1199 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1200 ret = -EINVAL;
1201 goto end;
1202 }
1203
1204 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1205 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1206 estack_ax_v = res;
1207 estack_ax_t = REG_U64;
1208 next_pc += sizeof(struct binary_op);
1209 PO;
1210 }
1211 OP(BYTECODE_OP_BIT_OR):
1212 {
1213 int64_t res;
1214
1215 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1216 ret = -EINVAL;
1217 goto end;
1218 }
1219
1220 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1221 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1222 estack_ax_v = res;
1223 estack_ax_t = REG_U64;
1224 next_pc += sizeof(struct binary_op);
1225 PO;
1226 }
1227 OP(BYTECODE_OP_BIT_XOR):
1228 {
1229 int64_t res;
1230
1231 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1232 ret = -EINVAL;
1233 goto end;
1234 }
1235
1236 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1237 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1238 estack_ax_v = res;
1239 estack_ax_t = REG_U64;
1240 next_pc += sizeof(struct binary_op);
1241 PO;
1242 }
1243
1244 /* unary */
1245 OP(BYTECODE_OP_UNARY_PLUS):
1246 OP(BYTECODE_OP_UNARY_MINUS):
1247 OP(BYTECODE_OP_UNARY_NOT):
1248 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1249 (unsigned int) *(bytecode_opcode_t *) pc);
1250 ret = -EINVAL;
1251 goto end;
1252
1253
1254 OP(BYTECODE_OP_UNARY_BIT_NOT):
1255 {
1256 estack_ax_v = ~(uint64_t) estack_ax_v;
1257 estack_ax_t = REG_S64;
1258 next_pc += sizeof(struct unary_op);
1259 PO;
1260 }
1261
1262 OP(BYTECODE_OP_UNARY_PLUS_S64):
1263 {
1264 next_pc += sizeof(struct unary_op);
1265 PO;
1266 }
1267 OP(BYTECODE_OP_UNARY_MINUS_S64):
1268 {
1269 estack_ax_v = -estack_ax_v;
1270 estack_ax_t = REG_S64;
1271 next_pc += sizeof(struct unary_op);
1272 PO;
1273 }
1274 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE):
1275 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE):
1276 {
1277 BUG_ON(1);
1278 PO;
1279 }
1280 OP(BYTECODE_OP_UNARY_NOT_S64):
1281 {
1282 estack_ax_v = !estack_ax_v;
1283 estack_ax_t = REG_S64;
1284 next_pc += sizeof(struct unary_op);
1285 PO;
1286 }
1287 OP(BYTECODE_OP_UNARY_NOT_DOUBLE):
1288 {
1289 BUG_ON(1);
1290 PO;
1291 }
1292
1293 /* logical */
1294 OP(BYTECODE_OP_AND):
1295 {
1296 struct logical_op *insn = (struct logical_op *) pc;
1297
1298 /* If AX is 0, skip and evaluate to 0 */
1299 if (unlikely(estack_ax_v == 0)) {
1300 dbg_printk("Jumping to bytecode offset %u\n",
1301 (unsigned int) insn->skip_offset);
1302 next_pc = start_pc + insn->skip_offset;
1303 } else {
1304 /* Pop 1 when jump not taken */
1305 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1306 next_pc += sizeof(struct logical_op);
1307 }
1308 PO;
1309 }
1310 OP(BYTECODE_OP_OR):
1311 {
1312 struct logical_op *insn = (struct logical_op *) pc;
1313
1314 /* If AX is nonzero, skip and evaluate to 1 */
1315
1316 if (unlikely(estack_ax_v != 0)) {
1317 estack_ax_v = 1;
1318 dbg_printk("Jumping to bytecode offset %u\n",
1319 (unsigned int) insn->skip_offset);
1320 next_pc = start_pc + insn->skip_offset;
1321 } else {
1322 /* Pop 1 when jump not taken */
1323 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1324 next_pc += sizeof(struct logical_op);
1325 }
1326 PO;
1327 }
1328
1329
1330 /* load field ref */
1331 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING):
1332 {
1333 struct load_op *insn = (struct load_op *) pc;
1334 struct field_ref *ref = (struct field_ref *) insn->data;
1335
1336 dbg_printk("load field ref offset %u type string\n",
1337 ref->offset);
1338 estack_push(stack, top, ax, bx, ax_t, bx_t);
1339 estack_ax(stack, top)->u.s.str =
1340 *(const char * const *) &interpreter_stack_data[ref->offset];
1341 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1342 dbg_printk("Bytecode warning: loading a NULL string.\n");
1343 ret = -EINVAL;
1344 goto end;
1345 }
1346 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1347 estack_ax(stack, top)->u.s.literal_type =
1348 ESTACK_STRING_LITERAL_TYPE_NONE;
1349 estack_ax(stack, top)->u.s.user = 0;
1350 estack_ax(stack, top)->type = REG_STRING;
1351 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1352 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1353 PO;
1354 }
1355
1356 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE):
1357 {
1358 struct load_op *insn = (struct load_op *) pc;
1359 struct field_ref *ref = (struct field_ref *) insn->data;
1360
1361 dbg_printk("load field ref offset %u type sequence\n",
1362 ref->offset);
1363 estack_push(stack, top, ax, bx, ax_t, bx_t);
1364 estack_ax(stack, top)->u.s.seq_len =
1365 *(unsigned long *) &interpreter_stack_data[ref->offset];
1366 estack_ax(stack, top)->u.s.str =
1367 *(const char **) (&interpreter_stack_data[ref->offset
1368 + sizeof(unsigned long)]);
1369 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1370 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1371 ret = -EINVAL;
1372 goto end;
1373 }
1374 estack_ax(stack, top)->u.s.literal_type =
1375 ESTACK_STRING_LITERAL_TYPE_NONE;
1376 estack_ax(stack, top)->u.s.user = 0;
1377 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1378 PO;
1379 }
1380
1381 OP(BYTECODE_OP_LOAD_FIELD_REF_S64):
1382 {
1383 struct load_op *insn = (struct load_op *) pc;
1384 struct field_ref *ref = (struct field_ref *) insn->data;
1385
1386 dbg_printk("load field ref offset %u type s64\n",
1387 ref->offset);
1388 estack_push(stack, top, ax, bx, ax_t, bx_t);
1389 estack_ax_v =
1390 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
1391 estack_ax_t = REG_S64;
1392 dbg_printk("ref load s64 %lld\n",
1393 (long long) estack_ax_v);
1394 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1395 PO;
1396 }
1397
1398 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE):
1399 {
1400 BUG_ON(1);
1401 PO;
1402 }
1403
1404 /* load from immediate operand */
1405 OP(BYTECODE_OP_LOAD_STRING):
1406 {
1407 struct load_op *insn = (struct load_op *) pc;
1408
1409 dbg_printk("load string %s\n", insn->data);
1410 estack_push(stack, top, ax, bx, ax_t, bx_t);
1411 estack_ax(stack, top)->u.s.str = insn->data;
1412 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1413 estack_ax(stack, top)->u.s.literal_type =
1414 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1415 estack_ax(stack, top)->u.s.user = 0;
1416 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1417 PO;
1418 }
1419
1420 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING):
1421 {
1422 struct load_op *insn = (struct load_op *) pc;
1423
1424 dbg_printk("load globbing pattern %s\n", insn->data);
1425 estack_push(stack, top, ax, bx, ax_t, bx_t);
1426 estack_ax(stack, top)->u.s.str = insn->data;
1427 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1428 estack_ax(stack, top)->u.s.literal_type =
1429 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1430 estack_ax(stack, top)->u.s.user = 0;
1431 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1432 PO;
1433 }
1434
1435 OP(BYTECODE_OP_LOAD_S64):
1436 {
1437 struct load_op *insn = (struct load_op *) pc;
1438
1439 estack_push(stack, top, ax, bx, ax_t, bx_t);
1440 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1441 estack_ax_t = REG_S64;
1442 dbg_printk("load s64 %lld\n",
1443 (long long) estack_ax_v);
1444 next_pc += sizeof(struct load_op)
1445 + sizeof(struct literal_numeric);
1446 PO;
1447 }
1448
1449 OP(BYTECODE_OP_LOAD_DOUBLE):
1450 {
1451 BUG_ON(1);
1452 PO;
1453 }
1454
1455 /* cast */
1456 OP(BYTECODE_OP_CAST_TO_S64):
1457 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1458 (unsigned int) *(bytecode_opcode_t *) pc);
1459 ret = -EINVAL;
1460 goto end;
1461
1462 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64):
1463 {
1464 BUG_ON(1);
1465 PO;
1466 }
1467
1468 OP(BYTECODE_OP_CAST_NOP):
1469 {
1470 next_pc += sizeof(struct cast_op);
1471 PO;
1472 }
1473
1474 /* get context ref */
1475 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING):
1476 {
1477 struct load_op *insn = (struct load_op *) pc;
1478 struct field_ref *ref = (struct field_ref *) insn->data;
1479 struct lttng_ctx_field *ctx_field;
1480 union lttng_ctx_value v;
1481
1482 dbg_printk("get context ref offset %u type string\n",
1483 ref->offset);
1484 ctx_field = &lttng_static_ctx->fields[ref->offset];
1485 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1486 estack_push(stack, top, ax, bx, ax_t, bx_t);
1487 estack_ax(stack, top)->u.s.str = v.str;
1488 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1489 dbg_printk("Bytecode warning: loading a NULL string.\n");
1490 ret = -EINVAL;
1491 goto end;
1492 }
1493 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1494 estack_ax(stack, top)->u.s.literal_type =
1495 ESTACK_STRING_LITERAL_TYPE_NONE;
1496 estack_ax(stack, top)->u.s.user = 0;
1497 estack_ax(stack, top)->type = REG_STRING;
1498 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1499 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1500 PO;
1501 }
1502
1503 OP(BYTECODE_OP_GET_CONTEXT_REF_S64):
1504 {
1505 struct load_op *insn = (struct load_op *) pc;
1506 struct field_ref *ref = (struct field_ref *) insn->data;
1507 struct lttng_ctx_field *ctx_field;
1508 union lttng_ctx_value v;
1509
1510 dbg_printk("get context ref offset %u type s64\n",
1511 ref->offset);
1512 ctx_field = &lttng_static_ctx->fields[ref->offset];
1513 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1514 estack_push(stack, top, ax, bx, ax_t, bx_t);
1515 estack_ax_v = v.s64;
1516 estack_ax_t = REG_S64;
1517 dbg_printk("ref get context s64 %lld\n",
1518 (long long) estack_ax_v);
1519 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1520 PO;
1521 }
1522
1523 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE):
1524 {
1525 BUG_ON(1);
1526 PO;
1527 }
1528
1529 /* load userspace field ref */
1530 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_STRING):
1531 {
1532 struct load_op *insn = (struct load_op *) pc;
1533 struct field_ref *ref = (struct field_ref *) insn->data;
1534
1535 dbg_printk("load field ref offset %u type user string\n",
1536 ref->offset);
1537 estack_push(stack, top, ax, bx, ax_t, bx_t);
1538 estack_ax(stack, top)->u.s.user_str =
1539 *(const char * const *) &interpreter_stack_data[ref->offset];
1540 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1541 dbg_printk("Bytecode warning: loading a NULL string.\n");
1542 ret = -EINVAL;
1543 goto end;
1544 }
1545 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1546 estack_ax(stack, top)->u.s.literal_type =
1547 ESTACK_STRING_LITERAL_TYPE_NONE;
1548 estack_ax(stack, top)->u.s.user = 1;
1549 estack_ax(stack, top)->type = REG_STRING;
1550 dbg_load_ref_user_str_printk(estack_ax(stack, top));
1551 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1552 PO;
1553 }
1554
1555 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1556 {
1557 struct load_op *insn = (struct load_op *) pc;
1558 struct field_ref *ref = (struct field_ref *) insn->data;
1559
1560 dbg_printk("load field ref offset %u type user sequence\n",
1561 ref->offset);
1562 estack_push(stack, top, ax, bx, ax_t, bx_t);
1563 estack_ax(stack, top)->u.s.seq_len =
1564 *(unsigned long *) &interpreter_stack_data[ref->offset];
1565 estack_ax(stack, top)->u.s.user_str =
1566 *(const char **) (&interpreter_stack_data[ref->offset
1567 + sizeof(unsigned long)]);
1568 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1569 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1570 ret = -EINVAL;
1571 goto end;
1572 }
1573 estack_ax(stack, top)->u.s.literal_type =
1574 ESTACK_STRING_LITERAL_TYPE_NONE;
1575 estack_ax(stack, top)->u.s.user = 1;
1576 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1577 PO;
1578 }
1579
1580 OP(BYTECODE_OP_GET_CONTEXT_ROOT):
1581 {
1582 dbg_printk("op get context root\n");
1583 estack_push(stack, top, ax, bx, ax_t, bx_t);
1584 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1585 /* "field" only needed for variants. */
1586 estack_ax(stack, top)->u.ptr.field = NULL;
1587 estack_ax(stack, top)->type = REG_PTR;
1588 next_pc += sizeof(struct load_op);
1589 PO;
1590 }
1591
1592 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT):
1593 {
1594 BUG_ON(1);
1595 PO;
1596 }
1597
1598 OP(BYTECODE_OP_GET_PAYLOAD_ROOT):
1599 {
1600 dbg_printk("op get app payload root\n");
1601 estack_push(stack, top, ax, bx, ax_t, bx_t);
1602 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1603 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
1604 /* "field" only needed for variants. */
1605 estack_ax(stack, top)->u.ptr.field = NULL;
1606 estack_ax(stack, top)->type = REG_PTR;
1607 next_pc += sizeof(struct load_op);
1608 PO;
1609 }
1610
1611 OP(BYTECODE_OP_GET_SYMBOL):
1612 {
1613 dbg_printk("op get symbol\n");
1614 switch (estack_ax(stack, top)->u.ptr.type) {
1615 case LOAD_OBJECT:
1616 printk(KERN_WARNING "LTTng: bytecode: Nested fields not implemented yet.\n");
1617 ret = -EINVAL;
1618 goto end;
1619 case LOAD_ROOT_CONTEXT:
1620 case LOAD_ROOT_APP_CONTEXT:
1621 case LOAD_ROOT_PAYLOAD:
1622 /*
1623 * symbol lookup is performed by
1624 * specialization.
1625 */
1626 ret = -EINVAL;
1627 goto end;
1628 }
1629 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1630 PO;
1631 }
1632
1633 OP(BYTECODE_OP_GET_SYMBOL_FIELD):
1634 {
1635 /*
1636 * Used for first variant encountered in a
1637 * traversal. Variants are not implemented yet.
1638 */
1639 ret = -EINVAL;
1640 goto end;
1641 }
1642
1643 OP(BYTECODE_OP_GET_INDEX_U16):
1644 {
1645 struct load_op *insn = (struct load_op *) pc;
1646 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1647
1648 dbg_printk("op get index u16\n");
1649 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1650 if (ret)
1651 goto end;
1652 estack_ax_v = estack_ax(stack, top)->u.v;
1653 estack_ax_t = estack_ax(stack, top)->type;
1654 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1655 PO;
1656 }
1657
1658 OP(BYTECODE_OP_GET_INDEX_U64):
1659 {
1660 struct load_op *insn = (struct load_op *) pc;
1661 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1662
1663 dbg_printk("op get index u64\n");
1664 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1665 if (ret)
1666 goto end;
1667 estack_ax_v = estack_ax(stack, top)->u.v;
1668 estack_ax_t = estack_ax(stack, top)->type;
1669 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1670 PO;
1671 }
1672
1673 OP(BYTECODE_OP_LOAD_FIELD):
1674 {
1675 dbg_printk("op load field\n");
1676 ret = dynamic_load_field(estack_ax(stack, top));
1677 if (ret)
1678 goto end;
1679 estack_ax_v = estack_ax(stack, top)->u.v;
1680 estack_ax_t = estack_ax(stack, top)->type;
1681 next_pc += sizeof(struct load_op);
1682 PO;
1683 }
1684
1685 OP(BYTECODE_OP_LOAD_FIELD_S8):
1686 {
1687 dbg_printk("op load field s8\n");
1688
1689 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1690 estack_ax_t = REG_S64;
1691 next_pc += sizeof(struct load_op);
1692 PO;
1693 }
1694 OP(BYTECODE_OP_LOAD_FIELD_S16):
1695 {
1696 dbg_printk("op load field s16\n");
1697
1698 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1699 estack_ax_t = REG_S64;
1700 next_pc += sizeof(struct load_op);
1701 PO;
1702 }
1703 OP(BYTECODE_OP_LOAD_FIELD_S32):
1704 {
1705 dbg_printk("op load field s32\n");
1706
1707 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1708 estack_ax_t = REG_S64;
1709 next_pc += sizeof(struct load_op);
1710 PO;
1711 }
1712 OP(BYTECODE_OP_LOAD_FIELD_S64):
1713 {
1714 dbg_printk("op load field s64\n");
1715
1716 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1717 estack_ax_t = REG_S64;
1718 next_pc += sizeof(struct load_op);
1719 PO;
1720 }
1721 OP(BYTECODE_OP_LOAD_FIELD_U8):
1722 {
1723 dbg_printk("op load field u8\n");
1724
1725 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1726 estack_ax_t = REG_S64;
1727 next_pc += sizeof(struct load_op);
1728 PO;
1729 }
1730 OP(BYTECODE_OP_LOAD_FIELD_U16):
1731 {
1732 dbg_printk("op load field u16\n");
1733
1734 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1735 estack_ax_t = REG_S64;
1736 next_pc += sizeof(struct load_op);
1737 PO;
1738 }
1739 OP(BYTECODE_OP_LOAD_FIELD_U32):
1740 {
1741 dbg_printk("op load field u32\n");
1742
1743 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1744 estack_ax_t = REG_S64;
1745 next_pc += sizeof(struct load_op);
1746 PO;
1747 }
1748 OP(BYTECODE_OP_LOAD_FIELD_U64):
1749 {
1750 dbg_printk("op load field u64\n");
1751
1752 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1753 estack_ax_t = REG_S64;
1754 next_pc += sizeof(struct load_op);
1755 PO;
1756 }
1757 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE):
1758 {
1759 ret = -EINVAL;
1760 goto end;
1761 }
1762
1763 OP(BYTECODE_OP_LOAD_FIELD_STRING):
1764 {
1765 const char *str;
1766
1767 dbg_printk("op load field string\n");
1768 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1769 estack_ax(stack, top)->u.s.str = str;
1770 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1771 dbg_printk("Bytecode warning: loading a NULL string.\n");
1772 ret = -EINVAL;
1773 goto end;
1774 }
1775 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1776 estack_ax(stack, top)->u.s.literal_type =
1777 ESTACK_STRING_LITERAL_TYPE_NONE;
1778 estack_ax(stack, top)->type = REG_STRING;
1779 next_pc += sizeof(struct load_op);
1780 PO;
1781 }
1782
1783 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE):
1784 {
1785 const char *ptr;
1786
1787 dbg_printk("op load field string sequence\n");
1788 ptr = estack_ax(stack, top)->u.ptr.ptr;
1789 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1790 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1791 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1792 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1793 ret = -EINVAL;
1794 goto end;
1795 }
1796 estack_ax(stack, top)->u.s.literal_type =
1797 ESTACK_STRING_LITERAL_TYPE_NONE;
1798 estack_ax(stack, top)->type = REG_STRING;
1799 next_pc += sizeof(struct load_op);
1800 PO;
1801 }
1802
1803 END_OP
1804 end:
1805 /* Return _DISCARD on error. */
1806 if (ret)
1807 return LTTNG_INTERPRETER_DISCARD;
1808
1809 if (output) {
1810 return lttng_bytecode_interpret_format_output(
1811 estack_ax(stack, top), output);
1812 }
1813
1814 return retval;
1815 }
1816 LTTNG_STACK_FRAME_NON_STANDARD(bytecode_interpret);
1817
1818 uint64_t lttng_bytecode_filter_interpret(void *filter_data,
1819 struct lttng_probe_ctx *lttng_probe_ctx,
1820 const char *filter_stack_data)
1821 {
1822 return bytecode_interpret(filter_data, lttng_probe_ctx,
1823 filter_stack_data, NULL);
1824 }
1825
1826 uint64_t lttng_bytecode_capture_interpret(void *capture_data,
1827 struct lttng_probe_ctx *lttng_probe_ctx,
1828 const char *capture_stack_data,
1829 struct lttng_interpreter_output *output)
1830 {
1831 return bytecode_interpret(capture_data, lttng_probe_ctx,
1832 capture_stack_data, output);
1833 }
1834
1835 #undef START_OP
1836 #undef OP
1837 #undef PO
1838 #undef END_OP
This page took 0.104915 seconds and 3 git commands to generate.