Refactoring: type description structures
[lttng-modules.git] / src / lttng-bytecode-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-bytecode-interpreter.c
4 *
5 * LTTng modules bytecode interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
14
15 #include <lttng/lttng-bytecode.h>
16 #include <lttng/string-utils.h>
17 #include <lttng/events-internal.h>
18
19 /*
20 * get_char should be called with page fault handler disabled if it is expected
21 * to handle user-space read.
22 */
23 static
24 char get_char(const struct estack_entry *reg, size_t offset)
25 {
26 if (unlikely(offset >= reg->u.s.seq_len))
27 return '\0';
28 if (reg->u.s.user) {
29 char c;
30
31 /* Handle invalid access as end of string. */
32 if (unlikely(!lttng_access_ok(VERIFY_READ,
33 reg->u.s.user_str + offset,
34 sizeof(c))))
35 return '\0';
36 /* Handle fault (nonzero return value) as end of string. */
37 if (unlikely(__copy_from_user_inatomic(&c,
38 reg->u.s.user_str + offset,
39 sizeof(c))))
40 return '\0';
41 return c;
42 } else {
43 return reg->u.s.str[offset];
44 }
45 }
46
47 /*
48 * -1: wildcard found.
49 * -2: unknown escape char.
50 * 0: normal char.
51 */
52 static
53 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
54 {
55 switch (*c) {
56 case '\\':
57 (*offset)++;
58 *c = get_char(reg, *offset);
59 switch (*c) {
60 case '\\':
61 case '*':
62 return 0;
63 default:
64 return -2;
65 }
66 case '*':
67 return -1;
68 default:
69 return 0;
70 }
71 }
72
73 static
74 char get_char_at_cb(size_t at, void *data)
75 {
76 return get_char(data, at);
77 }
78
79 static
80 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
81 {
82 bool has_user = false;
83 int result;
84 struct estack_entry *pattern_reg;
85 struct estack_entry *candidate_reg;
86
87 /* Disable the page fault handler when reading from userspace. */
88 if (estack_bx(stack, top)->u.s.user
89 || estack_ax(stack, top)->u.s.user) {
90 has_user = true;
91 pagefault_disable();
92 }
93
94 /* Find out which side is the pattern vs. the candidate. */
95 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
96 pattern_reg = estack_ax(stack, top);
97 candidate_reg = estack_bx(stack, top);
98 } else {
99 pattern_reg = estack_bx(stack, top);
100 candidate_reg = estack_ax(stack, top);
101 }
102
103 /* Perform the match operation. */
104 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
105 pattern_reg, get_char_at_cb, candidate_reg);
106 if (has_user)
107 pagefault_enable();
108
109 return result;
110 }
111
112 static
113 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
114 {
115 size_t offset_bx = 0, offset_ax = 0;
116 int diff, has_user = 0;
117
118 if (estack_bx(stack, top)->u.s.user
119 || estack_ax(stack, top)->u.s.user) {
120 has_user = 1;
121 pagefault_disable();
122 }
123
124 for (;;) {
125 int ret;
126 int escaped_r0 = 0;
127 char char_bx, char_ax;
128
129 char_bx = get_char(estack_bx(stack, top), offset_bx);
130 char_ax = get_char(estack_ax(stack, top), offset_ax);
131
132 if (unlikely(char_bx == '\0')) {
133 if (char_ax == '\0') {
134 diff = 0;
135 break;
136 } else {
137 if (estack_ax(stack, top)->u.s.literal_type ==
138 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
139 ret = parse_char(estack_ax(stack, top),
140 &char_ax, &offset_ax);
141 if (ret == -1) {
142 diff = 0;
143 break;
144 }
145 }
146 diff = -1;
147 break;
148 }
149 }
150 if (unlikely(char_ax == '\0')) {
151 if (estack_bx(stack, top)->u.s.literal_type ==
152 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
153 ret = parse_char(estack_bx(stack, top),
154 &char_bx, &offset_bx);
155 if (ret == -1) {
156 diff = 0;
157 break;
158 }
159 }
160 diff = 1;
161 break;
162 }
163 if (estack_bx(stack, top)->u.s.literal_type ==
164 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
165 ret = parse_char(estack_bx(stack, top),
166 &char_bx, &offset_bx);
167 if (ret == -1) {
168 diff = 0;
169 break;
170 } else if (ret == -2) {
171 escaped_r0 = 1;
172 }
173 /* else compare both char */
174 }
175 if (estack_ax(stack, top)->u.s.literal_type ==
176 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
177 ret = parse_char(estack_ax(stack, top),
178 &char_ax, &offset_ax);
179 if (ret == -1) {
180 diff = 0;
181 break;
182 } else if (ret == -2) {
183 if (!escaped_r0) {
184 diff = -1;
185 break;
186 }
187 } else {
188 if (escaped_r0) {
189 diff = 1;
190 break;
191 }
192 }
193 } else {
194 if (escaped_r0) {
195 diff = 1;
196 break;
197 }
198 }
199 diff = char_bx - char_ax;
200 if (diff != 0)
201 break;
202 offset_bx++;
203 offset_ax++;
204 }
205 if (has_user)
206 pagefault_enable();
207
208 return diff;
209 }
210
211 uint64_t lttng_bytecode_filter_interpret_false(void *filter_data,
212 struct lttng_probe_ctx *lttng_probe_ctx,
213 const char *filter_stack_data)
214 {
215 return LTTNG_INTERPRETER_DISCARD;
216 }
217
218 uint64_t lttng_bytecode_capture_interpret_false(void *filter_data,
219 struct lttng_probe_ctx *lttng_probe_ctx,
220 const char *capture_stack_data,
221 struct lttng_interpreter_output *output)
222 {
223 return LTTNG_INTERPRETER_DISCARD;
224 }
225
226 #ifdef INTERPRETER_USE_SWITCH
227
228 /*
229 * Fallback for compilers that do not support taking address of labels.
230 */
231
232 #define START_OP \
233 start_pc = &bytecode->data[0]; \
234 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
235 pc = next_pc) { \
236 dbg_printk("LTTng: Executing op %s (%u)\n", \
237 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
238 (unsigned int) *(bytecode_opcode_t *) pc); \
239 switch (*(bytecode_opcode_t *) pc) {
240
241 #define OP(name) case name
242
243 #define PO break
244
245 #define END_OP } \
246 }
247
248 #else
249
250 /*
251 * Dispatch-table based interpreter.
252 */
253
254 #define START_OP \
255 start_pc = &bytecode->code[0]; \
256 pc = next_pc = start_pc; \
257 if (unlikely(pc - start_pc >= bytecode->len)) \
258 goto end; \
259 goto *dispatch[*(bytecode_opcode_t *) pc];
260
261 #define OP(name) \
262 LABEL_##name
263
264 #define PO \
265 pc = next_pc; \
266 goto *dispatch[*(bytecode_opcode_t *) pc];
267
268 #define END_OP
269
270 #endif
271
272 #define IS_INTEGER_REGISTER(reg_type) \
273 (reg_type == REG_S64 || reg_type == REG_U64)
274
275 static int context_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
276 struct load_ptr *ptr,
277 uint32_t idx)
278 {
279
280 struct lttng_kernel_ctx_field *ctx_field;
281 const struct lttng_kernel_event_field *field;
282 union lttng_ctx_value v;
283
284 ctx_field = &lttng_static_ctx->fields[idx];
285 field = ctx_field->event_field;
286 ptr->type = LOAD_OBJECT;
287 /* field is only used for types nested within variants. */
288 ptr->field = NULL;
289
290 switch (field->type->type) {
291 case lttng_kernel_type_integer:
292 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
293 if (lttng_kernel_get_type_integer(field->type)->signedness) {
294 ptr->object_type = OBJECT_TYPE_S64;
295 ptr->u.s64 = v.s64;
296 ptr->ptr = &ptr->u.s64;
297 } else {
298 ptr->object_type = OBJECT_TYPE_U64;
299 ptr->u.u64 = v.s64; /* Cast. */
300 ptr->ptr = &ptr->u.u64;
301 }
302 break;
303 case lttng_kernel_type_enum:
304 {
305 const struct lttng_kernel_type_enum *enum_type = lttng_kernel_get_type_enum(field->type);
306 const struct lttng_kernel_type_integer *integer_type = lttng_kernel_get_type_integer(enum_type->container_type);
307
308 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
309 if (integer_type->signedness) {
310 ptr->object_type = OBJECT_TYPE_SIGNED_ENUM;
311 ptr->u.s64 = v.s64;
312 ptr->ptr = &ptr->u.s64;
313 } else {
314 ptr->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
315 ptr->u.u64 = v.s64; /* Cast. */
316 ptr->ptr = &ptr->u.u64;
317 }
318 break;
319 }
320 case lttng_kernel_type_array:
321 {
322 const struct lttng_kernel_type_array *array_type = lttng_kernel_get_type_array(field->type);
323
324 if (!lttng_kernel_type_is_bytewise_integer(array_type->elem_type)) {
325 printk(KERN_WARNING "LTTng: bytecode: Array nesting only supports integer types.\n");
326 return -EINVAL;
327 }
328 if (array_type->encoding == lttng_kernel_string_encoding_none) {
329 printk(KERN_WARNING "LTTng: bytecode: Only string arrays are supported for contexts.\n");
330 return -EINVAL;
331 }
332 ptr->object_type = OBJECT_TYPE_STRING;
333 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
334 ptr->ptr = v.str;
335 break;
336 }
337 case lttng_kernel_type_sequence:
338 {
339 const struct lttng_kernel_type_sequence *sequence_type = lttng_kernel_get_type_sequence(field->type);
340
341 if (!lttng_kernel_type_is_bytewise_integer(sequence_type->elem_type)) {
342 printk(KERN_WARNING "LTTng: bytecode: Sequence nesting only supports integer types.\n");
343 return -EINVAL;
344 }
345 if (sequence_type->encoding == lttng_kernel_string_encoding_none) {
346 printk(KERN_WARNING "LTTng: bytecode: Only string sequences are supported for contexts.\n");
347 return -EINVAL;
348 }
349 ptr->object_type = OBJECT_TYPE_STRING;
350 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
351 ptr->ptr = v.str;
352 break;
353 }
354 case lttng_kernel_type_string:
355 ptr->object_type = OBJECT_TYPE_STRING;
356 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
357 ptr->ptr = v.str;
358 break;
359 case lttng_kernel_type_struct:
360 printk(KERN_WARNING "LTTng: bytecode: Structure type cannot be loaded.\n");
361 return -EINVAL;
362 case lttng_kernel_type_variant:
363 printk(KERN_WARNING "LTTng: bytecode: Variant type cannot be loaded.\n");
364 return -EINVAL;
365 default:
366 printk(KERN_WARNING "LTTng: bytecode: Unknown type: %d", (int) field->type->type);
367 return -EINVAL;
368 }
369 return 0;
370 }
371
372 static int dynamic_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
373 struct bytecode_runtime *runtime,
374 uint64_t index, struct estack_entry *stack_top)
375 {
376 int ret;
377 const struct bytecode_get_index_data *gid;
378
379 gid = (const struct bytecode_get_index_data *) &runtime->data[index];
380 switch (stack_top->u.ptr.type) {
381 case LOAD_OBJECT:
382 switch (stack_top->u.ptr.object_type) {
383 case OBJECT_TYPE_ARRAY:
384 {
385 const char *ptr;
386
387 WARN_ON_ONCE(gid->offset >= gid->array_len);
388 /* Skip count (unsigned long) */
389 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
390 ptr = ptr + gid->offset;
391 stack_top->u.ptr.ptr = ptr;
392 stack_top->u.ptr.object_type = gid->elem.type;
393 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
394 BUG_ON(stack_top->u.ptr.field->type->type != lttng_kernel_type_array);
395 stack_top->u.ptr.field = NULL;
396 break;
397 }
398 case OBJECT_TYPE_SEQUENCE:
399 {
400 const char *ptr;
401 size_t ptr_seq_len;
402
403 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
404 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
405 if (gid->offset >= gid->elem.len * ptr_seq_len) {
406 ret = -EINVAL;
407 goto end;
408 }
409 ptr = ptr + gid->offset;
410 stack_top->u.ptr.ptr = ptr;
411 stack_top->u.ptr.object_type = gid->elem.type;
412 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
413 BUG_ON(stack_top->u.ptr.field->type->type != lttng_kernel_type_sequence);
414 stack_top->u.ptr.field = NULL;
415 break;
416 }
417 case OBJECT_TYPE_STRUCT:
418 printk(KERN_WARNING "LTTng: bytecode: Nested structures are not supported yet.\n");
419 ret = -EINVAL;
420 goto end;
421 case OBJECT_TYPE_VARIANT:
422 default:
423 printk(KERN_WARNING "LTTng: bytecode: Unexpected get index type %d",
424 (int) stack_top->u.ptr.object_type);
425 ret = -EINVAL;
426 goto end;
427 }
428 break;
429 case LOAD_ROOT_CONTEXT:
430 case LOAD_ROOT_APP_CONTEXT: /* Fall-through */
431 {
432 ret = context_get_index(lttng_probe_ctx,
433 &stack_top->u.ptr,
434 gid->ctx_index);
435 if (ret) {
436 goto end;
437 }
438 break;
439 }
440 case LOAD_ROOT_PAYLOAD:
441 stack_top->u.ptr.ptr += gid->offset;
442 if (gid->elem.type == OBJECT_TYPE_STRING)
443 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
444 stack_top->u.ptr.object_type = gid->elem.type;
445 stack_top->u.ptr.type = LOAD_OBJECT;
446 stack_top->u.ptr.field = gid->field;
447 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
448 break;
449 }
450
451 stack_top->type = REG_PTR;
452
453 return 0;
454
455 end:
456 return ret;
457 }
458
459 static int dynamic_load_field(struct estack_entry *stack_top)
460 {
461 int ret;
462
463 switch (stack_top->u.ptr.type) {
464 case LOAD_OBJECT:
465 break;
466 case LOAD_ROOT_CONTEXT:
467 case LOAD_ROOT_APP_CONTEXT:
468 case LOAD_ROOT_PAYLOAD:
469 default:
470 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
471 ret = -EINVAL;
472 goto end;
473 }
474 switch (stack_top->u.ptr.object_type) {
475 case OBJECT_TYPE_S8:
476 dbg_printk("op load field s8\n");
477 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
478 stack_top->type = REG_S64;
479 break;
480 case OBJECT_TYPE_S16:
481 {
482 int16_t tmp;
483
484 dbg_printk("op load field s16\n");
485 tmp = *(int16_t *) stack_top->u.ptr.ptr;
486 if (stack_top->u.ptr.rev_bo)
487 __swab16s(&tmp);
488 stack_top->u.v = tmp;
489 stack_top->type = REG_S64;
490 break;
491 }
492 case OBJECT_TYPE_S32:
493 {
494 int32_t tmp;
495
496 dbg_printk("op load field s32\n");
497 tmp = *(int32_t *) stack_top->u.ptr.ptr;
498 if (stack_top->u.ptr.rev_bo)
499 __swab32s(&tmp);
500 stack_top->u.v = tmp;
501 stack_top->type = REG_S64;
502 break;
503 }
504 case OBJECT_TYPE_S64:
505 {
506 int64_t tmp;
507
508 dbg_printk("op load field s64\n");
509 tmp = *(int64_t *) stack_top->u.ptr.ptr;
510 if (stack_top->u.ptr.rev_bo)
511 __swab64s(&tmp);
512 stack_top->u.v = tmp;
513 stack_top->type = REG_S64;
514 break;
515 }
516 case OBJECT_TYPE_SIGNED_ENUM:
517 {
518 int64_t tmp;
519
520 dbg_printk("op load field signed enumeration\n");
521 tmp = *(int64_t *) stack_top->u.ptr.ptr;
522 if (stack_top->u.ptr.rev_bo)
523 __swab64s(&tmp);
524 stack_top->u.v = tmp;
525 stack_top->type = REG_S64;
526 break;
527 }
528 case OBJECT_TYPE_U8:
529 dbg_printk("op load field u8\n");
530 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
531 stack_top->type = REG_U64;
532 break;
533 case OBJECT_TYPE_U16:
534 {
535 uint16_t tmp;
536
537 dbg_printk("op load field u16\n");
538 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
539 if (stack_top->u.ptr.rev_bo)
540 __swab16s(&tmp);
541 stack_top->u.v = tmp;
542 stack_top->type = REG_U64;
543 break;
544 }
545 case OBJECT_TYPE_U32:
546 {
547 uint32_t tmp;
548
549 dbg_printk("op load field u32\n");
550 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
551 if (stack_top->u.ptr.rev_bo)
552 __swab32s(&tmp);
553 stack_top->u.v = tmp;
554 stack_top->type = REG_U64;
555 break;
556 }
557 case OBJECT_TYPE_U64:
558 {
559 uint64_t tmp;
560
561 dbg_printk("op load field u64\n");
562 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
563 if (stack_top->u.ptr.rev_bo)
564 __swab64s(&tmp);
565 stack_top->u.v = tmp;
566 stack_top->type = REG_U64;
567 break;
568 }
569 case OBJECT_TYPE_UNSIGNED_ENUM:
570 {
571 uint64_t tmp;
572
573 dbg_printk("op load field unsigned enumeration\n");
574 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
575 if (stack_top->u.ptr.rev_bo)
576 __swab64s(&tmp);
577 stack_top->u.v = tmp;
578 stack_top->type = REG_U64;
579 break;
580 }
581 case OBJECT_TYPE_STRING:
582 {
583 const char *str;
584
585 dbg_printk("op load field string\n");
586 str = (const char *) stack_top->u.ptr.ptr;
587 stack_top->u.s.str = str;
588 if (unlikely(!stack_top->u.s.str)) {
589 dbg_printk("Bytecode warning: loading a NULL string.\n");
590 ret = -EINVAL;
591 goto end;
592 }
593 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
594 stack_top->u.s.literal_type =
595 ESTACK_STRING_LITERAL_TYPE_NONE;
596 stack_top->type = REG_STRING;
597 break;
598 }
599 case OBJECT_TYPE_STRING_SEQUENCE:
600 {
601 const char *ptr;
602
603 dbg_printk("op load field string sequence\n");
604 ptr = stack_top->u.ptr.ptr;
605 stack_top->u.s.seq_len = *(unsigned long *) ptr;
606 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
607 if (unlikely(!stack_top->u.s.str)) {
608 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
609 ret = -EINVAL;
610 goto end;
611 }
612 stack_top->u.s.literal_type =
613 ESTACK_STRING_LITERAL_TYPE_NONE;
614 stack_top->type = REG_STRING;
615 break;
616 }
617 case OBJECT_TYPE_DYNAMIC:
618 /*
619 * Dynamic types in context are looked up
620 * by context get index.
621 */
622 ret = -EINVAL;
623 goto end;
624 case OBJECT_TYPE_DOUBLE:
625 ret = -EINVAL;
626 goto end;
627 case OBJECT_TYPE_SEQUENCE:
628 case OBJECT_TYPE_ARRAY:
629 case OBJECT_TYPE_STRUCT:
630 case OBJECT_TYPE_VARIANT:
631 printk(KERN_WARNING "LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
632 ret = -EINVAL;
633 goto end;
634 }
635 return 0;
636
637 end:
638 return ret;
639 }
640
641 static
642 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
643 struct lttng_interpreter_output *output)
644 {
645 int ret;
646
647 again:
648 switch (ax->type) {
649 case REG_S64:
650 output->type = LTTNG_INTERPRETER_TYPE_S64;
651 output->u.s = ax->u.v;
652 break;
653 case REG_U64:
654 output->type = LTTNG_INTERPRETER_TYPE_U64;
655 output->u.u = (uint64_t) ax->u.v;
656 break;
657 case REG_STRING:
658 output->type = LTTNG_INTERPRETER_TYPE_STRING;
659 output->u.str.str = ax->u.s.str;
660 output->u.str.len = ax->u.s.seq_len;
661 break;
662 case REG_PTR:
663 switch (ax->u.ptr.object_type) {
664 case OBJECT_TYPE_S8:
665 case OBJECT_TYPE_S16:
666 case OBJECT_TYPE_S32:
667 case OBJECT_TYPE_S64:
668 case OBJECT_TYPE_U8:
669 case OBJECT_TYPE_U16:
670 case OBJECT_TYPE_U32:
671 case OBJECT_TYPE_U64:
672 case OBJECT_TYPE_DOUBLE:
673 case OBJECT_TYPE_STRING:
674 case OBJECT_TYPE_STRING_SEQUENCE:
675 ret = dynamic_load_field(ax);
676 if (ret)
677 return ret;
678 /* Retry after loading ptr into stack top. */
679 goto again;
680 case OBJECT_TYPE_SEQUENCE:
681 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
682 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
683 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
684 output->u.sequence.nested_type = lttng_kernel_get_type_sequence(ax->u.ptr.field->type)->elem_type;
685 break;
686 case OBJECT_TYPE_ARRAY:
687 /* Skip count (unsigned long) */
688 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
689 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
690 output->u.sequence.nr_elem = lttng_kernel_get_type_array(ax->u.ptr.field->type)->length;
691 output->u.sequence.nested_type = lttng_kernel_get_type_array(ax->u.ptr.field->type)->elem_type;
692 break;
693 case OBJECT_TYPE_SIGNED_ENUM:
694 ret = dynamic_load_field(ax);
695 if (ret)
696 return ret;
697 output->type = LTTNG_INTERPRETER_TYPE_SIGNED_ENUM;
698 output->u.s = ax->u.v;
699 break;
700 case OBJECT_TYPE_UNSIGNED_ENUM:
701 ret = dynamic_load_field(ax);
702 if (ret)
703 return ret;
704 output->type = LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM;
705 output->u.u = ax->u.v;
706 break;
707 case OBJECT_TYPE_STRUCT:
708 case OBJECT_TYPE_VARIANT:
709 default:
710 return -EINVAL;
711 }
712
713 break;
714 case REG_STAR_GLOB_STRING:
715 case REG_TYPE_UNKNOWN:
716 default:
717 return -EINVAL;
718 }
719
720 return LTTNG_INTERPRETER_RECORD_FLAG;
721 }
722
723 #ifdef DEBUG
724
725 #define DBG_USER_STR_CUTOFF 32
726
727 /*
728 * In debug mode, print user string (truncated, if necessary).
729 */
730 static inline
731 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
732 {
733 size_t pos = 0;
734 char last_char;
735 char user_str[DBG_USER_STR_CUTOFF];
736
737 pagefault_disable();
738 do {
739 last_char = get_char(user_str_reg, pos);
740 user_str[pos] = last_char;
741 pos++;
742 } while (last_char != '\0' && pos < sizeof(user_str));
743 pagefault_enable();
744
745 user_str[sizeof(user_str) - 1] = '\0';
746 dbg_printk("load field ref user string: '%s%s'\n", user_str,
747 last_char != '\0' ? "[...]" : "");
748 }
749 #else
750 static inline
751 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
752 {
753 }
754 #endif
755
756 /*
757 * Return 0 (discard), or raise the 0x1 flag (log event).
758 * Currently, other flags are kept for future extensions and have no
759 * effect.
760 */
761 static
762 uint64_t bytecode_interpret(void *interpreter_data,
763 struct lttng_probe_ctx *lttng_probe_ctx,
764 const char *interpreter_stack_data,
765 struct lttng_interpreter_output *output)
766 {
767 struct bytecode_runtime *bytecode = interpreter_data;
768 void *pc, *next_pc, *start_pc;
769 int ret = -EINVAL;
770 uint64_t retval = 0;
771 struct estack _stack;
772 struct estack *stack = &_stack;
773 register int64_t ax = 0, bx = 0;
774 register enum entry_type ax_t = REG_TYPE_UNKNOWN, bx_t = REG_TYPE_UNKNOWN;
775 register int top = INTERPRETER_STACK_EMPTY;
776 #ifndef INTERPRETER_USE_SWITCH
777 static void *dispatch[NR_BYTECODE_OPS] = {
778 [ BYTECODE_OP_UNKNOWN ] = &&LABEL_BYTECODE_OP_UNKNOWN,
779
780 [ BYTECODE_OP_RETURN ] = &&LABEL_BYTECODE_OP_RETURN,
781
782 /* binary */
783 [ BYTECODE_OP_MUL ] = &&LABEL_BYTECODE_OP_MUL,
784 [ BYTECODE_OP_DIV ] = &&LABEL_BYTECODE_OP_DIV,
785 [ BYTECODE_OP_MOD ] = &&LABEL_BYTECODE_OP_MOD,
786 [ BYTECODE_OP_PLUS ] = &&LABEL_BYTECODE_OP_PLUS,
787 [ BYTECODE_OP_MINUS ] = &&LABEL_BYTECODE_OP_MINUS,
788 [ BYTECODE_OP_BIT_RSHIFT ] = &&LABEL_BYTECODE_OP_BIT_RSHIFT,
789 [ BYTECODE_OP_BIT_LSHIFT ] = &&LABEL_BYTECODE_OP_BIT_LSHIFT,
790 [ BYTECODE_OP_BIT_AND ] = &&LABEL_BYTECODE_OP_BIT_AND,
791 [ BYTECODE_OP_BIT_OR ] = &&LABEL_BYTECODE_OP_BIT_OR,
792 [ BYTECODE_OP_BIT_XOR ] = &&LABEL_BYTECODE_OP_BIT_XOR,
793
794 /* binary comparators */
795 [ BYTECODE_OP_EQ ] = &&LABEL_BYTECODE_OP_EQ,
796 [ BYTECODE_OP_NE ] = &&LABEL_BYTECODE_OP_NE,
797 [ BYTECODE_OP_GT ] = &&LABEL_BYTECODE_OP_GT,
798 [ BYTECODE_OP_LT ] = &&LABEL_BYTECODE_OP_LT,
799 [ BYTECODE_OP_GE ] = &&LABEL_BYTECODE_OP_GE,
800 [ BYTECODE_OP_LE ] = &&LABEL_BYTECODE_OP_LE,
801
802 /* string binary comparator */
803 [ BYTECODE_OP_EQ_STRING ] = &&LABEL_BYTECODE_OP_EQ_STRING,
804 [ BYTECODE_OP_NE_STRING ] = &&LABEL_BYTECODE_OP_NE_STRING,
805 [ BYTECODE_OP_GT_STRING ] = &&LABEL_BYTECODE_OP_GT_STRING,
806 [ BYTECODE_OP_LT_STRING ] = &&LABEL_BYTECODE_OP_LT_STRING,
807 [ BYTECODE_OP_GE_STRING ] = &&LABEL_BYTECODE_OP_GE_STRING,
808 [ BYTECODE_OP_LE_STRING ] = &&LABEL_BYTECODE_OP_LE_STRING,
809
810 /* globbing pattern binary comparator */
811 [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING,
812 [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING,
813
814 /* s64 binary comparator */
815 [ BYTECODE_OP_EQ_S64 ] = &&LABEL_BYTECODE_OP_EQ_S64,
816 [ BYTECODE_OP_NE_S64 ] = &&LABEL_BYTECODE_OP_NE_S64,
817 [ BYTECODE_OP_GT_S64 ] = &&LABEL_BYTECODE_OP_GT_S64,
818 [ BYTECODE_OP_LT_S64 ] = &&LABEL_BYTECODE_OP_LT_S64,
819 [ BYTECODE_OP_GE_S64 ] = &&LABEL_BYTECODE_OP_GE_S64,
820 [ BYTECODE_OP_LE_S64 ] = &&LABEL_BYTECODE_OP_LE_S64,
821
822 /* double binary comparator */
823 [ BYTECODE_OP_EQ_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE,
824 [ BYTECODE_OP_NE_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_DOUBLE,
825 [ BYTECODE_OP_GT_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_DOUBLE,
826 [ BYTECODE_OP_LT_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_DOUBLE,
827 [ BYTECODE_OP_GE_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_DOUBLE,
828 [ BYTECODE_OP_LE_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_DOUBLE,
829
830 /* Mixed S64-double binary comparators */
831 [ BYTECODE_OP_EQ_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64,
832 [ BYTECODE_OP_NE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64,
833 [ BYTECODE_OP_GT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64,
834 [ BYTECODE_OP_LT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64,
835 [ BYTECODE_OP_GE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64,
836 [ BYTECODE_OP_LE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64,
837
838 [ BYTECODE_OP_EQ_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE,
839 [ BYTECODE_OP_NE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE,
840 [ BYTECODE_OP_GT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE,
841 [ BYTECODE_OP_LT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE,
842 [ BYTECODE_OP_GE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE,
843 [ BYTECODE_OP_LE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE,
844
845 /* unary */
846 [ BYTECODE_OP_UNARY_PLUS ] = &&LABEL_BYTECODE_OP_UNARY_PLUS,
847 [ BYTECODE_OP_UNARY_MINUS ] = &&LABEL_BYTECODE_OP_UNARY_MINUS,
848 [ BYTECODE_OP_UNARY_NOT ] = &&LABEL_BYTECODE_OP_UNARY_NOT,
849 [ BYTECODE_OP_UNARY_PLUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64,
850 [ BYTECODE_OP_UNARY_MINUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64,
851 [ BYTECODE_OP_UNARY_NOT_S64 ] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64,
852 [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE,
853 [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE,
854 [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE,
855
856 /* logical */
857 [ BYTECODE_OP_AND ] = &&LABEL_BYTECODE_OP_AND,
858 [ BYTECODE_OP_OR ] = &&LABEL_BYTECODE_OP_OR,
859
860 /* load field ref */
861 [ BYTECODE_OP_LOAD_FIELD_REF ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF,
862 [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING,
863 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE,
864 [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64,
865 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE,
866
867 /* load from immediate operand */
868 [ BYTECODE_OP_LOAD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STRING,
869 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING,
870 [ BYTECODE_OP_LOAD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_S64,
871 [ BYTECODE_OP_LOAD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE,
872
873 /* cast */
874 [ BYTECODE_OP_CAST_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_TO_S64,
875 [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64,
876 [ BYTECODE_OP_CAST_NOP ] = &&LABEL_BYTECODE_OP_CAST_NOP,
877
878 /* get context ref */
879 [ BYTECODE_OP_GET_CONTEXT_REF ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF,
880 [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING,
881 [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64,
882 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE,
883
884 /* load userspace field ref */
885 [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_STRING,
886 [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE,
887
888 /* Instructions for recursive traversal through composed types. */
889 [ BYTECODE_OP_GET_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT,
890 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT,
891 [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT,
892
893 [ BYTECODE_OP_GET_SYMBOL ] = &&LABEL_BYTECODE_OP_GET_SYMBOL,
894 [ BYTECODE_OP_GET_SYMBOL_FIELD ] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD,
895 [ BYTECODE_OP_GET_INDEX_U16 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U16,
896 [ BYTECODE_OP_GET_INDEX_U64 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U64,
897
898 [ BYTECODE_OP_LOAD_FIELD ] = &&LABEL_BYTECODE_OP_LOAD_FIELD,
899 [ BYTECODE_OP_LOAD_FIELD_S8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8,
900 [ BYTECODE_OP_LOAD_FIELD_S16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16,
901 [ BYTECODE_OP_LOAD_FIELD_S32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32,
902 [ BYTECODE_OP_LOAD_FIELD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64,
903 [ BYTECODE_OP_LOAD_FIELD_U8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8,
904 [ BYTECODE_OP_LOAD_FIELD_U16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16,
905 [ BYTECODE_OP_LOAD_FIELD_U32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32,
906 [ BYTECODE_OP_LOAD_FIELD_U64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64,
907 [ BYTECODE_OP_LOAD_FIELD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING,
908 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE,
909 [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE,
910
911 [ BYTECODE_OP_UNARY_BIT_NOT ] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT,
912
913 [ BYTECODE_OP_RETURN_S64 ] = &&LABEL_BYTECODE_OP_RETURN_S64,
914 };
915 #endif /* #ifndef INTERPRETER_USE_SWITCH */
916
917 START_OP
918
919 OP(BYTECODE_OP_UNKNOWN):
920 OP(BYTECODE_OP_LOAD_FIELD_REF):
921 OP(BYTECODE_OP_GET_CONTEXT_REF):
922 #ifdef INTERPRETER_USE_SWITCH
923 default:
924 #endif /* INTERPRETER_USE_SWITCH */
925 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
926 (unsigned int) *(bytecode_opcode_t *) pc);
927 ret = -EINVAL;
928 goto end;
929
930 OP(BYTECODE_OP_RETURN):
931 OP(BYTECODE_OP_RETURN_S64):
932 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
933 switch (estack_ax_t) {
934 case REG_S64:
935 case REG_U64:
936 retval = !!estack_ax_v;
937 break;
938 case REG_DOUBLE:
939 case REG_STRING:
940 case REG_PTR:
941 if (!output) {
942 ret = -EINVAL;
943 goto end;
944 }
945 retval = 0;
946 break;
947 case REG_STAR_GLOB_STRING:
948 case REG_TYPE_UNKNOWN:
949 ret = -EINVAL;
950 goto end;
951 }
952 ret = 0;
953 goto end;
954
955 /* binary */
956 OP(BYTECODE_OP_MUL):
957 OP(BYTECODE_OP_DIV):
958 OP(BYTECODE_OP_MOD):
959 OP(BYTECODE_OP_PLUS):
960 OP(BYTECODE_OP_MINUS):
961 printk(KERN_WARNING "LTTng: bytecode: unsupported bytecode op %u\n",
962 (unsigned int) *(bytecode_opcode_t *) pc);
963 ret = -EINVAL;
964 goto end;
965
966 OP(BYTECODE_OP_EQ):
967 OP(BYTECODE_OP_NE):
968 OP(BYTECODE_OP_GT):
969 OP(BYTECODE_OP_LT):
970 OP(BYTECODE_OP_GE):
971 OP(BYTECODE_OP_LE):
972 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
973 (unsigned int) *(bytecode_opcode_t *) pc);
974 ret = -EINVAL;
975 goto end;
976
977 OP(BYTECODE_OP_EQ_STRING):
978 {
979 int res;
980
981 res = (stack_strcmp(stack, top, "==") == 0);
982 estack_pop(stack, top, ax, bx, ax_t, bx_t);
983 estack_ax_v = res;
984 estack_ax_t = REG_S64;
985 next_pc += sizeof(struct binary_op);
986 PO;
987 }
988 OP(BYTECODE_OP_NE_STRING):
989 {
990 int res;
991
992 res = (stack_strcmp(stack, top, "!=") != 0);
993 estack_pop(stack, top, ax, bx, ax_t, bx_t);
994 estack_ax_v = res;
995 estack_ax_t = REG_S64;
996 next_pc += sizeof(struct binary_op);
997 PO;
998 }
999 OP(BYTECODE_OP_GT_STRING):
1000 {
1001 int res;
1002
1003 res = (stack_strcmp(stack, top, ">") > 0);
1004 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1005 estack_ax_v = res;
1006 estack_ax_t = REG_S64;
1007 next_pc += sizeof(struct binary_op);
1008 PO;
1009 }
1010 OP(BYTECODE_OP_LT_STRING):
1011 {
1012 int res;
1013
1014 res = (stack_strcmp(stack, top, "<") < 0);
1015 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1016 estack_ax_v = res;
1017 estack_ax_t = REG_S64;
1018 next_pc += sizeof(struct binary_op);
1019 PO;
1020 }
1021 OP(BYTECODE_OP_GE_STRING):
1022 {
1023 int res;
1024
1025 res = (stack_strcmp(stack, top, ">=") >= 0);
1026 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1027 estack_ax_v = res;
1028 estack_ax_t = REG_S64;
1029 next_pc += sizeof(struct binary_op);
1030 PO;
1031 }
1032 OP(BYTECODE_OP_LE_STRING):
1033 {
1034 int res;
1035
1036 res = (stack_strcmp(stack, top, "<=") <= 0);
1037 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1038 estack_ax_v = res;
1039 estack_ax_t = REG_S64;
1040 next_pc += sizeof(struct binary_op);
1041 PO;
1042 }
1043
1044 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING):
1045 {
1046 int res;
1047
1048 res = (stack_star_glob_match(stack, top, "==") == 0);
1049 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1050 estack_ax_v = res;
1051 estack_ax_t = REG_S64;
1052 next_pc += sizeof(struct binary_op);
1053 PO;
1054 }
1055 OP(BYTECODE_OP_NE_STAR_GLOB_STRING):
1056 {
1057 int res;
1058
1059 res = (stack_star_glob_match(stack, top, "!=") != 0);
1060 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1061 estack_ax_v = res;
1062 estack_ax_t = REG_S64;
1063 next_pc += sizeof(struct binary_op);
1064 PO;
1065 }
1066
1067 OP(BYTECODE_OP_EQ_S64):
1068 {
1069 int res;
1070
1071 res = (estack_bx_v == estack_ax_v);
1072 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1073 estack_ax_v = res;
1074 estack_ax_t = REG_S64;
1075 next_pc += sizeof(struct binary_op);
1076 PO;
1077 }
1078 OP(BYTECODE_OP_NE_S64):
1079 {
1080 int res;
1081
1082 res = (estack_bx_v != estack_ax_v);
1083 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1084 estack_ax_v = res;
1085 estack_ax_t = REG_S64;
1086 next_pc += sizeof(struct binary_op);
1087 PO;
1088 }
1089 OP(BYTECODE_OP_GT_S64):
1090 {
1091 int res;
1092
1093 res = (estack_bx_v > estack_ax_v);
1094 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1095 estack_ax_v = res;
1096 estack_ax_t = REG_S64;
1097 next_pc += sizeof(struct binary_op);
1098 PO;
1099 }
1100 OP(BYTECODE_OP_LT_S64):
1101 {
1102 int res;
1103
1104 res = (estack_bx_v < estack_ax_v);
1105 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1106 estack_ax_v = res;
1107 estack_ax_t = REG_S64;
1108 next_pc += sizeof(struct binary_op);
1109 PO;
1110 }
1111 OP(BYTECODE_OP_GE_S64):
1112 {
1113 int res;
1114
1115 res = (estack_bx_v >= estack_ax_v);
1116 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1117 estack_ax_v = res;
1118 estack_ax_t = REG_S64;
1119 next_pc += sizeof(struct binary_op);
1120 PO;
1121 }
1122 OP(BYTECODE_OP_LE_S64):
1123 {
1124 int res;
1125
1126 res = (estack_bx_v <= estack_ax_v);
1127 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1128 estack_ax_v = res;
1129 estack_ax_t = REG_S64;
1130 next_pc += sizeof(struct binary_op);
1131 PO;
1132 }
1133
1134 OP(BYTECODE_OP_EQ_DOUBLE):
1135 OP(BYTECODE_OP_NE_DOUBLE):
1136 OP(BYTECODE_OP_GT_DOUBLE):
1137 OP(BYTECODE_OP_LT_DOUBLE):
1138 OP(BYTECODE_OP_GE_DOUBLE):
1139 OP(BYTECODE_OP_LE_DOUBLE):
1140 {
1141 BUG_ON(1);
1142 PO;
1143 }
1144
1145 /* Mixed S64-double binary comparators */
1146 OP(BYTECODE_OP_EQ_DOUBLE_S64):
1147 OP(BYTECODE_OP_NE_DOUBLE_S64):
1148 OP(BYTECODE_OP_GT_DOUBLE_S64):
1149 OP(BYTECODE_OP_LT_DOUBLE_S64):
1150 OP(BYTECODE_OP_GE_DOUBLE_S64):
1151 OP(BYTECODE_OP_LE_DOUBLE_S64):
1152 OP(BYTECODE_OP_EQ_S64_DOUBLE):
1153 OP(BYTECODE_OP_NE_S64_DOUBLE):
1154 OP(BYTECODE_OP_GT_S64_DOUBLE):
1155 OP(BYTECODE_OP_LT_S64_DOUBLE):
1156 OP(BYTECODE_OP_GE_S64_DOUBLE):
1157 OP(BYTECODE_OP_LE_S64_DOUBLE):
1158 {
1159 BUG_ON(1);
1160 PO;
1161 }
1162 OP(BYTECODE_OP_BIT_RSHIFT):
1163 {
1164 int64_t res;
1165
1166 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1167 ret = -EINVAL;
1168 goto end;
1169 }
1170
1171 /* Catch undefined behavior. */
1172 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1173 ret = -EINVAL;
1174 goto end;
1175 }
1176 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1177 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1178 estack_ax_v = res;
1179 estack_ax_t = REG_U64;
1180 next_pc += sizeof(struct binary_op);
1181 PO;
1182 }
1183 OP(BYTECODE_OP_BIT_LSHIFT):
1184 {
1185 int64_t res;
1186
1187 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1188 ret = -EINVAL;
1189 goto end;
1190 }
1191
1192 /* Catch undefined behavior. */
1193 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1194 ret = -EINVAL;
1195 goto end;
1196 }
1197 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1198 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1199 estack_ax_v = res;
1200 estack_ax_t = REG_U64;
1201 next_pc += sizeof(struct binary_op);
1202 PO;
1203 }
1204 OP(BYTECODE_OP_BIT_AND):
1205 {
1206 int64_t res;
1207
1208 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1209 ret = -EINVAL;
1210 goto end;
1211 }
1212
1213 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1214 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1215 estack_ax_v = res;
1216 estack_ax_t = REG_U64;
1217 next_pc += sizeof(struct binary_op);
1218 PO;
1219 }
1220 OP(BYTECODE_OP_BIT_OR):
1221 {
1222 int64_t res;
1223
1224 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1225 ret = -EINVAL;
1226 goto end;
1227 }
1228
1229 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1230 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1231 estack_ax_v = res;
1232 estack_ax_t = REG_U64;
1233 next_pc += sizeof(struct binary_op);
1234 PO;
1235 }
1236 OP(BYTECODE_OP_BIT_XOR):
1237 {
1238 int64_t res;
1239
1240 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1241 ret = -EINVAL;
1242 goto end;
1243 }
1244
1245 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1246 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1247 estack_ax_v = res;
1248 estack_ax_t = REG_U64;
1249 next_pc += sizeof(struct binary_op);
1250 PO;
1251 }
1252
1253 /* unary */
1254 OP(BYTECODE_OP_UNARY_PLUS):
1255 OP(BYTECODE_OP_UNARY_MINUS):
1256 OP(BYTECODE_OP_UNARY_NOT):
1257 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1258 (unsigned int) *(bytecode_opcode_t *) pc);
1259 ret = -EINVAL;
1260 goto end;
1261
1262
1263 OP(BYTECODE_OP_UNARY_BIT_NOT):
1264 {
1265 estack_ax_v = ~(uint64_t) estack_ax_v;
1266 estack_ax_t = REG_S64;
1267 next_pc += sizeof(struct unary_op);
1268 PO;
1269 }
1270
1271 OP(BYTECODE_OP_UNARY_PLUS_S64):
1272 {
1273 next_pc += sizeof(struct unary_op);
1274 PO;
1275 }
1276 OP(BYTECODE_OP_UNARY_MINUS_S64):
1277 {
1278 estack_ax_v = -estack_ax_v;
1279 estack_ax_t = REG_S64;
1280 next_pc += sizeof(struct unary_op);
1281 PO;
1282 }
1283 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE):
1284 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE):
1285 {
1286 BUG_ON(1);
1287 PO;
1288 }
1289 OP(BYTECODE_OP_UNARY_NOT_S64):
1290 {
1291 estack_ax_v = !estack_ax_v;
1292 estack_ax_t = REG_S64;
1293 next_pc += sizeof(struct unary_op);
1294 PO;
1295 }
1296 OP(BYTECODE_OP_UNARY_NOT_DOUBLE):
1297 {
1298 BUG_ON(1);
1299 PO;
1300 }
1301
1302 /* logical */
1303 OP(BYTECODE_OP_AND):
1304 {
1305 struct logical_op *insn = (struct logical_op *) pc;
1306
1307 /* If AX is 0, skip and evaluate to 0 */
1308 if (unlikely(estack_ax_v == 0)) {
1309 dbg_printk("Jumping to bytecode offset %u\n",
1310 (unsigned int) insn->skip_offset);
1311 next_pc = start_pc + insn->skip_offset;
1312 } else {
1313 /* Pop 1 when jump not taken */
1314 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1315 next_pc += sizeof(struct logical_op);
1316 }
1317 PO;
1318 }
1319 OP(BYTECODE_OP_OR):
1320 {
1321 struct logical_op *insn = (struct logical_op *) pc;
1322
1323 /* If AX is nonzero, skip and evaluate to 1 */
1324
1325 if (unlikely(estack_ax_v != 0)) {
1326 estack_ax_v = 1;
1327 dbg_printk("Jumping to bytecode offset %u\n",
1328 (unsigned int) insn->skip_offset);
1329 next_pc = start_pc + insn->skip_offset;
1330 } else {
1331 /* Pop 1 when jump not taken */
1332 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1333 next_pc += sizeof(struct logical_op);
1334 }
1335 PO;
1336 }
1337
1338
1339 /* load field ref */
1340 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING):
1341 {
1342 struct load_op *insn = (struct load_op *) pc;
1343 struct field_ref *ref = (struct field_ref *) insn->data;
1344
1345 dbg_printk("load field ref offset %u type string\n",
1346 ref->offset);
1347 estack_push(stack, top, ax, bx, ax_t, bx_t);
1348 estack_ax(stack, top)->u.s.str =
1349 *(const char * const *) &interpreter_stack_data[ref->offset];
1350 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1351 dbg_printk("Bytecode warning: loading a NULL string.\n");
1352 ret = -EINVAL;
1353 goto end;
1354 }
1355 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1356 estack_ax(stack, top)->u.s.literal_type =
1357 ESTACK_STRING_LITERAL_TYPE_NONE;
1358 estack_ax(stack, top)->u.s.user = 0;
1359 estack_ax(stack, top)->type = REG_STRING;
1360 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1361 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1362 PO;
1363 }
1364
1365 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE):
1366 {
1367 struct load_op *insn = (struct load_op *) pc;
1368 struct field_ref *ref = (struct field_ref *) insn->data;
1369
1370 dbg_printk("load field ref offset %u type sequence\n",
1371 ref->offset);
1372 estack_push(stack, top, ax, bx, ax_t, bx_t);
1373 estack_ax(stack, top)->u.s.seq_len =
1374 *(unsigned long *) &interpreter_stack_data[ref->offset];
1375 estack_ax(stack, top)->u.s.str =
1376 *(const char **) (&interpreter_stack_data[ref->offset
1377 + sizeof(unsigned long)]);
1378 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1379 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1380 ret = -EINVAL;
1381 goto end;
1382 }
1383 estack_ax(stack, top)->u.s.literal_type =
1384 ESTACK_STRING_LITERAL_TYPE_NONE;
1385 estack_ax(stack, top)->u.s.user = 0;
1386 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1387 PO;
1388 }
1389
1390 OP(BYTECODE_OP_LOAD_FIELD_REF_S64):
1391 {
1392 struct load_op *insn = (struct load_op *) pc;
1393 struct field_ref *ref = (struct field_ref *) insn->data;
1394
1395 dbg_printk("load field ref offset %u type s64\n",
1396 ref->offset);
1397 estack_push(stack, top, ax, bx, ax_t, bx_t);
1398 estack_ax_v =
1399 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
1400 estack_ax_t = REG_S64;
1401 dbg_printk("ref load s64 %lld\n",
1402 (long long) estack_ax_v);
1403 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1404 PO;
1405 }
1406
1407 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE):
1408 {
1409 BUG_ON(1);
1410 PO;
1411 }
1412
1413 /* load from immediate operand */
1414 OP(BYTECODE_OP_LOAD_STRING):
1415 {
1416 struct load_op *insn = (struct load_op *) pc;
1417
1418 dbg_printk("load string %s\n", insn->data);
1419 estack_push(stack, top, ax, bx, ax_t, bx_t);
1420 estack_ax(stack, top)->u.s.str = insn->data;
1421 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1422 estack_ax(stack, top)->u.s.literal_type =
1423 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1424 estack_ax(stack, top)->u.s.user = 0;
1425 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1426 PO;
1427 }
1428
1429 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING):
1430 {
1431 struct load_op *insn = (struct load_op *) pc;
1432
1433 dbg_printk("load globbing pattern %s\n", insn->data);
1434 estack_push(stack, top, ax, bx, ax_t, bx_t);
1435 estack_ax(stack, top)->u.s.str = insn->data;
1436 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1437 estack_ax(stack, top)->u.s.literal_type =
1438 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1439 estack_ax(stack, top)->u.s.user = 0;
1440 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1441 PO;
1442 }
1443
1444 OP(BYTECODE_OP_LOAD_S64):
1445 {
1446 struct load_op *insn = (struct load_op *) pc;
1447
1448 estack_push(stack, top, ax, bx, ax_t, bx_t);
1449 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1450 estack_ax_t = REG_S64;
1451 dbg_printk("load s64 %lld\n",
1452 (long long) estack_ax_v);
1453 next_pc += sizeof(struct load_op)
1454 + sizeof(struct literal_numeric);
1455 PO;
1456 }
1457
1458 OP(BYTECODE_OP_LOAD_DOUBLE):
1459 {
1460 BUG_ON(1);
1461 PO;
1462 }
1463
1464 /* cast */
1465 OP(BYTECODE_OP_CAST_TO_S64):
1466 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1467 (unsigned int) *(bytecode_opcode_t *) pc);
1468 ret = -EINVAL;
1469 goto end;
1470
1471 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64):
1472 {
1473 BUG_ON(1);
1474 PO;
1475 }
1476
1477 OP(BYTECODE_OP_CAST_NOP):
1478 {
1479 next_pc += sizeof(struct cast_op);
1480 PO;
1481 }
1482
1483 /* get context ref */
1484 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING):
1485 {
1486 struct load_op *insn = (struct load_op *) pc;
1487 struct field_ref *ref = (struct field_ref *) insn->data;
1488 struct lttng_kernel_ctx_field *ctx_field;
1489 union lttng_ctx_value v;
1490
1491 dbg_printk("get context ref offset %u type string\n",
1492 ref->offset);
1493 ctx_field = &lttng_static_ctx->fields[ref->offset];
1494 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1495 estack_push(stack, top, ax, bx, ax_t, bx_t);
1496 estack_ax(stack, top)->u.s.str = v.str;
1497 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1498 dbg_printk("Bytecode warning: loading a NULL string.\n");
1499 ret = -EINVAL;
1500 goto end;
1501 }
1502 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1503 estack_ax(stack, top)->u.s.literal_type =
1504 ESTACK_STRING_LITERAL_TYPE_NONE;
1505 estack_ax(stack, top)->u.s.user = 0;
1506 estack_ax(stack, top)->type = REG_STRING;
1507 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1508 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1509 PO;
1510 }
1511
1512 OP(BYTECODE_OP_GET_CONTEXT_REF_S64):
1513 {
1514 struct load_op *insn = (struct load_op *) pc;
1515 struct field_ref *ref = (struct field_ref *) insn->data;
1516 struct lttng_kernel_ctx_field *ctx_field;
1517 union lttng_ctx_value v;
1518
1519 dbg_printk("get context ref offset %u type s64\n",
1520 ref->offset);
1521 ctx_field = &lttng_static_ctx->fields[ref->offset];
1522 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1523 estack_push(stack, top, ax, bx, ax_t, bx_t);
1524 estack_ax_v = v.s64;
1525 estack_ax_t = REG_S64;
1526 dbg_printk("ref get context s64 %lld\n",
1527 (long long) estack_ax_v);
1528 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1529 PO;
1530 }
1531
1532 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE):
1533 {
1534 BUG_ON(1);
1535 PO;
1536 }
1537
1538 /* load userspace field ref */
1539 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_STRING):
1540 {
1541 struct load_op *insn = (struct load_op *) pc;
1542 struct field_ref *ref = (struct field_ref *) insn->data;
1543
1544 dbg_printk("load field ref offset %u type user string\n",
1545 ref->offset);
1546 estack_push(stack, top, ax, bx, ax_t, bx_t);
1547 estack_ax(stack, top)->u.s.user_str =
1548 *(const char * const *) &interpreter_stack_data[ref->offset];
1549 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1550 dbg_printk("Bytecode warning: loading a NULL string.\n");
1551 ret = -EINVAL;
1552 goto end;
1553 }
1554 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1555 estack_ax(stack, top)->u.s.literal_type =
1556 ESTACK_STRING_LITERAL_TYPE_NONE;
1557 estack_ax(stack, top)->u.s.user = 1;
1558 estack_ax(stack, top)->type = REG_STRING;
1559 dbg_load_ref_user_str_printk(estack_ax(stack, top));
1560 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1561 PO;
1562 }
1563
1564 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1565 {
1566 struct load_op *insn = (struct load_op *) pc;
1567 struct field_ref *ref = (struct field_ref *) insn->data;
1568
1569 dbg_printk("load field ref offset %u type user sequence\n",
1570 ref->offset);
1571 estack_push(stack, top, ax, bx, ax_t, bx_t);
1572 estack_ax(stack, top)->u.s.seq_len =
1573 *(unsigned long *) &interpreter_stack_data[ref->offset];
1574 estack_ax(stack, top)->u.s.user_str =
1575 *(const char **) (&interpreter_stack_data[ref->offset
1576 + sizeof(unsigned long)]);
1577 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1578 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1579 ret = -EINVAL;
1580 goto end;
1581 }
1582 estack_ax(stack, top)->u.s.literal_type =
1583 ESTACK_STRING_LITERAL_TYPE_NONE;
1584 estack_ax(stack, top)->u.s.user = 1;
1585 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1586 PO;
1587 }
1588
1589 OP(BYTECODE_OP_GET_CONTEXT_ROOT):
1590 {
1591 dbg_printk("op get context root\n");
1592 estack_push(stack, top, ax, bx, ax_t, bx_t);
1593 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1594 /* "field" only needed for variants. */
1595 estack_ax(stack, top)->u.ptr.field = NULL;
1596 estack_ax(stack, top)->type = REG_PTR;
1597 next_pc += sizeof(struct load_op);
1598 PO;
1599 }
1600
1601 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT):
1602 {
1603 BUG_ON(1);
1604 PO;
1605 }
1606
1607 OP(BYTECODE_OP_GET_PAYLOAD_ROOT):
1608 {
1609 dbg_printk("op get app payload root\n");
1610 estack_push(stack, top, ax, bx, ax_t, bx_t);
1611 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1612 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
1613 /* "field" only needed for variants. */
1614 estack_ax(stack, top)->u.ptr.field = NULL;
1615 estack_ax(stack, top)->type = REG_PTR;
1616 next_pc += sizeof(struct load_op);
1617 PO;
1618 }
1619
1620 OP(BYTECODE_OP_GET_SYMBOL):
1621 {
1622 dbg_printk("op get symbol\n");
1623 switch (estack_ax(stack, top)->u.ptr.type) {
1624 case LOAD_OBJECT:
1625 printk(KERN_WARNING "LTTng: bytecode: Nested fields not implemented yet.\n");
1626 ret = -EINVAL;
1627 goto end;
1628 case LOAD_ROOT_CONTEXT:
1629 case LOAD_ROOT_APP_CONTEXT:
1630 case LOAD_ROOT_PAYLOAD:
1631 /*
1632 * symbol lookup is performed by
1633 * specialization.
1634 */
1635 ret = -EINVAL;
1636 goto end;
1637 }
1638 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1639 PO;
1640 }
1641
1642 OP(BYTECODE_OP_GET_SYMBOL_FIELD):
1643 {
1644 /*
1645 * Used for first variant encountered in a
1646 * traversal. Variants are not implemented yet.
1647 */
1648 ret = -EINVAL;
1649 goto end;
1650 }
1651
1652 OP(BYTECODE_OP_GET_INDEX_U16):
1653 {
1654 struct load_op *insn = (struct load_op *) pc;
1655 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1656
1657 dbg_printk("op get index u16\n");
1658 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1659 if (ret)
1660 goto end;
1661 estack_ax_v = estack_ax(stack, top)->u.v;
1662 estack_ax_t = estack_ax(stack, top)->type;
1663 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1664 PO;
1665 }
1666
1667 OP(BYTECODE_OP_GET_INDEX_U64):
1668 {
1669 struct load_op *insn = (struct load_op *) pc;
1670 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1671
1672 dbg_printk("op get index u64\n");
1673 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1674 if (ret)
1675 goto end;
1676 estack_ax_v = estack_ax(stack, top)->u.v;
1677 estack_ax_t = estack_ax(stack, top)->type;
1678 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1679 PO;
1680 }
1681
1682 OP(BYTECODE_OP_LOAD_FIELD):
1683 {
1684 dbg_printk("op load field\n");
1685 ret = dynamic_load_field(estack_ax(stack, top));
1686 if (ret)
1687 goto end;
1688 estack_ax_v = estack_ax(stack, top)->u.v;
1689 estack_ax_t = estack_ax(stack, top)->type;
1690 next_pc += sizeof(struct load_op);
1691 PO;
1692 }
1693
1694 OP(BYTECODE_OP_LOAD_FIELD_S8):
1695 {
1696 dbg_printk("op load field s8\n");
1697
1698 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1699 estack_ax_t = REG_S64;
1700 next_pc += sizeof(struct load_op);
1701 PO;
1702 }
1703 OP(BYTECODE_OP_LOAD_FIELD_S16):
1704 {
1705 dbg_printk("op load field s16\n");
1706
1707 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1708 estack_ax_t = REG_S64;
1709 next_pc += sizeof(struct load_op);
1710 PO;
1711 }
1712 OP(BYTECODE_OP_LOAD_FIELD_S32):
1713 {
1714 dbg_printk("op load field s32\n");
1715
1716 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1717 estack_ax_t = REG_S64;
1718 next_pc += sizeof(struct load_op);
1719 PO;
1720 }
1721 OP(BYTECODE_OP_LOAD_FIELD_S64):
1722 {
1723 dbg_printk("op load field s64\n");
1724
1725 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1726 estack_ax_t = REG_S64;
1727 next_pc += sizeof(struct load_op);
1728 PO;
1729 }
1730 OP(BYTECODE_OP_LOAD_FIELD_U8):
1731 {
1732 dbg_printk("op load field u8\n");
1733
1734 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1735 estack_ax_t = REG_S64;
1736 next_pc += sizeof(struct load_op);
1737 PO;
1738 }
1739 OP(BYTECODE_OP_LOAD_FIELD_U16):
1740 {
1741 dbg_printk("op load field u16\n");
1742
1743 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1744 estack_ax_t = REG_S64;
1745 next_pc += sizeof(struct load_op);
1746 PO;
1747 }
1748 OP(BYTECODE_OP_LOAD_FIELD_U32):
1749 {
1750 dbg_printk("op load field u32\n");
1751
1752 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1753 estack_ax_t = REG_S64;
1754 next_pc += sizeof(struct load_op);
1755 PO;
1756 }
1757 OP(BYTECODE_OP_LOAD_FIELD_U64):
1758 {
1759 dbg_printk("op load field u64\n");
1760
1761 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1762 estack_ax_t = REG_S64;
1763 next_pc += sizeof(struct load_op);
1764 PO;
1765 }
1766 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE):
1767 {
1768 ret = -EINVAL;
1769 goto end;
1770 }
1771
1772 OP(BYTECODE_OP_LOAD_FIELD_STRING):
1773 {
1774 const char *str;
1775
1776 dbg_printk("op load field string\n");
1777 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1778 estack_ax(stack, top)->u.s.str = str;
1779 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1780 dbg_printk("Bytecode warning: loading a NULL string.\n");
1781 ret = -EINVAL;
1782 goto end;
1783 }
1784 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1785 estack_ax(stack, top)->u.s.literal_type =
1786 ESTACK_STRING_LITERAL_TYPE_NONE;
1787 estack_ax(stack, top)->type = REG_STRING;
1788 next_pc += sizeof(struct load_op);
1789 PO;
1790 }
1791
1792 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE):
1793 {
1794 const char *ptr;
1795
1796 dbg_printk("op load field string sequence\n");
1797 ptr = estack_ax(stack, top)->u.ptr.ptr;
1798 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1799 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1800 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1801 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1802 ret = -EINVAL;
1803 goto end;
1804 }
1805 estack_ax(stack, top)->u.s.literal_type =
1806 ESTACK_STRING_LITERAL_TYPE_NONE;
1807 estack_ax(stack, top)->type = REG_STRING;
1808 next_pc += sizeof(struct load_op);
1809 PO;
1810 }
1811
1812 END_OP
1813 end:
1814 /* Return _DISCARD on error. */
1815 if (ret)
1816 return LTTNG_INTERPRETER_DISCARD;
1817
1818 if (output) {
1819 return lttng_bytecode_interpret_format_output(
1820 estack_ax(stack, top), output);
1821 }
1822
1823 return retval;
1824 }
1825 LTTNG_STACK_FRAME_NON_STANDARD(bytecode_interpret);
1826
1827 uint64_t lttng_bytecode_filter_interpret(void *filter_data,
1828 struct lttng_probe_ctx *lttng_probe_ctx,
1829 const char *filter_stack_data)
1830 {
1831 return bytecode_interpret(filter_data, lttng_probe_ctx,
1832 filter_stack_data, NULL);
1833 }
1834
1835 uint64_t lttng_bytecode_capture_interpret(void *capture_data,
1836 struct lttng_probe_ctx *lttng_probe_ctx,
1837 const char *capture_stack_data,
1838 struct lttng_interpreter_output *output)
1839 {
1840 return bytecode_interpret(capture_data, lttng_probe_ctx,
1841 capture_stack_data, output);
1842 }
1843
1844 #undef START_OP
1845 #undef OP
1846 #undef PO
1847 #undef END_OP
This page took 0.066918 seconds and 4 git commands to generate.