950c0ba028fcc6ac619f27023ec9fb7dcf549d8f
[lttng-modules.git] / src / lttng-bytecode-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-bytecode-interpreter.c
4 *
5 * LTTng modules bytecode interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
14
15 #include <lttng/lttng-bytecode.h>
16 #include <lttng/string-utils.h>
17
18 /*
19 * get_char should be called with page fault handler disabled if it is expected
20 * to handle user-space read.
21 */
22 static
23 char get_char(struct estack_entry *reg, size_t offset)
24 {
25 if (unlikely(offset >= reg->u.s.seq_len))
26 return '\0';
27 if (reg->u.s.user) {
28 char c;
29
30 /* Handle invalid access as end of string. */
31 if (unlikely(!lttng_access_ok(VERIFY_READ,
32 reg->u.s.user_str + offset,
33 sizeof(c))))
34 return '\0';
35 /* Handle fault (nonzero return value) as end of string. */
36 if (unlikely(__copy_from_user_inatomic(&c,
37 reg->u.s.user_str + offset,
38 sizeof(c))))
39 return '\0';
40 return c;
41 } else {
42 return reg->u.s.str[offset];
43 }
44 }
45
46 /*
47 * -1: wildcard found.
48 * -2: unknown escape char.
49 * 0: normal char.
50 */
51 static
52 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
53 {
54 switch (*c) {
55 case '\\':
56 (*offset)++;
57 *c = get_char(reg, *offset);
58 switch (*c) {
59 case '\\':
60 case '*':
61 return 0;
62 default:
63 return -2;
64 }
65 case '*':
66 return -1;
67 default:
68 return 0;
69 }
70 }
71
72 static
73 char get_char_at_cb(size_t at, void *data)
74 {
75 return get_char(data, at);
76 }
77
78 static
79 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
80 {
81 bool has_user = false;
82 int result;
83 struct estack_entry *pattern_reg;
84 struct estack_entry *candidate_reg;
85
86 /* Disable the page fault handler when reading from userspace. */
87 if (estack_bx(stack, top)->u.s.user
88 || estack_ax(stack, top)->u.s.user) {
89 has_user = true;
90 pagefault_disable();
91 }
92
93 /* Find out which side is the pattern vs. the candidate. */
94 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
95 pattern_reg = estack_ax(stack, top);
96 candidate_reg = estack_bx(stack, top);
97 } else {
98 pattern_reg = estack_bx(stack, top);
99 candidate_reg = estack_ax(stack, top);
100 }
101
102 /* Perform the match operation. */
103 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
104 pattern_reg, get_char_at_cb, candidate_reg);
105 if (has_user)
106 pagefault_enable();
107
108 return result;
109 }
110
111 static
112 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
113 {
114 size_t offset_bx = 0, offset_ax = 0;
115 int diff, has_user = 0;
116
117 if (estack_bx(stack, top)->u.s.user
118 || estack_ax(stack, top)->u.s.user) {
119 has_user = 1;
120 pagefault_disable();
121 }
122
123 for (;;) {
124 int ret;
125 int escaped_r0 = 0;
126 char char_bx, char_ax;
127
128 char_bx = get_char(estack_bx(stack, top), offset_bx);
129 char_ax = get_char(estack_ax(stack, top), offset_ax);
130
131 if (unlikely(char_bx == '\0')) {
132 if (char_ax == '\0') {
133 diff = 0;
134 break;
135 } else {
136 if (estack_ax(stack, top)->u.s.literal_type ==
137 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
138 ret = parse_char(estack_ax(stack, top),
139 &char_ax, &offset_ax);
140 if (ret == -1) {
141 diff = 0;
142 break;
143 }
144 }
145 diff = -1;
146 break;
147 }
148 }
149 if (unlikely(char_ax == '\0')) {
150 if (estack_bx(stack, top)->u.s.literal_type ==
151 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
152 ret = parse_char(estack_bx(stack, top),
153 &char_bx, &offset_bx);
154 if (ret == -1) {
155 diff = 0;
156 break;
157 }
158 }
159 diff = 1;
160 break;
161 }
162 if (estack_bx(stack, top)->u.s.literal_type ==
163 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
164 ret = parse_char(estack_bx(stack, top),
165 &char_bx, &offset_bx);
166 if (ret == -1) {
167 diff = 0;
168 break;
169 } else if (ret == -2) {
170 escaped_r0 = 1;
171 }
172 /* else compare both char */
173 }
174 if (estack_ax(stack, top)->u.s.literal_type ==
175 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
176 ret = parse_char(estack_ax(stack, top),
177 &char_ax, &offset_ax);
178 if (ret == -1) {
179 diff = 0;
180 break;
181 } else if (ret == -2) {
182 if (!escaped_r0) {
183 diff = -1;
184 break;
185 }
186 } else {
187 if (escaped_r0) {
188 diff = 1;
189 break;
190 }
191 }
192 } else {
193 if (escaped_r0) {
194 diff = 1;
195 break;
196 }
197 }
198 diff = char_bx - char_ax;
199 if (diff != 0)
200 break;
201 offset_bx++;
202 offset_ax++;
203 }
204 if (has_user)
205 pagefault_enable();
206
207 return diff;
208 }
209
210 uint64_t lttng_bytecode_filter_interpret_false(void *filter_data,
211 struct lttng_probe_ctx *lttng_probe_ctx,
212 const char *filter_stack_data)
213 {
214 return LTTNG_INTERPRETER_DISCARD;
215 }
216
217 #ifdef INTERPRETER_USE_SWITCH
218
219 /*
220 * Fallback for compilers that do not support taking address of labels.
221 */
222
223 #define START_OP \
224 start_pc = &bytecode->data[0]; \
225 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
226 pc = next_pc) { \
227 dbg_printk("LTTng: Executing op %s (%u)\n", \
228 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
229 (unsigned int) *(bytecode_opcode_t *) pc); \
230 switch (*(bytecode_opcode_t *) pc) {
231
232 #define OP(name) case name
233
234 #define PO break
235
236 #define END_OP } \
237 }
238
239 #else
240
241 /*
242 * Dispatch-table based interpreter.
243 */
244
245 #define START_OP \
246 start_pc = &bytecode->code[0]; \
247 pc = next_pc = start_pc; \
248 if (unlikely(pc - start_pc >= bytecode->len)) \
249 goto end; \
250 goto *dispatch[*(bytecode_opcode_t *) pc];
251
252 #define OP(name) \
253 LABEL_##name
254
255 #define PO \
256 pc = next_pc; \
257 goto *dispatch[*(bytecode_opcode_t *) pc];
258
259 #define END_OP
260
261 #endif
262
263 #define IS_INTEGER_REGISTER(reg_type) \
264 (reg_type == REG_S64 || reg_type == REG_U64)
265
266 static int context_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
267 struct load_ptr *ptr,
268 uint32_t idx)
269 {
270
271 struct lttng_ctx_field *ctx_field;
272 struct lttng_event_field *field;
273 union lttng_ctx_value v;
274
275 ctx_field = &lttng_static_ctx->fields[idx];
276 field = &ctx_field->event_field;
277 ptr->type = LOAD_OBJECT;
278 /* field is only used for types nested within variants. */
279 ptr->field = NULL;
280
281 switch (field->type.atype) {
282 case atype_integer:
283 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
284 if (field->type.u.integer.signedness) {
285 ptr->object_type = OBJECT_TYPE_S64;
286 ptr->u.s64 = v.s64;
287 ptr->ptr = &ptr->u.s64;
288 } else {
289 ptr->object_type = OBJECT_TYPE_U64;
290 ptr->u.u64 = v.s64; /* Cast. */
291 ptr->ptr = &ptr->u.u64;
292 }
293 break;
294 case atype_enum_nestable:
295 {
296 const struct lttng_integer_type *itype =
297 &field->type.u.enum_nestable.container_type->u.integer;
298
299 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
300 if (itype->signedness) {
301 ptr->object_type = OBJECT_TYPE_S64;
302 ptr->u.s64 = v.s64;
303 ptr->ptr = &ptr->u.s64;
304 } else {
305 ptr->object_type = OBJECT_TYPE_U64;
306 ptr->u.u64 = v.s64; /* Cast. */
307 ptr->ptr = &ptr->u.u64;
308 }
309 break;
310 }
311 case atype_array_nestable:
312 if (!lttng_is_bytewise_integer(field->type.u.array_nestable.elem_type)) {
313 printk(KERN_WARNING "LTTng: bytecode: Array nesting only supports integer types.\n");
314 return -EINVAL;
315 }
316 if (field->type.u.array_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
317 printk(KERN_WARNING "LTTng: bytecode: Only string arrays are supported for contexts.\n");
318 return -EINVAL;
319 }
320 ptr->object_type = OBJECT_TYPE_STRING;
321 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
322 ptr->ptr = v.str;
323 break;
324 case atype_sequence_nestable:
325 if (!lttng_is_bytewise_integer(field->type.u.sequence_nestable.elem_type)) {
326 printk(KERN_WARNING "LTTng: bytecode: Sequence nesting only supports integer types.\n");
327 return -EINVAL;
328 }
329 if (field->type.u.sequence_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
330 printk(KERN_WARNING "LTTng: bytecode: Only string sequences are supported for contexts.\n");
331 return -EINVAL;
332 }
333 ptr->object_type = OBJECT_TYPE_STRING;
334 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
335 ptr->ptr = v.str;
336 break;
337 case atype_string:
338 ptr->object_type = OBJECT_TYPE_STRING;
339 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
340 ptr->ptr = v.str;
341 break;
342 case atype_struct_nestable:
343 printk(KERN_WARNING "LTTng: bytecode: Structure type cannot be loaded.\n");
344 return -EINVAL;
345 case atype_variant_nestable:
346 printk(KERN_WARNING "LTTng: bytecode: Variant type cannot be loaded.\n");
347 return -EINVAL;
348 default:
349 printk(KERN_WARNING "LTTng: bytecode: Unknown type: %d", (int) field->type.atype);
350 return -EINVAL;
351 }
352 return 0;
353 }
354
355 static int dynamic_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
356 struct bytecode_runtime *runtime,
357 uint64_t index, struct estack_entry *stack_top)
358 {
359 int ret;
360 const struct bytecode_get_index_data *gid;
361
362 gid = (const struct bytecode_get_index_data *) &runtime->data[index];
363 switch (stack_top->u.ptr.type) {
364 case LOAD_OBJECT:
365 switch (stack_top->u.ptr.object_type) {
366 case OBJECT_TYPE_ARRAY:
367 {
368 const char *ptr;
369
370 WARN_ON_ONCE(gid->offset >= gid->array_len);
371 /* Skip count (unsigned long) */
372 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
373 ptr = ptr + gid->offset;
374 stack_top->u.ptr.ptr = ptr;
375 stack_top->u.ptr.object_type = gid->elem.type;
376 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
377 BUG_ON(stack_top->u.ptr.field->type.atype != atype_array_nestable);
378 stack_top->u.ptr.field = NULL;
379 break;
380 }
381 case OBJECT_TYPE_SEQUENCE:
382 {
383 const char *ptr;
384 size_t ptr_seq_len;
385
386 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
387 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
388 if (gid->offset >= gid->elem.len * ptr_seq_len) {
389 ret = -EINVAL;
390 goto end;
391 }
392 ptr = ptr + gid->offset;
393 stack_top->u.ptr.ptr = ptr;
394 stack_top->u.ptr.object_type = gid->elem.type;
395 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
396 BUG_ON(stack_top->u.ptr.field->type.atype != atype_sequence_nestable);
397 stack_top->u.ptr.field = NULL;
398 break;
399 }
400 case OBJECT_TYPE_STRUCT:
401 printk(KERN_WARNING "LTTng: bytecode: Nested structures are not supported yet.\n");
402 ret = -EINVAL;
403 goto end;
404 case OBJECT_TYPE_VARIANT:
405 default:
406 printk(KERN_WARNING "LTTng: bytecode: Unexpected get index type %d",
407 (int) stack_top->u.ptr.object_type);
408 ret = -EINVAL;
409 goto end;
410 }
411 break;
412 case LOAD_ROOT_CONTEXT:
413 case LOAD_ROOT_APP_CONTEXT: /* Fall-through */
414 {
415 ret = context_get_index(lttng_probe_ctx,
416 &stack_top->u.ptr,
417 gid->ctx_index);
418 if (ret) {
419 goto end;
420 }
421 break;
422 }
423 case LOAD_ROOT_PAYLOAD:
424 stack_top->u.ptr.ptr += gid->offset;
425 if (gid->elem.type == OBJECT_TYPE_STRING)
426 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
427 stack_top->u.ptr.object_type = gid->elem.type;
428 stack_top->u.ptr.type = LOAD_OBJECT;
429 stack_top->u.ptr.field = gid->field;
430 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
431 break;
432 }
433
434 stack_top->type = REG_PTR;
435
436 return 0;
437
438 end:
439 return ret;
440 }
441
442 static int dynamic_load_field(struct estack_entry *stack_top)
443 {
444 int ret;
445
446 switch (stack_top->u.ptr.type) {
447 case LOAD_OBJECT:
448 break;
449 case LOAD_ROOT_CONTEXT:
450 case LOAD_ROOT_APP_CONTEXT:
451 case LOAD_ROOT_PAYLOAD:
452 default:
453 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
454 ret = -EINVAL;
455 goto end;
456 }
457 switch (stack_top->u.ptr.object_type) {
458 case OBJECT_TYPE_S8:
459 dbg_printk("op load field s8\n");
460 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
461 stack_top->type = REG_S64;
462 break;
463 case OBJECT_TYPE_S16:
464 {
465 int16_t tmp;
466
467 dbg_printk("op load field s16\n");
468 tmp = *(int16_t *) stack_top->u.ptr.ptr;
469 if (stack_top->u.ptr.rev_bo)
470 __swab16s(&tmp);
471 stack_top->u.v = tmp;
472 stack_top->type = REG_S64;
473 break;
474 }
475 case OBJECT_TYPE_S32:
476 {
477 int32_t tmp;
478
479 dbg_printk("op load field s32\n");
480 tmp = *(int32_t *) stack_top->u.ptr.ptr;
481 if (stack_top->u.ptr.rev_bo)
482 __swab32s(&tmp);
483 stack_top->u.v = tmp;
484 stack_top->type = REG_S64;
485 break;
486 }
487 case OBJECT_TYPE_S64:
488 {
489 int64_t tmp;
490
491 dbg_printk("op load field s64\n");
492 tmp = *(int64_t *) stack_top->u.ptr.ptr;
493 if (stack_top->u.ptr.rev_bo)
494 __swab64s(&tmp);
495 stack_top->u.v = tmp;
496 stack_top->type = REG_S64;
497 break;
498 }
499 case OBJECT_TYPE_U8:
500 dbg_printk("op load field u8\n");
501 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
502 stack_top->type = REG_U64;
503 break;
504 case OBJECT_TYPE_U16:
505 {
506 uint16_t tmp;
507
508 dbg_printk("op load field u16\n");
509 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
510 if (stack_top->u.ptr.rev_bo)
511 __swab16s(&tmp);
512 stack_top->u.v = tmp;
513 stack_top->type = REG_U64;
514 break;
515 }
516 case OBJECT_TYPE_U32:
517 {
518 uint32_t tmp;
519
520 dbg_printk("op load field u32\n");
521 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
522 if (stack_top->u.ptr.rev_bo)
523 __swab32s(&tmp);
524 stack_top->u.v = tmp;
525 stack_top->type = REG_U64;
526 break;
527 }
528 case OBJECT_TYPE_U64:
529 {
530 uint64_t tmp;
531
532 dbg_printk("op load field u64\n");
533 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
534 if (stack_top->u.ptr.rev_bo)
535 __swab64s(&tmp);
536 stack_top->u.v = tmp;
537 stack_top->type = REG_U64;
538 break;
539 }
540 case OBJECT_TYPE_STRING:
541 {
542 const char *str;
543
544 dbg_printk("op load field string\n");
545 str = (const char *) stack_top->u.ptr.ptr;
546 stack_top->u.s.str = str;
547 if (unlikely(!stack_top->u.s.str)) {
548 dbg_printk("Bytecode warning: loading a NULL string.\n");
549 ret = -EINVAL;
550 goto end;
551 }
552 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
553 stack_top->u.s.literal_type =
554 ESTACK_STRING_LITERAL_TYPE_NONE;
555 stack_top->type = REG_STRING;
556 break;
557 }
558 case OBJECT_TYPE_STRING_SEQUENCE:
559 {
560 const char *ptr;
561
562 dbg_printk("op load field string sequence\n");
563 ptr = stack_top->u.ptr.ptr;
564 stack_top->u.s.seq_len = *(unsigned long *) ptr;
565 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
566 if (unlikely(!stack_top->u.s.str)) {
567 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
568 ret = -EINVAL;
569 goto end;
570 }
571 stack_top->u.s.literal_type =
572 ESTACK_STRING_LITERAL_TYPE_NONE;
573 stack_top->type = REG_STRING;
574 break;
575 }
576 case OBJECT_TYPE_DYNAMIC:
577 /*
578 * Dynamic types in context are looked up
579 * by context get index.
580 */
581 ret = -EINVAL;
582 goto end;
583 case OBJECT_TYPE_DOUBLE:
584 ret = -EINVAL;
585 goto end;
586 case OBJECT_TYPE_SEQUENCE:
587 case OBJECT_TYPE_ARRAY:
588 case OBJECT_TYPE_STRUCT:
589 case OBJECT_TYPE_VARIANT:
590 printk(KERN_WARNING "LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
591 ret = -EINVAL;
592 goto end;
593 }
594 return 0;
595
596 end:
597 return ret;
598 }
599
600 static
601 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
602 struct lttng_interpreter_output *output)
603 {
604 int ret;
605
606 again:
607 switch (ax->type) {
608 case REG_S64:
609 output->type = LTTNG_INTERPRETER_TYPE_S64;
610 output->u.s = ax->u.v;
611 break;
612 case REG_U64:
613 output->type = LTTNG_INTERPRETER_TYPE_U64;
614 output->u.u = (uint64_t) ax->u.v;
615 break;
616 case REG_STRING:
617 output->type = LTTNG_INTERPRETER_TYPE_STRING;
618 output->u.str.str = ax->u.s.str;
619 output->u.str.len = ax->u.s.seq_len;
620 break;
621 case REG_PTR:
622 switch (ax->u.ptr.object_type) {
623 case OBJECT_TYPE_S8:
624 case OBJECT_TYPE_S16:
625 case OBJECT_TYPE_S32:
626 case OBJECT_TYPE_S64:
627 case OBJECT_TYPE_U8:
628 case OBJECT_TYPE_U16:
629 case OBJECT_TYPE_U32:
630 case OBJECT_TYPE_U64:
631 case OBJECT_TYPE_DOUBLE:
632 case OBJECT_TYPE_STRING:
633 case OBJECT_TYPE_STRING_SEQUENCE:
634 ret = dynamic_load_field(ax);
635 if (ret)
636 return ret;
637 /* Retry after loading ptr into stack top. */
638 goto again;
639 case OBJECT_TYPE_SEQUENCE:
640 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
641 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
642 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
643 output->u.sequence.nested_type = ax->u.ptr.field->type.u.sequence_nestable.elem_type;
644 break;
645 case OBJECT_TYPE_ARRAY:
646 /* Skip count (unsigned long) */
647 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
648 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
649 output->u.sequence.nr_elem = ax->u.ptr.field->type.u.array_nestable.length;
650 output->u.sequence.nested_type = ax->u.ptr.field->type.u.array_nestable.elem_type;
651 break;
652 case OBJECT_TYPE_STRUCT:
653 case OBJECT_TYPE_VARIANT:
654 default:
655 return -EINVAL;
656 }
657
658 break;
659 case REG_STAR_GLOB_STRING:
660 case REG_TYPE_UNKNOWN:
661 default:
662 return -EINVAL;
663 }
664
665 return LTTNG_INTERPRETER_RECORD_FLAG;
666 }
667
668 /*
669 * Return 0 (discard), or raise the 0x1 flag (log event).
670 * Currently, other flags are kept for future extensions and have no
671 * effect.
672 */
673 static
674 uint64_t bytecode_interpret(void *interpreter_data,
675 struct lttng_probe_ctx *lttng_probe_ctx,
676 const char *interpreter_stack_data,
677 struct lttng_interpreter_output *output)
678 {
679 struct bytecode_runtime *bytecode = interpreter_data;
680 void *pc, *next_pc, *start_pc;
681 int ret = -EINVAL;
682 uint64_t retval = 0;
683 struct estack _stack;
684 struct estack *stack = &_stack;
685 register int64_t ax = 0, bx = 0;
686 register enum entry_type ax_t = REG_TYPE_UNKNOWN, bx_t = REG_TYPE_UNKNOWN;
687 register int top = INTERPRETER_STACK_EMPTY;
688 #ifndef INTERPRETER_USE_SWITCH
689 static void *dispatch[NR_BYTECODE_OPS] = {
690 [ BYTECODE_OP_UNKNOWN ] = &&LABEL_BYTECODE_OP_UNKNOWN,
691
692 [ BYTECODE_OP_RETURN ] = &&LABEL_BYTECODE_OP_RETURN,
693
694 /* binary */
695 [ BYTECODE_OP_MUL ] = &&LABEL_BYTECODE_OP_MUL,
696 [ BYTECODE_OP_DIV ] = &&LABEL_BYTECODE_OP_DIV,
697 [ BYTECODE_OP_MOD ] = &&LABEL_BYTECODE_OP_MOD,
698 [ BYTECODE_OP_PLUS ] = &&LABEL_BYTECODE_OP_PLUS,
699 [ BYTECODE_OP_MINUS ] = &&LABEL_BYTECODE_OP_MINUS,
700 [ BYTECODE_OP_BIT_RSHIFT ] = &&LABEL_BYTECODE_OP_BIT_RSHIFT,
701 [ BYTECODE_OP_BIT_LSHIFT ] = &&LABEL_BYTECODE_OP_BIT_LSHIFT,
702 [ BYTECODE_OP_BIT_AND ] = &&LABEL_BYTECODE_OP_BIT_AND,
703 [ BYTECODE_OP_BIT_OR ] = &&LABEL_BYTECODE_OP_BIT_OR,
704 [ BYTECODE_OP_BIT_XOR ] = &&LABEL_BYTECODE_OP_BIT_XOR,
705
706 /* binary comparators */
707 [ BYTECODE_OP_EQ ] = &&LABEL_BYTECODE_OP_EQ,
708 [ BYTECODE_OP_NE ] = &&LABEL_BYTECODE_OP_NE,
709 [ BYTECODE_OP_GT ] = &&LABEL_BYTECODE_OP_GT,
710 [ BYTECODE_OP_LT ] = &&LABEL_BYTECODE_OP_LT,
711 [ BYTECODE_OP_GE ] = &&LABEL_BYTECODE_OP_GE,
712 [ BYTECODE_OP_LE ] = &&LABEL_BYTECODE_OP_LE,
713
714 /* string binary comparator */
715 [ BYTECODE_OP_EQ_STRING ] = &&LABEL_BYTECODE_OP_EQ_STRING,
716 [ BYTECODE_OP_NE_STRING ] = &&LABEL_BYTECODE_OP_NE_STRING,
717 [ BYTECODE_OP_GT_STRING ] = &&LABEL_BYTECODE_OP_GT_STRING,
718 [ BYTECODE_OP_LT_STRING ] = &&LABEL_BYTECODE_OP_LT_STRING,
719 [ BYTECODE_OP_GE_STRING ] = &&LABEL_BYTECODE_OP_GE_STRING,
720 [ BYTECODE_OP_LE_STRING ] = &&LABEL_BYTECODE_OP_LE_STRING,
721
722 /* globbing pattern binary comparator */
723 [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING,
724 [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING,
725
726 /* s64 binary comparator */
727 [ BYTECODE_OP_EQ_S64 ] = &&LABEL_BYTECODE_OP_EQ_S64,
728 [ BYTECODE_OP_NE_S64 ] = &&LABEL_BYTECODE_OP_NE_S64,
729 [ BYTECODE_OP_GT_S64 ] = &&LABEL_BYTECODE_OP_GT_S64,
730 [ BYTECODE_OP_LT_S64 ] = &&LABEL_BYTECODE_OP_LT_S64,
731 [ BYTECODE_OP_GE_S64 ] = &&LABEL_BYTECODE_OP_GE_S64,
732 [ BYTECODE_OP_LE_S64 ] = &&LABEL_BYTECODE_OP_LE_S64,
733
734 /* double binary comparator */
735 [ BYTECODE_OP_EQ_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE,
736 [ BYTECODE_OP_NE_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_DOUBLE,
737 [ BYTECODE_OP_GT_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_DOUBLE,
738 [ BYTECODE_OP_LT_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_DOUBLE,
739 [ BYTECODE_OP_GE_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_DOUBLE,
740 [ BYTECODE_OP_LE_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_DOUBLE,
741
742 /* Mixed S64-double binary comparators */
743 [ BYTECODE_OP_EQ_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64,
744 [ BYTECODE_OP_NE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64,
745 [ BYTECODE_OP_GT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64,
746 [ BYTECODE_OP_LT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64,
747 [ BYTECODE_OP_GE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64,
748 [ BYTECODE_OP_LE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64,
749
750 [ BYTECODE_OP_EQ_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE,
751 [ BYTECODE_OP_NE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE,
752 [ BYTECODE_OP_GT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE,
753 [ BYTECODE_OP_LT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE,
754 [ BYTECODE_OP_GE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE,
755 [ BYTECODE_OP_LE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE,
756
757 /* unary */
758 [ BYTECODE_OP_UNARY_PLUS ] = &&LABEL_BYTECODE_OP_UNARY_PLUS,
759 [ BYTECODE_OP_UNARY_MINUS ] = &&LABEL_BYTECODE_OP_UNARY_MINUS,
760 [ BYTECODE_OP_UNARY_NOT ] = &&LABEL_BYTECODE_OP_UNARY_NOT,
761 [ BYTECODE_OP_UNARY_PLUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64,
762 [ BYTECODE_OP_UNARY_MINUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64,
763 [ BYTECODE_OP_UNARY_NOT_S64 ] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64,
764 [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE,
765 [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE,
766 [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE,
767
768 /* logical */
769 [ BYTECODE_OP_AND ] = &&LABEL_BYTECODE_OP_AND,
770 [ BYTECODE_OP_OR ] = &&LABEL_BYTECODE_OP_OR,
771
772 /* load field ref */
773 [ BYTECODE_OP_LOAD_FIELD_REF ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF,
774 [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING,
775 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE,
776 [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64,
777 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE,
778
779 /* load from immediate operand */
780 [ BYTECODE_OP_LOAD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STRING,
781 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING,
782 [ BYTECODE_OP_LOAD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_S64,
783 [ BYTECODE_OP_LOAD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE,
784
785 /* cast */
786 [ BYTECODE_OP_CAST_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_TO_S64,
787 [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64,
788 [ BYTECODE_OP_CAST_NOP ] = &&LABEL_BYTECODE_OP_CAST_NOP,
789
790 /* get context ref */
791 [ BYTECODE_OP_GET_CONTEXT_REF ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF,
792 [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING,
793 [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64,
794 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE,
795
796 /* load userspace field ref */
797 [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_STRING,
798 [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE,
799
800 /* Instructions for recursive traversal through composed types. */
801 [ BYTECODE_OP_GET_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT,
802 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT,
803 [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT,
804
805 [ BYTECODE_OP_GET_SYMBOL ] = &&LABEL_BYTECODE_OP_GET_SYMBOL,
806 [ BYTECODE_OP_GET_SYMBOL_FIELD ] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD,
807 [ BYTECODE_OP_GET_INDEX_U16 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U16,
808 [ BYTECODE_OP_GET_INDEX_U64 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U64,
809
810 [ BYTECODE_OP_LOAD_FIELD ] = &&LABEL_BYTECODE_OP_LOAD_FIELD,
811 [ BYTECODE_OP_LOAD_FIELD_S8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8,
812 [ BYTECODE_OP_LOAD_FIELD_S16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16,
813 [ BYTECODE_OP_LOAD_FIELD_S32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32,
814 [ BYTECODE_OP_LOAD_FIELD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64,
815 [ BYTECODE_OP_LOAD_FIELD_U8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8,
816 [ BYTECODE_OP_LOAD_FIELD_U16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16,
817 [ BYTECODE_OP_LOAD_FIELD_U32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32,
818 [ BYTECODE_OP_LOAD_FIELD_U64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64,
819 [ BYTECODE_OP_LOAD_FIELD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING,
820 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE,
821 [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE,
822
823 [ BYTECODE_OP_UNARY_BIT_NOT ] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT,
824
825 [ BYTECODE_OP_RETURN_S64 ] = &&LABEL_BYTECODE_OP_RETURN_S64,
826 };
827 #endif /* #ifndef INTERPRETER_USE_SWITCH */
828
829 START_OP
830
831 OP(BYTECODE_OP_UNKNOWN):
832 OP(BYTECODE_OP_LOAD_FIELD_REF):
833 OP(BYTECODE_OP_GET_CONTEXT_REF):
834 #ifdef INTERPRETER_USE_SWITCH
835 default:
836 #endif /* INTERPRETER_USE_SWITCH */
837 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
838 (unsigned int) *(bytecode_opcode_t *) pc);
839 ret = -EINVAL;
840 goto end;
841
842 OP(BYTECODE_OP_RETURN):
843 OP(BYTECODE_OP_RETURN_S64):
844 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
845 switch (estack_ax_t) {
846 case REG_S64:
847 case REG_U64:
848 retval = !!estack_ax_v;
849 break;
850 case REG_DOUBLE:
851 case REG_STRING:
852 case REG_PTR:
853 if (!output) {
854 ret = -EINVAL;
855 goto end;
856 }
857 retval = 0;
858 break;
859 case REG_STAR_GLOB_STRING:
860 case REG_TYPE_UNKNOWN:
861 ret = -EINVAL;
862 goto end;
863 }
864 ret = 0;
865 goto end;
866
867 /* binary */
868 OP(BYTECODE_OP_MUL):
869 OP(BYTECODE_OP_DIV):
870 OP(BYTECODE_OP_MOD):
871 OP(BYTECODE_OP_PLUS):
872 OP(BYTECODE_OP_MINUS):
873 printk(KERN_WARNING "LTTng: bytecode: unsupported bytecode op %u\n",
874 (unsigned int) *(bytecode_opcode_t *) pc);
875 ret = -EINVAL;
876 goto end;
877
878 OP(BYTECODE_OP_EQ):
879 OP(BYTECODE_OP_NE):
880 OP(BYTECODE_OP_GT):
881 OP(BYTECODE_OP_LT):
882 OP(BYTECODE_OP_GE):
883 OP(BYTECODE_OP_LE):
884 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
885 (unsigned int) *(bytecode_opcode_t *) pc);
886 ret = -EINVAL;
887 goto end;
888
889 OP(BYTECODE_OP_EQ_STRING):
890 {
891 int res;
892
893 res = (stack_strcmp(stack, top, "==") == 0);
894 estack_pop(stack, top, ax, bx, ax_t, bx_t);
895 estack_ax_v = res;
896 estack_ax_t = REG_S64;
897 next_pc += sizeof(struct binary_op);
898 PO;
899 }
900 OP(BYTECODE_OP_NE_STRING):
901 {
902 int res;
903
904 res = (stack_strcmp(stack, top, "!=") != 0);
905 estack_pop(stack, top, ax, bx, ax_t, bx_t);
906 estack_ax_v = res;
907 estack_ax_t = REG_S64;
908 next_pc += sizeof(struct binary_op);
909 PO;
910 }
911 OP(BYTECODE_OP_GT_STRING):
912 {
913 int res;
914
915 res = (stack_strcmp(stack, top, ">") > 0);
916 estack_pop(stack, top, ax, bx, ax_t, bx_t);
917 estack_ax_v = res;
918 estack_ax_t = REG_S64;
919 next_pc += sizeof(struct binary_op);
920 PO;
921 }
922 OP(BYTECODE_OP_LT_STRING):
923 {
924 int res;
925
926 res = (stack_strcmp(stack, top, "<") < 0);
927 estack_pop(stack, top, ax, bx, ax_t, bx_t);
928 estack_ax_v = res;
929 estack_ax_t = REG_S64;
930 next_pc += sizeof(struct binary_op);
931 PO;
932 }
933 OP(BYTECODE_OP_GE_STRING):
934 {
935 int res;
936
937 res = (stack_strcmp(stack, top, ">=") >= 0);
938 estack_pop(stack, top, ax, bx, ax_t, bx_t);
939 estack_ax_v = res;
940 estack_ax_t = REG_S64;
941 next_pc += sizeof(struct binary_op);
942 PO;
943 }
944 OP(BYTECODE_OP_LE_STRING):
945 {
946 int res;
947
948 res = (stack_strcmp(stack, top, "<=") <= 0);
949 estack_pop(stack, top, ax, bx, ax_t, bx_t);
950 estack_ax_v = res;
951 estack_ax_t = REG_S64;
952 next_pc += sizeof(struct binary_op);
953 PO;
954 }
955
956 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING):
957 {
958 int res;
959
960 res = (stack_star_glob_match(stack, top, "==") == 0);
961 estack_pop(stack, top, ax, bx, ax_t, bx_t);
962 estack_ax_v = res;
963 estack_ax_t = REG_S64;
964 next_pc += sizeof(struct binary_op);
965 PO;
966 }
967 OP(BYTECODE_OP_NE_STAR_GLOB_STRING):
968 {
969 int res;
970
971 res = (stack_star_glob_match(stack, top, "!=") != 0);
972 estack_pop(stack, top, ax, bx, ax_t, bx_t);
973 estack_ax_v = res;
974 estack_ax_t = REG_S64;
975 next_pc += sizeof(struct binary_op);
976 PO;
977 }
978
979 OP(BYTECODE_OP_EQ_S64):
980 {
981 int res;
982
983 res = (estack_bx_v == estack_ax_v);
984 estack_pop(stack, top, ax, bx, ax_t, bx_t);
985 estack_ax_v = res;
986 estack_ax_t = REG_S64;
987 next_pc += sizeof(struct binary_op);
988 PO;
989 }
990 OP(BYTECODE_OP_NE_S64):
991 {
992 int res;
993
994 res = (estack_bx_v != estack_ax_v);
995 estack_pop(stack, top, ax, bx, ax_t, bx_t);
996 estack_ax_v = res;
997 estack_ax_t = REG_S64;
998 next_pc += sizeof(struct binary_op);
999 PO;
1000 }
1001 OP(BYTECODE_OP_GT_S64):
1002 {
1003 int res;
1004
1005 res = (estack_bx_v > estack_ax_v);
1006 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1007 estack_ax_v = res;
1008 estack_ax_t = REG_S64;
1009 next_pc += sizeof(struct binary_op);
1010 PO;
1011 }
1012 OP(BYTECODE_OP_LT_S64):
1013 {
1014 int res;
1015
1016 res = (estack_bx_v < estack_ax_v);
1017 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1018 estack_ax_v = res;
1019 estack_ax_t = REG_S64;
1020 next_pc += sizeof(struct binary_op);
1021 PO;
1022 }
1023 OP(BYTECODE_OP_GE_S64):
1024 {
1025 int res;
1026
1027 res = (estack_bx_v >= estack_ax_v);
1028 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1029 estack_ax_v = res;
1030 estack_ax_t = REG_S64;
1031 next_pc += sizeof(struct binary_op);
1032 PO;
1033 }
1034 OP(BYTECODE_OP_LE_S64):
1035 {
1036 int res;
1037
1038 res = (estack_bx_v <= estack_ax_v);
1039 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1040 estack_ax_v = res;
1041 estack_ax_t = REG_S64;
1042 next_pc += sizeof(struct binary_op);
1043 PO;
1044 }
1045
1046 OP(BYTECODE_OP_EQ_DOUBLE):
1047 OP(BYTECODE_OP_NE_DOUBLE):
1048 OP(BYTECODE_OP_GT_DOUBLE):
1049 OP(BYTECODE_OP_LT_DOUBLE):
1050 OP(BYTECODE_OP_GE_DOUBLE):
1051 OP(BYTECODE_OP_LE_DOUBLE):
1052 {
1053 BUG_ON(1);
1054 PO;
1055 }
1056
1057 /* Mixed S64-double binary comparators */
1058 OP(BYTECODE_OP_EQ_DOUBLE_S64):
1059 OP(BYTECODE_OP_NE_DOUBLE_S64):
1060 OP(BYTECODE_OP_GT_DOUBLE_S64):
1061 OP(BYTECODE_OP_LT_DOUBLE_S64):
1062 OP(BYTECODE_OP_GE_DOUBLE_S64):
1063 OP(BYTECODE_OP_LE_DOUBLE_S64):
1064 OP(BYTECODE_OP_EQ_S64_DOUBLE):
1065 OP(BYTECODE_OP_NE_S64_DOUBLE):
1066 OP(BYTECODE_OP_GT_S64_DOUBLE):
1067 OP(BYTECODE_OP_LT_S64_DOUBLE):
1068 OP(BYTECODE_OP_GE_S64_DOUBLE):
1069 OP(BYTECODE_OP_LE_S64_DOUBLE):
1070 {
1071 BUG_ON(1);
1072 PO;
1073 }
1074 OP(BYTECODE_OP_BIT_RSHIFT):
1075 {
1076 int64_t res;
1077
1078 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1079 ret = -EINVAL;
1080 goto end;
1081 }
1082
1083 /* Catch undefined behavior. */
1084 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1085 ret = -EINVAL;
1086 goto end;
1087 }
1088 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1089 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1090 estack_ax_v = res;
1091 estack_ax_t = REG_U64;
1092 next_pc += sizeof(struct binary_op);
1093 PO;
1094 }
1095 OP(BYTECODE_OP_BIT_LSHIFT):
1096 {
1097 int64_t res;
1098
1099 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1100 ret = -EINVAL;
1101 goto end;
1102 }
1103
1104 /* Catch undefined behavior. */
1105 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1106 ret = -EINVAL;
1107 goto end;
1108 }
1109 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1110 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1111 estack_ax_v = res;
1112 estack_ax_t = REG_U64;
1113 next_pc += sizeof(struct binary_op);
1114 PO;
1115 }
1116 OP(BYTECODE_OP_BIT_AND):
1117 {
1118 int64_t res;
1119
1120 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1121 ret = -EINVAL;
1122 goto end;
1123 }
1124
1125 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1126 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1127 estack_ax_v = res;
1128 estack_ax_t = REG_U64;
1129 next_pc += sizeof(struct binary_op);
1130 PO;
1131 }
1132 OP(BYTECODE_OP_BIT_OR):
1133 {
1134 int64_t res;
1135
1136 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1137 ret = -EINVAL;
1138 goto end;
1139 }
1140
1141 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1142 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1143 estack_ax_v = res;
1144 estack_ax_t = REG_U64;
1145 next_pc += sizeof(struct binary_op);
1146 PO;
1147 }
1148 OP(BYTECODE_OP_BIT_XOR):
1149 {
1150 int64_t res;
1151
1152 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1153 ret = -EINVAL;
1154 goto end;
1155 }
1156
1157 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1158 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1159 estack_ax_v = res;
1160 estack_ax_t = REG_U64;
1161 next_pc += sizeof(struct binary_op);
1162 PO;
1163 }
1164
1165 /* unary */
1166 OP(BYTECODE_OP_UNARY_PLUS):
1167 OP(BYTECODE_OP_UNARY_MINUS):
1168 OP(BYTECODE_OP_UNARY_NOT):
1169 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1170 (unsigned int) *(bytecode_opcode_t *) pc);
1171 ret = -EINVAL;
1172 goto end;
1173
1174
1175 OP(BYTECODE_OP_UNARY_BIT_NOT):
1176 {
1177 estack_ax_v = ~(uint64_t) estack_ax_v;
1178 estack_ax_t = REG_S64;
1179 next_pc += sizeof(struct unary_op);
1180 PO;
1181 }
1182
1183 OP(BYTECODE_OP_UNARY_PLUS_S64):
1184 {
1185 next_pc += sizeof(struct unary_op);
1186 PO;
1187 }
1188 OP(BYTECODE_OP_UNARY_MINUS_S64):
1189 {
1190 estack_ax_v = -estack_ax_v;
1191 estack_ax_t = REG_S64;
1192 next_pc += sizeof(struct unary_op);
1193 PO;
1194 }
1195 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE):
1196 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE):
1197 {
1198 BUG_ON(1);
1199 PO;
1200 }
1201 OP(BYTECODE_OP_UNARY_NOT_S64):
1202 {
1203 estack_ax_v = !estack_ax_v;
1204 estack_ax_t = REG_S64;
1205 next_pc += sizeof(struct unary_op);
1206 PO;
1207 }
1208 OP(BYTECODE_OP_UNARY_NOT_DOUBLE):
1209 {
1210 BUG_ON(1);
1211 PO;
1212 }
1213
1214 /* logical */
1215 OP(BYTECODE_OP_AND):
1216 {
1217 struct logical_op *insn = (struct logical_op *) pc;
1218
1219 /* If AX is 0, skip and evaluate to 0 */
1220 if (unlikely(estack_ax_v == 0)) {
1221 dbg_printk("Jumping to bytecode offset %u\n",
1222 (unsigned int) insn->skip_offset);
1223 next_pc = start_pc + insn->skip_offset;
1224 } else {
1225 /* Pop 1 when jump not taken */
1226 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1227 next_pc += sizeof(struct logical_op);
1228 }
1229 PO;
1230 }
1231 OP(BYTECODE_OP_OR):
1232 {
1233 struct logical_op *insn = (struct logical_op *) pc;
1234
1235 /* If AX is nonzero, skip and evaluate to 1 */
1236
1237 if (unlikely(estack_ax_v != 0)) {
1238 estack_ax_v = 1;
1239 dbg_printk("Jumping to bytecode offset %u\n",
1240 (unsigned int) insn->skip_offset);
1241 next_pc = start_pc + insn->skip_offset;
1242 } else {
1243 /* Pop 1 when jump not taken */
1244 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1245 next_pc += sizeof(struct logical_op);
1246 }
1247 PO;
1248 }
1249
1250
1251 /* load field ref */
1252 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING):
1253 {
1254 struct load_op *insn = (struct load_op *) pc;
1255 struct field_ref *ref = (struct field_ref *) insn->data;
1256
1257 dbg_printk("load field ref offset %u type string\n",
1258 ref->offset);
1259 estack_push(stack, top, ax, bx, ax_t, bx_t);
1260 estack_ax(stack, top)->u.s.str =
1261 *(const char * const *) &interpreter_stack_data[ref->offset];
1262 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1263 dbg_printk("Bytecode warning: loading a NULL string.\n");
1264 ret = -EINVAL;
1265 goto end;
1266 }
1267 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1268 estack_ax(stack, top)->u.s.literal_type =
1269 ESTACK_STRING_LITERAL_TYPE_NONE;
1270 estack_ax(stack, top)->u.s.user = 0;
1271 estack_ax(stack, top)->type = REG_STRING;
1272 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1273 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1274 PO;
1275 }
1276
1277 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE):
1278 {
1279 struct load_op *insn = (struct load_op *) pc;
1280 struct field_ref *ref = (struct field_ref *) insn->data;
1281
1282 dbg_printk("load field ref offset %u type sequence\n",
1283 ref->offset);
1284 estack_push(stack, top, ax, bx, ax_t, bx_t);
1285 estack_ax(stack, top)->u.s.seq_len =
1286 *(unsigned long *) &interpreter_stack_data[ref->offset];
1287 estack_ax(stack, top)->u.s.str =
1288 *(const char **) (&interpreter_stack_data[ref->offset
1289 + sizeof(unsigned long)]);
1290 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1291 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1292 ret = -EINVAL;
1293 goto end;
1294 }
1295 estack_ax(stack, top)->u.s.literal_type =
1296 ESTACK_STRING_LITERAL_TYPE_NONE;
1297 estack_ax(stack, top)->u.s.user = 0;
1298 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1299 PO;
1300 }
1301
1302 OP(BYTECODE_OP_LOAD_FIELD_REF_S64):
1303 {
1304 struct load_op *insn = (struct load_op *) pc;
1305 struct field_ref *ref = (struct field_ref *) insn->data;
1306
1307 dbg_printk("load field ref offset %u type s64\n",
1308 ref->offset);
1309 estack_push(stack, top, ax, bx, ax_t, bx_t);
1310 estack_ax_v =
1311 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
1312 estack_ax_t = REG_S64;
1313 dbg_printk("ref load s64 %lld\n",
1314 (long long) estack_ax_v);
1315 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1316 PO;
1317 }
1318
1319 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE):
1320 {
1321 BUG_ON(1);
1322 PO;
1323 }
1324
1325 /* load from immediate operand */
1326 OP(BYTECODE_OP_LOAD_STRING):
1327 {
1328 struct load_op *insn = (struct load_op *) pc;
1329
1330 dbg_printk("load string %s\n", insn->data);
1331 estack_push(stack, top, ax, bx, ax_t, bx_t);
1332 estack_ax(stack, top)->u.s.str = insn->data;
1333 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1334 estack_ax(stack, top)->u.s.literal_type =
1335 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1336 estack_ax(stack, top)->u.s.user = 0;
1337 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1338 PO;
1339 }
1340
1341 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING):
1342 {
1343 struct load_op *insn = (struct load_op *) pc;
1344
1345 dbg_printk("load globbing pattern %s\n", insn->data);
1346 estack_push(stack, top, ax, bx, ax_t, bx_t);
1347 estack_ax(stack, top)->u.s.str = insn->data;
1348 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1349 estack_ax(stack, top)->u.s.literal_type =
1350 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1351 estack_ax(stack, top)->u.s.user = 0;
1352 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1353 PO;
1354 }
1355
1356 OP(BYTECODE_OP_LOAD_S64):
1357 {
1358 struct load_op *insn = (struct load_op *) pc;
1359
1360 estack_push(stack, top, ax, bx, ax_t, bx_t);
1361 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1362 estack_ax_t = REG_S64;
1363 dbg_printk("load s64 %lld\n",
1364 (long long) estack_ax_v);
1365 next_pc += sizeof(struct load_op)
1366 + sizeof(struct literal_numeric);
1367 PO;
1368 }
1369
1370 OP(BYTECODE_OP_LOAD_DOUBLE):
1371 {
1372 BUG_ON(1);
1373 PO;
1374 }
1375
1376 /* cast */
1377 OP(BYTECODE_OP_CAST_TO_S64):
1378 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1379 (unsigned int) *(bytecode_opcode_t *) pc);
1380 ret = -EINVAL;
1381 goto end;
1382
1383 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64):
1384 {
1385 BUG_ON(1);
1386 PO;
1387 }
1388
1389 OP(BYTECODE_OP_CAST_NOP):
1390 {
1391 next_pc += sizeof(struct cast_op);
1392 PO;
1393 }
1394
1395 /* get context ref */
1396 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING):
1397 {
1398 struct load_op *insn = (struct load_op *) pc;
1399 struct field_ref *ref = (struct field_ref *) insn->data;
1400 struct lttng_ctx_field *ctx_field;
1401 union lttng_ctx_value v;
1402
1403 dbg_printk("get context ref offset %u type string\n",
1404 ref->offset);
1405 ctx_field = &lttng_static_ctx->fields[ref->offset];
1406 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1407 estack_push(stack, top, ax, bx, ax_t, bx_t);
1408 estack_ax(stack, top)->u.s.str = v.str;
1409 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1410 dbg_printk("Bytecode warning: loading a NULL string.\n");
1411 ret = -EINVAL;
1412 goto end;
1413 }
1414 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1415 estack_ax(stack, top)->u.s.literal_type =
1416 ESTACK_STRING_LITERAL_TYPE_NONE;
1417 estack_ax(stack, top)->u.s.user = 0;
1418 estack_ax(stack, top)->type = REG_STRING;
1419 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1420 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1421 PO;
1422 }
1423
1424 OP(BYTECODE_OP_GET_CONTEXT_REF_S64):
1425 {
1426 struct load_op *insn = (struct load_op *) pc;
1427 struct field_ref *ref = (struct field_ref *) insn->data;
1428 struct lttng_ctx_field *ctx_field;
1429 union lttng_ctx_value v;
1430
1431 dbg_printk("get context ref offset %u type s64\n",
1432 ref->offset);
1433 ctx_field = &lttng_static_ctx->fields[ref->offset];
1434 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1435 estack_push(stack, top, ax, bx, ax_t, bx_t);
1436 estack_ax_v = v.s64;
1437 estack_ax_t = REG_S64;
1438 dbg_printk("ref get context s64 %lld\n",
1439 (long long) estack_ax_v);
1440 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1441 PO;
1442 }
1443
1444 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE):
1445 {
1446 BUG_ON(1);
1447 PO;
1448 }
1449
1450 /* load userspace field ref */
1451 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_STRING):
1452 {
1453 struct load_op *insn = (struct load_op *) pc;
1454 struct field_ref *ref = (struct field_ref *) insn->data;
1455
1456 dbg_printk("load field ref offset %u type user string\n",
1457 ref->offset);
1458 estack_push(stack, top, ax, bx, ax_t, bx_t);
1459 estack_ax(stack, top)->u.s.user_str =
1460 *(const char * const *) &interpreter_stack_data[ref->offset];
1461 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1462 dbg_printk("Bytecode warning: loading a NULL string.\n");
1463 ret = -EINVAL;
1464 goto end;
1465 }
1466 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1467 estack_ax(stack, top)->u.s.literal_type =
1468 ESTACK_STRING_LITERAL_TYPE_NONE;
1469 estack_ax(stack, top)->u.s.user = 1;
1470 estack_ax(stack, top)->type = REG_STRING;
1471 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1472 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1473 PO;
1474 }
1475
1476 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1477 {
1478 struct load_op *insn = (struct load_op *) pc;
1479 struct field_ref *ref = (struct field_ref *) insn->data;
1480
1481 dbg_printk("load field ref offset %u type user sequence\n",
1482 ref->offset);
1483 estack_push(stack, top, ax, bx, ax_t, bx_t);
1484 estack_ax(stack, top)->u.s.seq_len =
1485 *(unsigned long *) &interpreter_stack_data[ref->offset];
1486 estack_ax(stack, top)->u.s.user_str =
1487 *(const char **) (&interpreter_stack_data[ref->offset
1488 + sizeof(unsigned long)]);
1489 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1490 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1491 ret = -EINVAL;
1492 goto end;
1493 }
1494 estack_ax(stack, top)->u.s.literal_type =
1495 ESTACK_STRING_LITERAL_TYPE_NONE;
1496 estack_ax(stack, top)->u.s.user = 1;
1497 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1498 PO;
1499 }
1500
1501 OP(BYTECODE_OP_GET_CONTEXT_ROOT):
1502 {
1503 dbg_printk("op get context root\n");
1504 estack_push(stack, top, ax, bx, ax_t, bx_t);
1505 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1506 /* "field" only needed for variants. */
1507 estack_ax(stack, top)->u.ptr.field = NULL;
1508 estack_ax(stack, top)->type = REG_PTR;
1509 next_pc += sizeof(struct load_op);
1510 PO;
1511 }
1512
1513 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT):
1514 {
1515 BUG_ON(1);
1516 PO;
1517 }
1518
1519 OP(BYTECODE_OP_GET_PAYLOAD_ROOT):
1520 {
1521 dbg_printk("op get app payload root\n");
1522 estack_push(stack, top, ax, bx, ax_t, bx_t);
1523 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1524 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
1525 /* "field" only needed for variants. */
1526 estack_ax(stack, top)->u.ptr.field = NULL;
1527 estack_ax(stack, top)->type = REG_PTR;
1528 next_pc += sizeof(struct load_op);
1529 PO;
1530 }
1531
1532 OP(BYTECODE_OP_GET_SYMBOL):
1533 {
1534 dbg_printk("op get symbol\n");
1535 switch (estack_ax(stack, top)->u.ptr.type) {
1536 case LOAD_OBJECT:
1537 printk(KERN_WARNING "LTTng: bytecode: Nested fields not implemented yet.\n");
1538 ret = -EINVAL;
1539 goto end;
1540 case LOAD_ROOT_CONTEXT:
1541 case LOAD_ROOT_APP_CONTEXT:
1542 case LOAD_ROOT_PAYLOAD:
1543 /*
1544 * symbol lookup is performed by
1545 * specialization.
1546 */
1547 ret = -EINVAL;
1548 goto end;
1549 }
1550 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1551 PO;
1552 }
1553
1554 OP(BYTECODE_OP_GET_SYMBOL_FIELD):
1555 {
1556 /*
1557 * Used for first variant encountered in a
1558 * traversal. Variants are not implemented yet.
1559 */
1560 ret = -EINVAL;
1561 goto end;
1562 }
1563
1564 OP(BYTECODE_OP_GET_INDEX_U16):
1565 {
1566 struct load_op *insn = (struct load_op *) pc;
1567 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1568
1569 dbg_printk("op get index u16\n");
1570 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1571 if (ret)
1572 goto end;
1573 estack_ax_v = estack_ax(stack, top)->u.v;
1574 estack_ax_t = estack_ax(stack, top)->type;
1575 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1576 PO;
1577 }
1578
1579 OP(BYTECODE_OP_GET_INDEX_U64):
1580 {
1581 struct load_op *insn = (struct load_op *) pc;
1582 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1583
1584 dbg_printk("op get index u64\n");
1585 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1586 if (ret)
1587 goto end;
1588 estack_ax_v = estack_ax(stack, top)->u.v;
1589 estack_ax_t = estack_ax(stack, top)->type;
1590 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1591 PO;
1592 }
1593
1594 OP(BYTECODE_OP_LOAD_FIELD):
1595 {
1596 dbg_printk("op load field\n");
1597 ret = dynamic_load_field(estack_ax(stack, top));
1598 if (ret)
1599 goto end;
1600 estack_ax_v = estack_ax(stack, top)->u.v;
1601 estack_ax_t = estack_ax(stack, top)->type;
1602 next_pc += sizeof(struct load_op);
1603 PO;
1604 }
1605
1606 OP(BYTECODE_OP_LOAD_FIELD_S8):
1607 {
1608 dbg_printk("op load field s8\n");
1609
1610 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1611 estack_ax_t = REG_S64;
1612 next_pc += sizeof(struct load_op);
1613 PO;
1614 }
1615 OP(BYTECODE_OP_LOAD_FIELD_S16):
1616 {
1617 dbg_printk("op load field s16\n");
1618
1619 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1620 estack_ax_t = REG_S64;
1621 next_pc += sizeof(struct load_op);
1622 PO;
1623 }
1624 OP(BYTECODE_OP_LOAD_FIELD_S32):
1625 {
1626 dbg_printk("op load field s32\n");
1627
1628 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1629 estack_ax_t = REG_S64;
1630 next_pc += sizeof(struct load_op);
1631 PO;
1632 }
1633 OP(BYTECODE_OP_LOAD_FIELD_S64):
1634 {
1635 dbg_printk("op load field s64\n");
1636
1637 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1638 estack_ax_t = REG_S64;
1639 next_pc += sizeof(struct load_op);
1640 PO;
1641 }
1642 OP(BYTECODE_OP_LOAD_FIELD_U8):
1643 {
1644 dbg_printk("op load field u8\n");
1645
1646 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1647 estack_ax_t = REG_S64;
1648 next_pc += sizeof(struct load_op);
1649 PO;
1650 }
1651 OP(BYTECODE_OP_LOAD_FIELD_U16):
1652 {
1653 dbg_printk("op load field u16\n");
1654
1655 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1656 estack_ax_t = REG_S64;
1657 next_pc += sizeof(struct load_op);
1658 PO;
1659 }
1660 OP(BYTECODE_OP_LOAD_FIELD_U32):
1661 {
1662 dbg_printk("op load field u32\n");
1663
1664 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1665 estack_ax_t = REG_S64;
1666 next_pc += sizeof(struct load_op);
1667 PO;
1668 }
1669 OP(BYTECODE_OP_LOAD_FIELD_U64):
1670 {
1671 dbg_printk("op load field u64\n");
1672
1673 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1674 estack_ax_t = REG_S64;
1675 next_pc += sizeof(struct load_op);
1676 PO;
1677 }
1678 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE):
1679 {
1680 ret = -EINVAL;
1681 goto end;
1682 }
1683
1684 OP(BYTECODE_OP_LOAD_FIELD_STRING):
1685 {
1686 const char *str;
1687
1688 dbg_printk("op load field string\n");
1689 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1690 estack_ax(stack, top)->u.s.str = str;
1691 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1692 dbg_printk("Bytecode warning: loading a NULL string.\n");
1693 ret = -EINVAL;
1694 goto end;
1695 }
1696 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1697 estack_ax(stack, top)->u.s.literal_type =
1698 ESTACK_STRING_LITERAL_TYPE_NONE;
1699 estack_ax(stack, top)->type = REG_STRING;
1700 next_pc += sizeof(struct load_op);
1701 PO;
1702 }
1703
1704 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE):
1705 {
1706 const char *ptr;
1707
1708 dbg_printk("op load field string sequence\n");
1709 ptr = estack_ax(stack, top)->u.ptr.ptr;
1710 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1711 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1712 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1713 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1714 ret = -EINVAL;
1715 goto end;
1716 }
1717 estack_ax(stack, top)->u.s.literal_type =
1718 ESTACK_STRING_LITERAL_TYPE_NONE;
1719 estack_ax(stack, top)->type = REG_STRING;
1720 next_pc += sizeof(struct load_op);
1721 PO;
1722 }
1723
1724 END_OP
1725 end:
1726 /* Return _DISCARD on error. */
1727 if (ret)
1728 return LTTNG_INTERPRETER_DISCARD;
1729
1730 if (output) {
1731 return lttng_bytecode_interpret_format_output(
1732 estack_ax(stack, top), output);
1733 }
1734
1735 return retval;
1736 }
1737 LTTNG_STACK_FRAME_NON_STANDARD(bytecode_interpret);
1738
1739 uint64_t lttng_bytecode_filter_interpret(void *filter_data,
1740 struct lttng_probe_ctx *lttng_probe_ctx,
1741 const char *filter_stack_data)
1742 {
1743 return bytecode_interpret(filter_data, lttng_probe_ctx,
1744 filter_stack_data, NULL);
1745 }
1746
1747 #undef START_OP
1748 #undef OP
1749 #undef PO
1750 #undef END_OP
This page took 0.099158 seconds and 3 git commands to generate.