Split struct lttng_session into public/private structures
[lttng-modules.git] / src / lttng-bytecode-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-bytecode-interpreter.c
4 *
5 * LTTng modules bytecode interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
14
15 #include <lttng/lttng-bytecode.h>
16 #include <lttng/string-utils.h>
17 #include <lttng/events-internal.h>
18
19 /*
20 * get_char should be called with page fault handler disabled if it is expected
21 * to handle user-space read.
22 */
23 static
24 char get_char(const struct estack_entry *reg, size_t offset)
25 {
26 if (unlikely(offset >= reg->u.s.seq_len))
27 return '\0';
28 if (reg->u.s.user) {
29 char c;
30
31 /* Handle invalid access as end of string. */
32 if (unlikely(!lttng_access_ok(VERIFY_READ,
33 reg->u.s.user_str + offset,
34 sizeof(c))))
35 return '\0';
36 /* Handle fault (nonzero return value) as end of string. */
37 if (unlikely(__copy_from_user_inatomic(&c,
38 reg->u.s.user_str + offset,
39 sizeof(c))))
40 return '\0';
41 return c;
42 } else {
43 return reg->u.s.str[offset];
44 }
45 }
46
47 /*
48 * -1: wildcard found.
49 * -2: unknown escape char.
50 * 0: normal char.
51 */
52 static
53 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
54 {
55 switch (*c) {
56 case '\\':
57 (*offset)++;
58 *c = get_char(reg, *offset);
59 switch (*c) {
60 case '\\':
61 case '*':
62 return 0;
63 default:
64 return -2;
65 }
66 case '*':
67 return -1;
68 default:
69 return 0;
70 }
71 }
72
73 static
74 char get_char_at_cb(size_t at, void *data)
75 {
76 return get_char(data, at);
77 }
78
79 static
80 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
81 {
82 bool has_user = false;
83 int result;
84 struct estack_entry *pattern_reg;
85 struct estack_entry *candidate_reg;
86
87 /* Disable the page fault handler when reading from userspace. */
88 if (estack_bx(stack, top)->u.s.user
89 || estack_ax(stack, top)->u.s.user) {
90 has_user = true;
91 pagefault_disable();
92 }
93
94 /* Find out which side is the pattern vs. the candidate. */
95 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
96 pattern_reg = estack_ax(stack, top);
97 candidate_reg = estack_bx(stack, top);
98 } else {
99 pattern_reg = estack_bx(stack, top);
100 candidate_reg = estack_ax(stack, top);
101 }
102
103 /* Perform the match operation. */
104 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
105 pattern_reg, get_char_at_cb, candidate_reg);
106 if (has_user)
107 pagefault_enable();
108
109 return result;
110 }
111
112 static
113 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
114 {
115 size_t offset_bx = 0, offset_ax = 0;
116 int diff, has_user = 0;
117
118 if (estack_bx(stack, top)->u.s.user
119 || estack_ax(stack, top)->u.s.user) {
120 has_user = 1;
121 pagefault_disable();
122 }
123
124 for (;;) {
125 int ret;
126 int escaped_r0 = 0;
127 char char_bx, char_ax;
128
129 char_bx = get_char(estack_bx(stack, top), offset_bx);
130 char_ax = get_char(estack_ax(stack, top), offset_ax);
131
132 if (unlikely(char_bx == '\0')) {
133 if (char_ax == '\0') {
134 diff = 0;
135 break;
136 } else {
137 if (estack_ax(stack, top)->u.s.literal_type ==
138 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
139 ret = parse_char(estack_ax(stack, top),
140 &char_ax, &offset_ax);
141 if (ret == -1) {
142 diff = 0;
143 break;
144 }
145 }
146 diff = -1;
147 break;
148 }
149 }
150 if (unlikely(char_ax == '\0')) {
151 if (estack_bx(stack, top)->u.s.literal_type ==
152 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
153 ret = parse_char(estack_bx(stack, top),
154 &char_bx, &offset_bx);
155 if (ret == -1) {
156 diff = 0;
157 break;
158 }
159 }
160 diff = 1;
161 break;
162 }
163 if (estack_bx(stack, top)->u.s.literal_type ==
164 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
165 ret = parse_char(estack_bx(stack, top),
166 &char_bx, &offset_bx);
167 if (ret == -1) {
168 diff = 0;
169 break;
170 } else if (ret == -2) {
171 escaped_r0 = 1;
172 }
173 /* else compare both char */
174 }
175 if (estack_ax(stack, top)->u.s.literal_type ==
176 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
177 ret = parse_char(estack_ax(stack, top),
178 &char_ax, &offset_ax);
179 if (ret == -1) {
180 diff = 0;
181 break;
182 } else if (ret == -2) {
183 if (!escaped_r0) {
184 diff = -1;
185 break;
186 }
187 } else {
188 if (escaped_r0) {
189 diff = 1;
190 break;
191 }
192 }
193 } else {
194 if (escaped_r0) {
195 diff = 1;
196 break;
197 }
198 }
199 diff = char_bx - char_ax;
200 if (diff != 0)
201 break;
202 offset_bx++;
203 offset_ax++;
204 }
205 if (has_user)
206 pagefault_enable();
207
208 return diff;
209 }
210
211 int lttng_bytecode_interpret_error(
212 struct lttng_kernel_bytecode_runtime *bytecode_runtime __attribute__((unused)),
213 const char *stack_data __attribute__((unused)),
214 struct lttng_kernel_probe_ctx *probe_ctx __attribute__((unused)),
215 void *ctx __attribute__((unused)))
216 {
217 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR;
218 }
219
220 #ifdef INTERPRETER_USE_SWITCH
221
222 /*
223 * Fallback for compilers that do not support taking address of labels.
224 */
225
226 #define START_OP \
227 start_pc = &bytecode->data[0]; \
228 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
229 pc = next_pc) { \
230 dbg_printk("LTTng: Executing op %s (%u)\n", \
231 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
232 (unsigned int) *(bytecode_opcode_t *) pc); \
233 switch (*(bytecode_opcode_t *) pc) {
234
235 #define OP(name) case name
236
237 #define PO break
238
239 #define END_OP } \
240 }
241
242 #else
243
244 /*
245 * Dispatch-table based interpreter.
246 */
247
248 #define START_OP \
249 start_pc = &bytecode->code[0]; \
250 pc = next_pc = start_pc; \
251 if (unlikely(pc - start_pc >= bytecode->len)) \
252 goto end; \
253 goto *dispatch[*(bytecode_opcode_t *) pc];
254
255 #define OP(name) \
256 LABEL_##name
257
258 #define PO \
259 pc = next_pc; \
260 goto *dispatch[*(bytecode_opcode_t *) pc];
261
262 #define END_OP
263
264 #endif
265
266 #define IS_INTEGER_REGISTER(reg_type) \
267 (reg_type == REG_S64 || reg_type == REG_U64)
268
269 static int context_get_index(struct lttng_kernel_probe_ctx *lttng_probe_ctx,
270 struct load_ptr *ptr,
271 uint32_t idx)
272 {
273
274 struct lttng_kernel_ctx_field *ctx_field;
275 const struct lttng_kernel_event_field *field;
276 struct lttng_ctx_value v;
277
278 ctx_field = &lttng_static_ctx->fields[idx];
279 field = ctx_field->event_field;
280 ptr->type = LOAD_OBJECT;
281 /* field is only used for types nested within variants. */
282 ptr->field = NULL;
283
284 switch (field->type->type) {
285 case lttng_kernel_type_integer:
286 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
287 if (lttng_kernel_get_type_integer(field->type)->signedness) {
288 ptr->object_type = OBJECT_TYPE_S64;
289 ptr->u.s64 = v.u.s64;
290 ptr->ptr = &ptr->u.s64;
291 } else {
292 ptr->object_type = OBJECT_TYPE_U64;
293 ptr->u.u64 = v.u.s64; /* Cast. */
294 ptr->ptr = &ptr->u.u64;
295 }
296 break;
297 case lttng_kernel_type_enum:
298 {
299 const struct lttng_kernel_type_enum *enum_type = lttng_kernel_get_type_enum(field->type);
300 const struct lttng_kernel_type_integer *integer_type = lttng_kernel_get_type_integer(enum_type->container_type);
301
302 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
303 if (integer_type->signedness) {
304 ptr->object_type = OBJECT_TYPE_SIGNED_ENUM;
305 ptr->u.s64 = v.u.s64;
306 ptr->ptr = &ptr->u.s64;
307 } else {
308 ptr->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
309 ptr->u.u64 = v.u.s64; /* Cast. */
310 ptr->ptr = &ptr->u.u64;
311 }
312 break;
313 }
314 case lttng_kernel_type_array:
315 {
316 const struct lttng_kernel_type_array *array_type = lttng_kernel_get_type_array(field->type);
317
318 if (!lttng_kernel_type_is_bytewise_integer(array_type->elem_type)) {
319 printk(KERN_WARNING "LTTng: bytecode: Array nesting only supports integer types.\n");
320 return -EINVAL;
321 }
322 if (array_type->encoding == lttng_kernel_string_encoding_none) {
323 printk(KERN_WARNING "LTTng: bytecode: Only string arrays are supported for contexts.\n");
324 return -EINVAL;
325 }
326 ptr->object_type = OBJECT_TYPE_STRING;
327 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
328 ptr->ptr = v.u.str;
329 break;
330 }
331 case lttng_kernel_type_sequence:
332 {
333 const struct lttng_kernel_type_sequence *sequence_type = lttng_kernel_get_type_sequence(field->type);
334
335 if (!lttng_kernel_type_is_bytewise_integer(sequence_type->elem_type)) {
336 printk(KERN_WARNING "LTTng: bytecode: Sequence nesting only supports integer types.\n");
337 return -EINVAL;
338 }
339 if (sequence_type->encoding == lttng_kernel_string_encoding_none) {
340 printk(KERN_WARNING "LTTng: bytecode: Only string sequences are supported for contexts.\n");
341 return -EINVAL;
342 }
343 ptr->object_type = OBJECT_TYPE_STRING;
344 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
345 ptr->ptr = v.u.str;
346 break;
347 }
348 case lttng_kernel_type_string:
349 ptr->object_type = OBJECT_TYPE_STRING;
350 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
351 ptr->ptr = v.u.str;
352 break;
353 case lttng_kernel_type_struct:
354 printk(KERN_WARNING "LTTng: bytecode: Structure type cannot be loaded.\n");
355 return -EINVAL;
356 case lttng_kernel_type_variant:
357 printk(KERN_WARNING "LTTng: bytecode: Variant type cannot be loaded.\n");
358 return -EINVAL;
359 default:
360 printk(KERN_WARNING "LTTng: bytecode: Unknown type: %d", (int) field->type->type);
361 return -EINVAL;
362 }
363 return 0;
364 }
365
366 static int dynamic_get_index(struct lttng_kernel_probe_ctx *lttng_probe_ctx,
367 struct bytecode_runtime *runtime,
368 uint64_t index, struct estack_entry *stack_top)
369 {
370 int ret;
371 const struct bytecode_get_index_data *gid;
372
373 gid = (const struct bytecode_get_index_data *) &runtime->data[index];
374 switch (stack_top->u.ptr.type) {
375 case LOAD_OBJECT:
376 switch (stack_top->u.ptr.object_type) {
377 case OBJECT_TYPE_ARRAY:
378 {
379 const char *ptr;
380
381 WARN_ON_ONCE(gid->offset >= gid->array_len);
382 /* Skip count (unsigned long) */
383 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
384 ptr = ptr + gid->offset;
385 stack_top->u.ptr.ptr = ptr;
386 stack_top->u.ptr.object_type = gid->elem.type;
387 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
388 BUG_ON(stack_top->u.ptr.field->type->type != lttng_kernel_type_array);
389 stack_top->u.ptr.field = NULL;
390 break;
391 }
392 case OBJECT_TYPE_SEQUENCE:
393 {
394 const char *ptr;
395 size_t ptr_seq_len;
396
397 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
398 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
399 if (gid->offset >= gid->elem.len * ptr_seq_len) {
400 ret = -EINVAL;
401 goto end;
402 }
403 ptr = ptr + gid->offset;
404 stack_top->u.ptr.ptr = ptr;
405 stack_top->u.ptr.object_type = gid->elem.type;
406 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
407 BUG_ON(stack_top->u.ptr.field->type->type != lttng_kernel_type_sequence);
408 stack_top->u.ptr.field = NULL;
409 break;
410 }
411 case OBJECT_TYPE_STRUCT:
412 printk(KERN_WARNING "LTTng: bytecode: Nested structures are not supported yet.\n");
413 ret = -EINVAL;
414 goto end;
415 case OBJECT_TYPE_VARIANT:
416 default:
417 printk(KERN_WARNING "LTTng: bytecode: Unexpected get index type %d",
418 (int) stack_top->u.ptr.object_type);
419 ret = -EINVAL;
420 goto end;
421 }
422 break;
423 case LOAD_ROOT_CONTEXT:
424 case LOAD_ROOT_APP_CONTEXT: /* Fall-through */
425 {
426 ret = context_get_index(lttng_probe_ctx,
427 &stack_top->u.ptr,
428 gid->ctx_index);
429 if (ret) {
430 goto end;
431 }
432 break;
433 }
434 case LOAD_ROOT_PAYLOAD:
435 stack_top->u.ptr.ptr += gid->offset;
436 if (gid->elem.type == OBJECT_TYPE_STRING)
437 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
438 stack_top->u.ptr.object_type = gid->elem.type;
439 stack_top->u.ptr.type = LOAD_OBJECT;
440 stack_top->u.ptr.field = gid->field;
441 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
442 break;
443 }
444
445 stack_top->type = REG_PTR;
446
447 return 0;
448
449 end:
450 return ret;
451 }
452
453 static int dynamic_load_field(struct estack_entry *stack_top)
454 {
455 int ret;
456
457 switch (stack_top->u.ptr.type) {
458 case LOAD_OBJECT:
459 break;
460 case LOAD_ROOT_CONTEXT:
461 case LOAD_ROOT_APP_CONTEXT:
462 case LOAD_ROOT_PAYLOAD:
463 default:
464 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
465 ret = -EINVAL;
466 goto end;
467 }
468 switch (stack_top->u.ptr.object_type) {
469 case OBJECT_TYPE_S8:
470 dbg_printk("op load field s8\n");
471 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
472 stack_top->type = REG_S64;
473 break;
474 case OBJECT_TYPE_S16:
475 {
476 int16_t tmp;
477
478 dbg_printk("op load field s16\n");
479 tmp = *(int16_t *) stack_top->u.ptr.ptr;
480 if (stack_top->u.ptr.rev_bo)
481 __swab16s(&tmp);
482 stack_top->u.v = tmp;
483 stack_top->type = REG_S64;
484 break;
485 }
486 case OBJECT_TYPE_S32:
487 {
488 int32_t tmp;
489
490 dbg_printk("op load field s32\n");
491 tmp = *(int32_t *) stack_top->u.ptr.ptr;
492 if (stack_top->u.ptr.rev_bo)
493 __swab32s(&tmp);
494 stack_top->u.v = tmp;
495 stack_top->type = REG_S64;
496 break;
497 }
498 case OBJECT_TYPE_S64:
499 {
500 int64_t tmp;
501
502 dbg_printk("op load field s64\n");
503 tmp = *(int64_t *) stack_top->u.ptr.ptr;
504 if (stack_top->u.ptr.rev_bo)
505 __swab64s(&tmp);
506 stack_top->u.v = tmp;
507 stack_top->type = REG_S64;
508 break;
509 }
510 case OBJECT_TYPE_SIGNED_ENUM:
511 {
512 int64_t tmp;
513
514 dbg_printk("op load field signed enumeration\n");
515 tmp = *(int64_t *) stack_top->u.ptr.ptr;
516 if (stack_top->u.ptr.rev_bo)
517 __swab64s(&tmp);
518 stack_top->u.v = tmp;
519 stack_top->type = REG_S64;
520 break;
521 }
522 case OBJECT_TYPE_U8:
523 dbg_printk("op load field u8\n");
524 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
525 stack_top->type = REG_U64;
526 break;
527 case OBJECT_TYPE_U16:
528 {
529 uint16_t tmp;
530
531 dbg_printk("op load field u16\n");
532 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
533 if (stack_top->u.ptr.rev_bo)
534 __swab16s(&tmp);
535 stack_top->u.v = tmp;
536 stack_top->type = REG_U64;
537 break;
538 }
539 case OBJECT_TYPE_U32:
540 {
541 uint32_t tmp;
542
543 dbg_printk("op load field u32\n");
544 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
545 if (stack_top->u.ptr.rev_bo)
546 __swab32s(&tmp);
547 stack_top->u.v = tmp;
548 stack_top->type = REG_U64;
549 break;
550 }
551 case OBJECT_TYPE_U64:
552 {
553 uint64_t tmp;
554
555 dbg_printk("op load field u64\n");
556 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
557 if (stack_top->u.ptr.rev_bo)
558 __swab64s(&tmp);
559 stack_top->u.v = tmp;
560 stack_top->type = REG_U64;
561 break;
562 }
563 case OBJECT_TYPE_UNSIGNED_ENUM:
564 {
565 uint64_t tmp;
566
567 dbg_printk("op load field unsigned enumeration\n");
568 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
569 if (stack_top->u.ptr.rev_bo)
570 __swab64s(&tmp);
571 stack_top->u.v = tmp;
572 stack_top->type = REG_U64;
573 break;
574 }
575 case OBJECT_TYPE_STRING:
576 {
577 const char *str;
578
579 dbg_printk("op load field string\n");
580 str = (const char *) stack_top->u.ptr.ptr;
581 stack_top->u.s.str = str;
582 if (unlikely(!stack_top->u.s.str)) {
583 dbg_printk("Bytecode warning: loading a NULL string.\n");
584 ret = -EINVAL;
585 goto end;
586 }
587 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
588 stack_top->u.s.literal_type =
589 ESTACK_STRING_LITERAL_TYPE_NONE;
590 stack_top->type = REG_STRING;
591 break;
592 }
593 case OBJECT_TYPE_STRING_SEQUENCE:
594 {
595 const char *ptr;
596
597 dbg_printk("op load field string sequence\n");
598 ptr = stack_top->u.ptr.ptr;
599 stack_top->u.s.seq_len = *(unsigned long *) ptr;
600 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
601 if (unlikely(!stack_top->u.s.str)) {
602 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
603 ret = -EINVAL;
604 goto end;
605 }
606 stack_top->u.s.literal_type =
607 ESTACK_STRING_LITERAL_TYPE_NONE;
608 stack_top->type = REG_STRING;
609 break;
610 }
611 case OBJECT_TYPE_DYNAMIC:
612 /*
613 * Dynamic types in context are looked up
614 * by context get index.
615 */
616 ret = -EINVAL;
617 goto end;
618 case OBJECT_TYPE_DOUBLE:
619 ret = -EINVAL;
620 goto end;
621 case OBJECT_TYPE_SEQUENCE:
622 case OBJECT_TYPE_ARRAY:
623 case OBJECT_TYPE_STRUCT:
624 case OBJECT_TYPE_VARIANT:
625 printk(KERN_WARNING "LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
626 ret = -EINVAL;
627 goto end;
628 }
629 return 0;
630
631 end:
632 return ret;
633 }
634
635 static
636 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
637 struct lttng_interpreter_output *output)
638 {
639 int ret;
640
641 again:
642 switch (ax->type) {
643 case REG_S64:
644 output->type = LTTNG_INTERPRETER_TYPE_S64;
645 output->u.s = ax->u.v;
646 break;
647 case REG_U64:
648 output->type = LTTNG_INTERPRETER_TYPE_U64;
649 output->u.u = (uint64_t) ax->u.v;
650 break;
651 case REG_STRING:
652 output->type = LTTNG_INTERPRETER_TYPE_STRING;
653 output->u.str.str = ax->u.s.str;
654 output->u.str.len = ax->u.s.seq_len;
655 break;
656 case REG_PTR:
657 switch (ax->u.ptr.object_type) {
658 case OBJECT_TYPE_S8:
659 case OBJECT_TYPE_S16:
660 case OBJECT_TYPE_S32:
661 case OBJECT_TYPE_S64:
662 case OBJECT_TYPE_U8:
663 case OBJECT_TYPE_U16:
664 case OBJECT_TYPE_U32:
665 case OBJECT_TYPE_U64:
666 case OBJECT_TYPE_DOUBLE:
667 case OBJECT_TYPE_STRING:
668 case OBJECT_TYPE_STRING_SEQUENCE:
669 ret = dynamic_load_field(ax);
670 if (ret)
671 return ret;
672 /* Retry after loading ptr into stack top. */
673 goto again;
674 case OBJECT_TYPE_SEQUENCE:
675 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
676 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
677 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
678 output->u.sequence.nested_type = lttng_kernel_get_type_sequence(ax->u.ptr.field->type)->elem_type;
679 break;
680 case OBJECT_TYPE_ARRAY:
681 /* Skip count (unsigned long) */
682 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
683 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
684 output->u.sequence.nr_elem = lttng_kernel_get_type_array(ax->u.ptr.field->type)->length;
685 output->u.sequence.nested_type = lttng_kernel_get_type_array(ax->u.ptr.field->type)->elem_type;
686 break;
687 case OBJECT_TYPE_SIGNED_ENUM:
688 ret = dynamic_load_field(ax);
689 if (ret)
690 return ret;
691 output->type = LTTNG_INTERPRETER_TYPE_SIGNED_ENUM;
692 output->u.s = ax->u.v;
693 break;
694 case OBJECT_TYPE_UNSIGNED_ENUM:
695 ret = dynamic_load_field(ax);
696 if (ret)
697 return ret;
698 output->type = LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM;
699 output->u.u = ax->u.v;
700 break;
701 case OBJECT_TYPE_STRUCT:
702 case OBJECT_TYPE_VARIANT:
703 default:
704 return -EINVAL;
705 }
706
707 break;
708 case REG_STAR_GLOB_STRING:
709 case REG_TYPE_UNKNOWN:
710 default:
711 return -EINVAL;
712 }
713
714 return 0;
715 }
716
717 #ifdef DEBUG
718
719 #define DBG_USER_STR_CUTOFF 32
720
721 /*
722 * In debug mode, print user string (truncated, if necessary).
723 */
724 static inline
725 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
726 {
727 size_t pos = 0;
728 char last_char;
729 char user_str[DBG_USER_STR_CUTOFF];
730
731 pagefault_disable();
732 do {
733 last_char = get_char(user_str_reg, pos);
734 user_str[pos] = last_char;
735 pos++;
736 } while (last_char != '\0' && pos < sizeof(user_str));
737 pagefault_enable();
738
739 user_str[sizeof(user_str) - 1] = '\0';
740 dbg_printk("load field ref user string: '%s%s'\n", user_str,
741 last_char != '\0' ? "[...]" : "");
742 }
743 #else
744 static inline
745 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
746 {
747 }
748 #endif
749
750 /*
751 * Return LTTNG_KERNEL_BYTECODE_INTERPRETER_OK on success.
752 * Return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR on error.
753 *
754 * For FILTER bytecode: expect a struct lttng_kernel_bytecode_filter_ctx *
755 * as @ctx argument.
756 * For CAPTURE bytecode: expect a struct lttng_interpreter_output *
757 * as @ctx argument.
758 */
759 int lttng_bytecode_interpret(struct lttng_kernel_bytecode_runtime *kernel_bytecode,
760 const char *interpreter_stack_data,
761 struct lttng_kernel_probe_ctx *lttng_probe_ctx,
762 void *caller_ctx)
763 {
764 struct bytecode_runtime *bytecode = container_of(kernel_bytecode, struct bytecode_runtime, p);
765 void *pc, *next_pc, *start_pc;
766 int ret = -EINVAL;
767 uint64_t retval = 0;
768 struct estack _stack;
769 struct estack *stack = &_stack;
770 register int64_t ax = 0, bx = 0;
771 register enum entry_type ax_t = REG_TYPE_UNKNOWN, bx_t = REG_TYPE_UNKNOWN;
772 register int top = INTERPRETER_STACK_EMPTY;
773 #ifndef INTERPRETER_USE_SWITCH
774 static void *dispatch[NR_BYTECODE_OPS] = {
775 [ BYTECODE_OP_UNKNOWN ] = &&LABEL_BYTECODE_OP_UNKNOWN,
776
777 [ BYTECODE_OP_RETURN ] = &&LABEL_BYTECODE_OP_RETURN,
778
779 /* binary */
780 [ BYTECODE_OP_MUL ] = &&LABEL_BYTECODE_OP_MUL,
781 [ BYTECODE_OP_DIV ] = &&LABEL_BYTECODE_OP_DIV,
782 [ BYTECODE_OP_MOD ] = &&LABEL_BYTECODE_OP_MOD,
783 [ BYTECODE_OP_PLUS ] = &&LABEL_BYTECODE_OP_PLUS,
784 [ BYTECODE_OP_MINUS ] = &&LABEL_BYTECODE_OP_MINUS,
785 [ BYTECODE_OP_BIT_RSHIFT ] = &&LABEL_BYTECODE_OP_BIT_RSHIFT,
786 [ BYTECODE_OP_BIT_LSHIFT ] = &&LABEL_BYTECODE_OP_BIT_LSHIFT,
787 [ BYTECODE_OP_BIT_AND ] = &&LABEL_BYTECODE_OP_BIT_AND,
788 [ BYTECODE_OP_BIT_OR ] = &&LABEL_BYTECODE_OP_BIT_OR,
789 [ BYTECODE_OP_BIT_XOR ] = &&LABEL_BYTECODE_OP_BIT_XOR,
790
791 /* binary comparators */
792 [ BYTECODE_OP_EQ ] = &&LABEL_BYTECODE_OP_EQ,
793 [ BYTECODE_OP_NE ] = &&LABEL_BYTECODE_OP_NE,
794 [ BYTECODE_OP_GT ] = &&LABEL_BYTECODE_OP_GT,
795 [ BYTECODE_OP_LT ] = &&LABEL_BYTECODE_OP_LT,
796 [ BYTECODE_OP_GE ] = &&LABEL_BYTECODE_OP_GE,
797 [ BYTECODE_OP_LE ] = &&LABEL_BYTECODE_OP_LE,
798
799 /* string binary comparator */
800 [ BYTECODE_OP_EQ_STRING ] = &&LABEL_BYTECODE_OP_EQ_STRING,
801 [ BYTECODE_OP_NE_STRING ] = &&LABEL_BYTECODE_OP_NE_STRING,
802 [ BYTECODE_OP_GT_STRING ] = &&LABEL_BYTECODE_OP_GT_STRING,
803 [ BYTECODE_OP_LT_STRING ] = &&LABEL_BYTECODE_OP_LT_STRING,
804 [ BYTECODE_OP_GE_STRING ] = &&LABEL_BYTECODE_OP_GE_STRING,
805 [ BYTECODE_OP_LE_STRING ] = &&LABEL_BYTECODE_OP_LE_STRING,
806
807 /* globbing pattern binary comparator */
808 [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING,
809 [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING,
810
811 /* s64 binary comparator */
812 [ BYTECODE_OP_EQ_S64 ] = &&LABEL_BYTECODE_OP_EQ_S64,
813 [ BYTECODE_OP_NE_S64 ] = &&LABEL_BYTECODE_OP_NE_S64,
814 [ BYTECODE_OP_GT_S64 ] = &&LABEL_BYTECODE_OP_GT_S64,
815 [ BYTECODE_OP_LT_S64 ] = &&LABEL_BYTECODE_OP_LT_S64,
816 [ BYTECODE_OP_GE_S64 ] = &&LABEL_BYTECODE_OP_GE_S64,
817 [ BYTECODE_OP_LE_S64 ] = &&LABEL_BYTECODE_OP_LE_S64,
818
819 /* double binary comparator */
820 [ BYTECODE_OP_EQ_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE,
821 [ BYTECODE_OP_NE_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_DOUBLE,
822 [ BYTECODE_OP_GT_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_DOUBLE,
823 [ BYTECODE_OP_LT_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_DOUBLE,
824 [ BYTECODE_OP_GE_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_DOUBLE,
825 [ BYTECODE_OP_LE_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_DOUBLE,
826
827 /* Mixed S64-double binary comparators */
828 [ BYTECODE_OP_EQ_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64,
829 [ BYTECODE_OP_NE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64,
830 [ BYTECODE_OP_GT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64,
831 [ BYTECODE_OP_LT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64,
832 [ BYTECODE_OP_GE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64,
833 [ BYTECODE_OP_LE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64,
834
835 [ BYTECODE_OP_EQ_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE,
836 [ BYTECODE_OP_NE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE,
837 [ BYTECODE_OP_GT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE,
838 [ BYTECODE_OP_LT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE,
839 [ BYTECODE_OP_GE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE,
840 [ BYTECODE_OP_LE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE,
841
842 /* unary */
843 [ BYTECODE_OP_UNARY_PLUS ] = &&LABEL_BYTECODE_OP_UNARY_PLUS,
844 [ BYTECODE_OP_UNARY_MINUS ] = &&LABEL_BYTECODE_OP_UNARY_MINUS,
845 [ BYTECODE_OP_UNARY_NOT ] = &&LABEL_BYTECODE_OP_UNARY_NOT,
846 [ BYTECODE_OP_UNARY_PLUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64,
847 [ BYTECODE_OP_UNARY_MINUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64,
848 [ BYTECODE_OP_UNARY_NOT_S64 ] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64,
849 [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE,
850 [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE,
851 [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE,
852
853 /* logical */
854 [ BYTECODE_OP_AND ] = &&LABEL_BYTECODE_OP_AND,
855 [ BYTECODE_OP_OR ] = &&LABEL_BYTECODE_OP_OR,
856
857 /* load field ref */
858 [ BYTECODE_OP_LOAD_FIELD_REF ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF,
859 [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING,
860 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE,
861 [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64,
862 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE,
863
864 /* load from immediate operand */
865 [ BYTECODE_OP_LOAD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STRING,
866 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING,
867 [ BYTECODE_OP_LOAD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_S64,
868 [ BYTECODE_OP_LOAD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE,
869
870 /* cast */
871 [ BYTECODE_OP_CAST_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_TO_S64,
872 [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64,
873 [ BYTECODE_OP_CAST_NOP ] = &&LABEL_BYTECODE_OP_CAST_NOP,
874
875 /* get context ref */
876 [ BYTECODE_OP_GET_CONTEXT_REF ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF,
877 [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING,
878 [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64,
879 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE,
880
881 /* load userspace field ref */
882 [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_STRING,
883 [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE,
884
885 /* Instructions for recursive traversal through composed types. */
886 [ BYTECODE_OP_GET_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT,
887 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT,
888 [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT,
889
890 [ BYTECODE_OP_GET_SYMBOL ] = &&LABEL_BYTECODE_OP_GET_SYMBOL,
891 [ BYTECODE_OP_GET_SYMBOL_FIELD ] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD,
892 [ BYTECODE_OP_GET_INDEX_U16 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U16,
893 [ BYTECODE_OP_GET_INDEX_U64 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U64,
894
895 [ BYTECODE_OP_LOAD_FIELD ] = &&LABEL_BYTECODE_OP_LOAD_FIELD,
896 [ BYTECODE_OP_LOAD_FIELD_S8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8,
897 [ BYTECODE_OP_LOAD_FIELD_S16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16,
898 [ BYTECODE_OP_LOAD_FIELD_S32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32,
899 [ BYTECODE_OP_LOAD_FIELD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64,
900 [ BYTECODE_OP_LOAD_FIELD_U8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8,
901 [ BYTECODE_OP_LOAD_FIELD_U16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16,
902 [ BYTECODE_OP_LOAD_FIELD_U32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32,
903 [ BYTECODE_OP_LOAD_FIELD_U64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64,
904 [ BYTECODE_OP_LOAD_FIELD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING,
905 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE,
906 [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE,
907
908 [ BYTECODE_OP_UNARY_BIT_NOT ] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT,
909
910 [ BYTECODE_OP_RETURN_S64 ] = &&LABEL_BYTECODE_OP_RETURN_S64,
911 };
912 #endif /* #ifndef INTERPRETER_USE_SWITCH */
913
914 START_OP
915
916 OP(BYTECODE_OP_UNKNOWN):
917 OP(BYTECODE_OP_LOAD_FIELD_REF):
918 OP(BYTECODE_OP_GET_CONTEXT_REF):
919 #ifdef INTERPRETER_USE_SWITCH
920 default:
921 #endif /* INTERPRETER_USE_SWITCH */
922 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
923 (unsigned int) *(bytecode_opcode_t *) pc);
924 ret = -EINVAL;
925 goto end;
926
927 OP(BYTECODE_OP_RETURN):
928 /* LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR or LTTNG_KERNEL_BYTECODE_INTERPRETER_OK */
929 switch (estack_ax_t) {
930 case REG_S64:
931 case REG_U64:
932 retval = !!estack_ax_v;
933 break;
934 case REG_DOUBLE:
935 case REG_STRING:
936 case REG_PTR:
937 if (kernel_bytecode->type != LTTNG_KERNEL_BYTECODE_TYPE_CAPTURE) {
938 ret = -EINVAL;
939 goto end;
940 }
941 retval = 0;
942 break;
943 case REG_STAR_GLOB_STRING:
944 case REG_TYPE_UNKNOWN:
945 ret = -EINVAL;
946 goto end;
947 }
948 ret = 0;
949 goto end;
950
951 OP(BYTECODE_OP_RETURN_S64):
952 /* LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR or LTTNG_KERNEL_BYTECODE_INTERPRETER_OK */
953 retval = !!estack_ax_v;
954 ret = 0;
955 goto end;
956
957 /* binary */
958 OP(BYTECODE_OP_MUL):
959 OP(BYTECODE_OP_DIV):
960 OP(BYTECODE_OP_MOD):
961 OP(BYTECODE_OP_PLUS):
962 OP(BYTECODE_OP_MINUS):
963 printk(KERN_WARNING "LTTng: bytecode: unsupported bytecode op %u\n",
964 (unsigned int) *(bytecode_opcode_t *) pc);
965 ret = -EINVAL;
966 goto end;
967
968 OP(BYTECODE_OP_EQ):
969 OP(BYTECODE_OP_NE):
970 OP(BYTECODE_OP_GT):
971 OP(BYTECODE_OP_LT):
972 OP(BYTECODE_OP_GE):
973 OP(BYTECODE_OP_LE):
974 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
975 (unsigned int) *(bytecode_opcode_t *) pc);
976 ret = -EINVAL;
977 goto end;
978
979 OP(BYTECODE_OP_EQ_STRING):
980 {
981 int res;
982
983 res = (stack_strcmp(stack, top, "==") == 0);
984 estack_pop(stack, top, ax, bx, ax_t, bx_t);
985 estack_ax_v = res;
986 estack_ax_t = REG_S64;
987 next_pc += sizeof(struct binary_op);
988 PO;
989 }
990 OP(BYTECODE_OP_NE_STRING):
991 {
992 int res;
993
994 res = (stack_strcmp(stack, top, "!=") != 0);
995 estack_pop(stack, top, ax, bx, ax_t, bx_t);
996 estack_ax_v = res;
997 estack_ax_t = REG_S64;
998 next_pc += sizeof(struct binary_op);
999 PO;
1000 }
1001 OP(BYTECODE_OP_GT_STRING):
1002 {
1003 int res;
1004
1005 res = (stack_strcmp(stack, top, ">") > 0);
1006 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1007 estack_ax_v = res;
1008 estack_ax_t = REG_S64;
1009 next_pc += sizeof(struct binary_op);
1010 PO;
1011 }
1012 OP(BYTECODE_OP_LT_STRING):
1013 {
1014 int res;
1015
1016 res = (stack_strcmp(stack, top, "<") < 0);
1017 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1018 estack_ax_v = res;
1019 estack_ax_t = REG_S64;
1020 next_pc += sizeof(struct binary_op);
1021 PO;
1022 }
1023 OP(BYTECODE_OP_GE_STRING):
1024 {
1025 int res;
1026
1027 res = (stack_strcmp(stack, top, ">=") >= 0);
1028 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1029 estack_ax_v = res;
1030 estack_ax_t = REG_S64;
1031 next_pc += sizeof(struct binary_op);
1032 PO;
1033 }
1034 OP(BYTECODE_OP_LE_STRING):
1035 {
1036 int res;
1037
1038 res = (stack_strcmp(stack, top, "<=") <= 0);
1039 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1040 estack_ax_v = res;
1041 estack_ax_t = REG_S64;
1042 next_pc += sizeof(struct binary_op);
1043 PO;
1044 }
1045
1046 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING):
1047 {
1048 int res;
1049
1050 res = (stack_star_glob_match(stack, top, "==") == 0);
1051 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1052 estack_ax_v = res;
1053 estack_ax_t = REG_S64;
1054 next_pc += sizeof(struct binary_op);
1055 PO;
1056 }
1057 OP(BYTECODE_OP_NE_STAR_GLOB_STRING):
1058 {
1059 int res;
1060
1061 res = (stack_star_glob_match(stack, top, "!=") != 0);
1062 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1063 estack_ax_v = res;
1064 estack_ax_t = REG_S64;
1065 next_pc += sizeof(struct binary_op);
1066 PO;
1067 }
1068
1069 OP(BYTECODE_OP_EQ_S64):
1070 {
1071 int res;
1072
1073 res = (estack_bx_v == estack_ax_v);
1074 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1075 estack_ax_v = res;
1076 estack_ax_t = REG_S64;
1077 next_pc += sizeof(struct binary_op);
1078 PO;
1079 }
1080 OP(BYTECODE_OP_NE_S64):
1081 {
1082 int res;
1083
1084 res = (estack_bx_v != estack_ax_v);
1085 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1086 estack_ax_v = res;
1087 estack_ax_t = REG_S64;
1088 next_pc += sizeof(struct binary_op);
1089 PO;
1090 }
1091 OP(BYTECODE_OP_GT_S64):
1092 {
1093 int res;
1094
1095 res = (estack_bx_v > estack_ax_v);
1096 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1097 estack_ax_v = res;
1098 estack_ax_t = REG_S64;
1099 next_pc += sizeof(struct binary_op);
1100 PO;
1101 }
1102 OP(BYTECODE_OP_LT_S64):
1103 {
1104 int res;
1105
1106 res = (estack_bx_v < estack_ax_v);
1107 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1108 estack_ax_v = res;
1109 estack_ax_t = REG_S64;
1110 next_pc += sizeof(struct binary_op);
1111 PO;
1112 }
1113 OP(BYTECODE_OP_GE_S64):
1114 {
1115 int res;
1116
1117 res = (estack_bx_v >= estack_ax_v);
1118 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1119 estack_ax_v = res;
1120 estack_ax_t = REG_S64;
1121 next_pc += sizeof(struct binary_op);
1122 PO;
1123 }
1124 OP(BYTECODE_OP_LE_S64):
1125 {
1126 int res;
1127
1128 res = (estack_bx_v <= estack_ax_v);
1129 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1130 estack_ax_v = res;
1131 estack_ax_t = REG_S64;
1132 next_pc += sizeof(struct binary_op);
1133 PO;
1134 }
1135
1136 OP(BYTECODE_OP_EQ_DOUBLE):
1137 OP(BYTECODE_OP_NE_DOUBLE):
1138 OP(BYTECODE_OP_GT_DOUBLE):
1139 OP(BYTECODE_OP_LT_DOUBLE):
1140 OP(BYTECODE_OP_GE_DOUBLE):
1141 OP(BYTECODE_OP_LE_DOUBLE):
1142 {
1143 BUG_ON(1);
1144 PO;
1145 }
1146
1147 /* Mixed S64-double binary comparators */
1148 OP(BYTECODE_OP_EQ_DOUBLE_S64):
1149 OP(BYTECODE_OP_NE_DOUBLE_S64):
1150 OP(BYTECODE_OP_GT_DOUBLE_S64):
1151 OP(BYTECODE_OP_LT_DOUBLE_S64):
1152 OP(BYTECODE_OP_GE_DOUBLE_S64):
1153 OP(BYTECODE_OP_LE_DOUBLE_S64):
1154 OP(BYTECODE_OP_EQ_S64_DOUBLE):
1155 OP(BYTECODE_OP_NE_S64_DOUBLE):
1156 OP(BYTECODE_OP_GT_S64_DOUBLE):
1157 OP(BYTECODE_OP_LT_S64_DOUBLE):
1158 OP(BYTECODE_OP_GE_S64_DOUBLE):
1159 OP(BYTECODE_OP_LE_S64_DOUBLE):
1160 {
1161 BUG_ON(1);
1162 PO;
1163 }
1164 OP(BYTECODE_OP_BIT_RSHIFT):
1165 {
1166 int64_t res;
1167
1168 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1169 ret = -EINVAL;
1170 goto end;
1171 }
1172
1173 /* Catch undefined behavior. */
1174 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1175 ret = -EINVAL;
1176 goto end;
1177 }
1178 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1179 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1180 estack_ax_v = res;
1181 estack_ax_t = REG_U64;
1182 next_pc += sizeof(struct binary_op);
1183 PO;
1184 }
1185 OP(BYTECODE_OP_BIT_LSHIFT):
1186 {
1187 int64_t res;
1188
1189 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1190 ret = -EINVAL;
1191 goto end;
1192 }
1193
1194 /* Catch undefined behavior. */
1195 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1196 ret = -EINVAL;
1197 goto end;
1198 }
1199 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1200 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1201 estack_ax_v = res;
1202 estack_ax_t = REG_U64;
1203 next_pc += sizeof(struct binary_op);
1204 PO;
1205 }
1206 OP(BYTECODE_OP_BIT_AND):
1207 {
1208 int64_t res;
1209
1210 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1211 ret = -EINVAL;
1212 goto end;
1213 }
1214
1215 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1216 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1217 estack_ax_v = res;
1218 estack_ax_t = REG_U64;
1219 next_pc += sizeof(struct binary_op);
1220 PO;
1221 }
1222 OP(BYTECODE_OP_BIT_OR):
1223 {
1224 int64_t res;
1225
1226 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1227 ret = -EINVAL;
1228 goto end;
1229 }
1230
1231 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1232 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1233 estack_ax_v = res;
1234 estack_ax_t = REG_U64;
1235 next_pc += sizeof(struct binary_op);
1236 PO;
1237 }
1238 OP(BYTECODE_OP_BIT_XOR):
1239 {
1240 int64_t res;
1241
1242 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1243 ret = -EINVAL;
1244 goto end;
1245 }
1246
1247 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1248 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1249 estack_ax_v = res;
1250 estack_ax_t = REG_U64;
1251 next_pc += sizeof(struct binary_op);
1252 PO;
1253 }
1254
1255 /* unary */
1256 OP(BYTECODE_OP_UNARY_PLUS):
1257 OP(BYTECODE_OP_UNARY_MINUS):
1258 OP(BYTECODE_OP_UNARY_NOT):
1259 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1260 (unsigned int) *(bytecode_opcode_t *) pc);
1261 ret = -EINVAL;
1262 goto end;
1263
1264
1265 OP(BYTECODE_OP_UNARY_BIT_NOT):
1266 {
1267 estack_ax_v = ~(uint64_t) estack_ax_v;
1268 estack_ax_t = REG_S64;
1269 next_pc += sizeof(struct unary_op);
1270 PO;
1271 }
1272
1273 OP(BYTECODE_OP_UNARY_PLUS_S64):
1274 {
1275 next_pc += sizeof(struct unary_op);
1276 PO;
1277 }
1278 OP(BYTECODE_OP_UNARY_MINUS_S64):
1279 {
1280 estack_ax_v = -estack_ax_v;
1281 estack_ax_t = REG_S64;
1282 next_pc += sizeof(struct unary_op);
1283 PO;
1284 }
1285 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE):
1286 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE):
1287 {
1288 BUG_ON(1);
1289 PO;
1290 }
1291 OP(BYTECODE_OP_UNARY_NOT_S64):
1292 {
1293 estack_ax_v = !estack_ax_v;
1294 estack_ax_t = REG_S64;
1295 next_pc += sizeof(struct unary_op);
1296 PO;
1297 }
1298 OP(BYTECODE_OP_UNARY_NOT_DOUBLE):
1299 {
1300 BUG_ON(1);
1301 PO;
1302 }
1303
1304 /* logical */
1305 OP(BYTECODE_OP_AND):
1306 {
1307 struct logical_op *insn = (struct logical_op *) pc;
1308
1309 /* If AX is 0, skip and evaluate to 0 */
1310 if (unlikely(estack_ax_v == 0)) {
1311 dbg_printk("Jumping to bytecode offset %u\n",
1312 (unsigned int) insn->skip_offset);
1313 next_pc = start_pc + insn->skip_offset;
1314 } else {
1315 /* Pop 1 when jump not taken */
1316 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1317 next_pc += sizeof(struct logical_op);
1318 }
1319 PO;
1320 }
1321 OP(BYTECODE_OP_OR):
1322 {
1323 struct logical_op *insn = (struct logical_op *) pc;
1324
1325 /* If AX is nonzero, skip and evaluate to 1 */
1326
1327 if (unlikely(estack_ax_v != 0)) {
1328 estack_ax_v = 1;
1329 dbg_printk("Jumping to bytecode offset %u\n",
1330 (unsigned int) insn->skip_offset);
1331 next_pc = start_pc + insn->skip_offset;
1332 } else {
1333 /* Pop 1 when jump not taken */
1334 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1335 next_pc += sizeof(struct logical_op);
1336 }
1337 PO;
1338 }
1339
1340
1341 /* load field ref */
1342 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING):
1343 {
1344 struct load_op *insn = (struct load_op *) pc;
1345 struct field_ref *ref = (struct field_ref *) insn->data;
1346
1347 dbg_printk("load field ref offset %u type string\n",
1348 ref->offset);
1349 estack_push(stack, top, ax, bx, ax_t, bx_t);
1350 estack_ax(stack, top)->u.s.str =
1351 *(const char * const *) &interpreter_stack_data[ref->offset];
1352 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1353 dbg_printk("Bytecode warning: loading a NULL string.\n");
1354 ret = -EINVAL;
1355 goto end;
1356 }
1357 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1358 estack_ax(stack, top)->u.s.literal_type =
1359 ESTACK_STRING_LITERAL_TYPE_NONE;
1360 estack_ax(stack, top)->u.s.user = 0;
1361 estack_ax(stack, top)->type = REG_STRING;
1362 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1363 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1364 PO;
1365 }
1366
1367 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE):
1368 {
1369 struct load_op *insn = (struct load_op *) pc;
1370 struct field_ref *ref = (struct field_ref *) insn->data;
1371
1372 dbg_printk("load field ref offset %u type sequence\n",
1373 ref->offset);
1374 estack_push(stack, top, ax, bx, ax_t, bx_t);
1375 estack_ax(stack, top)->u.s.seq_len =
1376 *(unsigned long *) &interpreter_stack_data[ref->offset];
1377 estack_ax(stack, top)->u.s.str =
1378 *(const char **) (&interpreter_stack_data[ref->offset
1379 + sizeof(unsigned long)]);
1380 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1381 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1382 ret = -EINVAL;
1383 goto end;
1384 }
1385 estack_ax(stack, top)->u.s.literal_type =
1386 ESTACK_STRING_LITERAL_TYPE_NONE;
1387 estack_ax(stack, top)->u.s.user = 0;
1388 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1389 PO;
1390 }
1391
1392 OP(BYTECODE_OP_LOAD_FIELD_REF_S64):
1393 {
1394 struct load_op *insn = (struct load_op *) pc;
1395 struct field_ref *ref = (struct field_ref *) insn->data;
1396
1397 dbg_printk("load field ref offset %u type s64\n",
1398 ref->offset);
1399 estack_push(stack, top, ax, bx, ax_t, bx_t);
1400 estack_ax_v =
1401 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
1402 estack_ax_t = REG_S64;
1403 dbg_printk("ref load s64 %lld\n",
1404 (long long) estack_ax_v);
1405 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1406 PO;
1407 }
1408
1409 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE):
1410 {
1411 BUG_ON(1);
1412 PO;
1413 }
1414
1415 /* load from immediate operand */
1416 OP(BYTECODE_OP_LOAD_STRING):
1417 {
1418 struct load_op *insn = (struct load_op *) pc;
1419
1420 dbg_printk("load string %s\n", insn->data);
1421 estack_push(stack, top, ax, bx, ax_t, bx_t);
1422 estack_ax(stack, top)->u.s.str = insn->data;
1423 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1424 estack_ax(stack, top)->u.s.literal_type =
1425 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1426 estack_ax(stack, top)->u.s.user = 0;
1427 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1428 PO;
1429 }
1430
1431 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING):
1432 {
1433 struct load_op *insn = (struct load_op *) pc;
1434
1435 dbg_printk("load globbing pattern %s\n", insn->data);
1436 estack_push(stack, top, ax, bx, ax_t, bx_t);
1437 estack_ax(stack, top)->u.s.str = insn->data;
1438 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1439 estack_ax(stack, top)->u.s.literal_type =
1440 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1441 estack_ax(stack, top)->u.s.user = 0;
1442 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1443 PO;
1444 }
1445
1446 OP(BYTECODE_OP_LOAD_S64):
1447 {
1448 struct load_op *insn = (struct load_op *) pc;
1449
1450 estack_push(stack, top, ax, bx, ax_t, bx_t);
1451 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1452 estack_ax_t = REG_S64;
1453 dbg_printk("load s64 %lld\n",
1454 (long long) estack_ax_v);
1455 next_pc += sizeof(struct load_op)
1456 + sizeof(struct literal_numeric);
1457 PO;
1458 }
1459
1460 OP(BYTECODE_OP_LOAD_DOUBLE):
1461 {
1462 BUG_ON(1);
1463 PO;
1464 }
1465
1466 /* cast */
1467 OP(BYTECODE_OP_CAST_TO_S64):
1468 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1469 (unsigned int) *(bytecode_opcode_t *) pc);
1470 ret = -EINVAL;
1471 goto end;
1472
1473 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64):
1474 {
1475 BUG_ON(1);
1476 PO;
1477 }
1478
1479 OP(BYTECODE_OP_CAST_NOP):
1480 {
1481 next_pc += sizeof(struct cast_op);
1482 PO;
1483 }
1484
1485 /* get context ref */
1486 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING):
1487 {
1488 struct load_op *insn = (struct load_op *) pc;
1489 struct field_ref *ref = (struct field_ref *) insn->data;
1490 struct lttng_kernel_ctx_field *ctx_field;
1491 struct lttng_ctx_value v;
1492
1493 dbg_printk("get context ref offset %u type string\n",
1494 ref->offset);
1495 ctx_field = &lttng_static_ctx->fields[ref->offset];
1496 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
1497 estack_push(stack, top, ax, bx, ax_t, bx_t);
1498 estack_ax(stack, top)->u.s.str = v.u.str;
1499 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1500 dbg_printk("Bytecode warning: loading a NULL string.\n");
1501 ret = -EINVAL;
1502 goto end;
1503 }
1504 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1505 estack_ax(stack, top)->u.s.literal_type =
1506 ESTACK_STRING_LITERAL_TYPE_NONE;
1507 estack_ax(stack, top)->u.s.user = 0;
1508 estack_ax(stack, top)->type = REG_STRING;
1509 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1510 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1511 PO;
1512 }
1513
1514 OP(BYTECODE_OP_GET_CONTEXT_REF_S64):
1515 {
1516 struct load_op *insn = (struct load_op *) pc;
1517 struct field_ref *ref = (struct field_ref *) insn->data;
1518 struct lttng_kernel_ctx_field *ctx_field;
1519 struct lttng_ctx_value v;
1520
1521 dbg_printk("get context ref offset %u type s64\n",
1522 ref->offset);
1523 ctx_field = &lttng_static_ctx->fields[ref->offset];
1524 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
1525 estack_push(stack, top, ax, bx, ax_t, bx_t);
1526 estack_ax_v = v.u.s64;
1527 estack_ax_t = REG_S64;
1528 dbg_printk("ref get context s64 %lld\n",
1529 (long long) estack_ax_v);
1530 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1531 PO;
1532 }
1533
1534 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE):
1535 {
1536 BUG_ON(1);
1537 PO;
1538 }
1539
1540 /* load userspace field ref */
1541 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_STRING):
1542 {
1543 struct load_op *insn = (struct load_op *) pc;
1544 struct field_ref *ref = (struct field_ref *) insn->data;
1545
1546 dbg_printk("load field ref offset %u type user string\n",
1547 ref->offset);
1548 estack_push(stack, top, ax, bx, ax_t, bx_t);
1549 estack_ax(stack, top)->u.s.user_str =
1550 *(const char * const *) &interpreter_stack_data[ref->offset];
1551 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1552 dbg_printk("Bytecode warning: loading a NULL string.\n");
1553 ret = -EINVAL;
1554 goto end;
1555 }
1556 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1557 estack_ax(stack, top)->u.s.literal_type =
1558 ESTACK_STRING_LITERAL_TYPE_NONE;
1559 estack_ax(stack, top)->u.s.user = 1;
1560 estack_ax(stack, top)->type = REG_STRING;
1561 dbg_load_ref_user_str_printk(estack_ax(stack, top));
1562 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1563 PO;
1564 }
1565
1566 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1567 {
1568 struct load_op *insn = (struct load_op *) pc;
1569 struct field_ref *ref = (struct field_ref *) insn->data;
1570
1571 dbg_printk("load field ref offset %u type user sequence\n",
1572 ref->offset);
1573 estack_push(stack, top, ax, bx, ax_t, bx_t);
1574 estack_ax(stack, top)->u.s.seq_len =
1575 *(unsigned long *) &interpreter_stack_data[ref->offset];
1576 estack_ax(stack, top)->u.s.user_str =
1577 *(const char **) (&interpreter_stack_data[ref->offset
1578 + sizeof(unsigned long)]);
1579 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1580 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1581 ret = -EINVAL;
1582 goto end;
1583 }
1584 estack_ax(stack, top)->u.s.literal_type =
1585 ESTACK_STRING_LITERAL_TYPE_NONE;
1586 estack_ax(stack, top)->u.s.user = 1;
1587 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1588 PO;
1589 }
1590
1591 OP(BYTECODE_OP_GET_CONTEXT_ROOT):
1592 {
1593 dbg_printk("op get context root\n");
1594 estack_push(stack, top, ax, bx, ax_t, bx_t);
1595 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1596 /* "field" only needed for variants. */
1597 estack_ax(stack, top)->u.ptr.field = NULL;
1598 estack_ax(stack, top)->type = REG_PTR;
1599 next_pc += sizeof(struct load_op);
1600 PO;
1601 }
1602
1603 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT):
1604 {
1605 BUG_ON(1);
1606 PO;
1607 }
1608
1609 OP(BYTECODE_OP_GET_PAYLOAD_ROOT):
1610 {
1611 dbg_printk("op get app payload root\n");
1612 estack_push(stack, top, ax, bx, ax_t, bx_t);
1613 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1614 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
1615 /* "field" only needed for variants. */
1616 estack_ax(stack, top)->u.ptr.field = NULL;
1617 estack_ax(stack, top)->type = REG_PTR;
1618 next_pc += sizeof(struct load_op);
1619 PO;
1620 }
1621
1622 OP(BYTECODE_OP_GET_SYMBOL):
1623 {
1624 dbg_printk("op get symbol\n");
1625 switch (estack_ax(stack, top)->u.ptr.type) {
1626 case LOAD_OBJECT:
1627 printk(KERN_WARNING "LTTng: bytecode: Nested fields not implemented yet.\n");
1628 ret = -EINVAL;
1629 goto end;
1630 case LOAD_ROOT_CONTEXT:
1631 case LOAD_ROOT_APP_CONTEXT:
1632 case LOAD_ROOT_PAYLOAD:
1633 /*
1634 * symbol lookup is performed by
1635 * specialization.
1636 */
1637 ret = -EINVAL;
1638 goto end;
1639 }
1640 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1641 PO;
1642 }
1643
1644 OP(BYTECODE_OP_GET_SYMBOL_FIELD):
1645 {
1646 /*
1647 * Used for first variant encountered in a
1648 * traversal. Variants are not implemented yet.
1649 */
1650 ret = -EINVAL;
1651 goto end;
1652 }
1653
1654 OP(BYTECODE_OP_GET_INDEX_U16):
1655 {
1656 struct load_op *insn = (struct load_op *) pc;
1657 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1658
1659 dbg_printk("op get index u16\n");
1660 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1661 if (ret)
1662 goto end;
1663 estack_ax_v = estack_ax(stack, top)->u.v;
1664 estack_ax_t = estack_ax(stack, top)->type;
1665 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1666 PO;
1667 }
1668
1669 OP(BYTECODE_OP_GET_INDEX_U64):
1670 {
1671 struct load_op *insn = (struct load_op *) pc;
1672 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1673
1674 dbg_printk("op get index u64\n");
1675 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1676 if (ret)
1677 goto end;
1678 estack_ax_v = estack_ax(stack, top)->u.v;
1679 estack_ax_t = estack_ax(stack, top)->type;
1680 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1681 PO;
1682 }
1683
1684 OP(BYTECODE_OP_LOAD_FIELD):
1685 {
1686 dbg_printk("op load field\n");
1687 ret = dynamic_load_field(estack_ax(stack, top));
1688 if (ret)
1689 goto end;
1690 estack_ax_v = estack_ax(stack, top)->u.v;
1691 estack_ax_t = estack_ax(stack, top)->type;
1692 next_pc += sizeof(struct load_op);
1693 PO;
1694 }
1695
1696 OP(BYTECODE_OP_LOAD_FIELD_S8):
1697 {
1698 dbg_printk("op load field s8\n");
1699
1700 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1701 estack_ax_t = REG_S64;
1702 next_pc += sizeof(struct load_op);
1703 PO;
1704 }
1705 OP(BYTECODE_OP_LOAD_FIELD_S16):
1706 {
1707 dbg_printk("op load field s16\n");
1708
1709 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1710 estack_ax_t = REG_S64;
1711 next_pc += sizeof(struct load_op);
1712 PO;
1713 }
1714 OP(BYTECODE_OP_LOAD_FIELD_S32):
1715 {
1716 dbg_printk("op load field s32\n");
1717
1718 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1719 estack_ax_t = REG_S64;
1720 next_pc += sizeof(struct load_op);
1721 PO;
1722 }
1723 OP(BYTECODE_OP_LOAD_FIELD_S64):
1724 {
1725 dbg_printk("op load field s64\n");
1726
1727 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1728 estack_ax_t = REG_S64;
1729 next_pc += sizeof(struct load_op);
1730 PO;
1731 }
1732 OP(BYTECODE_OP_LOAD_FIELD_U8):
1733 {
1734 dbg_printk("op load field u8\n");
1735
1736 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1737 estack_ax_t = REG_S64;
1738 next_pc += sizeof(struct load_op);
1739 PO;
1740 }
1741 OP(BYTECODE_OP_LOAD_FIELD_U16):
1742 {
1743 dbg_printk("op load field u16\n");
1744
1745 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1746 estack_ax_t = REG_S64;
1747 next_pc += sizeof(struct load_op);
1748 PO;
1749 }
1750 OP(BYTECODE_OP_LOAD_FIELD_U32):
1751 {
1752 dbg_printk("op load field u32\n");
1753
1754 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1755 estack_ax_t = REG_S64;
1756 next_pc += sizeof(struct load_op);
1757 PO;
1758 }
1759 OP(BYTECODE_OP_LOAD_FIELD_U64):
1760 {
1761 dbg_printk("op load field u64\n");
1762
1763 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1764 estack_ax_t = REG_S64;
1765 next_pc += sizeof(struct load_op);
1766 PO;
1767 }
1768 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE):
1769 {
1770 ret = -EINVAL;
1771 goto end;
1772 }
1773
1774 OP(BYTECODE_OP_LOAD_FIELD_STRING):
1775 {
1776 const char *str;
1777
1778 dbg_printk("op load field string\n");
1779 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1780 estack_ax(stack, top)->u.s.str = str;
1781 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1782 dbg_printk("Bytecode warning: loading a NULL string.\n");
1783 ret = -EINVAL;
1784 goto end;
1785 }
1786 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1787 estack_ax(stack, top)->u.s.literal_type =
1788 ESTACK_STRING_LITERAL_TYPE_NONE;
1789 estack_ax(stack, top)->type = REG_STRING;
1790 next_pc += sizeof(struct load_op);
1791 PO;
1792 }
1793
1794 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE):
1795 {
1796 const char *ptr;
1797
1798 dbg_printk("op load field string sequence\n");
1799 ptr = estack_ax(stack, top)->u.ptr.ptr;
1800 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1801 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1802 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1803 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1804 ret = -EINVAL;
1805 goto end;
1806 }
1807 estack_ax(stack, top)->u.s.literal_type =
1808 ESTACK_STRING_LITERAL_TYPE_NONE;
1809 estack_ax(stack, top)->type = REG_STRING;
1810 next_pc += sizeof(struct load_op);
1811 PO;
1812 }
1813
1814 END_OP
1815 end:
1816 /* No need to prepare output if an error occurred. */
1817 if (ret)
1818 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR;
1819
1820 /* Prepare output. */
1821 switch (kernel_bytecode->type) {
1822 case LTTNG_KERNEL_BYTECODE_TYPE_FILTER:
1823 {
1824 struct lttng_kernel_bytecode_filter_ctx *filter_ctx =
1825 (struct lttng_kernel_bytecode_filter_ctx *) caller_ctx;
1826 if (retval)
1827 filter_ctx->result = LTTNG_KERNEL_BYTECODE_FILTER_ACCEPT;
1828 else
1829 filter_ctx->result = LTTNG_KERNEL_BYTECODE_FILTER_REJECT;
1830 break;
1831 }
1832 case LTTNG_KERNEL_BYTECODE_TYPE_CAPTURE:
1833 ret = lttng_bytecode_interpret_format_output(estack_ax(stack, top),
1834 (struct lttng_interpreter_output *) caller_ctx);
1835 break;
1836 default:
1837 ret = -EINVAL;
1838 break;
1839 }
1840 if (ret)
1841 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR;
1842 else
1843 return LTTNG_KERNEL_BYTECODE_INTERPRETER_OK;
1844 }
1845 LTTNG_STACK_FRAME_NON_STANDARD(lttng_bytecode_interpret);
1846
1847 /*
1848 * Return LTTNG_KERNEL_EVENT_FILTER_ACCEPT or LTTNG_KERNEL_EVENT_FILTER_REJECT.
1849 */
1850 int lttng_kernel_interpret_event_filter(const struct lttng_kernel_event_common *event,
1851 const char *interpreter_stack_data,
1852 struct lttng_kernel_probe_ctx *probe_ctx,
1853 void *event_filter_ctx __attribute__((unused)))
1854 {
1855 struct lttng_kernel_bytecode_runtime *filter_bc_runtime;
1856 struct list_head *filter_bytecode_runtime_head = &event->priv->filter_bytecode_runtime_head;
1857 struct lttng_kernel_bytecode_filter_ctx bytecode_filter_ctx;
1858 bool filter_record = false;
1859
1860 list_for_each_entry_rcu(filter_bc_runtime, filter_bytecode_runtime_head, node) {
1861 if (likely(filter_bc_runtime->interpreter_func(filter_bc_runtime,
1862 interpreter_stack_data, probe_ctx, &bytecode_filter_ctx) == LTTNG_KERNEL_BYTECODE_INTERPRETER_OK)) {
1863 if (unlikely(bytecode_filter_ctx.result == LTTNG_KERNEL_BYTECODE_FILTER_ACCEPT)) {
1864 filter_record = true;
1865 break;
1866 }
1867 }
1868 }
1869 if (filter_record)
1870 return LTTNG_KERNEL_EVENT_FILTER_ACCEPT;
1871 else
1872 return LTTNG_KERNEL_EVENT_FILTER_REJECT;
1873 }
1874
1875 #undef START_OP
1876 #undef OP
1877 #undef PO
1878 #undef END_OP
This page took 0.093123 seconds and 4 git commands to generate.