bytecode: set register type to `REG_PTR` even if not used
[lttng-modules.git] / src / lttng-filter-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-filter-interpreter.c
4 *
5 * LTTng modules filter interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
14
15 #include <lttng/filter.h>
16 #include <lttng/string-utils.h>
17
18 /*
19 * get_char should be called with page fault handler disabled if it is expected
20 * to handle user-space read.
21 */
22 static
23 char get_char(struct estack_entry *reg, size_t offset)
24 {
25 if (unlikely(offset >= reg->u.s.seq_len))
26 return '\0';
27 if (reg->u.s.user) {
28 char c;
29
30 /* Handle invalid access as end of string. */
31 if (unlikely(!lttng_access_ok(VERIFY_READ,
32 reg->u.s.user_str + offset,
33 sizeof(c))))
34 return '\0';
35 /* Handle fault (nonzero return value) as end of string. */
36 if (unlikely(__copy_from_user_inatomic(&c,
37 reg->u.s.user_str + offset,
38 sizeof(c))))
39 return '\0';
40 return c;
41 } else {
42 return reg->u.s.str[offset];
43 }
44 }
45
46 /*
47 * -1: wildcard found.
48 * -2: unknown escape char.
49 * 0: normal char.
50 */
51 static
52 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
53 {
54 switch (*c) {
55 case '\\':
56 (*offset)++;
57 *c = get_char(reg, *offset);
58 switch (*c) {
59 case '\\':
60 case '*':
61 return 0;
62 default:
63 return -2;
64 }
65 case '*':
66 return -1;
67 default:
68 return 0;
69 }
70 }
71
72 static
73 char get_char_at_cb(size_t at, void *data)
74 {
75 return get_char(data, at);
76 }
77
78 static
79 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
80 {
81 bool has_user = false;
82 int result;
83 struct estack_entry *pattern_reg;
84 struct estack_entry *candidate_reg;
85
86 /* Disable the page fault handler when reading from userspace. */
87 if (estack_bx(stack, top)->u.s.user
88 || estack_ax(stack, top)->u.s.user) {
89 has_user = true;
90 pagefault_disable();
91 }
92
93 /* Find out which side is the pattern vs. the candidate. */
94 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
95 pattern_reg = estack_ax(stack, top);
96 candidate_reg = estack_bx(stack, top);
97 } else {
98 pattern_reg = estack_bx(stack, top);
99 candidate_reg = estack_ax(stack, top);
100 }
101
102 /* Perform the match operation. */
103 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
104 pattern_reg, get_char_at_cb, candidate_reg);
105 if (has_user)
106 pagefault_enable();
107
108 return result;
109 }
110
111 static
112 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
113 {
114 size_t offset_bx = 0, offset_ax = 0;
115 int diff, has_user = 0;
116
117 if (estack_bx(stack, top)->u.s.user
118 || estack_ax(stack, top)->u.s.user) {
119 has_user = 1;
120 pagefault_disable();
121 }
122
123 for (;;) {
124 int ret;
125 int escaped_r0 = 0;
126 char char_bx, char_ax;
127
128 char_bx = get_char(estack_bx(stack, top), offset_bx);
129 char_ax = get_char(estack_ax(stack, top), offset_ax);
130
131 if (unlikely(char_bx == '\0')) {
132 if (char_ax == '\0') {
133 diff = 0;
134 break;
135 } else {
136 if (estack_ax(stack, top)->u.s.literal_type ==
137 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
138 ret = parse_char(estack_ax(stack, top),
139 &char_ax, &offset_ax);
140 if (ret == -1) {
141 diff = 0;
142 break;
143 }
144 }
145 diff = -1;
146 break;
147 }
148 }
149 if (unlikely(char_ax == '\0')) {
150 if (estack_bx(stack, top)->u.s.literal_type ==
151 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
152 ret = parse_char(estack_bx(stack, top),
153 &char_bx, &offset_bx);
154 if (ret == -1) {
155 diff = 0;
156 break;
157 }
158 }
159 diff = 1;
160 break;
161 }
162 if (estack_bx(stack, top)->u.s.literal_type ==
163 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
164 ret = parse_char(estack_bx(stack, top),
165 &char_bx, &offset_bx);
166 if (ret == -1) {
167 diff = 0;
168 break;
169 } else if (ret == -2) {
170 escaped_r0 = 1;
171 }
172 /* else compare both char */
173 }
174 if (estack_ax(stack, top)->u.s.literal_type ==
175 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
176 ret = parse_char(estack_ax(stack, top),
177 &char_ax, &offset_ax);
178 if (ret == -1) {
179 diff = 0;
180 break;
181 } else if (ret == -2) {
182 if (!escaped_r0) {
183 diff = -1;
184 break;
185 }
186 } else {
187 if (escaped_r0) {
188 diff = 1;
189 break;
190 }
191 }
192 } else {
193 if (escaped_r0) {
194 diff = 1;
195 break;
196 }
197 }
198 diff = char_bx - char_ax;
199 if (diff != 0)
200 break;
201 offset_bx++;
202 offset_ax++;
203 }
204 if (has_user)
205 pagefault_enable();
206
207 return diff;
208 }
209
210 uint64_t lttng_filter_interpret_bytecode_false(void *filter_data,
211 struct lttng_probe_ctx *lttng_probe_ctx,
212 const char *filter_stack_data)
213 {
214 return LTTNG_FILTER_DISCARD;
215 }
216
217 #ifdef INTERPRETER_USE_SWITCH
218
219 /*
220 * Fallback for compilers that do not support taking address of labels.
221 */
222
223 #define START_OP \
224 start_pc = &bytecode->data[0]; \
225 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
226 pc = next_pc) { \
227 dbg_printk("LTTng: Executing op %s (%u)\n", \
228 lttng_filter_print_op((unsigned int) *(filter_opcode_t *) pc), \
229 (unsigned int) *(filter_opcode_t *) pc); \
230 switch (*(filter_opcode_t *) pc) {
231
232 #define OP(name) case name
233
234 #define PO break
235
236 #define END_OP } \
237 }
238
239 #else
240
241 /*
242 * Dispatch-table based interpreter.
243 */
244
245 #define START_OP \
246 start_pc = &bytecode->code[0]; \
247 pc = next_pc = start_pc; \
248 if (unlikely(pc - start_pc >= bytecode->len)) \
249 goto end; \
250 goto *dispatch[*(filter_opcode_t *) pc];
251
252 #define OP(name) \
253 LABEL_##name
254
255 #define PO \
256 pc = next_pc; \
257 goto *dispatch[*(filter_opcode_t *) pc];
258
259 #define END_OP
260
261 #endif
262
263 #define IS_INTEGER_REGISTER(reg_type) \
264 (reg_type == REG_S64 || reg_type == REG_U64)
265
266 static int context_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
267 struct load_ptr *ptr,
268 uint32_t idx)
269 {
270
271 struct lttng_ctx_field *ctx_field;
272 struct lttng_event_field *field;
273 union lttng_ctx_value v;
274
275 ctx_field = &lttng_static_ctx->fields[idx];
276 field = &ctx_field->event_field;
277 ptr->type = LOAD_OBJECT;
278 /* field is only used for types nested within variants. */
279 ptr->field = NULL;
280
281 switch (field->type.atype) {
282 case atype_integer:
283 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
284 if (field->type.u.integer.signedness) {
285 ptr->object_type = OBJECT_TYPE_S64;
286 ptr->u.s64 = v.s64;
287 ptr->ptr = &ptr->u.s64;
288 } else {
289 ptr->object_type = OBJECT_TYPE_U64;
290 ptr->u.u64 = v.s64; /* Cast. */
291 ptr->ptr = &ptr->u.u64;
292 }
293 break;
294 case atype_enum_nestable:
295 {
296 const struct lttng_integer_type *itype =
297 &field->type.u.enum_nestable.container_type->u.integer;
298
299 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
300 if (itype->signedness) {
301 ptr->object_type = OBJECT_TYPE_S64;
302 ptr->u.s64 = v.s64;
303 ptr->ptr = &ptr->u.s64;
304 } else {
305 ptr->object_type = OBJECT_TYPE_U64;
306 ptr->u.u64 = v.s64; /* Cast. */
307 ptr->ptr = &ptr->u.u64;
308 }
309 break;
310 }
311 case atype_array_nestable:
312 if (!lttng_is_bytewise_integer(field->type.u.array_nestable.elem_type)) {
313 printk(KERN_WARNING "LTTng: filter: Array nesting only supports integer types.\n");
314 return -EINVAL;
315 }
316 if (field->type.u.array_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
317 printk(KERN_WARNING "LTTng: filter: Only string arrays are supported for contexts.\n");
318 return -EINVAL;
319 }
320 ptr->object_type = OBJECT_TYPE_STRING;
321 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
322 ptr->ptr = v.str;
323 break;
324 case atype_sequence_nestable:
325 if (!lttng_is_bytewise_integer(field->type.u.sequence_nestable.elem_type)) {
326 printk(KERN_WARNING "LTTng: filter: Sequence nesting only supports integer types.\n");
327 return -EINVAL;
328 }
329 if (field->type.u.sequence_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
330 printk(KERN_WARNING "LTTng: filter: Only string sequences are supported for contexts.\n");
331 return -EINVAL;
332 }
333 ptr->object_type = OBJECT_TYPE_STRING;
334 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
335 ptr->ptr = v.str;
336 break;
337 case atype_string:
338 ptr->object_type = OBJECT_TYPE_STRING;
339 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
340 ptr->ptr = v.str;
341 break;
342 case atype_struct_nestable:
343 printk(KERN_WARNING "LTTng: filter: Structure type cannot be loaded.\n");
344 return -EINVAL;
345 case atype_variant_nestable:
346 printk(KERN_WARNING "LTTng: filter: Variant type cannot be loaded.\n");
347 return -EINVAL;
348 default:
349 printk(KERN_WARNING "LTTng: filter: Unknown type: %d", (int) field->type.atype);
350 return -EINVAL;
351 }
352 return 0;
353 }
354
355 static int dynamic_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
356 struct bytecode_runtime *runtime,
357 uint64_t index, struct estack_entry *stack_top)
358 {
359 int ret;
360 const struct filter_get_index_data *gid;
361
362 gid = (const struct filter_get_index_data *) &runtime->data[index];
363 switch (stack_top->u.ptr.type) {
364 case LOAD_OBJECT:
365 switch (stack_top->u.ptr.object_type) {
366 case OBJECT_TYPE_ARRAY:
367 {
368 const char *ptr;
369
370 WARN_ON_ONCE(gid->offset >= gid->array_len);
371 /* Skip count (unsigned long) */
372 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
373 ptr = ptr + gid->offset;
374 stack_top->u.ptr.ptr = ptr;
375 stack_top->u.ptr.object_type = gid->elem.type;
376 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
377 BUG_ON(stack_top->u.ptr.field->type.atype != atype_array_nestable);
378 stack_top->u.ptr.field = NULL;
379 break;
380 }
381 case OBJECT_TYPE_SEQUENCE:
382 {
383 const char *ptr;
384 size_t ptr_seq_len;
385
386 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
387 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
388 if (gid->offset >= gid->elem.len * ptr_seq_len) {
389 ret = -EINVAL;
390 goto end;
391 }
392 ptr = ptr + gid->offset;
393 stack_top->u.ptr.ptr = ptr;
394 stack_top->u.ptr.object_type = gid->elem.type;
395 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
396 BUG_ON(stack_top->u.ptr.field->type.atype != atype_sequence_nestable);
397 stack_top->u.ptr.field = NULL;
398 break;
399 }
400 case OBJECT_TYPE_STRUCT:
401 printk(KERN_WARNING "LTTng: filter: Nested structures are not supported yet.\n");
402 ret = -EINVAL;
403 goto end;
404 case OBJECT_TYPE_VARIANT:
405 default:
406 printk(KERN_WARNING "LTTng: filter: Unexpected get index type %d",
407 (int) stack_top->u.ptr.object_type);
408 ret = -EINVAL;
409 goto end;
410 }
411 break;
412 case LOAD_ROOT_CONTEXT:
413 case LOAD_ROOT_APP_CONTEXT: /* Fall-through */
414 {
415 ret = context_get_index(lttng_probe_ctx,
416 &stack_top->u.ptr,
417 gid->ctx_index);
418 if (ret) {
419 goto end;
420 }
421 break;
422 }
423 case LOAD_ROOT_PAYLOAD:
424 stack_top->u.ptr.ptr += gid->offset;
425 if (gid->elem.type == OBJECT_TYPE_STRING)
426 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
427 stack_top->u.ptr.object_type = gid->elem.type;
428 stack_top->u.ptr.type = LOAD_OBJECT;
429 stack_top->u.ptr.field = gid->field;
430 break;
431 }
432
433 stack_top->type = REG_PTR;
434
435 return 0;
436
437 end:
438 return ret;
439 }
440
441 static int dynamic_load_field(struct estack_entry *stack_top)
442 {
443 int ret;
444
445 switch (stack_top->u.ptr.type) {
446 case LOAD_OBJECT:
447 break;
448 case LOAD_ROOT_CONTEXT:
449 case LOAD_ROOT_APP_CONTEXT:
450 case LOAD_ROOT_PAYLOAD:
451 default:
452 dbg_printk("Filter warning: cannot load root, missing field name.\n");
453 ret = -EINVAL;
454 goto end;
455 }
456 switch (stack_top->u.ptr.object_type) {
457 case OBJECT_TYPE_S8:
458 dbg_printk("op load field s8\n");
459 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
460 stack_top->type = REG_S64;
461 break;
462 case OBJECT_TYPE_S16:
463 {
464 int16_t tmp;
465
466 dbg_printk("op load field s16\n");
467 tmp = *(int16_t *) stack_top->u.ptr.ptr;
468 if (stack_top->u.ptr.rev_bo)
469 __swab16s(&tmp);
470 stack_top->u.v = tmp;
471 stack_top->type = REG_S64;
472 break;
473 }
474 case OBJECT_TYPE_S32:
475 {
476 int32_t tmp;
477
478 dbg_printk("op load field s32\n");
479 tmp = *(int32_t *) stack_top->u.ptr.ptr;
480 if (stack_top->u.ptr.rev_bo)
481 __swab32s(&tmp);
482 stack_top->u.v = tmp;
483 stack_top->type = REG_S64;
484 break;
485 }
486 case OBJECT_TYPE_S64:
487 {
488 int64_t tmp;
489
490 dbg_printk("op load field s64\n");
491 tmp = *(int64_t *) stack_top->u.ptr.ptr;
492 if (stack_top->u.ptr.rev_bo)
493 __swab64s(&tmp);
494 stack_top->u.v = tmp;
495 stack_top->type = REG_S64;
496 break;
497 }
498 case OBJECT_TYPE_U8:
499 dbg_printk("op load field u8\n");
500 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
501 stack_top->type = REG_U64;
502 break;
503 case OBJECT_TYPE_U16:
504 {
505 uint16_t tmp;
506
507 dbg_printk("op load field u16\n");
508 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
509 if (stack_top->u.ptr.rev_bo)
510 __swab16s(&tmp);
511 stack_top->u.v = tmp;
512 stack_top->type = REG_U64;
513 break;
514 }
515 case OBJECT_TYPE_U32:
516 {
517 uint32_t tmp;
518
519 dbg_printk("op load field u32\n");
520 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
521 if (stack_top->u.ptr.rev_bo)
522 __swab32s(&tmp);
523 stack_top->u.v = tmp;
524 stack_top->type = REG_U64;
525 break;
526 }
527 case OBJECT_TYPE_U64:
528 {
529 uint64_t tmp;
530
531 dbg_printk("op load field u64\n");
532 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
533 if (stack_top->u.ptr.rev_bo)
534 __swab64s(&tmp);
535 stack_top->u.v = tmp;
536 stack_top->type = REG_U64;
537 break;
538 }
539 case OBJECT_TYPE_STRING:
540 {
541 const char *str;
542
543 dbg_printk("op load field string\n");
544 str = (const char *) stack_top->u.ptr.ptr;
545 stack_top->u.s.str = str;
546 if (unlikely(!stack_top->u.s.str)) {
547 dbg_printk("Filter warning: loading a NULL string.\n");
548 ret = -EINVAL;
549 goto end;
550 }
551 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
552 stack_top->u.s.literal_type =
553 ESTACK_STRING_LITERAL_TYPE_NONE;
554 stack_top->type = REG_STRING;
555 break;
556 }
557 case OBJECT_TYPE_STRING_SEQUENCE:
558 {
559 const char *ptr;
560
561 dbg_printk("op load field string sequence\n");
562 ptr = stack_top->u.ptr.ptr;
563 stack_top->u.s.seq_len = *(unsigned long *) ptr;
564 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
565 if (unlikely(!stack_top->u.s.str)) {
566 dbg_printk("Filter warning: loading a NULL sequence.\n");
567 ret = -EINVAL;
568 goto end;
569 }
570 stack_top->u.s.literal_type =
571 ESTACK_STRING_LITERAL_TYPE_NONE;
572 stack_top->type = REG_STRING;
573 break;
574 }
575 case OBJECT_TYPE_DYNAMIC:
576 /*
577 * Dynamic types in context are looked up
578 * by context get index.
579 */
580 ret = -EINVAL;
581 goto end;
582 case OBJECT_TYPE_DOUBLE:
583 ret = -EINVAL;
584 goto end;
585 case OBJECT_TYPE_SEQUENCE:
586 case OBJECT_TYPE_ARRAY:
587 case OBJECT_TYPE_STRUCT:
588 case OBJECT_TYPE_VARIANT:
589 printk(KERN_WARNING "LTTng: filter: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
590 ret = -EINVAL;
591 goto end;
592 }
593 return 0;
594
595 end:
596 return ret;
597 }
598
599 static
600 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
601 struct lttng_interpreter_output *output)
602 {
603 int ret;
604
605 again:
606 switch (ax->type) {
607 case REG_S64:
608 output->type = LTTNG_INTERPRETER_TYPE_S64;
609 output->u.s = ax->u.v;
610 break;
611 case REG_U64:
612 output->type = LTTNG_INTERPRETER_TYPE_U64;
613 output->u.u = (uint64_t) ax->u.v;
614 break;
615 case REG_STRING:
616 output->type = LTTNG_INTERPRETER_TYPE_STRING;
617 output->u.str.str = ax->u.s.str;
618 output->u.str.len = ax->u.s.seq_len;
619 break;
620 case REG_PTR:
621 switch (ax->u.ptr.object_type) {
622 case OBJECT_TYPE_S8:
623 case OBJECT_TYPE_S16:
624 case OBJECT_TYPE_S32:
625 case OBJECT_TYPE_S64:
626 case OBJECT_TYPE_U8:
627 case OBJECT_TYPE_U16:
628 case OBJECT_TYPE_U32:
629 case OBJECT_TYPE_U64:
630 case OBJECT_TYPE_DOUBLE:
631 case OBJECT_TYPE_STRING:
632 case OBJECT_TYPE_STRING_SEQUENCE:
633 ret = dynamic_load_field(ax);
634 if (ret)
635 return ret;
636 /* Retry after loading ptr into stack top. */
637 goto again;
638 case OBJECT_TYPE_SEQUENCE:
639 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
640 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
641 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
642 output->u.sequence.nested_type = ax->u.ptr.field->type.u.sequence_nestable.elem_type;
643 break;
644 case OBJECT_TYPE_ARRAY:
645 /* Skip count (unsigned long) */
646 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
647 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
648 output->u.sequence.nr_elem = ax->u.ptr.field->type.u.array_nestable.length;
649 output->u.sequence.nested_type = ax->u.ptr.field->type.u.array_nestable.elem_type;
650 break;
651 case OBJECT_TYPE_STRUCT:
652 case OBJECT_TYPE_VARIANT:
653 default:
654 return -EINVAL;
655 }
656
657 break;
658 case REG_STAR_GLOB_STRING:
659 case REG_TYPE_UNKNOWN:
660 default:
661 return -EINVAL;
662 }
663
664 return LTTNG_FILTER_RECORD_FLAG;
665 }
666
667 /*
668 * Return 0 (discard), or raise the 0x1 flag (log event).
669 * Currently, other flags are kept for future extensions and have no
670 * effect.
671 */
672 static
673 uint64_t bytecode_interpret(void *interpreter_data,
674 struct lttng_probe_ctx *lttng_probe_ctx,
675 const char *interpreter_stack_data,
676 struct lttng_interpreter_output *output)
677 {
678 struct bytecode_runtime *bytecode = interpreter_data;
679 void *pc, *next_pc, *start_pc;
680 int ret = -EINVAL;
681 uint64_t retval = 0;
682 struct estack _stack;
683 struct estack *stack = &_stack;
684 register int64_t ax = 0, bx = 0;
685 register enum entry_type ax_t = REG_TYPE_UNKNOWN, bx_t = REG_TYPE_UNKNOWN;
686 register int top = FILTER_STACK_EMPTY;
687 #ifndef INTERPRETER_USE_SWITCH
688 static void *dispatch[NR_FILTER_OPS] = {
689 [ FILTER_OP_UNKNOWN ] = &&LABEL_FILTER_OP_UNKNOWN,
690
691 [ FILTER_OP_RETURN ] = &&LABEL_FILTER_OP_RETURN,
692
693 /* binary */
694 [ FILTER_OP_MUL ] = &&LABEL_FILTER_OP_MUL,
695 [ FILTER_OP_DIV ] = &&LABEL_FILTER_OP_DIV,
696 [ FILTER_OP_MOD ] = &&LABEL_FILTER_OP_MOD,
697 [ FILTER_OP_PLUS ] = &&LABEL_FILTER_OP_PLUS,
698 [ FILTER_OP_MINUS ] = &&LABEL_FILTER_OP_MINUS,
699 [ FILTER_OP_BIT_RSHIFT ] = &&LABEL_FILTER_OP_BIT_RSHIFT,
700 [ FILTER_OP_BIT_LSHIFT ] = &&LABEL_FILTER_OP_BIT_LSHIFT,
701 [ FILTER_OP_BIT_AND ] = &&LABEL_FILTER_OP_BIT_AND,
702 [ FILTER_OP_BIT_OR ] = &&LABEL_FILTER_OP_BIT_OR,
703 [ FILTER_OP_BIT_XOR ] = &&LABEL_FILTER_OP_BIT_XOR,
704
705 /* binary comparators */
706 [ FILTER_OP_EQ ] = &&LABEL_FILTER_OP_EQ,
707 [ FILTER_OP_NE ] = &&LABEL_FILTER_OP_NE,
708 [ FILTER_OP_GT ] = &&LABEL_FILTER_OP_GT,
709 [ FILTER_OP_LT ] = &&LABEL_FILTER_OP_LT,
710 [ FILTER_OP_GE ] = &&LABEL_FILTER_OP_GE,
711 [ FILTER_OP_LE ] = &&LABEL_FILTER_OP_LE,
712
713 /* string binary comparator */
714 [ FILTER_OP_EQ_STRING ] = &&LABEL_FILTER_OP_EQ_STRING,
715 [ FILTER_OP_NE_STRING ] = &&LABEL_FILTER_OP_NE_STRING,
716 [ FILTER_OP_GT_STRING ] = &&LABEL_FILTER_OP_GT_STRING,
717 [ FILTER_OP_LT_STRING ] = &&LABEL_FILTER_OP_LT_STRING,
718 [ FILTER_OP_GE_STRING ] = &&LABEL_FILTER_OP_GE_STRING,
719 [ FILTER_OP_LE_STRING ] = &&LABEL_FILTER_OP_LE_STRING,
720
721 /* globbing pattern binary comparator */
722 [ FILTER_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_EQ_STAR_GLOB_STRING,
723 [ FILTER_OP_NE_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_NE_STAR_GLOB_STRING,
724
725 /* s64 binary comparator */
726 [ FILTER_OP_EQ_S64 ] = &&LABEL_FILTER_OP_EQ_S64,
727 [ FILTER_OP_NE_S64 ] = &&LABEL_FILTER_OP_NE_S64,
728 [ FILTER_OP_GT_S64 ] = &&LABEL_FILTER_OP_GT_S64,
729 [ FILTER_OP_LT_S64 ] = &&LABEL_FILTER_OP_LT_S64,
730 [ FILTER_OP_GE_S64 ] = &&LABEL_FILTER_OP_GE_S64,
731 [ FILTER_OP_LE_S64 ] = &&LABEL_FILTER_OP_LE_S64,
732
733 /* double binary comparator */
734 [ FILTER_OP_EQ_DOUBLE ] = &&LABEL_FILTER_OP_EQ_DOUBLE,
735 [ FILTER_OP_NE_DOUBLE ] = &&LABEL_FILTER_OP_NE_DOUBLE,
736 [ FILTER_OP_GT_DOUBLE ] = &&LABEL_FILTER_OP_GT_DOUBLE,
737 [ FILTER_OP_LT_DOUBLE ] = &&LABEL_FILTER_OP_LT_DOUBLE,
738 [ FILTER_OP_GE_DOUBLE ] = &&LABEL_FILTER_OP_GE_DOUBLE,
739 [ FILTER_OP_LE_DOUBLE ] = &&LABEL_FILTER_OP_LE_DOUBLE,
740
741 /* Mixed S64-double binary comparators */
742 [ FILTER_OP_EQ_DOUBLE_S64 ] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64,
743 [ FILTER_OP_NE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_NE_DOUBLE_S64,
744 [ FILTER_OP_GT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GT_DOUBLE_S64,
745 [ FILTER_OP_LT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LT_DOUBLE_S64,
746 [ FILTER_OP_GE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GE_DOUBLE_S64,
747 [ FILTER_OP_LE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LE_DOUBLE_S64,
748
749 [ FILTER_OP_EQ_S64_DOUBLE ] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE,
750 [ FILTER_OP_NE_S64_DOUBLE ] = &&LABEL_FILTER_OP_NE_S64_DOUBLE,
751 [ FILTER_OP_GT_S64_DOUBLE ] = &&LABEL_FILTER_OP_GT_S64_DOUBLE,
752 [ FILTER_OP_LT_S64_DOUBLE ] = &&LABEL_FILTER_OP_LT_S64_DOUBLE,
753 [ FILTER_OP_GE_S64_DOUBLE ] = &&LABEL_FILTER_OP_GE_S64_DOUBLE,
754 [ FILTER_OP_LE_S64_DOUBLE ] = &&LABEL_FILTER_OP_LE_S64_DOUBLE,
755
756 /* unary */
757 [ FILTER_OP_UNARY_PLUS ] = &&LABEL_FILTER_OP_UNARY_PLUS,
758 [ FILTER_OP_UNARY_MINUS ] = &&LABEL_FILTER_OP_UNARY_MINUS,
759 [ FILTER_OP_UNARY_NOT ] = &&LABEL_FILTER_OP_UNARY_NOT,
760 [ FILTER_OP_UNARY_PLUS_S64 ] = &&LABEL_FILTER_OP_UNARY_PLUS_S64,
761 [ FILTER_OP_UNARY_MINUS_S64 ] = &&LABEL_FILTER_OP_UNARY_MINUS_S64,
762 [ FILTER_OP_UNARY_NOT_S64 ] = &&LABEL_FILTER_OP_UNARY_NOT_S64,
763 [ FILTER_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE,
764 [ FILTER_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE,
765 [ FILTER_OP_UNARY_NOT_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE,
766
767 /* logical */
768 [ FILTER_OP_AND ] = &&LABEL_FILTER_OP_AND,
769 [ FILTER_OP_OR ] = &&LABEL_FILTER_OP_OR,
770
771 /* load field ref */
772 [ FILTER_OP_LOAD_FIELD_REF ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF,
773 [ FILTER_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING,
774 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE,
775 [ FILTER_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64,
776 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE,
777
778 /* load from immediate operand */
779 [ FILTER_OP_LOAD_STRING ] = &&LABEL_FILTER_OP_LOAD_STRING,
780 [ FILTER_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_LOAD_STAR_GLOB_STRING,
781 [ FILTER_OP_LOAD_S64 ] = &&LABEL_FILTER_OP_LOAD_S64,
782 [ FILTER_OP_LOAD_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_DOUBLE,
783
784 /* cast */
785 [ FILTER_OP_CAST_TO_S64 ] = &&LABEL_FILTER_OP_CAST_TO_S64,
786 [ FILTER_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64,
787 [ FILTER_OP_CAST_NOP ] = &&LABEL_FILTER_OP_CAST_NOP,
788
789 /* get context ref */
790 [ FILTER_OP_GET_CONTEXT_REF ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF,
791 [ FILTER_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING,
792 [ FILTER_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64,
793 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE,
794
795 /* load userspace field ref */
796 [ FILTER_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_STRING,
797 [ FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE,
798
799 /* Instructions for recursive traversal through composed types. */
800 [ FILTER_OP_GET_CONTEXT_ROOT ] = &&LABEL_FILTER_OP_GET_CONTEXT_ROOT,
801 [ FILTER_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_FILTER_OP_GET_APP_CONTEXT_ROOT,
802 [ FILTER_OP_GET_PAYLOAD_ROOT ] = &&LABEL_FILTER_OP_GET_PAYLOAD_ROOT,
803
804 [ FILTER_OP_GET_SYMBOL ] = &&LABEL_FILTER_OP_GET_SYMBOL,
805 [ FILTER_OP_GET_SYMBOL_FIELD ] = &&LABEL_FILTER_OP_GET_SYMBOL_FIELD,
806 [ FILTER_OP_GET_INDEX_U16 ] = &&LABEL_FILTER_OP_GET_INDEX_U16,
807 [ FILTER_OP_GET_INDEX_U64 ] = &&LABEL_FILTER_OP_GET_INDEX_U64,
808
809 [ FILTER_OP_LOAD_FIELD ] = &&LABEL_FILTER_OP_LOAD_FIELD,
810 [ FILTER_OP_LOAD_FIELD_S8 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S8,
811 [ FILTER_OP_LOAD_FIELD_S16 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S16,
812 [ FILTER_OP_LOAD_FIELD_S32 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S32,
813 [ FILTER_OP_LOAD_FIELD_S64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S64,
814 [ FILTER_OP_LOAD_FIELD_U8 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U8,
815 [ FILTER_OP_LOAD_FIELD_U16 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U16,
816 [ FILTER_OP_LOAD_FIELD_U32 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U32,
817 [ FILTER_OP_LOAD_FIELD_U64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U64,
818 [ FILTER_OP_LOAD_FIELD_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_STRING,
819 [ FILTER_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_SEQUENCE,
820 [ FILTER_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_FIELD_DOUBLE,
821
822 [ FILTER_OP_UNARY_BIT_NOT ] = &&LABEL_FILTER_OP_UNARY_BIT_NOT,
823
824 [ FILTER_OP_RETURN_S64 ] = &&LABEL_FILTER_OP_RETURN_S64,
825 };
826 #endif /* #ifndef INTERPRETER_USE_SWITCH */
827
828 START_OP
829
830 OP(FILTER_OP_UNKNOWN):
831 OP(FILTER_OP_LOAD_FIELD_REF):
832 OP(FILTER_OP_GET_CONTEXT_REF):
833 #ifdef INTERPRETER_USE_SWITCH
834 default:
835 #endif /* INTERPRETER_USE_SWITCH */
836 printk(KERN_WARNING "LTTng: filter: unknown bytecode op %u\n",
837 (unsigned int) *(filter_opcode_t *) pc);
838 ret = -EINVAL;
839 goto end;
840
841 OP(FILTER_OP_RETURN):
842 OP(FILTER_OP_RETURN_S64):
843 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
844 switch (estack_ax_t) {
845 case REG_S64:
846 case REG_U64:
847 retval = !!estack_ax_v;
848 break;
849 case REG_DOUBLE:
850 case REG_STRING:
851 case REG_PTR:
852 if (!output) {
853 ret = -EINVAL;
854 goto end;
855 }
856 retval = 0;
857 break;
858 case REG_STAR_GLOB_STRING:
859 case REG_TYPE_UNKNOWN:
860 ret = -EINVAL;
861 goto end;
862 }
863 ret = 0;
864 goto end;
865
866 /* binary */
867 OP(FILTER_OP_MUL):
868 OP(FILTER_OP_DIV):
869 OP(FILTER_OP_MOD):
870 OP(FILTER_OP_PLUS):
871 OP(FILTER_OP_MINUS):
872 printk(KERN_WARNING "LTTng: filter: unsupported bytecode op %u\n",
873 (unsigned int) *(filter_opcode_t *) pc);
874 ret = -EINVAL;
875 goto end;
876
877 OP(FILTER_OP_EQ):
878 OP(FILTER_OP_NE):
879 OP(FILTER_OP_GT):
880 OP(FILTER_OP_LT):
881 OP(FILTER_OP_GE):
882 OP(FILTER_OP_LE):
883 printk(KERN_WARNING "LTTng: filter: unsupported non-specialized bytecode op %u\n",
884 (unsigned int) *(filter_opcode_t *) pc);
885 ret = -EINVAL;
886 goto end;
887
888 OP(FILTER_OP_EQ_STRING):
889 {
890 int res;
891
892 res = (stack_strcmp(stack, top, "==") == 0);
893 estack_pop(stack, top, ax, bx, ax_t, bx_t);
894 estack_ax_v = res;
895 estack_ax_t = REG_S64;
896 next_pc += sizeof(struct binary_op);
897 PO;
898 }
899 OP(FILTER_OP_NE_STRING):
900 {
901 int res;
902
903 res = (stack_strcmp(stack, top, "!=") != 0);
904 estack_pop(stack, top, ax, bx, ax_t, bx_t);
905 estack_ax_v = res;
906 estack_ax_t = REG_S64;
907 next_pc += sizeof(struct binary_op);
908 PO;
909 }
910 OP(FILTER_OP_GT_STRING):
911 {
912 int res;
913
914 res = (stack_strcmp(stack, top, ">") > 0);
915 estack_pop(stack, top, ax, bx, ax_t, bx_t);
916 estack_ax_v = res;
917 estack_ax_t = REG_S64;
918 next_pc += sizeof(struct binary_op);
919 PO;
920 }
921 OP(FILTER_OP_LT_STRING):
922 {
923 int res;
924
925 res = (stack_strcmp(stack, top, "<") < 0);
926 estack_pop(stack, top, ax, bx, ax_t, bx_t);
927 estack_ax_v = res;
928 estack_ax_t = REG_S64;
929 next_pc += sizeof(struct binary_op);
930 PO;
931 }
932 OP(FILTER_OP_GE_STRING):
933 {
934 int res;
935
936 res = (stack_strcmp(stack, top, ">=") >= 0);
937 estack_pop(stack, top, ax, bx, ax_t, bx_t);
938 estack_ax_v = res;
939 estack_ax_t = REG_S64;
940 next_pc += sizeof(struct binary_op);
941 PO;
942 }
943 OP(FILTER_OP_LE_STRING):
944 {
945 int res;
946
947 res = (stack_strcmp(stack, top, "<=") <= 0);
948 estack_pop(stack, top, ax, bx, ax_t, bx_t);
949 estack_ax_v = res;
950 estack_ax_t = REG_S64;
951 next_pc += sizeof(struct binary_op);
952 PO;
953 }
954
955 OP(FILTER_OP_EQ_STAR_GLOB_STRING):
956 {
957 int res;
958
959 res = (stack_star_glob_match(stack, top, "==") == 0);
960 estack_pop(stack, top, ax, bx, ax_t, bx_t);
961 estack_ax_v = res;
962 estack_ax_t = REG_S64;
963 next_pc += sizeof(struct binary_op);
964 PO;
965 }
966 OP(FILTER_OP_NE_STAR_GLOB_STRING):
967 {
968 int res;
969
970 res = (stack_star_glob_match(stack, top, "!=") != 0);
971 estack_pop(stack, top, ax, bx, ax_t, bx_t);
972 estack_ax_v = res;
973 estack_ax_t = REG_S64;
974 next_pc += sizeof(struct binary_op);
975 PO;
976 }
977
978 OP(FILTER_OP_EQ_S64):
979 {
980 int res;
981
982 res = (estack_bx_v == estack_ax_v);
983 estack_pop(stack, top, ax, bx, ax_t, bx_t);
984 estack_ax_v = res;
985 estack_ax_t = REG_S64;
986 next_pc += sizeof(struct binary_op);
987 PO;
988 }
989 OP(FILTER_OP_NE_S64):
990 {
991 int res;
992
993 res = (estack_bx_v != estack_ax_v);
994 estack_pop(stack, top, ax, bx, ax_t, bx_t);
995 estack_ax_v = res;
996 estack_ax_t = REG_S64;
997 next_pc += sizeof(struct binary_op);
998 PO;
999 }
1000 OP(FILTER_OP_GT_S64):
1001 {
1002 int res;
1003
1004 res = (estack_bx_v > estack_ax_v);
1005 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1006 estack_ax_v = res;
1007 estack_ax_t = REG_S64;
1008 next_pc += sizeof(struct binary_op);
1009 PO;
1010 }
1011 OP(FILTER_OP_LT_S64):
1012 {
1013 int res;
1014
1015 res = (estack_bx_v < estack_ax_v);
1016 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1017 estack_ax_v = res;
1018 estack_ax_t = REG_S64;
1019 next_pc += sizeof(struct binary_op);
1020 PO;
1021 }
1022 OP(FILTER_OP_GE_S64):
1023 {
1024 int res;
1025
1026 res = (estack_bx_v >= estack_ax_v);
1027 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1028 estack_ax_v = res;
1029 estack_ax_t = REG_S64;
1030 next_pc += sizeof(struct binary_op);
1031 PO;
1032 }
1033 OP(FILTER_OP_LE_S64):
1034 {
1035 int res;
1036
1037 res = (estack_bx_v <= estack_ax_v);
1038 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1039 estack_ax_v = res;
1040 estack_ax_t = REG_S64;
1041 next_pc += sizeof(struct binary_op);
1042 PO;
1043 }
1044
1045 OP(FILTER_OP_EQ_DOUBLE):
1046 OP(FILTER_OP_NE_DOUBLE):
1047 OP(FILTER_OP_GT_DOUBLE):
1048 OP(FILTER_OP_LT_DOUBLE):
1049 OP(FILTER_OP_GE_DOUBLE):
1050 OP(FILTER_OP_LE_DOUBLE):
1051 {
1052 BUG_ON(1);
1053 PO;
1054 }
1055
1056 /* Mixed S64-double binary comparators */
1057 OP(FILTER_OP_EQ_DOUBLE_S64):
1058 OP(FILTER_OP_NE_DOUBLE_S64):
1059 OP(FILTER_OP_GT_DOUBLE_S64):
1060 OP(FILTER_OP_LT_DOUBLE_S64):
1061 OP(FILTER_OP_GE_DOUBLE_S64):
1062 OP(FILTER_OP_LE_DOUBLE_S64):
1063 OP(FILTER_OP_EQ_S64_DOUBLE):
1064 OP(FILTER_OP_NE_S64_DOUBLE):
1065 OP(FILTER_OP_GT_S64_DOUBLE):
1066 OP(FILTER_OP_LT_S64_DOUBLE):
1067 OP(FILTER_OP_GE_S64_DOUBLE):
1068 OP(FILTER_OP_LE_S64_DOUBLE):
1069 {
1070 BUG_ON(1);
1071 PO;
1072 }
1073 OP(FILTER_OP_BIT_RSHIFT):
1074 {
1075 int64_t res;
1076
1077 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1078 ret = -EINVAL;
1079 goto end;
1080 }
1081
1082 /* Catch undefined behavior. */
1083 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1084 ret = -EINVAL;
1085 goto end;
1086 }
1087 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1088 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1089 estack_ax_v = res;
1090 estack_ax_t = REG_U64;
1091 next_pc += sizeof(struct binary_op);
1092 PO;
1093 }
1094 OP(FILTER_OP_BIT_LSHIFT):
1095 {
1096 int64_t res;
1097
1098 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1099 ret = -EINVAL;
1100 goto end;
1101 }
1102
1103 /* Catch undefined behavior. */
1104 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1105 ret = -EINVAL;
1106 goto end;
1107 }
1108 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1109 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1110 estack_ax_v = res;
1111 estack_ax_t = REG_U64;
1112 next_pc += sizeof(struct binary_op);
1113 PO;
1114 }
1115 OP(FILTER_OP_BIT_AND):
1116 {
1117 int64_t res;
1118
1119 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1120 ret = -EINVAL;
1121 goto end;
1122 }
1123
1124 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1125 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1126 estack_ax_v = res;
1127 estack_ax_t = REG_U64;
1128 next_pc += sizeof(struct binary_op);
1129 PO;
1130 }
1131 OP(FILTER_OP_BIT_OR):
1132 {
1133 int64_t res;
1134
1135 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1136 ret = -EINVAL;
1137 goto end;
1138 }
1139
1140 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1141 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1142 estack_ax_v = res;
1143 estack_ax_t = REG_U64;
1144 next_pc += sizeof(struct binary_op);
1145 PO;
1146 }
1147 OP(FILTER_OP_BIT_XOR):
1148 {
1149 int64_t res;
1150
1151 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1152 ret = -EINVAL;
1153 goto end;
1154 }
1155
1156 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1157 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1158 estack_ax_v = res;
1159 estack_ax_t = REG_U64;
1160 next_pc += sizeof(struct binary_op);
1161 PO;
1162 }
1163
1164 /* unary */
1165 OP(FILTER_OP_UNARY_PLUS):
1166 OP(FILTER_OP_UNARY_MINUS):
1167 OP(FILTER_OP_UNARY_NOT):
1168 printk(KERN_WARNING "LTTng: filter: unsupported non-specialized bytecode op %u\n",
1169 (unsigned int) *(filter_opcode_t *) pc);
1170 ret = -EINVAL;
1171 goto end;
1172
1173
1174 OP(FILTER_OP_UNARY_BIT_NOT):
1175 {
1176 estack_ax_v = ~(uint64_t) estack_ax_v;
1177 estack_ax_t = REG_S64;
1178 next_pc += sizeof(struct unary_op);
1179 PO;
1180 }
1181
1182 OP(FILTER_OP_UNARY_PLUS_S64):
1183 {
1184 next_pc += sizeof(struct unary_op);
1185 PO;
1186 }
1187 OP(FILTER_OP_UNARY_MINUS_S64):
1188 {
1189 estack_ax_v = -estack_ax_v;
1190 estack_ax_t = REG_S64;
1191 next_pc += sizeof(struct unary_op);
1192 PO;
1193 }
1194 OP(FILTER_OP_UNARY_PLUS_DOUBLE):
1195 OP(FILTER_OP_UNARY_MINUS_DOUBLE):
1196 {
1197 BUG_ON(1);
1198 PO;
1199 }
1200 OP(FILTER_OP_UNARY_NOT_S64):
1201 {
1202 estack_ax_v = !estack_ax_v;
1203 estack_ax_t = REG_S64;
1204 next_pc += sizeof(struct unary_op);
1205 PO;
1206 }
1207 OP(FILTER_OP_UNARY_NOT_DOUBLE):
1208 {
1209 BUG_ON(1);
1210 PO;
1211 }
1212
1213 /* logical */
1214 OP(FILTER_OP_AND):
1215 {
1216 struct logical_op *insn = (struct logical_op *) pc;
1217
1218 /* If AX is 0, skip and evaluate to 0 */
1219 if (unlikely(estack_ax_v == 0)) {
1220 dbg_printk("Jumping to bytecode offset %u\n",
1221 (unsigned int) insn->skip_offset);
1222 next_pc = start_pc + insn->skip_offset;
1223 } else {
1224 /* Pop 1 when jump not taken */
1225 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1226 next_pc += sizeof(struct logical_op);
1227 }
1228 PO;
1229 }
1230 OP(FILTER_OP_OR):
1231 {
1232 struct logical_op *insn = (struct logical_op *) pc;
1233
1234 /* If AX is nonzero, skip and evaluate to 1 */
1235
1236 if (unlikely(estack_ax_v != 0)) {
1237 estack_ax_v = 1;
1238 dbg_printk("Jumping to bytecode offset %u\n",
1239 (unsigned int) insn->skip_offset);
1240 next_pc = start_pc + insn->skip_offset;
1241 } else {
1242 /* Pop 1 when jump not taken */
1243 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1244 next_pc += sizeof(struct logical_op);
1245 }
1246 PO;
1247 }
1248
1249
1250 /* load field ref */
1251 OP(FILTER_OP_LOAD_FIELD_REF_STRING):
1252 {
1253 struct load_op *insn = (struct load_op *) pc;
1254 struct field_ref *ref = (struct field_ref *) insn->data;
1255
1256 dbg_printk("load field ref offset %u type string\n",
1257 ref->offset);
1258 estack_push(stack, top, ax, bx, ax_t, bx_t);
1259 estack_ax(stack, top)->u.s.str =
1260 *(const char * const *) &interpreter_stack_data[ref->offset];
1261 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1262 dbg_printk("Filter warning: loading a NULL string.\n");
1263 ret = -EINVAL;
1264 goto end;
1265 }
1266 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1267 estack_ax(stack, top)->u.s.literal_type =
1268 ESTACK_STRING_LITERAL_TYPE_NONE;
1269 estack_ax(stack, top)->u.s.user = 0;
1270 estack_ax(stack, top)->type = REG_STRING;
1271 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1272 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1273 PO;
1274 }
1275
1276 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE):
1277 {
1278 struct load_op *insn = (struct load_op *) pc;
1279 struct field_ref *ref = (struct field_ref *) insn->data;
1280
1281 dbg_printk("load field ref offset %u type sequence\n",
1282 ref->offset);
1283 estack_push(stack, top, ax, bx, ax_t, bx_t);
1284 estack_ax(stack, top)->u.s.seq_len =
1285 *(unsigned long *) &interpreter_stack_data[ref->offset];
1286 estack_ax(stack, top)->u.s.str =
1287 *(const char **) (&interpreter_stack_data[ref->offset
1288 + sizeof(unsigned long)]);
1289 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1290 dbg_printk("Filter warning: loading a NULL sequence.\n");
1291 ret = -EINVAL;
1292 goto end;
1293 }
1294 estack_ax(stack, top)->u.s.literal_type =
1295 ESTACK_STRING_LITERAL_TYPE_NONE;
1296 estack_ax(stack, top)->u.s.user = 0;
1297 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1298 PO;
1299 }
1300
1301 OP(FILTER_OP_LOAD_FIELD_REF_S64):
1302 {
1303 struct load_op *insn = (struct load_op *) pc;
1304 struct field_ref *ref = (struct field_ref *) insn->data;
1305
1306 dbg_printk("load field ref offset %u type s64\n",
1307 ref->offset);
1308 estack_push(stack, top, ax, bx, ax_t, bx_t);
1309 estack_ax_v =
1310 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
1311 estack_ax_t = REG_S64;
1312 dbg_printk("ref load s64 %lld\n",
1313 (long long) estack_ax_v);
1314 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1315 PO;
1316 }
1317
1318 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE):
1319 {
1320 BUG_ON(1);
1321 PO;
1322 }
1323
1324 /* load from immediate operand */
1325 OP(FILTER_OP_LOAD_STRING):
1326 {
1327 struct load_op *insn = (struct load_op *) pc;
1328
1329 dbg_printk("load string %s\n", insn->data);
1330 estack_push(stack, top, ax, bx, ax_t, bx_t);
1331 estack_ax(stack, top)->u.s.str = insn->data;
1332 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1333 estack_ax(stack, top)->u.s.literal_type =
1334 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1335 estack_ax(stack, top)->u.s.user = 0;
1336 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1337 PO;
1338 }
1339
1340 OP(FILTER_OP_LOAD_STAR_GLOB_STRING):
1341 {
1342 struct load_op *insn = (struct load_op *) pc;
1343
1344 dbg_printk("load globbing pattern %s\n", insn->data);
1345 estack_push(stack, top, ax, bx, ax_t, bx_t);
1346 estack_ax(stack, top)->u.s.str = insn->data;
1347 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1348 estack_ax(stack, top)->u.s.literal_type =
1349 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1350 estack_ax(stack, top)->u.s.user = 0;
1351 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1352 PO;
1353 }
1354
1355 OP(FILTER_OP_LOAD_S64):
1356 {
1357 struct load_op *insn = (struct load_op *) pc;
1358
1359 estack_push(stack, top, ax, bx, ax_t, bx_t);
1360 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1361 estack_ax_t = REG_S64;
1362 dbg_printk("load s64 %lld\n",
1363 (long long) estack_ax_v);
1364 next_pc += sizeof(struct load_op)
1365 + sizeof(struct literal_numeric);
1366 PO;
1367 }
1368
1369 OP(FILTER_OP_LOAD_DOUBLE):
1370 {
1371 BUG_ON(1);
1372 PO;
1373 }
1374
1375 /* cast */
1376 OP(FILTER_OP_CAST_TO_S64):
1377 printk(KERN_WARNING "LTTng: filter: unsupported non-specialized bytecode op %u\n",
1378 (unsigned int) *(filter_opcode_t *) pc);
1379 ret = -EINVAL;
1380 goto end;
1381
1382 OP(FILTER_OP_CAST_DOUBLE_TO_S64):
1383 {
1384 BUG_ON(1);
1385 PO;
1386 }
1387
1388 OP(FILTER_OP_CAST_NOP):
1389 {
1390 next_pc += sizeof(struct cast_op);
1391 PO;
1392 }
1393
1394 /* get context ref */
1395 OP(FILTER_OP_GET_CONTEXT_REF_STRING):
1396 {
1397 struct load_op *insn = (struct load_op *) pc;
1398 struct field_ref *ref = (struct field_ref *) insn->data;
1399 struct lttng_ctx_field *ctx_field;
1400 union lttng_ctx_value v;
1401
1402 dbg_printk("get context ref offset %u type string\n",
1403 ref->offset);
1404 ctx_field = &lttng_static_ctx->fields[ref->offset];
1405 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1406 estack_push(stack, top, ax, bx, ax_t, bx_t);
1407 estack_ax(stack, top)->u.s.str = v.str;
1408 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1409 dbg_printk("Filter warning: loading a NULL string.\n");
1410 ret = -EINVAL;
1411 goto end;
1412 }
1413 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1414 estack_ax(stack, top)->u.s.literal_type =
1415 ESTACK_STRING_LITERAL_TYPE_NONE;
1416 estack_ax(stack, top)->u.s.user = 0;
1417 estack_ax(stack, top)->type = REG_STRING;
1418 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1419 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1420 PO;
1421 }
1422
1423 OP(FILTER_OP_GET_CONTEXT_REF_S64):
1424 {
1425 struct load_op *insn = (struct load_op *) pc;
1426 struct field_ref *ref = (struct field_ref *) insn->data;
1427 struct lttng_ctx_field *ctx_field;
1428 union lttng_ctx_value v;
1429
1430 dbg_printk("get context ref offset %u type s64\n",
1431 ref->offset);
1432 ctx_field = &lttng_static_ctx->fields[ref->offset];
1433 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1434 estack_push(stack, top, ax, bx, ax_t, bx_t);
1435 estack_ax_v = v.s64;
1436 estack_ax_t = REG_S64;
1437 dbg_printk("ref get context s64 %lld\n",
1438 (long long) estack_ax_v);
1439 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1440 PO;
1441 }
1442
1443 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE):
1444 {
1445 BUG_ON(1);
1446 PO;
1447 }
1448
1449 /* load userspace field ref */
1450 OP(FILTER_OP_LOAD_FIELD_REF_USER_STRING):
1451 {
1452 struct load_op *insn = (struct load_op *) pc;
1453 struct field_ref *ref = (struct field_ref *) insn->data;
1454
1455 dbg_printk("load field ref offset %u type user string\n",
1456 ref->offset);
1457 estack_push(stack, top, ax, bx, ax_t, bx_t);
1458 estack_ax(stack, top)->u.s.user_str =
1459 *(const char * const *) &interpreter_stack_data[ref->offset];
1460 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1461 dbg_printk("Filter warning: loading a NULL string.\n");
1462 ret = -EINVAL;
1463 goto end;
1464 }
1465 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1466 estack_ax(stack, top)->u.s.literal_type =
1467 ESTACK_STRING_LITERAL_TYPE_NONE;
1468 estack_ax(stack, top)->u.s.user = 1;
1469 estack_ax(stack, top)->type = REG_STRING;
1470 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1471 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1472 PO;
1473 }
1474
1475 OP(FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1476 {
1477 struct load_op *insn = (struct load_op *) pc;
1478 struct field_ref *ref = (struct field_ref *) insn->data;
1479
1480 dbg_printk("load field ref offset %u type user sequence\n",
1481 ref->offset);
1482 estack_push(stack, top, ax, bx, ax_t, bx_t);
1483 estack_ax(stack, top)->u.s.seq_len =
1484 *(unsigned long *) &interpreter_stack_data[ref->offset];
1485 estack_ax(stack, top)->u.s.user_str =
1486 *(const char **) (&interpreter_stack_data[ref->offset
1487 + sizeof(unsigned long)]);
1488 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1489 dbg_printk("Filter warning: loading a NULL sequence.\n");
1490 ret = -EINVAL;
1491 goto end;
1492 }
1493 estack_ax(stack, top)->u.s.literal_type =
1494 ESTACK_STRING_LITERAL_TYPE_NONE;
1495 estack_ax(stack, top)->u.s.user = 1;
1496 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1497 PO;
1498 }
1499
1500 OP(FILTER_OP_GET_CONTEXT_ROOT):
1501 {
1502 dbg_printk("op get context root\n");
1503 estack_push(stack, top, ax, bx, ax_t, bx_t);
1504 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1505 /* "field" only needed for variants. */
1506 estack_ax(stack, top)->u.ptr.field = NULL;
1507 estack_ax(stack, top)->type = REG_PTR;
1508 next_pc += sizeof(struct load_op);
1509 PO;
1510 }
1511
1512 OP(FILTER_OP_GET_APP_CONTEXT_ROOT):
1513 {
1514 BUG_ON(1);
1515 PO;
1516 }
1517
1518 OP(FILTER_OP_GET_PAYLOAD_ROOT):
1519 {
1520 dbg_printk("op get app payload root\n");
1521 estack_push(stack, top, ax, bx, ax_t, bx_t);
1522 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1523 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
1524 /* "field" only needed for variants. */
1525 estack_ax(stack, top)->u.ptr.field = NULL;
1526 estack_ax(stack, top)->type = REG_PTR;
1527 next_pc += sizeof(struct load_op);
1528 PO;
1529 }
1530
1531 OP(FILTER_OP_GET_SYMBOL):
1532 {
1533 dbg_printk("op get symbol\n");
1534 switch (estack_ax(stack, top)->u.ptr.type) {
1535 case LOAD_OBJECT:
1536 printk(KERN_WARNING "LTTng: filter: Nested fields not implemented yet.\n");
1537 ret = -EINVAL;
1538 goto end;
1539 case LOAD_ROOT_CONTEXT:
1540 case LOAD_ROOT_APP_CONTEXT:
1541 case LOAD_ROOT_PAYLOAD:
1542 /*
1543 * symbol lookup is performed by
1544 * specialization.
1545 */
1546 ret = -EINVAL;
1547 goto end;
1548 }
1549 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1550 PO;
1551 }
1552
1553 OP(FILTER_OP_GET_SYMBOL_FIELD):
1554 {
1555 /*
1556 * Used for first variant encountered in a
1557 * traversal. Variants are not implemented yet.
1558 */
1559 ret = -EINVAL;
1560 goto end;
1561 }
1562
1563 OP(FILTER_OP_GET_INDEX_U16):
1564 {
1565 struct load_op *insn = (struct load_op *) pc;
1566 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1567
1568 dbg_printk("op get index u16\n");
1569 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1570 if (ret)
1571 goto end;
1572 estack_ax_v = estack_ax(stack, top)->u.v;
1573 estack_ax_t = estack_ax(stack, top)->type;
1574 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1575 PO;
1576 }
1577
1578 OP(FILTER_OP_GET_INDEX_U64):
1579 {
1580 struct load_op *insn = (struct load_op *) pc;
1581 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1582
1583 dbg_printk("op get index u64\n");
1584 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1585 if (ret)
1586 goto end;
1587 estack_ax_v = estack_ax(stack, top)->u.v;
1588 estack_ax_t = estack_ax(stack, top)->type;
1589 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1590 PO;
1591 }
1592
1593 OP(FILTER_OP_LOAD_FIELD):
1594 {
1595 dbg_printk("op load field\n");
1596 ret = dynamic_load_field(estack_ax(stack, top));
1597 if (ret)
1598 goto end;
1599 estack_ax_v = estack_ax(stack, top)->u.v;
1600 estack_ax_t = estack_ax(stack, top)->type;
1601 next_pc += sizeof(struct load_op);
1602 PO;
1603 }
1604
1605 OP(FILTER_OP_LOAD_FIELD_S8):
1606 {
1607 dbg_printk("op load field s8\n");
1608
1609 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1610 estack_ax_t = REG_S64;
1611 next_pc += sizeof(struct load_op);
1612 PO;
1613 }
1614 OP(FILTER_OP_LOAD_FIELD_S16):
1615 {
1616 dbg_printk("op load field s16\n");
1617
1618 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1619 estack_ax_t = REG_S64;
1620 next_pc += sizeof(struct load_op);
1621 PO;
1622 }
1623 OP(FILTER_OP_LOAD_FIELD_S32):
1624 {
1625 dbg_printk("op load field s32\n");
1626
1627 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1628 estack_ax_t = REG_S64;
1629 next_pc += sizeof(struct load_op);
1630 PO;
1631 }
1632 OP(FILTER_OP_LOAD_FIELD_S64):
1633 {
1634 dbg_printk("op load field s64\n");
1635
1636 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1637 estack_ax_t = REG_S64;
1638 next_pc += sizeof(struct load_op);
1639 PO;
1640 }
1641 OP(FILTER_OP_LOAD_FIELD_U8):
1642 {
1643 dbg_printk("op load field u8\n");
1644
1645 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1646 estack_ax_t = REG_S64;
1647 next_pc += sizeof(struct load_op);
1648 PO;
1649 }
1650 OP(FILTER_OP_LOAD_FIELD_U16):
1651 {
1652 dbg_printk("op load field u16\n");
1653
1654 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1655 estack_ax_t = REG_S64;
1656 next_pc += sizeof(struct load_op);
1657 PO;
1658 }
1659 OP(FILTER_OP_LOAD_FIELD_U32):
1660 {
1661 dbg_printk("op load field u32\n");
1662
1663 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1664 estack_ax_t = REG_S64;
1665 next_pc += sizeof(struct load_op);
1666 PO;
1667 }
1668 OP(FILTER_OP_LOAD_FIELD_U64):
1669 {
1670 dbg_printk("op load field u64\n");
1671
1672 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1673 estack_ax_t = REG_S64;
1674 next_pc += sizeof(struct load_op);
1675 PO;
1676 }
1677 OP(FILTER_OP_LOAD_FIELD_DOUBLE):
1678 {
1679 ret = -EINVAL;
1680 goto end;
1681 }
1682
1683 OP(FILTER_OP_LOAD_FIELD_STRING):
1684 {
1685 const char *str;
1686
1687 dbg_printk("op load field string\n");
1688 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1689 estack_ax(stack, top)->u.s.str = str;
1690 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1691 dbg_printk("Filter warning: loading a NULL string.\n");
1692 ret = -EINVAL;
1693 goto end;
1694 }
1695 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1696 estack_ax(stack, top)->u.s.literal_type =
1697 ESTACK_STRING_LITERAL_TYPE_NONE;
1698 estack_ax(stack, top)->type = REG_STRING;
1699 next_pc += sizeof(struct load_op);
1700 PO;
1701 }
1702
1703 OP(FILTER_OP_LOAD_FIELD_SEQUENCE):
1704 {
1705 const char *ptr;
1706
1707 dbg_printk("op load field string sequence\n");
1708 ptr = estack_ax(stack, top)->u.ptr.ptr;
1709 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1710 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1711 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1712 dbg_printk("Filter warning: loading a NULL sequence.\n");
1713 ret = -EINVAL;
1714 goto end;
1715 }
1716 estack_ax(stack, top)->u.s.literal_type =
1717 ESTACK_STRING_LITERAL_TYPE_NONE;
1718 estack_ax(stack, top)->type = REG_STRING;
1719 next_pc += sizeof(struct load_op);
1720 PO;
1721 }
1722
1723 END_OP
1724 end:
1725 /* Return _DISCARD on error. */
1726 if (ret)
1727 return LTTNG_FILTER_DISCARD;
1728
1729 if (output) {
1730 return lttng_bytecode_interpret_format_output(
1731 estack_ax(stack, top), output);
1732 }
1733
1734 return retval;
1735 }
1736 LTTNG_STACK_FRAME_NON_STANDARD(bytecode_interpret);
1737
1738 uint64_t lttng_filter_interpret_bytecode(void *filter_data,
1739 struct lttng_probe_ctx *lttng_probe_ctx,
1740 const char *filter_stack_data)
1741 {
1742 return bytecode_interpret(filter_data, lttng_probe_ctx,
1743 filter_stack_data, NULL);
1744 }
1745
1746 #undef START_OP
1747 #undef OP
1748 #undef PO
1749 #undef END_OP
This page took 0.104673 seconds and 4 git commands to generate.