3273d4a0f284892b6d0188126cf8aea02f3b4234
[lttng-modules.git] / src / lttng-filter-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-filter-interpreter.c
4 *
5 * LTTng modules filter interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
14
15 #include <lttng/filter.h>
16 #include <lttng/string-utils.h>
17
18 /*
19 * get_char should be called with page fault handler disabled if it is expected
20 * to handle user-space read.
21 */
22 static
23 char get_char(struct estack_entry *reg, size_t offset)
24 {
25 if (unlikely(offset >= reg->u.s.seq_len))
26 return '\0';
27 if (reg->u.s.user) {
28 char c;
29
30 /* Handle invalid access as end of string. */
31 if (unlikely(!lttng_access_ok(VERIFY_READ,
32 reg->u.s.user_str + offset,
33 sizeof(c))))
34 return '\0';
35 /* Handle fault (nonzero return value) as end of string. */
36 if (unlikely(__copy_from_user_inatomic(&c,
37 reg->u.s.user_str + offset,
38 sizeof(c))))
39 return '\0';
40 return c;
41 } else {
42 return reg->u.s.str[offset];
43 }
44 }
45
46 /*
47 * -1: wildcard found.
48 * -2: unknown escape char.
49 * 0: normal char.
50 */
51 static
52 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
53 {
54 switch (*c) {
55 case '\\':
56 (*offset)++;
57 *c = get_char(reg, *offset);
58 switch (*c) {
59 case '\\':
60 case '*':
61 return 0;
62 default:
63 return -2;
64 }
65 case '*':
66 return -1;
67 default:
68 return 0;
69 }
70 }
71
72 static
73 char get_char_at_cb(size_t at, void *data)
74 {
75 return get_char(data, at);
76 }
77
78 static
79 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
80 {
81 bool has_user = false;
82 int result;
83 struct estack_entry *pattern_reg;
84 struct estack_entry *candidate_reg;
85
86 /* Disable the page fault handler when reading from userspace. */
87 if (estack_bx(stack, top)->u.s.user
88 || estack_ax(stack, top)->u.s.user) {
89 has_user = true;
90 pagefault_disable();
91 }
92
93 /* Find out which side is the pattern vs. the candidate. */
94 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
95 pattern_reg = estack_ax(stack, top);
96 candidate_reg = estack_bx(stack, top);
97 } else {
98 pattern_reg = estack_bx(stack, top);
99 candidate_reg = estack_ax(stack, top);
100 }
101
102 /* Perform the match operation. */
103 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
104 pattern_reg, get_char_at_cb, candidate_reg);
105 if (has_user)
106 pagefault_enable();
107
108 return result;
109 }
110
111 static
112 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
113 {
114 size_t offset_bx = 0, offset_ax = 0;
115 int diff, has_user = 0;
116
117 if (estack_bx(stack, top)->u.s.user
118 || estack_ax(stack, top)->u.s.user) {
119 has_user = 1;
120 pagefault_disable();
121 }
122
123 for (;;) {
124 int ret;
125 int escaped_r0 = 0;
126 char char_bx, char_ax;
127
128 char_bx = get_char(estack_bx(stack, top), offset_bx);
129 char_ax = get_char(estack_ax(stack, top), offset_ax);
130
131 if (unlikely(char_bx == '\0')) {
132 if (char_ax == '\0') {
133 diff = 0;
134 break;
135 } else {
136 if (estack_ax(stack, top)->u.s.literal_type ==
137 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
138 ret = parse_char(estack_ax(stack, top),
139 &char_ax, &offset_ax);
140 if (ret == -1) {
141 diff = 0;
142 break;
143 }
144 }
145 diff = -1;
146 break;
147 }
148 }
149 if (unlikely(char_ax == '\0')) {
150 if (estack_bx(stack, top)->u.s.literal_type ==
151 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
152 ret = parse_char(estack_bx(stack, top),
153 &char_bx, &offset_bx);
154 if (ret == -1) {
155 diff = 0;
156 break;
157 }
158 }
159 diff = 1;
160 break;
161 }
162 if (estack_bx(stack, top)->u.s.literal_type ==
163 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
164 ret = parse_char(estack_bx(stack, top),
165 &char_bx, &offset_bx);
166 if (ret == -1) {
167 diff = 0;
168 break;
169 } else if (ret == -2) {
170 escaped_r0 = 1;
171 }
172 /* else compare both char */
173 }
174 if (estack_ax(stack, top)->u.s.literal_type ==
175 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
176 ret = parse_char(estack_ax(stack, top),
177 &char_ax, &offset_ax);
178 if (ret == -1) {
179 diff = 0;
180 break;
181 } else if (ret == -2) {
182 if (!escaped_r0) {
183 diff = -1;
184 break;
185 }
186 } else {
187 if (escaped_r0) {
188 diff = 1;
189 break;
190 }
191 }
192 } else {
193 if (escaped_r0) {
194 diff = 1;
195 break;
196 }
197 }
198 diff = char_bx - char_ax;
199 if (diff != 0)
200 break;
201 offset_bx++;
202 offset_ax++;
203 }
204 if (has_user)
205 pagefault_enable();
206
207 return diff;
208 }
209
210 uint64_t lttng_filter_interpret_bytecode_false(void *filter_data,
211 struct lttng_probe_ctx *lttng_probe_ctx,
212 const char *filter_stack_data)
213 {
214 return LTTNG_FILTER_DISCARD;
215 }
216
217 #ifdef INTERPRETER_USE_SWITCH
218
219 /*
220 * Fallback for compilers that do not support taking address of labels.
221 */
222
223 #define START_OP \
224 start_pc = &bytecode->data[0]; \
225 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
226 pc = next_pc) { \
227 dbg_printk("LTTng: Executing op %s (%u)\n", \
228 lttng_filter_print_op((unsigned int) *(filter_opcode_t *) pc), \
229 (unsigned int) *(filter_opcode_t *) pc); \
230 switch (*(filter_opcode_t *) pc) {
231
232 #define OP(name) case name
233
234 #define PO break
235
236 #define END_OP } \
237 }
238
239 #else
240
241 /*
242 * Dispatch-table based interpreter.
243 */
244
245 #define START_OP \
246 start_pc = &bytecode->code[0]; \
247 pc = next_pc = start_pc; \
248 if (unlikely(pc - start_pc >= bytecode->len)) \
249 goto end; \
250 goto *dispatch[*(filter_opcode_t *) pc];
251
252 #define OP(name) \
253 LABEL_##name
254
255 #define PO \
256 pc = next_pc; \
257 goto *dispatch[*(filter_opcode_t *) pc];
258
259 #define END_OP
260
261 #endif
262
263 #define IS_INTEGER_REGISTER(reg_type) \
264 (reg_type == REG_S64 || reg_type == REG_U64)
265
266 static int context_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
267 struct load_ptr *ptr,
268 uint32_t idx)
269 {
270
271 struct lttng_ctx_field *ctx_field;
272 struct lttng_event_field *field;
273 union lttng_ctx_value v;
274
275 ctx_field = &lttng_static_ctx->fields[idx];
276 field = &ctx_field->event_field;
277 ptr->type = LOAD_OBJECT;
278 /* field is only used for types nested within variants. */
279 ptr->field = NULL;
280
281 switch (field->type.atype) {
282 case atype_integer:
283 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
284 if (field->type.u.integer.signedness) {
285 ptr->object_type = OBJECT_TYPE_S64;
286 ptr->u.s64 = v.s64;
287 ptr->ptr = &ptr->u.s64;
288 } else {
289 ptr->object_type = OBJECT_TYPE_U64;
290 ptr->u.u64 = v.s64; /* Cast. */
291 ptr->ptr = &ptr->u.u64;
292 }
293 break;
294 case atype_enum_nestable:
295 {
296 const struct lttng_integer_type *itype =
297 &field->type.u.enum_nestable.container_type->u.integer;
298
299 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
300 if (itype->signedness) {
301 ptr->object_type = OBJECT_TYPE_S64;
302 ptr->u.s64 = v.s64;
303 ptr->ptr = &ptr->u.s64;
304 } else {
305 ptr->object_type = OBJECT_TYPE_U64;
306 ptr->u.u64 = v.s64; /* Cast. */
307 ptr->ptr = &ptr->u.u64;
308 }
309 break;
310 }
311 case atype_array_nestable:
312 if (!lttng_is_bytewise_integer(field->type.u.array_nestable.elem_type)) {
313 printk(KERN_WARNING "LTTng: filter: Array nesting only supports integer types.\n");
314 return -EINVAL;
315 }
316 if (field->type.u.array_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
317 printk(KERN_WARNING "LTTng: filter: Only string arrays are supported for contexts.\n");
318 return -EINVAL;
319 }
320 ptr->object_type = OBJECT_TYPE_STRING;
321 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
322 ptr->ptr = v.str;
323 break;
324 case atype_sequence_nestable:
325 if (!lttng_is_bytewise_integer(field->type.u.sequence_nestable.elem_type)) {
326 printk(KERN_WARNING "LTTng: filter: Sequence nesting only supports integer types.\n");
327 return -EINVAL;
328 }
329 if (field->type.u.sequence_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
330 printk(KERN_WARNING "LTTng: filter: Only string sequences are supported for contexts.\n");
331 return -EINVAL;
332 }
333 ptr->object_type = OBJECT_TYPE_STRING;
334 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
335 ptr->ptr = v.str;
336 break;
337 case atype_string:
338 ptr->object_type = OBJECT_TYPE_STRING;
339 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
340 ptr->ptr = v.str;
341 break;
342 case atype_struct_nestable:
343 printk(KERN_WARNING "LTTng: filter: Structure type cannot be loaded.\n");
344 return -EINVAL;
345 case atype_variant_nestable:
346 printk(KERN_WARNING "LTTng: filter: Variant type cannot be loaded.\n");
347 return -EINVAL;
348 default:
349 printk(KERN_WARNING "LTTng: filter: Unknown type: %d", (int) field->type.atype);
350 return -EINVAL;
351 }
352 return 0;
353 }
354
355 static int dynamic_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
356 struct bytecode_runtime *runtime,
357 uint64_t index, struct estack_entry *stack_top)
358 {
359 int ret;
360 const struct filter_get_index_data *gid;
361
362 gid = (const struct filter_get_index_data *) &runtime->data[index];
363 switch (stack_top->u.ptr.type) {
364 case LOAD_OBJECT:
365 switch (stack_top->u.ptr.object_type) {
366 case OBJECT_TYPE_ARRAY:
367 {
368 const char *ptr;
369
370 WARN_ON_ONCE(gid->offset >= gid->array_len);
371 /* Skip count (unsigned long) */
372 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
373 ptr = ptr + gid->offset;
374 stack_top->u.ptr.ptr = ptr;
375 stack_top->u.ptr.object_type = gid->elem.type;
376 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
377 BUG_ON(stack_top->u.ptr.field->type.atype != atype_array_nestable);
378 stack_top->u.ptr.field = NULL;
379 break;
380 }
381 case OBJECT_TYPE_SEQUENCE:
382 {
383 const char *ptr;
384 size_t ptr_seq_len;
385
386 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
387 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
388 if (gid->offset >= gid->elem.len * ptr_seq_len) {
389 ret = -EINVAL;
390 goto end;
391 }
392 ptr = ptr + gid->offset;
393 stack_top->u.ptr.ptr = ptr;
394 stack_top->u.ptr.object_type = gid->elem.type;
395 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
396 BUG_ON(stack_top->u.ptr.field->type.atype != atype_sequence_nestable);
397 stack_top->u.ptr.field = NULL;
398 break;
399 }
400 case OBJECT_TYPE_STRUCT:
401 printk(KERN_WARNING "LTTng: filter: Nested structures are not supported yet.\n");
402 ret = -EINVAL;
403 goto end;
404 case OBJECT_TYPE_VARIANT:
405 default:
406 printk(KERN_WARNING "LTTng: filter: Unexpected get index type %d",
407 (int) stack_top->u.ptr.object_type);
408 ret = -EINVAL;
409 goto end;
410 }
411 break;
412 case LOAD_ROOT_CONTEXT:
413 case LOAD_ROOT_APP_CONTEXT: /* Fall-through */
414 {
415 ret = context_get_index(lttng_probe_ctx,
416 &stack_top->u.ptr,
417 gid->ctx_index);
418 if (ret) {
419 goto end;
420 }
421 break;
422 }
423 case LOAD_ROOT_PAYLOAD:
424 stack_top->u.ptr.ptr += gid->offset;
425 if (gid->elem.type == OBJECT_TYPE_STRING)
426 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
427 stack_top->u.ptr.object_type = gid->elem.type;
428 stack_top->u.ptr.type = LOAD_OBJECT;
429 stack_top->u.ptr.field = gid->field;
430 break;
431 }
432 return 0;
433
434 end:
435 return ret;
436 }
437
438 static int dynamic_load_field(struct estack_entry *stack_top)
439 {
440 int ret;
441
442 switch (stack_top->u.ptr.type) {
443 case LOAD_OBJECT:
444 break;
445 case LOAD_ROOT_CONTEXT:
446 case LOAD_ROOT_APP_CONTEXT:
447 case LOAD_ROOT_PAYLOAD:
448 default:
449 dbg_printk("Filter warning: cannot load root, missing field name.\n");
450 ret = -EINVAL;
451 goto end;
452 }
453 switch (stack_top->u.ptr.object_type) {
454 case OBJECT_TYPE_S8:
455 dbg_printk("op load field s8\n");
456 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
457 stack_top->type = REG_S64;
458 break;
459 case OBJECT_TYPE_S16:
460 {
461 int16_t tmp;
462
463 dbg_printk("op load field s16\n");
464 tmp = *(int16_t *) stack_top->u.ptr.ptr;
465 if (stack_top->u.ptr.rev_bo)
466 __swab16s(&tmp);
467 stack_top->u.v = tmp;
468 stack_top->type = REG_S64;
469 break;
470 }
471 case OBJECT_TYPE_S32:
472 {
473 int32_t tmp;
474
475 dbg_printk("op load field s32\n");
476 tmp = *(int32_t *) stack_top->u.ptr.ptr;
477 if (stack_top->u.ptr.rev_bo)
478 __swab32s(&tmp);
479 stack_top->u.v = tmp;
480 stack_top->type = REG_S64;
481 break;
482 }
483 case OBJECT_TYPE_S64:
484 {
485 int64_t tmp;
486
487 dbg_printk("op load field s64\n");
488 tmp = *(int64_t *) stack_top->u.ptr.ptr;
489 if (stack_top->u.ptr.rev_bo)
490 __swab64s(&tmp);
491 stack_top->u.v = tmp;
492 stack_top->type = REG_S64;
493 break;
494 }
495 case OBJECT_TYPE_U8:
496 dbg_printk("op load field u8\n");
497 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
498 stack_top->type = REG_U64;
499 break;
500 case OBJECT_TYPE_U16:
501 {
502 uint16_t tmp;
503
504 dbg_printk("op load field u16\n");
505 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
506 if (stack_top->u.ptr.rev_bo)
507 __swab16s(&tmp);
508 stack_top->u.v = tmp;
509 stack_top->type = REG_U64;
510 break;
511 }
512 case OBJECT_TYPE_U32:
513 {
514 uint32_t tmp;
515
516 dbg_printk("op load field u32\n");
517 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
518 if (stack_top->u.ptr.rev_bo)
519 __swab32s(&tmp);
520 stack_top->u.v = tmp;
521 stack_top->type = REG_U64;
522 break;
523 }
524 case OBJECT_TYPE_U64:
525 {
526 uint64_t tmp;
527
528 dbg_printk("op load field u64\n");
529 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
530 if (stack_top->u.ptr.rev_bo)
531 __swab64s(&tmp);
532 stack_top->u.v = tmp;
533 stack_top->type = REG_U64;
534 break;
535 }
536 case OBJECT_TYPE_STRING:
537 {
538 const char *str;
539
540 dbg_printk("op load field string\n");
541 str = (const char *) stack_top->u.ptr.ptr;
542 stack_top->u.s.str = str;
543 if (unlikely(!stack_top->u.s.str)) {
544 dbg_printk("Filter warning: loading a NULL string.\n");
545 ret = -EINVAL;
546 goto end;
547 }
548 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
549 stack_top->u.s.literal_type =
550 ESTACK_STRING_LITERAL_TYPE_NONE;
551 stack_top->type = REG_STRING;
552 break;
553 }
554 case OBJECT_TYPE_STRING_SEQUENCE:
555 {
556 const char *ptr;
557
558 dbg_printk("op load field string sequence\n");
559 ptr = stack_top->u.ptr.ptr;
560 stack_top->u.s.seq_len = *(unsigned long *) ptr;
561 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
562 if (unlikely(!stack_top->u.s.str)) {
563 dbg_printk("Filter warning: loading a NULL sequence.\n");
564 ret = -EINVAL;
565 goto end;
566 }
567 stack_top->u.s.literal_type =
568 ESTACK_STRING_LITERAL_TYPE_NONE;
569 stack_top->type = REG_STRING;
570 break;
571 }
572 case OBJECT_TYPE_DYNAMIC:
573 /*
574 * Dynamic types in context are looked up
575 * by context get index.
576 */
577 ret = -EINVAL;
578 goto end;
579 case OBJECT_TYPE_DOUBLE:
580 ret = -EINVAL;
581 goto end;
582 case OBJECT_TYPE_SEQUENCE:
583 case OBJECT_TYPE_ARRAY:
584 case OBJECT_TYPE_STRUCT:
585 case OBJECT_TYPE_VARIANT:
586 printk(KERN_WARNING "LTTng: filter: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
587 ret = -EINVAL;
588 goto end;
589 }
590 return 0;
591
592 end:
593 return ret;
594 }
595
596 static
597 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
598 struct lttng_interpreter_output *output)
599 {
600 int ret;
601
602 again:
603 switch (ax->type) {
604 case REG_S64:
605 output->type = LTTNG_INTERPRETER_TYPE_S64;
606 output->u.s = ax->u.v;
607 break;
608 case REG_U64:
609 output->type = LTTNG_INTERPRETER_TYPE_U64;
610 output->u.u = (uint64_t) ax->u.v;
611 break;
612 case REG_STRING:
613 output->type = LTTNG_INTERPRETER_TYPE_STRING;
614 output->u.str.str = ax->u.s.str;
615 output->u.str.len = ax->u.s.seq_len;
616 break;
617 case REG_PTR:
618 switch (ax->u.ptr.object_type) {
619 case OBJECT_TYPE_S8:
620 case OBJECT_TYPE_S16:
621 case OBJECT_TYPE_S32:
622 case OBJECT_TYPE_S64:
623 case OBJECT_TYPE_U8:
624 case OBJECT_TYPE_U16:
625 case OBJECT_TYPE_U32:
626 case OBJECT_TYPE_U64:
627 case OBJECT_TYPE_DOUBLE:
628 case OBJECT_TYPE_STRING:
629 case OBJECT_TYPE_STRING_SEQUENCE:
630 ret = dynamic_load_field(ax);
631 if (ret)
632 return ret;
633 /* Retry after loading ptr into stack top. */
634 goto again;
635 case OBJECT_TYPE_SEQUENCE:
636 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
637 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
638 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
639 output->u.sequence.nested_type = ax->u.ptr.field->type.u.sequence_nestable.elem_type;
640 break;
641 case OBJECT_TYPE_ARRAY:
642 /* Skip count (unsigned long) */
643 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
644 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
645 output->u.sequence.nr_elem = ax->u.ptr.field->type.u.array_nestable.length;
646 output->u.sequence.nested_type = ax->u.ptr.field->type.u.array_nestable.elem_type;
647 break;
648 case OBJECT_TYPE_STRUCT:
649 case OBJECT_TYPE_VARIANT:
650 default:
651 return -EINVAL;
652 }
653
654 break;
655 case REG_STAR_GLOB_STRING:
656 case REG_TYPE_UNKNOWN:
657 default:
658 return -EINVAL;
659 }
660
661 return LTTNG_FILTER_RECORD_FLAG;
662 }
663
664 /*
665 * Return 0 (discard), or raise the 0x1 flag (log event).
666 * Currently, other flags are kept for future extensions and have no
667 * effect.
668 */
669 static
670 uint64_t bytecode_interpret(void *interpreter_data,
671 struct lttng_probe_ctx *lttng_probe_ctx,
672 const char *interpreter_stack_data,
673 struct lttng_interpreter_output *output)
674 {
675 struct bytecode_runtime *bytecode = interpreter_data;
676 void *pc, *next_pc, *start_pc;
677 int ret = -EINVAL;
678 uint64_t retval = 0;
679 struct estack _stack;
680 struct estack *stack = &_stack;
681 register int64_t ax = 0, bx = 0;
682 register enum entry_type ax_t = REG_TYPE_UNKNOWN, bx_t = REG_TYPE_UNKNOWN;
683 register int top = FILTER_STACK_EMPTY;
684 #ifndef INTERPRETER_USE_SWITCH
685 static void *dispatch[NR_FILTER_OPS] = {
686 [ FILTER_OP_UNKNOWN ] = &&LABEL_FILTER_OP_UNKNOWN,
687
688 [ FILTER_OP_RETURN ] = &&LABEL_FILTER_OP_RETURN,
689
690 /* binary */
691 [ FILTER_OP_MUL ] = &&LABEL_FILTER_OP_MUL,
692 [ FILTER_OP_DIV ] = &&LABEL_FILTER_OP_DIV,
693 [ FILTER_OP_MOD ] = &&LABEL_FILTER_OP_MOD,
694 [ FILTER_OP_PLUS ] = &&LABEL_FILTER_OP_PLUS,
695 [ FILTER_OP_MINUS ] = &&LABEL_FILTER_OP_MINUS,
696 [ FILTER_OP_BIT_RSHIFT ] = &&LABEL_FILTER_OP_BIT_RSHIFT,
697 [ FILTER_OP_BIT_LSHIFT ] = &&LABEL_FILTER_OP_BIT_LSHIFT,
698 [ FILTER_OP_BIT_AND ] = &&LABEL_FILTER_OP_BIT_AND,
699 [ FILTER_OP_BIT_OR ] = &&LABEL_FILTER_OP_BIT_OR,
700 [ FILTER_OP_BIT_XOR ] = &&LABEL_FILTER_OP_BIT_XOR,
701
702 /* binary comparators */
703 [ FILTER_OP_EQ ] = &&LABEL_FILTER_OP_EQ,
704 [ FILTER_OP_NE ] = &&LABEL_FILTER_OP_NE,
705 [ FILTER_OP_GT ] = &&LABEL_FILTER_OP_GT,
706 [ FILTER_OP_LT ] = &&LABEL_FILTER_OP_LT,
707 [ FILTER_OP_GE ] = &&LABEL_FILTER_OP_GE,
708 [ FILTER_OP_LE ] = &&LABEL_FILTER_OP_LE,
709
710 /* string binary comparator */
711 [ FILTER_OP_EQ_STRING ] = &&LABEL_FILTER_OP_EQ_STRING,
712 [ FILTER_OP_NE_STRING ] = &&LABEL_FILTER_OP_NE_STRING,
713 [ FILTER_OP_GT_STRING ] = &&LABEL_FILTER_OP_GT_STRING,
714 [ FILTER_OP_LT_STRING ] = &&LABEL_FILTER_OP_LT_STRING,
715 [ FILTER_OP_GE_STRING ] = &&LABEL_FILTER_OP_GE_STRING,
716 [ FILTER_OP_LE_STRING ] = &&LABEL_FILTER_OP_LE_STRING,
717
718 /* globbing pattern binary comparator */
719 [ FILTER_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_EQ_STAR_GLOB_STRING,
720 [ FILTER_OP_NE_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_NE_STAR_GLOB_STRING,
721
722 /* s64 binary comparator */
723 [ FILTER_OP_EQ_S64 ] = &&LABEL_FILTER_OP_EQ_S64,
724 [ FILTER_OP_NE_S64 ] = &&LABEL_FILTER_OP_NE_S64,
725 [ FILTER_OP_GT_S64 ] = &&LABEL_FILTER_OP_GT_S64,
726 [ FILTER_OP_LT_S64 ] = &&LABEL_FILTER_OP_LT_S64,
727 [ FILTER_OP_GE_S64 ] = &&LABEL_FILTER_OP_GE_S64,
728 [ FILTER_OP_LE_S64 ] = &&LABEL_FILTER_OP_LE_S64,
729
730 /* double binary comparator */
731 [ FILTER_OP_EQ_DOUBLE ] = &&LABEL_FILTER_OP_EQ_DOUBLE,
732 [ FILTER_OP_NE_DOUBLE ] = &&LABEL_FILTER_OP_NE_DOUBLE,
733 [ FILTER_OP_GT_DOUBLE ] = &&LABEL_FILTER_OP_GT_DOUBLE,
734 [ FILTER_OP_LT_DOUBLE ] = &&LABEL_FILTER_OP_LT_DOUBLE,
735 [ FILTER_OP_GE_DOUBLE ] = &&LABEL_FILTER_OP_GE_DOUBLE,
736 [ FILTER_OP_LE_DOUBLE ] = &&LABEL_FILTER_OP_LE_DOUBLE,
737
738 /* Mixed S64-double binary comparators */
739 [ FILTER_OP_EQ_DOUBLE_S64 ] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64,
740 [ FILTER_OP_NE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_NE_DOUBLE_S64,
741 [ FILTER_OP_GT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GT_DOUBLE_S64,
742 [ FILTER_OP_LT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LT_DOUBLE_S64,
743 [ FILTER_OP_GE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GE_DOUBLE_S64,
744 [ FILTER_OP_LE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LE_DOUBLE_S64,
745
746 [ FILTER_OP_EQ_S64_DOUBLE ] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE,
747 [ FILTER_OP_NE_S64_DOUBLE ] = &&LABEL_FILTER_OP_NE_S64_DOUBLE,
748 [ FILTER_OP_GT_S64_DOUBLE ] = &&LABEL_FILTER_OP_GT_S64_DOUBLE,
749 [ FILTER_OP_LT_S64_DOUBLE ] = &&LABEL_FILTER_OP_LT_S64_DOUBLE,
750 [ FILTER_OP_GE_S64_DOUBLE ] = &&LABEL_FILTER_OP_GE_S64_DOUBLE,
751 [ FILTER_OP_LE_S64_DOUBLE ] = &&LABEL_FILTER_OP_LE_S64_DOUBLE,
752
753 /* unary */
754 [ FILTER_OP_UNARY_PLUS ] = &&LABEL_FILTER_OP_UNARY_PLUS,
755 [ FILTER_OP_UNARY_MINUS ] = &&LABEL_FILTER_OP_UNARY_MINUS,
756 [ FILTER_OP_UNARY_NOT ] = &&LABEL_FILTER_OP_UNARY_NOT,
757 [ FILTER_OP_UNARY_PLUS_S64 ] = &&LABEL_FILTER_OP_UNARY_PLUS_S64,
758 [ FILTER_OP_UNARY_MINUS_S64 ] = &&LABEL_FILTER_OP_UNARY_MINUS_S64,
759 [ FILTER_OP_UNARY_NOT_S64 ] = &&LABEL_FILTER_OP_UNARY_NOT_S64,
760 [ FILTER_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE,
761 [ FILTER_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE,
762 [ FILTER_OP_UNARY_NOT_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE,
763
764 /* logical */
765 [ FILTER_OP_AND ] = &&LABEL_FILTER_OP_AND,
766 [ FILTER_OP_OR ] = &&LABEL_FILTER_OP_OR,
767
768 /* load field ref */
769 [ FILTER_OP_LOAD_FIELD_REF ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF,
770 [ FILTER_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING,
771 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE,
772 [ FILTER_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64,
773 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE,
774
775 /* load from immediate operand */
776 [ FILTER_OP_LOAD_STRING ] = &&LABEL_FILTER_OP_LOAD_STRING,
777 [ FILTER_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_LOAD_STAR_GLOB_STRING,
778 [ FILTER_OP_LOAD_S64 ] = &&LABEL_FILTER_OP_LOAD_S64,
779 [ FILTER_OP_LOAD_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_DOUBLE,
780
781 /* cast */
782 [ FILTER_OP_CAST_TO_S64 ] = &&LABEL_FILTER_OP_CAST_TO_S64,
783 [ FILTER_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64,
784 [ FILTER_OP_CAST_NOP ] = &&LABEL_FILTER_OP_CAST_NOP,
785
786 /* get context ref */
787 [ FILTER_OP_GET_CONTEXT_REF ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF,
788 [ FILTER_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING,
789 [ FILTER_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64,
790 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE,
791
792 /* load userspace field ref */
793 [ FILTER_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_STRING,
794 [ FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE,
795
796 /* Instructions for recursive traversal through composed types. */
797 [ FILTER_OP_GET_CONTEXT_ROOT ] = &&LABEL_FILTER_OP_GET_CONTEXT_ROOT,
798 [ FILTER_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_FILTER_OP_GET_APP_CONTEXT_ROOT,
799 [ FILTER_OP_GET_PAYLOAD_ROOT ] = &&LABEL_FILTER_OP_GET_PAYLOAD_ROOT,
800
801 [ FILTER_OP_GET_SYMBOL ] = &&LABEL_FILTER_OP_GET_SYMBOL,
802 [ FILTER_OP_GET_SYMBOL_FIELD ] = &&LABEL_FILTER_OP_GET_SYMBOL_FIELD,
803 [ FILTER_OP_GET_INDEX_U16 ] = &&LABEL_FILTER_OP_GET_INDEX_U16,
804 [ FILTER_OP_GET_INDEX_U64 ] = &&LABEL_FILTER_OP_GET_INDEX_U64,
805
806 [ FILTER_OP_LOAD_FIELD ] = &&LABEL_FILTER_OP_LOAD_FIELD,
807 [ FILTER_OP_LOAD_FIELD_S8 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S8,
808 [ FILTER_OP_LOAD_FIELD_S16 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S16,
809 [ FILTER_OP_LOAD_FIELD_S32 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S32,
810 [ FILTER_OP_LOAD_FIELD_S64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S64,
811 [ FILTER_OP_LOAD_FIELD_U8 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U8,
812 [ FILTER_OP_LOAD_FIELD_U16 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U16,
813 [ FILTER_OP_LOAD_FIELD_U32 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U32,
814 [ FILTER_OP_LOAD_FIELD_U64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U64,
815 [ FILTER_OP_LOAD_FIELD_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_STRING,
816 [ FILTER_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_SEQUENCE,
817 [ FILTER_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_FIELD_DOUBLE,
818
819 [ FILTER_OP_UNARY_BIT_NOT ] = &&LABEL_FILTER_OP_UNARY_BIT_NOT,
820
821 [ FILTER_OP_RETURN_S64 ] = &&LABEL_FILTER_OP_RETURN_S64,
822 };
823 #endif /* #ifndef INTERPRETER_USE_SWITCH */
824
825 START_OP
826
827 OP(FILTER_OP_UNKNOWN):
828 OP(FILTER_OP_LOAD_FIELD_REF):
829 OP(FILTER_OP_GET_CONTEXT_REF):
830 #ifdef INTERPRETER_USE_SWITCH
831 default:
832 #endif /* INTERPRETER_USE_SWITCH */
833 printk(KERN_WARNING "LTTng: filter: unknown bytecode op %u\n",
834 (unsigned int) *(filter_opcode_t *) pc);
835 ret = -EINVAL;
836 goto end;
837
838 OP(FILTER_OP_RETURN):
839 OP(FILTER_OP_RETURN_S64):
840 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
841 switch (estack_ax_t) {
842 case REG_S64:
843 case REG_U64:
844 retval = !!estack_ax_v;
845 break;
846 case REG_DOUBLE:
847 case REG_STRING:
848 case REG_PTR:
849 if (!output) {
850 ret = -EINVAL;
851 goto end;
852 }
853 retval = 0;
854 break;
855 case REG_STAR_GLOB_STRING:
856 case REG_TYPE_UNKNOWN:
857 ret = -EINVAL;
858 goto end;
859 }
860 ret = 0;
861 goto end;
862
863 /* binary */
864 OP(FILTER_OP_MUL):
865 OP(FILTER_OP_DIV):
866 OP(FILTER_OP_MOD):
867 OP(FILTER_OP_PLUS):
868 OP(FILTER_OP_MINUS):
869 printk(KERN_WARNING "LTTng: filter: unsupported bytecode op %u\n",
870 (unsigned int) *(filter_opcode_t *) pc);
871 ret = -EINVAL;
872 goto end;
873
874 OP(FILTER_OP_EQ):
875 OP(FILTER_OP_NE):
876 OP(FILTER_OP_GT):
877 OP(FILTER_OP_LT):
878 OP(FILTER_OP_GE):
879 OP(FILTER_OP_LE):
880 printk(KERN_WARNING "LTTng: filter: unsupported non-specialized bytecode op %u\n",
881 (unsigned int) *(filter_opcode_t *) pc);
882 ret = -EINVAL;
883 goto end;
884
885 OP(FILTER_OP_EQ_STRING):
886 {
887 int res;
888
889 res = (stack_strcmp(stack, top, "==") == 0);
890 estack_pop(stack, top, ax, bx, ax_t, bx_t);
891 estack_ax_v = res;
892 estack_ax_t = REG_S64;
893 next_pc += sizeof(struct binary_op);
894 PO;
895 }
896 OP(FILTER_OP_NE_STRING):
897 {
898 int res;
899
900 res = (stack_strcmp(stack, top, "!=") != 0);
901 estack_pop(stack, top, ax, bx, ax_t, bx_t);
902 estack_ax_v = res;
903 estack_ax_t = REG_S64;
904 next_pc += sizeof(struct binary_op);
905 PO;
906 }
907 OP(FILTER_OP_GT_STRING):
908 {
909 int res;
910
911 res = (stack_strcmp(stack, top, ">") > 0);
912 estack_pop(stack, top, ax, bx, ax_t, bx_t);
913 estack_ax_v = res;
914 estack_ax_t = REG_S64;
915 next_pc += sizeof(struct binary_op);
916 PO;
917 }
918 OP(FILTER_OP_LT_STRING):
919 {
920 int res;
921
922 res = (stack_strcmp(stack, top, "<") < 0);
923 estack_pop(stack, top, ax, bx, ax_t, bx_t);
924 estack_ax_v = res;
925 estack_ax_t = REG_S64;
926 next_pc += sizeof(struct binary_op);
927 PO;
928 }
929 OP(FILTER_OP_GE_STRING):
930 {
931 int res;
932
933 res = (stack_strcmp(stack, top, ">=") >= 0);
934 estack_pop(stack, top, ax, bx, ax_t, bx_t);
935 estack_ax_v = res;
936 estack_ax_t = REG_S64;
937 next_pc += sizeof(struct binary_op);
938 PO;
939 }
940 OP(FILTER_OP_LE_STRING):
941 {
942 int res;
943
944 res = (stack_strcmp(stack, top, "<=") <= 0);
945 estack_pop(stack, top, ax, bx, ax_t, bx_t);
946 estack_ax_v = res;
947 estack_ax_t = REG_S64;
948 next_pc += sizeof(struct binary_op);
949 PO;
950 }
951
952 OP(FILTER_OP_EQ_STAR_GLOB_STRING):
953 {
954 int res;
955
956 res = (stack_star_glob_match(stack, top, "==") == 0);
957 estack_pop(stack, top, ax, bx, ax_t, bx_t);
958 estack_ax_v = res;
959 estack_ax_t = REG_S64;
960 next_pc += sizeof(struct binary_op);
961 PO;
962 }
963 OP(FILTER_OP_NE_STAR_GLOB_STRING):
964 {
965 int res;
966
967 res = (stack_star_glob_match(stack, top, "!=") != 0);
968 estack_pop(stack, top, ax, bx, ax_t, bx_t);
969 estack_ax_v = res;
970 estack_ax_t = REG_S64;
971 next_pc += sizeof(struct binary_op);
972 PO;
973 }
974
975 OP(FILTER_OP_EQ_S64):
976 {
977 int res;
978
979 res = (estack_bx_v == estack_ax_v);
980 estack_pop(stack, top, ax, bx, ax_t, bx_t);
981 estack_ax_v = res;
982 estack_ax_t = REG_S64;
983 next_pc += sizeof(struct binary_op);
984 PO;
985 }
986 OP(FILTER_OP_NE_S64):
987 {
988 int res;
989
990 res = (estack_bx_v != estack_ax_v);
991 estack_pop(stack, top, ax, bx, ax_t, bx_t);
992 estack_ax_v = res;
993 estack_ax_t = REG_S64;
994 next_pc += sizeof(struct binary_op);
995 PO;
996 }
997 OP(FILTER_OP_GT_S64):
998 {
999 int res;
1000
1001 res = (estack_bx_v > estack_ax_v);
1002 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1003 estack_ax_v = res;
1004 estack_ax_t = REG_S64;
1005 next_pc += sizeof(struct binary_op);
1006 PO;
1007 }
1008 OP(FILTER_OP_LT_S64):
1009 {
1010 int res;
1011
1012 res = (estack_bx_v < estack_ax_v);
1013 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1014 estack_ax_v = res;
1015 estack_ax_t = REG_S64;
1016 next_pc += sizeof(struct binary_op);
1017 PO;
1018 }
1019 OP(FILTER_OP_GE_S64):
1020 {
1021 int res;
1022
1023 res = (estack_bx_v >= estack_ax_v);
1024 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1025 estack_ax_v = res;
1026 estack_ax_t = REG_S64;
1027 next_pc += sizeof(struct binary_op);
1028 PO;
1029 }
1030 OP(FILTER_OP_LE_S64):
1031 {
1032 int res;
1033
1034 res = (estack_bx_v <= estack_ax_v);
1035 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1036 estack_ax_v = res;
1037 estack_ax_t = REG_S64;
1038 next_pc += sizeof(struct binary_op);
1039 PO;
1040 }
1041
1042 OP(FILTER_OP_EQ_DOUBLE):
1043 OP(FILTER_OP_NE_DOUBLE):
1044 OP(FILTER_OP_GT_DOUBLE):
1045 OP(FILTER_OP_LT_DOUBLE):
1046 OP(FILTER_OP_GE_DOUBLE):
1047 OP(FILTER_OP_LE_DOUBLE):
1048 {
1049 BUG_ON(1);
1050 PO;
1051 }
1052
1053 /* Mixed S64-double binary comparators */
1054 OP(FILTER_OP_EQ_DOUBLE_S64):
1055 OP(FILTER_OP_NE_DOUBLE_S64):
1056 OP(FILTER_OP_GT_DOUBLE_S64):
1057 OP(FILTER_OP_LT_DOUBLE_S64):
1058 OP(FILTER_OP_GE_DOUBLE_S64):
1059 OP(FILTER_OP_LE_DOUBLE_S64):
1060 OP(FILTER_OP_EQ_S64_DOUBLE):
1061 OP(FILTER_OP_NE_S64_DOUBLE):
1062 OP(FILTER_OP_GT_S64_DOUBLE):
1063 OP(FILTER_OP_LT_S64_DOUBLE):
1064 OP(FILTER_OP_GE_S64_DOUBLE):
1065 OP(FILTER_OP_LE_S64_DOUBLE):
1066 {
1067 BUG_ON(1);
1068 PO;
1069 }
1070 OP(FILTER_OP_BIT_RSHIFT):
1071 {
1072 int64_t res;
1073
1074 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1075 ret = -EINVAL;
1076 goto end;
1077 }
1078
1079 /* Catch undefined behavior. */
1080 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1081 ret = -EINVAL;
1082 goto end;
1083 }
1084 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1085 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1086 estack_ax_v = res;
1087 estack_ax_t = REG_U64;
1088 next_pc += sizeof(struct binary_op);
1089 PO;
1090 }
1091 OP(FILTER_OP_BIT_LSHIFT):
1092 {
1093 int64_t res;
1094
1095 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1096 ret = -EINVAL;
1097 goto end;
1098 }
1099
1100 /* Catch undefined behavior. */
1101 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1102 ret = -EINVAL;
1103 goto end;
1104 }
1105 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1106 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1107 estack_ax_v = res;
1108 estack_ax_t = REG_U64;
1109 next_pc += sizeof(struct binary_op);
1110 PO;
1111 }
1112 OP(FILTER_OP_BIT_AND):
1113 {
1114 int64_t res;
1115
1116 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1117 ret = -EINVAL;
1118 goto end;
1119 }
1120
1121 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1122 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1123 estack_ax_v = res;
1124 estack_ax_t = REG_U64;
1125 next_pc += sizeof(struct binary_op);
1126 PO;
1127 }
1128 OP(FILTER_OP_BIT_OR):
1129 {
1130 int64_t res;
1131
1132 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1133 ret = -EINVAL;
1134 goto end;
1135 }
1136
1137 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1138 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1139 estack_ax_v = res;
1140 estack_ax_t = REG_U64;
1141 next_pc += sizeof(struct binary_op);
1142 PO;
1143 }
1144 OP(FILTER_OP_BIT_XOR):
1145 {
1146 int64_t res;
1147
1148 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1149 ret = -EINVAL;
1150 goto end;
1151 }
1152
1153 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1154 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1155 estack_ax_v = res;
1156 estack_ax_t = REG_U64;
1157 next_pc += sizeof(struct binary_op);
1158 PO;
1159 }
1160
1161 /* unary */
1162 OP(FILTER_OP_UNARY_PLUS):
1163 OP(FILTER_OP_UNARY_MINUS):
1164 OP(FILTER_OP_UNARY_NOT):
1165 printk(KERN_WARNING "LTTng: filter: unsupported non-specialized bytecode op %u\n",
1166 (unsigned int) *(filter_opcode_t *) pc);
1167 ret = -EINVAL;
1168 goto end;
1169
1170
1171 OP(FILTER_OP_UNARY_BIT_NOT):
1172 {
1173 estack_ax_v = ~(uint64_t) estack_ax_v;
1174 estack_ax_t = REG_S64;
1175 next_pc += sizeof(struct unary_op);
1176 PO;
1177 }
1178
1179 OP(FILTER_OP_UNARY_PLUS_S64):
1180 {
1181 next_pc += sizeof(struct unary_op);
1182 PO;
1183 }
1184 OP(FILTER_OP_UNARY_MINUS_S64):
1185 {
1186 estack_ax_v = -estack_ax_v;
1187 estack_ax_t = REG_S64;
1188 next_pc += sizeof(struct unary_op);
1189 PO;
1190 }
1191 OP(FILTER_OP_UNARY_PLUS_DOUBLE):
1192 OP(FILTER_OP_UNARY_MINUS_DOUBLE):
1193 {
1194 BUG_ON(1);
1195 PO;
1196 }
1197 OP(FILTER_OP_UNARY_NOT_S64):
1198 {
1199 estack_ax_v = !estack_ax_v;
1200 estack_ax_t = REG_S64;
1201 next_pc += sizeof(struct unary_op);
1202 PO;
1203 }
1204 OP(FILTER_OP_UNARY_NOT_DOUBLE):
1205 {
1206 BUG_ON(1);
1207 PO;
1208 }
1209
1210 /* logical */
1211 OP(FILTER_OP_AND):
1212 {
1213 struct logical_op *insn = (struct logical_op *) pc;
1214
1215 /* If AX is 0, skip and evaluate to 0 */
1216 if (unlikely(estack_ax_v == 0)) {
1217 dbg_printk("Jumping to bytecode offset %u\n",
1218 (unsigned int) insn->skip_offset);
1219 next_pc = start_pc + insn->skip_offset;
1220 } else {
1221 /* Pop 1 when jump not taken */
1222 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1223 next_pc += sizeof(struct logical_op);
1224 }
1225 PO;
1226 }
1227 OP(FILTER_OP_OR):
1228 {
1229 struct logical_op *insn = (struct logical_op *) pc;
1230
1231 /* If AX is nonzero, skip and evaluate to 1 */
1232
1233 if (unlikely(estack_ax_v != 0)) {
1234 estack_ax_v = 1;
1235 dbg_printk("Jumping to bytecode offset %u\n",
1236 (unsigned int) insn->skip_offset);
1237 next_pc = start_pc + insn->skip_offset;
1238 } else {
1239 /* Pop 1 when jump not taken */
1240 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1241 next_pc += sizeof(struct logical_op);
1242 }
1243 PO;
1244 }
1245
1246
1247 /* load field ref */
1248 OP(FILTER_OP_LOAD_FIELD_REF_STRING):
1249 {
1250 struct load_op *insn = (struct load_op *) pc;
1251 struct field_ref *ref = (struct field_ref *) insn->data;
1252
1253 dbg_printk("load field ref offset %u type string\n",
1254 ref->offset);
1255 estack_push(stack, top, ax, bx, ax_t, bx_t);
1256 estack_ax(stack, top)->u.s.str =
1257 *(const char * const *) &interpreter_stack_data[ref->offset];
1258 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1259 dbg_printk("Filter warning: loading a NULL string.\n");
1260 ret = -EINVAL;
1261 goto end;
1262 }
1263 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1264 estack_ax(stack, top)->u.s.literal_type =
1265 ESTACK_STRING_LITERAL_TYPE_NONE;
1266 estack_ax(stack, top)->u.s.user = 0;
1267 estack_ax(stack, top)->type = REG_STRING;
1268 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1269 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1270 PO;
1271 }
1272
1273 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE):
1274 {
1275 struct load_op *insn = (struct load_op *) pc;
1276 struct field_ref *ref = (struct field_ref *) insn->data;
1277
1278 dbg_printk("load field ref offset %u type sequence\n",
1279 ref->offset);
1280 estack_push(stack, top, ax, bx, ax_t, bx_t);
1281 estack_ax(stack, top)->u.s.seq_len =
1282 *(unsigned long *) &interpreter_stack_data[ref->offset];
1283 estack_ax(stack, top)->u.s.str =
1284 *(const char **) (&interpreter_stack_data[ref->offset
1285 + sizeof(unsigned long)]);
1286 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1287 dbg_printk("Filter warning: loading a NULL sequence.\n");
1288 ret = -EINVAL;
1289 goto end;
1290 }
1291 estack_ax(stack, top)->u.s.literal_type =
1292 ESTACK_STRING_LITERAL_TYPE_NONE;
1293 estack_ax(stack, top)->u.s.user = 0;
1294 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1295 PO;
1296 }
1297
1298 OP(FILTER_OP_LOAD_FIELD_REF_S64):
1299 {
1300 struct load_op *insn = (struct load_op *) pc;
1301 struct field_ref *ref = (struct field_ref *) insn->data;
1302
1303 dbg_printk("load field ref offset %u type s64\n",
1304 ref->offset);
1305 estack_push(stack, top, ax, bx, ax_t, bx_t);
1306 estack_ax_v =
1307 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
1308 estack_ax_t = REG_S64;
1309 dbg_printk("ref load s64 %lld\n",
1310 (long long) estack_ax_v);
1311 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1312 PO;
1313 }
1314
1315 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE):
1316 {
1317 BUG_ON(1);
1318 PO;
1319 }
1320
1321 /* load from immediate operand */
1322 OP(FILTER_OP_LOAD_STRING):
1323 {
1324 struct load_op *insn = (struct load_op *) pc;
1325
1326 dbg_printk("load string %s\n", insn->data);
1327 estack_push(stack, top, ax, bx, ax_t, bx_t);
1328 estack_ax(stack, top)->u.s.str = insn->data;
1329 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1330 estack_ax(stack, top)->u.s.literal_type =
1331 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1332 estack_ax(stack, top)->u.s.user = 0;
1333 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1334 PO;
1335 }
1336
1337 OP(FILTER_OP_LOAD_STAR_GLOB_STRING):
1338 {
1339 struct load_op *insn = (struct load_op *) pc;
1340
1341 dbg_printk("load globbing pattern %s\n", insn->data);
1342 estack_push(stack, top, ax, bx, ax_t, bx_t);
1343 estack_ax(stack, top)->u.s.str = insn->data;
1344 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1345 estack_ax(stack, top)->u.s.literal_type =
1346 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1347 estack_ax(stack, top)->u.s.user = 0;
1348 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1349 PO;
1350 }
1351
1352 OP(FILTER_OP_LOAD_S64):
1353 {
1354 struct load_op *insn = (struct load_op *) pc;
1355
1356 estack_push(stack, top, ax, bx, ax_t, bx_t);
1357 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1358 estack_ax_t = REG_S64;
1359 dbg_printk("load s64 %lld\n",
1360 (long long) estack_ax_v);
1361 next_pc += sizeof(struct load_op)
1362 + sizeof(struct literal_numeric);
1363 PO;
1364 }
1365
1366 OP(FILTER_OP_LOAD_DOUBLE):
1367 {
1368 BUG_ON(1);
1369 PO;
1370 }
1371
1372 /* cast */
1373 OP(FILTER_OP_CAST_TO_S64):
1374 printk(KERN_WARNING "LTTng: filter: unsupported non-specialized bytecode op %u\n",
1375 (unsigned int) *(filter_opcode_t *) pc);
1376 ret = -EINVAL;
1377 goto end;
1378
1379 OP(FILTER_OP_CAST_DOUBLE_TO_S64):
1380 {
1381 BUG_ON(1);
1382 PO;
1383 }
1384
1385 OP(FILTER_OP_CAST_NOP):
1386 {
1387 next_pc += sizeof(struct cast_op);
1388 PO;
1389 }
1390
1391 /* get context ref */
1392 OP(FILTER_OP_GET_CONTEXT_REF_STRING):
1393 {
1394 struct load_op *insn = (struct load_op *) pc;
1395 struct field_ref *ref = (struct field_ref *) insn->data;
1396 struct lttng_ctx_field *ctx_field;
1397 union lttng_ctx_value v;
1398
1399 dbg_printk("get context ref offset %u type string\n",
1400 ref->offset);
1401 ctx_field = &lttng_static_ctx->fields[ref->offset];
1402 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1403 estack_push(stack, top, ax, bx, ax_t, bx_t);
1404 estack_ax(stack, top)->u.s.str = v.str;
1405 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1406 dbg_printk("Filter warning: loading a NULL string.\n");
1407 ret = -EINVAL;
1408 goto end;
1409 }
1410 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1411 estack_ax(stack, top)->u.s.literal_type =
1412 ESTACK_STRING_LITERAL_TYPE_NONE;
1413 estack_ax(stack, top)->u.s.user = 0;
1414 estack_ax(stack, top)->type = REG_STRING;
1415 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1416 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1417 PO;
1418 }
1419
1420 OP(FILTER_OP_GET_CONTEXT_REF_S64):
1421 {
1422 struct load_op *insn = (struct load_op *) pc;
1423 struct field_ref *ref = (struct field_ref *) insn->data;
1424 struct lttng_ctx_field *ctx_field;
1425 union lttng_ctx_value v;
1426
1427 dbg_printk("get context ref offset %u type s64\n",
1428 ref->offset);
1429 ctx_field = &lttng_static_ctx->fields[ref->offset];
1430 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1431 estack_push(stack, top, ax, bx, ax_t, bx_t);
1432 estack_ax_v = v.s64;
1433 estack_ax_t = REG_S64;
1434 dbg_printk("ref get context s64 %lld\n",
1435 (long long) estack_ax_v);
1436 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1437 PO;
1438 }
1439
1440 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE):
1441 {
1442 BUG_ON(1);
1443 PO;
1444 }
1445
1446 /* load userspace field ref */
1447 OP(FILTER_OP_LOAD_FIELD_REF_USER_STRING):
1448 {
1449 struct load_op *insn = (struct load_op *) pc;
1450 struct field_ref *ref = (struct field_ref *) insn->data;
1451
1452 dbg_printk("load field ref offset %u type user string\n",
1453 ref->offset);
1454 estack_push(stack, top, ax, bx, ax_t, bx_t);
1455 estack_ax(stack, top)->u.s.user_str =
1456 *(const char * const *) &interpreter_stack_data[ref->offset];
1457 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1458 dbg_printk("Filter warning: loading a NULL string.\n");
1459 ret = -EINVAL;
1460 goto end;
1461 }
1462 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1463 estack_ax(stack, top)->u.s.literal_type =
1464 ESTACK_STRING_LITERAL_TYPE_NONE;
1465 estack_ax(stack, top)->u.s.user = 1;
1466 estack_ax(stack, top)->type = REG_STRING;
1467 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1468 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1469 PO;
1470 }
1471
1472 OP(FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1473 {
1474 struct load_op *insn = (struct load_op *) pc;
1475 struct field_ref *ref = (struct field_ref *) insn->data;
1476
1477 dbg_printk("load field ref offset %u type user sequence\n",
1478 ref->offset);
1479 estack_push(stack, top, ax, bx, ax_t, bx_t);
1480 estack_ax(stack, top)->u.s.seq_len =
1481 *(unsigned long *) &interpreter_stack_data[ref->offset];
1482 estack_ax(stack, top)->u.s.user_str =
1483 *(const char **) (&interpreter_stack_data[ref->offset
1484 + sizeof(unsigned long)]);
1485 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1486 dbg_printk("Filter warning: loading a NULL sequence.\n");
1487 ret = -EINVAL;
1488 goto end;
1489 }
1490 estack_ax(stack, top)->u.s.literal_type =
1491 ESTACK_STRING_LITERAL_TYPE_NONE;
1492 estack_ax(stack, top)->u.s.user = 1;
1493 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1494 PO;
1495 }
1496
1497 OP(FILTER_OP_GET_CONTEXT_ROOT):
1498 {
1499 dbg_printk("op get context root\n");
1500 estack_push(stack, top, ax, bx, ax_t, bx_t);
1501 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1502 /* "field" only needed for variants. */
1503 estack_ax(stack, top)->u.ptr.field = NULL;
1504 estack_ax(stack, top)->type = REG_PTR;
1505 next_pc += sizeof(struct load_op);
1506 PO;
1507 }
1508
1509 OP(FILTER_OP_GET_APP_CONTEXT_ROOT):
1510 {
1511 BUG_ON(1);
1512 PO;
1513 }
1514
1515 OP(FILTER_OP_GET_PAYLOAD_ROOT):
1516 {
1517 dbg_printk("op get app payload root\n");
1518 estack_push(stack, top, ax, bx, ax_t, bx_t);
1519 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1520 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
1521 /* "field" only needed for variants. */
1522 estack_ax(stack, top)->u.ptr.field = NULL;
1523 estack_ax(stack, top)->type = REG_PTR;
1524 next_pc += sizeof(struct load_op);
1525 PO;
1526 }
1527
1528 OP(FILTER_OP_GET_SYMBOL):
1529 {
1530 dbg_printk("op get symbol\n");
1531 switch (estack_ax(stack, top)->u.ptr.type) {
1532 case LOAD_OBJECT:
1533 printk(KERN_WARNING "LTTng: filter: Nested fields not implemented yet.\n");
1534 ret = -EINVAL;
1535 goto end;
1536 case LOAD_ROOT_CONTEXT:
1537 case LOAD_ROOT_APP_CONTEXT:
1538 case LOAD_ROOT_PAYLOAD:
1539 /*
1540 * symbol lookup is performed by
1541 * specialization.
1542 */
1543 ret = -EINVAL;
1544 goto end;
1545 }
1546 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1547 PO;
1548 }
1549
1550 OP(FILTER_OP_GET_SYMBOL_FIELD):
1551 {
1552 /*
1553 * Used for first variant encountered in a
1554 * traversal. Variants are not implemented yet.
1555 */
1556 ret = -EINVAL;
1557 goto end;
1558 }
1559
1560 OP(FILTER_OP_GET_INDEX_U16):
1561 {
1562 struct load_op *insn = (struct load_op *) pc;
1563 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1564
1565 dbg_printk("op get index u16\n");
1566 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1567 if (ret)
1568 goto end;
1569 estack_ax_v = estack_ax(stack, top)->u.v;
1570 estack_ax_t = estack_ax(stack, top)->type;
1571 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1572 PO;
1573 }
1574
1575 OP(FILTER_OP_GET_INDEX_U64):
1576 {
1577 struct load_op *insn = (struct load_op *) pc;
1578 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1579
1580 dbg_printk("op get index u64\n");
1581 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1582 if (ret)
1583 goto end;
1584 estack_ax_v = estack_ax(stack, top)->u.v;
1585 estack_ax_t = estack_ax(stack, top)->type;
1586 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1587 PO;
1588 }
1589
1590 OP(FILTER_OP_LOAD_FIELD):
1591 {
1592 dbg_printk("op load field\n");
1593 ret = dynamic_load_field(estack_ax(stack, top));
1594 if (ret)
1595 goto end;
1596 estack_ax_v = estack_ax(stack, top)->u.v;
1597 estack_ax_t = estack_ax(stack, top)->type;
1598 next_pc += sizeof(struct load_op);
1599 PO;
1600 }
1601
1602 OP(FILTER_OP_LOAD_FIELD_S8):
1603 {
1604 dbg_printk("op load field s8\n");
1605
1606 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1607 estack_ax_t = REG_S64;
1608 next_pc += sizeof(struct load_op);
1609 PO;
1610 }
1611 OP(FILTER_OP_LOAD_FIELD_S16):
1612 {
1613 dbg_printk("op load field s16\n");
1614
1615 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1616 estack_ax_t = REG_S64;
1617 next_pc += sizeof(struct load_op);
1618 PO;
1619 }
1620 OP(FILTER_OP_LOAD_FIELD_S32):
1621 {
1622 dbg_printk("op load field s32\n");
1623
1624 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1625 estack_ax_t = REG_S64;
1626 next_pc += sizeof(struct load_op);
1627 PO;
1628 }
1629 OP(FILTER_OP_LOAD_FIELD_S64):
1630 {
1631 dbg_printk("op load field s64\n");
1632
1633 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1634 estack_ax_t = REG_S64;
1635 next_pc += sizeof(struct load_op);
1636 PO;
1637 }
1638 OP(FILTER_OP_LOAD_FIELD_U8):
1639 {
1640 dbg_printk("op load field u8\n");
1641
1642 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1643 estack_ax_t = REG_S64;
1644 next_pc += sizeof(struct load_op);
1645 PO;
1646 }
1647 OP(FILTER_OP_LOAD_FIELD_U16):
1648 {
1649 dbg_printk("op load field u16\n");
1650
1651 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1652 estack_ax_t = REG_S64;
1653 next_pc += sizeof(struct load_op);
1654 PO;
1655 }
1656 OP(FILTER_OP_LOAD_FIELD_U32):
1657 {
1658 dbg_printk("op load field u32\n");
1659
1660 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1661 estack_ax_t = REG_S64;
1662 next_pc += sizeof(struct load_op);
1663 PO;
1664 }
1665 OP(FILTER_OP_LOAD_FIELD_U64):
1666 {
1667 dbg_printk("op load field u64\n");
1668
1669 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1670 estack_ax_t = REG_S64;
1671 next_pc += sizeof(struct load_op);
1672 PO;
1673 }
1674 OP(FILTER_OP_LOAD_FIELD_DOUBLE):
1675 {
1676 ret = -EINVAL;
1677 goto end;
1678 }
1679
1680 OP(FILTER_OP_LOAD_FIELD_STRING):
1681 {
1682 const char *str;
1683
1684 dbg_printk("op load field string\n");
1685 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1686 estack_ax(stack, top)->u.s.str = str;
1687 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1688 dbg_printk("Filter warning: loading a NULL string.\n");
1689 ret = -EINVAL;
1690 goto end;
1691 }
1692 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1693 estack_ax(stack, top)->u.s.literal_type =
1694 ESTACK_STRING_LITERAL_TYPE_NONE;
1695 estack_ax(stack, top)->type = REG_STRING;
1696 next_pc += sizeof(struct load_op);
1697 PO;
1698 }
1699
1700 OP(FILTER_OP_LOAD_FIELD_SEQUENCE):
1701 {
1702 const char *ptr;
1703
1704 dbg_printk("op load field string sequence\n");
1705 ptr = estack_ax(stack, top)->u.ptr.ptr;
1706 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1707 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1708 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1709 dbg_printk("Filter warning: loading a NULL sequence.\n");
1710 ret = -EINVAL;
1711 goto end;
1712 }
1713 estack_ax(stack, top)->u.s.literal_type =
1714 ESTACK_STRING_LITERAL_TYPE_NONE;
1715 estack_ax(stack, top)->type = REG_STRING;
1716 next_pc += sizeof(struct load_op);
1717 PO;
1718 }
1719
1720 END_OP
1721 end:
1722 /* Return _DISCARD on error. */
1723 if (ret)
1724 return LTTNG_FILTER_DISCARD;
1725
1726 if (output) {
1727 return lttng_bytecode_interpret_format_output(
1728 estack_ax(stack, top), output);
1729 }
1730
1731 return retval;
1732 }
1733 LTTNG_STACK_FRAME_NON_STANDARD(bytecode_interpret);
1734
1735 uint64_t lttng_filter_interpret_bytecode(void *filter_data,
1736 struct lttng_probe_ctx *lttng_probe_ctx,
1737 const char *filter_stack_data)
1738 {
1739 return bytecode_interpret(filter_data, lttng_probe_ctx,
1740 filter_stack_data, NULL);
1741 }
1742
1743 #undef START_OP
1744 #undef OP
1745 #undef PO
1746 #undef END_OP
This page took 0.12216 seconds and 3 git commands to generate.