7c05c24f97786127fbdd8fffc94ae83fcbcd8c80
[lttng-modules.git] / src / lttng-filter-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-filter-interpreter.c
4 *
5 * LTTng modules filter interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
14
15 #include <lttng/filter.h>
16 #include <lttng/string-utils.h>
17
18 LTTNG_STACK_FRAME_NON_STANDARD(lttng_filter_interpret_bytecode);
19
20 /*
21 * get_char should be called with page fault handler disabled if it is expected
22 * to handle user-space read.
23 */
24 static
25 char get_char(struct estack_entry *reg, size_t offset)
26 {
27 if (unlikely(offset >= reg->u.s.seq_len))
28 return '\0';
29 if (reg->u.s.user) {
30 char c;
31
32 /* Handle invalid access as end of string. */
33 if (unlikely(!lttng_access_ok(VERIFY_READ,
34 reg->u.s.user_str + offset,
35 sizeof(c))))
36 return '\0';
37 /* Handle fault (nonzero return value) as end of string. */
38 if (unlikely(__copy_from_user_inatomic(&c,
39 reg->u.s.user_str + offset,
40 sizeof(c))))
41 return '\0';
42 return c;
43 } else {
44 return reg->u.s.str[offset];
45 }
46 }
47
48 /*
49 * -1: wildcard found.
50 * -2: unknown escape char.
51 * 0: normal char.
52 */
53 static
54 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
55 {
56 switch (*c) {
57 case '\\':
58 (*offset)++;
59 *c = get_char(reg, *offset);
60 switch (*c) {
61 case '\\':
62 case '*':
63 return 0;
64 default:
65 return -2;
66 }
67 case '*':
68 return -1;
69 default:
70 return 0;
71 }
72 }
73
74 static
75 char get_char_at_cb(size_t at, void *data)
76 {
77 return get_char(data, at);
78 }
79
80 static
81 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
82 {
83 bool has_user = false;
84 int result;
85 struct estack_entry *pattern_reg;
86 struct estack_entry *candidate_reg;
87
88 /* Disable the page fault handler when reading from userspace. */
89 if (estack_bx(stack, top)->u.s.user
90 || estack_ax(stack, top)->u.s.user) {
91 has_user = true;
92 pagefault_disable();
93 }
94
95 /* Find out which side is the pattern vs. the candidate. */
96 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
97 pattern_reg = estack_ax(stack, top);
98 candidate_reg = estack_bx(stack, top);
99 } else {
100 pattern_reg = estack_bx(stack, top);
101 candidate_reg = estack_ax(stack, top);
102 }
103
104 /* Perform the match operation. */
105 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
106 pattern_reg, get_char_at_cb, candidate_reg);
107 if (has_user)
108 pagefault_enable();
109
110 return result;
111 }
112
113 static
114 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
115 {
116 size_t offset_bx = 0, offset_ax = 0;
117 int diff, has_user = 0;
118
119 if (estack_bx(stack, top)->u.s.user
120 || estack_ax(stack, top)->u.s.user) {
121 has_user = 1;
122 pagefault_disable();
123 }
124
125 for (;;) {
126 int ret;
127 int escaped_r0 = 0;
128 char char_bx, char_ax;
129
130 char_bx = get_char(estack_bx(stack, top), offset_bx);
131 char_ax = get_char(estack_ax(stack, top), offset_ax);
132
133 if (unlikely(char_bx == '\0')) {
134 if (char_ax == '\0') {
135 diff = 0;
136 break;
137 } else {
138 if (estack_ax(stack, top)->u.s.literal_type ==
139 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
140 ret = parse_char(estack_ax(stack, top),
141 &char_ax, &offset_ax);
142 if (ret == -1) {
143 diff = 0;
144 break;
145 }
146 }
147 diff = -1;
148 break;
149 }
150 }
151 if (unlikely(char_ax == '\0')) {
152 if (estack_bx(stack, top)->u.s.literal_type ==
153 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
154 ret = parse_char(estack_bx(stack, top),
155 &char_bx, &offset_bx);
156 if (ret == -1) {
157 diff = 0;
158 break;
159 }
160 }
161 diff = 1;
162 break;
163 }
164 if (estack_bx(stack, top)->u.s.literal_type ==
165 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
166 ret = parse_char(estack_bx(stack, top),
167 &char_bx, &offset_bx);
168 if (ret == -1) {
169 diff = 0;
170 break;
171 } else if (ret == -2) {
172 escaped_r0 = 1;
173 }
174 /* else compare both char */
175 }
176 if (estack_ax(stack, top)->u.s.literal_type ==
177 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
178 ret = parse_char(estack_ax(stack, top),
179 &char_ax, &offset_ax);
180 if (ret == -1) {
181 diff = 0;
182 break;
183 } else if (ret == -2) {
184 if (!escaped_r0) {
185 diff = -1;
186 break;
187 }
188 } else {
189 if (escaped_r0) {
190 diff = 1;
191 break;
192 }
193 }
194 } else {
195 if (escaped_r0) {
196 diff = 1;
197 break;
198 }
199 }
200 diff = char_bx - char_ax;
201 if (diff != 0)
202 break;
203 offset_bx++;
204 offset_ax++;
205 }
206 if (has_user)
207 pagefault_enable();
208
209 return diff;
210 }
211
212 uint64_t lttng_filter_false(void *filter_data,
213 struct lttng_probe_ctx *lttng_probe_ctx,
214 const char *filter_stack_data)
215 {
216 return LTTNG_FILTER_DISCARD;
217 }
218
219 #ifdef INTERPRETER_USE_SWITCH
220
221 /*
222 * Fallback for compilers that do not support taking address of labels.
223 */
224
225 #define START_OP \
226 start_pc = &bytecode->data[0]; \
227 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
228 pc = next_pc) { \
229 dbg_printk("LTTng: Executing op %s (%u)\n", \
230 lttng_filter_print_op((unsigned int) *(filter_opcode_t *) pc), \
231 (unsigned int) *(filter_opcode_t *) pc); \
232 switch (*(filter_opcode_t *) pc) {
233
234 #define OP(name) case name
235
236 #define PO break
237
238 #define END_OP } \
239 }
240
241 #else
242
243 /*
244 * Dispatch-table based interpreter.
245 */
246
247 #define START_OP \
248 start_pc = &bytecode->code[0]; \
249 pc = next_pc = start_pc; \
250 if (unlikely(pc - start_pc >= bytecode->len)) \
251 goto end; \
252 goto *dispatch[*(filter_opcode_t *) pc];
253
254 #define OP(name) \
255 LABEL_##name
256
257 #define PO \
258 pc = next_pc; \
259 goto *dispatch[*(filter_opcode_t *) pc];
260
261 #define END_OP
262
263 #endif
264
265 #define IS_INTEGER_REGISTER(reg_type) \
266 (reg_type == REG_S64 || reg_type == REG_U64)
267
268 static int context_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
269 struct load_ptr *ptr,
270 uint32_t idx)
271 {
272
273 struct lttng_ctx_field *ctx_field;
274 struct lttng_event_field *field;
275 union lttng_ctx_value v;
276
277 ctx_field = &lttng_static_ctx->fields[idx];
278 field = &ctx_field->event_field;
279 ptr->type = LOAD_OBJECT;
280 /* field is only used for types nested within variants. */
281 ptr->field = NULL;
282
283 switch (field->type.atype) {
284 case atype_integer:
285 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
286 if (field->type.u.integer.signedness) {
287 ptr->object_type = OBJECT_TYPE_S64;
288 ptr->u.s64 = v.s64;
289 ptr->ptr = &ptr->u.s64;
290 } else {
291 ptr->object_type = OBJECT_TYPE_U64;
292 ptr->u.u64 = v.s64; /* Cast. */
293 ptr->ptr = &ptr->u.u64;
294 }
295 break;
296 case atype_enum_nestable:
297 {
298 const struct lttng_integer_type *itype =
299 &field->type.u.enum_nestable.container_type->u.integer;
300
301 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
302 if (itype->signedness) {
303 ptr->object_type = OBJECT_TYPE_S64;
304 ptr->u.s64 = v.s64;
305 ptr->ptr = &ptr->u.s64;
306 } else {
307 ptr->object_type = OBJECT_TYPE_U64;
308 ptr->u.u64 = v.s64; /* Cast. */
309 ptr->ptr = &ptr->u.u64;
310 }
311 break;
312 }
313 case atype_array_nestable:
314 if (!lttng_is_bytewise_integer(field->type.u.array_nestable.elem_type)) {
315 printk(KERN_WARNING "LTTng: filter: Array nesting only supports integer types.\n");
316 return -EINVAL;
317 }
318 if (field->type.u.array_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
319 printk(KERN_WARNING "LTTng: filter: Only string arrays are supported for contexts.\n");
320 return -EINVAL;
321 }
322 ptr->object_type = OBJECT_TYPE_STRING;
323 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
324 ptr->ptr = v.str;
325 break;
326 case atype_sequence_nestable:
327 if (!lttng_is_bytewise_integer(field->type.u.sequence_nestable.elem_type)) {
328 printk(KERN_WARNING "LTTng: filter: Sequence nesting only supports integer types.\n");
329 return -EINVAL;
330 }
331 if (field->type.u.sequence_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
332 printk(KERN_WARNING "LTTng: filter: Only string sequences are supported for contexts.\n");
333 return -EINVAL;
334 }
335 ptr->object_type = OBJECT_TYPE_STRING;
336 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
337 ptr->ptr = v.str;
338 break;
339 case atype_string:
340 ptr->object_type = OBJECT_TYPE_STRING;
341 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
342 ptr->ptr = v.str;
343 break;
344 case atype_struct_nestable:
345 printk(KERN_WARNING "LTTng: filter: Structure type cannot be loaded.\n");
346 return -EINVAL;
347 case atype_variant_nestable:
348 printk(KERN_WARNING "LTTng: filter: Variant type cannot be loaded.\n");
349 return -EINVAL;
350 default:
351 printk(KERN_WARNING "LTTng: filter: Unknown type: %d", (int) field->type.atype);
352 return -EINVAL;
353 }
354 return 0;
355 }
356
357 static int dynamic_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
358 struct bytecode_runtime *runtime,
359 uint64_t index, struct estack_entry *stack_top)
360 {
361 int ret;
362 const struct filter_get_index_data *gid;
363
364 /*
365 * Types nested within variants need to perform dynamic lookup
366 * based on the field descriptions. LTTng-UST does not implement
367 * variants for now.
368 */
369 if (stack_top->u.ptr.field)
370 return -EINVAL;
371 gid = (const struct filter_get_index_data *) &runtime->data[index];
372 switch (stack_top->u.ptr.type) {
373 case LOAD_OBJECT:
374 switch (stack_top->u.ptr.object_type) {
375 case OBJECT_TYPE_ARRAY:
376 {
377 const char *ptr;
378
379 WARN_ON_ONCE(gid->offset >= gid->array_len);
380 /* Skip count (unsigned long) */
381 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
382 ptr = ptr + gid->offset;
383 stack_top->u.ptr.ptr = ptr;
384 stack_top->u.ptr.object_type = gid->elem.type;
385 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
386 /* field is only used for types nested within variants. */
387 stack_top->u.ptr.field = NULL;
388 break;
389 }
390 case OBJECT_TYPE_SEQUENCE:
391 {
392 const char *ptr;
393 size_t ptr_seq_len;
394
395 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
396 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
397 if (gid->offset >= gid->elem.len * ptr_seq_len) {
398 ret = -EINVAL;
399 goto end;
400 }
401 ptr = ptr + gid->offset;
402 stack_top->u.ptr.ptr = ptr;
403 stack_top->u.ptr.object_type = gid->elem.type;
404 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
405 /* field is only used for types nested within variants. */
406 stack_top->u.ptr.field = NULL;
407 break;
408 }
409 case OBJECT_TYPE_STRUCT:
410 printk(KERN_WARNING "LTTng: filter: Nested structures are not supported yet.\n");
411 ret = -EINVAL;
412 goto end;
413 case OBJECT_TYPE_VARIANT:
414 default:
415 printk(KERN_WARNING "LTTng: filter: Unexpected get index type %d",
416 (int) stack_top->u.ptr.object_type);
417 ret = -EINVAL;
418 goto end;
419 }
420 break;
421 case LOAD_ROOT_CONTEXT:
422 case LOAD_ROOT_APP_CONTEXT: /* Fall-through */
423 {
424 ret = context_get_index(lttng_probe_ctx,
425 &stack_top->u.ptr,
426 gid->ctx_index);
427 if (ret) {
428 goto end;
429 }
430 break;
431 }
432 case LOAD_ROOT_PAYLOAD:
433 stack_top->u.ptr.ptr += gid->offset;
434 if (gid->elem.type == OBJECT_TYPE_STRING)
435 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
436 stack_top->u.ptr.object_type = gid->elem.type;
437 stack_top->u.ptr.type = LOAD_OBJECT;
438 /* field is only used for types nested within variants. */
439 stack_top->u.ptr.field = NULL;
440 break;
441 }
442 return 0;
443
444 end:
445 return ret;
446 }
447
448 static int dynamic_load_field(struct estack_entry *stack_top)
449 {
450 int ret;
451
452 switch (stack_top->u.ptr.type) {
453 case LOAD_OBJECT:
454 break;
455 case LOAD_ROOT_CONTEXT:
456 case LOAD_ROOT_APP_CONTEXT:
457 case LOAD_ROOT_PAYLOAD:
458 default:
459 dbg_printk("Filter warning: cannot load root, missing field name.\n");
460 ret = -EINVAL;
461 goto end;
462 }
463 switch (stack_top->u.ptr.object_type) {
464 case OBJECT_TYPE_S8:
465 dbg_printk("op load field s8\n");
466 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
467 stack_top->type = REG_S64;
468 break;
469 case OBJECT_TYPE_S16:
470 {
471 int16_t tmp;
472
473 dbg_printk("op load field s16\n");
474 tmp = *(int16_t *) stack_top->u.ptr.ptr;
475 if (stack_top->u.ptr.rev_bo)
476 __swab16s(&tmp);
477 stack_top->u.v = tmp;
478 stack_top->type = REG_S64;
479 break;
480 }
481 case OBJECT_TYPE_S32:
482 {
483 int32_t tmp;
484
485 dbg_printk("op load field s32\n");
486 tmp = *(int32_t *) stack_top->u.ptr.ptr;
487 if (stack_top->u.ptr.rev_bo)
488 __swab32s(&tmp);
489 stack_top->u.v = tmp;
490 stack_top->type = REG_S64;
491 break;
492 }
493 case OBJECT_TYPE_S64:
494 {
495 int64_t tmp;
496
497 dbg_printk("op load field s64\n");
498 tmp = *(int64_t *) stack_top->u.ptr.ptr;
499 if (stack_top->u.ptr.rev_bo)
500 __swab64s(&tmp);
501 stack_top->u.v = tmp;
502 stack_top->type = REG_S64;
503 break;
504 }
505 case OBJECT_TYPE_U8:
506 dbg_printk("op load field u8\n");
507 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
508 stack_top->type = REG_U64;
509 break;
510 case OBJECT_TYPE_U16:
511 {
512 uint16_t tmp;
513
514 dbg_printk("op load field u16\n");
515 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
516 if (stack_top->u.ptr.rev_bo)
517 __swab16s(&tmp);
518 stack_top->u.v = tmp;
519 stack_top->type = REG_U64;
520 break;
521 }
522 case OBJECT_TYPE_U32:
523 {
524 uint32_t tmp;
525
526 dbg_printk("op load field u32\n");
527 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
528 if (stack_top->u.ptr.rev_bo)
529 __swab32s(&tmp);
530 stack_top->u.v = tmp;
531 stack_top->type = REG_U64;
532 break;
533 }
534 case OBJECT_TYPE_U64:
535 {
536 uint64_t tmp;
537
538 dbg_printk("op load field u64\n");
539 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
540 if (stack_top->u.ptr.rev_bo)
541 __swab64s(&tmp);
542 stack_top->u.v = tmp;
543 stack_top->type = REG_U64;
544 break;
545 }
546 case OBJECT_TYPE_STRING:
547 {
548 const char *str;
549
550 dbg_printk("op load field string\n");
551 str = (const char *) stack_top->u.ptr.ptr;
552 stack_top->u.s.str = str;
553 if (unlikely(!stack_top->u.s.str)) {
554 dbg_printk("Filter warning: loading a NULL string.\n");
555 ret = -EINVAL;
556 goto end;
557 }
558 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
559 stack_top->u.s.literal_type =
560 ESTACK_STRING_LITERAL_TYPE_NONE;
561 stack_top->type = REG_STRING;
562 break;
563 }
564 case OBJECT_TYPE_STRING_SEQUENCE:
565 {
566 const char *ptr;
567
568 dbg_printk("op load field string sequence\n");
569 ptr = stack_top->u.ptr.ptr;
570 stack_top->u.s.seq_len = *(unsigned long *) ptr;
571 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
572 if (unlikely(!stack_top->u.s.str)) {
573 dbg_printk("Filter warning: loading a NULL sequence.\n");
574 ret = -EINVAL;
575 goto end;
576 }
577 stack_top->u.s.literal_type =
578 ESTACK_STRING_LITERAL_TYPE_NONE;
579 stack_top->type = REG_STRING;
580 break;
581 }
582 case OBJECT_TYPE_DYNAMIC:
583 /*
584 * Dynamic types in context are looked up
585 * by context get index.
586 */
587 ret = -EINVAL;
588 goto end;
589 case OBJECT_TYPE_DOUBLE:
590 ret = -EINVAL;
591 goto end;
592 case OBJECT_TYPE_SEQUENCE:
593 case OBJECT_TYPE_ARRAY:
594 case OBJECT_TYPE_STRUCT:
595 case OBJECT_TYPE_VARIANT:
596 printk(KERN_WARNING "LTTng: filter: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
597 ret = -EINVAL;
598 goto end;
599 }
600 return 0;
601
602 end:
603 return ret;
604 }
605
606 /*
607 * Return 0 (discard), or raise the 0x1 flag (log event).
608 * Currently, other flags are kept for future extensions and have no
609 * effect.
610 */
611 uint64_t lttng_filter_interpret_bytecode(void *filter_data,
612 struct lttng_probe_ctx *lttng_probe_ctx,
613 const char *filter_stack_data)
614 {
615 struct bytecode_runtime *bytecode = filter_data;
616 void *pc, *next_pc, *start_pc;
617 int ret = -EINVAL;
618 uint64_t retval = 0;
619 struct estack _stack;
620 struct estack *stack = &_stack;
621 register int64_t ax = 0, bx = 0;
622 register enum entry_type ax_t = REG_TYPE_UNKNOWN, bx_t = REG_TYPE_UNKNOWN;
623 register int top = FILTER_STACK_EMPTY;
624 #ifndef INTERPRETER_USE_SWITCH
625 static void *dispatch[NR_FILTER_OPS] = {
626 [ FILTER_OP_UNKNOWN ] = &&LABEL_FILTER_OP_UNKNOWN,
627
628 [ FILTER_OP_RETURN ] = &&LABEL_FILTER_OP_RETURN,
629
630 /* binary */
631 [ FILTER_OP_MUL ] = &&LABEL_FILTER_OP_MUL,
632 [ FILTER_OP_DIV ] = &&LABEL_FILTER_OP_DIV,
633 [ FILTER_OP_MOD ] = &&LABEL_FILTER_OP_MOD,
634 [ FILTER_OP_PLUS ] = &&LABEL_FILTER_OP_PLUS,
635 [ FILTER_OP_MINUS ] = &&LABEL_FILTER_OP_MINUS,
636 [ FILTER_OP_BIT_RSHIFT ] = &&LABEL_FILTER_OP_BIT_RSHIFT,
637 [ FILTER_OP_BIT_LSHIFT ] = &&LABEL_FILTER_OP_BIT_LSHIFT,
638 [ FILTER_OP_BIT_AND ] = &&LABEL_FILTER_OP_BIT_AND,
639 [ FILTER_OP_BIT_OR ] = &&LABEL_FILTER_OP_BIT_OR,
640 [ FILTER_OP_BIT_XOR ] = &&LABEL_FILTER_OP_BIT_XOR,
641
642 /* binary comparators */
643 [ FILTER_OP_EQ ] = &&LABEL_FILTER_OP_EQ,
644 [ FILTER_OP_NE ] = &&LABEL_FILTER_OP_NE,
645 [ FILTER_OP_GT ] = &&LABEL_FILTER_OP_GT,
646 [ FILTER_OP_LT ] = &&LABEL_FILTER_OP_LT,
647 [ FILTER_OP_GE ] = &&LABEL_FILTER_OP_GE,
648 [ FILTER_OP_LE ] = &&LABEL_FILTER_OP_LE,
649
650 /* string binary comparator */
651 [ FILTER_OP_EQ_STRING ] = &&LABEL_FILTER_OP_EQ_STRING,
652 [ FILTER_OP_NE_STRING ] = &&LABEL_FILTER_OP_NE_STRING,
653 [ FILTER_OP_GT_STRING ] = &&LABEL_FILTER_OP_GT_STRING,
654 [ FILTER_OP_LT_STRING ] = &&LABEL_FILTER_OP_LT_STRING,
655 [ FILTER_OP_GE_STRING ] = &&LABEL_FILTER_OP_GE_STRING,
656 [ FILTER_OP_LE_STRING ] = &&LABEL_FILTER_OP_LE_STRING,
657
658 /* globbing pattern binary comparator */
659 [ FILTER_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_EQ_STAR_GLOB_STRING,
660 [ FILTER_OP_NE_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_NE_STAR_GLOB_STRING,
661
662 /* s64 binary comparator */
663 [ FILTER_OP_EQ_S64 ] = &&LABEL_FILTER_OP_EQ_S64,
664 [ FILTER_OP_NE_S64 ] = &&LABEL_FILTER_OP_NE_S64,
665 [ FILTER_OP_GT_S64 ] = &&LABEL_FILTER_OP_GT_S64,
666 [ FILTER_OP_LT_S64 ] = &&LABEL_FILTER_OP_LT_S64,
667 [ FILTER_OP_GE_S64 ] = &&LABEL_FILTER_OP_GE_S64,
668 [ FILTER_OP_LE_S64 ] = &&LABEL_FILTER_OP_LE_S64,
669
670 /* double binary comparator */
671 [ FILTER_OP_EQ_DOUBLE ] = &&LABEL_FILTER_OP_EQ_DOUBLE,
672 [ FILTER_OP_NE_DOUBLE ] = &&LABEL_FILTER_OP_NE_DOUBLE,
673 [ FILTER_OP_GT_DOUBLE ] = &&LABEL_FILTER_OP_GT_DOUBLE,
674 [ FILTER_OP_LT_DOUBLE ] = &&LABEL_FILTER_OP_LT_DOUBLE,
675 [ FILTER_OP_GE_DOUBLE ] = &&LABEL_FILTER_OP_GE_DOUBLE,
676 [ FILTER_OP_LE_DOUBLE ] = &&LABEL_FILTER_OP_LE_DOUBLE,
677
678 /* Mixed S64-double binary comparators */
679 [ FILTER_OP_EQ_DOUBLE_S64 ] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64,
680 [ FILTER_OP_NE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_NE_DOUBLE_S64,
681 [ FILTER_OP_GT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GT_DOUBLE_S64,
682 [ FILTER_OP_LT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LT_DOUBLE_S64,
683 [ FILTER_OP_GE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GE_DOUBLE_S64,
684 [ FILTER_OP_LE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LE_DOUBLE_S64,
685
686 [ FILTER_OP_EQ_S64_DOUBLE ] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE,
687 [ FILTER_OP_NE_S64_DOUBLE ] = &&LABEL_FILTER_OP_NE_S64_DOUBLE,
688 [ FILTER_OP_GT_S64_DOUBLE ] = &&LABEL_FILTER_OP_GT_S64_DOUBLE,
689 [ FILTER_OP_LT_S64_DOUBLE ] = &&LABEL_FILTER_OP_LT_S64_DOUBLE,
690 [ FILTER_OP_GE_S64_DOUBLE ] = &&LABEL_FILTER_OP_GE_S64_DOUBLE,
691 [ FILTER_OP_LE_S64_DOUBLE ] = &&LABEL_FILTER_OP_LE_S64_DOUBLE,
692
693 /* unary */
694 [ FILTER_OP_UNARY_PLUS ] = &&LABEL_FILTER_OP_UNARY_PLUS,
695 [ FILTER_OP_UNARY_MINUS ] = &&LABEL_FILTER_OP_UNARY_MINUS,
696 [ FILTER_OP_UNARY_NOT ] = &&LABEL_FILTER_OP_UNARY_NOT,
697 [ FILTER_OP_UNARY_PLUS_S64 ] = &&LABEL_FILTER_OP_UNARY_PLUS_S64,
698 [ FILTER_OP_UNARY_MINUS_S64 ] = &&LABEL_FILTER_OP_UNARY_MINUS_S64,
699 [ FILTER_OP_UNARY_NOT_S64 ] = &&LABEL_FILTER_OP_UNARY_NOT_S64,
700 [ FILTER_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE,
701 [ FILTER_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE,
702 [ FILTER_OP_UNARY_NOT_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE,
703
704 /* logical */
705 [ FILTER_OP_AND ] = &&LABEL_FILTER_OP_AND,
706 [ FILTER_OP_OR ] = &&LABEL_FILTER_OP_OR,
707
708 /* load field ref */
709 [ FILTER_OP_LOAD_FIELD_REF ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF,
710 [ FILTER_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING,
711 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE,
712 [ FILTER_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64,
713 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE,
714
715 /* load from immediate operand */
716 [ FILTER_OP_LOAD_STRING ] = &&LABEL_FILTER_OP_LOAD_STRING,
717 [ FILTER_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_LOAD_STAR_GLOB_STRING,
718 [ FILTER_OP_LOAD_S64 ] = &&LABEL_FILTER_OP_LOAD_S64,
719 [ FILTER_OP_LOAD_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_DOUBLE,
720
721 /* cast */
722 [ FILTER_OP_CAST_TO_S64 ] = &&LABEL_FILTER_OP_CAST_TO_S64,
723 [ FILTER_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64,
724 [ FILTER_OP_CAST_NOP ] = &&LABEL_FILTER_OP_CAST_NOP,
725
726 /* get context ref */
727 [ FILTER_OP_GET_CONTEXT_REF ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF,
728 [ FILTER_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING,
729 [ FILTER_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64,
730 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE,
731
732 /* load userspace field ref */
733 [ FILTER_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_STRING,
734 [ FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE,
735
736 /* Instructions for recursive traversal through composed types. */
737 [ FILTER_OP_GET_CONTEXT_ROOT ] = &&LABEL_FILTER_OP_GET_CONTEXT_ROOT,
738 [ FILTER_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_FILTER_OP_GET_APP_CONTEXT_ROOT,
739 [ FILTER_OP_GET_PAYLOAD_ROOT ] = &&LABEL_FILTER_OP_GET_PAYLOAD_ROOT,
740
741 [ FILTER_OP_GET_SYMBOL ] = &&LABEL_FILTER_OP_GET_SYMBOL,
742 [ FILTER_OP_GET_SYMBOL_FIELD ] = &&LABEL_FILTER_OP_GET_SYMBOL_FIELD,
743 [ FILTER_OP_GET_INDEX_U16 ] = &&LABEL_FILTER_OP_GET_INDEX_U16,
744 [ FILTER_OP_GET_INDEX_U64 ] = &&LABEL_FILTER_OP_GET_INDEX_U64,
745
746 [ FILTER_OP_LOAD_FIELD ] = &&LABEL_FILTER_OP_LOAD_FIELD,
747 [ FILTER_OP_LOAD_FIELD_S8 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S8,
748 [ FILTER_OP_LOAD_FIELD_S16 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S16,
749 [ FILTER_OP_LOAD_FIELD_S32 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S32,
750 [ FILTER_OP_LOAD_FIELD_S64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S64,
751 [ FILTER_OP_LOAD_FIELD_U8 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U8,
752 [ FILTER_OP_LOAD_FIELD_U16 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U16,
753 [ FILTER_OP_LOAD_FIELD_U32 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U32,
754 [ FILTER_OP_LOAD_FIELD_U64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U64,
755 [ FILTER_OP_LOAD_FIELD_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_STRING,
756 [ FILTER_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_SEQUENCE,
757 [ FILTER_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_FIELD_DOUBLE,
758
759 [ FILTER_OP_UNARY_BIT_NOT ] = &&LABEL_FILTER_OP_UNARY_BIT_NOT,
760
761 [ FILTER_OP_RETURN_S64 ] = &&LABEL_FILTER_OP_RETURN_S64,
762 };
763 #endif /* #ifndef INTERPRETER_USE_SWITCH */
764
765 START_OP
766
767 OP(FILTER_OP_UNKNOWN):
768 OP(FILTER_OP_LOAD_FIELD_REF):
769 OP(FILTER_OP_GET_CONTEXT_REF):
770 #ifdef INTERPRETER_USE_SWITCH
771 default:
772 #endif /* INTERPRETER_USE_SWITCH */
773 printk(KERN_WARNING "LTTng: filter: unknown bytecode op %u\n",
774 (unsigned int) *(filter_opcode_t *) pc);
775 ret = -EINVAL;
776 goto end;
777
778 OP(FILTER_OP_RETURN):
779 OP(FILTER_OP_RETURN_S64):
780 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
781 switch (estack_ax_t) {
782 case REG_S64:
783 case REG_U64:
784 retval = !!estack_ax_v;
785 break;
786 case REG_DOUBLE:
787 case REG_STRING:
788 case REG_PTR:
789 case REG_STAR_GLOB_STRING:
790 case REG_TYPE_UNKNOWN:
791 ret = -EINVAL;
792 goto end;
793 }
794 ret = 0;
795 goto end;
796
797 /* binary */
798 OP(FILTER_OP_MUL):
799 OP(FILTER_OP_DIV):
800 OP(FILTER_OP_MOD):
801 OP(FILTER_OP_PLUS):
802 OP(FILTER_OP_MINUS):
803 printk(KERN_WARNING "LTTng: filter: unsupported bytecode op %u\n",
804 (unsigned int) *(filter_opcode_t *) pc);
805 ret = -EINVAL;
806 goto end;
807
808 OP(FILTER_OP_EQ):
809 OP(FILTER_OP_NE):
810 OP(FILTER_OP_GT):
811 OP(FILTER_OP_LT):
812 OP(FILTER_OP_GE):
813 OP(FILTER_OP_LE):
814 printk(KERN_WARNING "LTTng: filter: unsupported non-specialized bytecode op %u\n",
815 (unsigned int) *(filter_opcode_t *) pc);
816 ret = -EINVAL;
817 goto end;
818
819 OP(FILTER_OP_EQ_STRING):
820 {
821 int res;
822
823 res = (stack_strcmp(stack, top, "==") == 0);
824 estack_pop(stack, top, ax, bx, ax_t, bx_t);
825 estack_ax_v = res;
826 estack_ax_t = REG_S64;
827 next_pc += sizeof(struct binary_op);
828 PO;
829 }
830 OP(FILTER_OP_NE_STRING):
831 {
832 int res;
833
834 res = (stack_strcmp(stack, top, "!=") != 0);
835 estack_pop(stack, top, ax, bx, ax_t, bx_t);
836 estack_ax_v = res;
837 estack_ax_t = REG_S64;
838 next_pc += sizeof(struct binary_op);
839 PO;
840 }
841 OP(FILTER_OP_GT_STRING):
842 {
843 int res;
844
845 res = (stack_strcmp(stack, top, ">") > 0);
846 estack_pop(stack, top, ax, bx, ax_t, bx_t);
847 estack_ax_v = res;
848 estack_ax_t = REG_S64;
849 next_pc += sizeof(struct binary_op);
850 PO;
851 }
852 OP(FILTER_OP_LT_STRING):
853 {
854 int res;
855
856 res = (stack_strcmp(stack, top, "<") < 0);
857 estack_pop(stack, top, ax, bx, ax_t, bx_t);
858 estack_ax_v = res;
859 estack_ax_t = REG_S64;
860 next_pc += sizeof(struct binary_op);
861 PO;
862 }
863 OP(FILTER_OP_GE_STRING):
864 {
865 int res;
866
867 res = (stack_strcmp(stack, top, ">=") >= 0);
868 estack_pop(stack, top, ax, bx, ax_t, bx_t);
869 estack_ax_v = res;
870 estack_ax_t = REG_S64;
871 next_pc += sizeof(struct binary_op);
872 PO;
873 }
874 OP(FILTER_OP_LE_STRING):
875 {
876 int res;
877
878 res = (stack_strcmp(stack, top, "<=") <= 0);
879 estack_pop(stack, top, ax, bx, ax_t, bx_t);
880 estack_ax_v = res;
881 estack_ax_t = REG_S64;
882 next_pc += sizeof(struct binary_op);
883 PO;
884 }
885
886 OP(FILTER_OP_EQ_STAR_GLOB_STRING):
887 {
888 int res;
889
890 res = (stack_star_glob_match(stack, top, "==") == 0);
891 estack_pop(stack, top, ax, bx, ax_t, bx_t);
892 estack_ax_v = res;
893 estack_ax_t = REG_S64;
894 next_pc += sizeof(struct binary_op);
895 PO;
896 }
897 OP(FILTER_OP_NE_STAR_GLOB_STRING):
898 {
899 int res;
900
901 res = (stack_star_glob_match(stack, top, "!=") != 0);
902 estack_pop(stack, top, ax, bx, ax_t, bx_t);
903 estack_ax_v = res;
904 estack_ax_t = REG_S64;
905 next_pc += sizeof(struct binary_op);
906 PO;
907 }
908
909 OP(FILTER_OP_EQ_S64):
910 {
911 int res;
912
913 res = (estack_bx_v == estack_ax_v);
914 estack_pop(stack, top, ax, bx, ax_t, bx_t);
915 estack_ax_v = res;
916 estack_ax_t = REG_S64;
917 next_pc += sizeof(struct binary_op);
918 PO;
919 }
920 OP(FILTER_OP_NE_S64):
921 {
922 int res;
923
924 res = (estack_bx_v != estack_ax_v);
925 estack_pop(stack, top, ax, bx, ax_t, bx_t);
926 estack_ax_v = res;
927 estack_ax_t = REG_S64;
928 next_pc += sizeof(struct binary_op);
929 PO;
930 }
931 OP(FILTER_OP_GT_S64):
932 {
933 int res;
934
935 res = (estack_bx_v > estack_ax_v);
936 estack_pop(stack, top, ax, bx, ax_t, bx_t);
937 estack_ax_v = res;
938 estack_ax_t = REG_S64;
939 next_pc += sizeof(struct binary_op);
940 PO;
941 }
942 OP(FILTER_OP_LT_S64):
943 {
944 int res;
945
946 res = (estack_bx_v < estack_ax_v);
947 estack_pop(stack, top, ax, bx, ax_t, bx_t);
948 estack_ax_v = res;
949 estack_ax_t = REG_S64;
950 next_pc += sizeof(struct binary_op);
951 PO;
952 }
953 OP(FILTER_OP_GE_S64):
954 {
955 int res;
956
957 res = (estack_bx_v >= estack_ax_v);
958 estack_pop(stack, top, ax, bx, ax_t, bx_t);
959 estack_ax_v = res;
960 estack_ax_t = REG_S64;
961 next_pc += sizeof(struct binary_op);
962 PO;
963 }
964 OP(FILTER_OP_LE_S64):
965 {
966 int res;
967
968 res = (estack_bx_v <= estack_ax_v);
969 estack_pop(stack, top, ax, bx, ax_t, bx_t);
970 estack_ax_v = res;
971 estack_ax_t = REG_S64;
972 next_pc += sizeof(struct binary_op);
973 PO;
974 }
975
976 OP(FILTER_OP_EQ_DOUBLE):
977 OP(FILTER_OP_NE_DOUBLE):
978 OP(FILTER_OP_GT_DOUBLE):
979 OP(FILTER_OP_LT_DOUBLE):
980 OP(FILTER_OP_GE_DOUBLE):
981 OP(FILTER_OP_LE_DOUBLE):
982 {
983 BUG_ON(1);
984 PO;
985 }
986
987 /* Mixed S64-double binary comparators */
988 OP(FILTER_OP_EQ_DOUBLE_S64):
989 OP(FILTER_OP_NE_DOUBLE_S64):
990 OP(FILTER_OP_GT_DOUBLE_S64):
991 OP(FILTER_OP_LT_DOUBLE_S64):
992 OP(FILTER_OP_GE_DOUBLE_S64):
993 OP(FILTER_OP_LE_DOUBLE_S64):
994 OP(FILTER_OP_EQ_S64_DOUBLE):
995 OP(FILTER_OP_NE_S64_DOUBLE):
996 OP(FILTER_OP_GT_S64_DOUBLE):
997 OP(FILTER_OP_LT_S64_DOUBLE):
998 OP(FILTER_OP_GE_S64_DOUBLE):
999 OP(FILTER_OP_LE_S64_DOUBLE):
1000 {
1001 BUG_ON(1);
1002 PO;
1003 }
1004 OP(FILTER_OP_BIT_RSHIFT):
1005 {
1006 int64_t res;
1007
1008 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1009 ret = -EINVAL;
1010 goto end;
1011 }
1012
1013 /* Catch undefined behavior. */
1014 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1015 ret = -EINVAL;
1016 goto end;
1017 }
1018 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1019 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1020 estack_ax_v = res;
1021 estack_ax_t = REG_U64;
1022 next_pc += sizeof(struct binary_op);
1023 PO;
1024 }
1025 OP(FILTER_OP_BIT_LSHIFT):
1026 {
1027 int64_t res;
1028
1029 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1030 ret = -EINVAL;
1031 goto end;
1032 }
1033
1034 /* Catch undefined behavior. */
1035 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1036 ret = -EINVAL;
1037 goto end;
1038 }
1039 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1040 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1041 estack_ax_v = res;
1042 estack_ax_t = REG_U64;
1043 next_pc += sizeof(struct binary_op);
1044 PO;
1045 }
1046 OP(FILTER_OP_BIT_AND):
1047 {
1048 int64_t res;
1049
1050 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1051 ret = -EINVAL;
1052 goto end;
1053 }
1054
1055 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1056 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1057 estack_ax_v = res;
1058 estack_ax_t = REG_U64;
1059 next_pc += sizeof(struct binary_op);
1060 PO;
1061 }
1062 OP(FILTER_OP_BIT_OR):
1063 {
1064 int64_t res;
1065
1066 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1067 ret = -EINVAL;
1068 goto end;
1069 }
1070
1071 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1072 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1073 estack_ax_v = res;
1074 estack_ax_t = REG_U64;
1075 next_pc += sizeof(struct binary_op);
1076 PO;
1077 }
1078 OP(FILTER_OP_BIT_XOR):
1079 {
1080 int64_t res;
1081
1082 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1083 ret = -EINVAL;
1084 goto end;
1085 }
1086
1087 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1088 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1089 estack_ax_v = res;
1090 estack_ax_t = REG_U64;
1091 next_pc += sizeof(struct binary_op);
1092 PO;
1093 }
1094
1095 /* unary */
1096 OP(FILTER_OP_UNARY_PLUS):
1097 OP(FILTER_OP_UNARY_MINUS):
1098 OP(FILTER_OP_UNARY_NOT):
1099 printk(KERN_WARNING "LTTng: filter: unsupported non-specialized bytecode op %u\n",
1100 (unsigned int) *(filter_opcode_t *) pc);
1101 ret = -EINVAL;
1102 goto end;
1103
1104
1105 OP(FILTER_OP_UNARY_BIT_NOT):
1106 {
1107 estack_ax_v = ~(uint64_t) estack_ax_v;
1108 estack_ax_t = REG_S64;
1109 next_pc += sizeof(struct unary_op);
1110 PO;
1111 }
1112
1113 OP(FILTER_OP_UNARY_PLUS_S64):
1114 {
1115 next_pc += sizeof(struct unary_op);
1116 PO;
1117 }
1118 OP(FILTER_OP_UNARY_MINUS_S64):
1119 {
1120 estack_ax_v = -estack_ax_v;
1121 estack_ax_t = REG_S64;
1122 next_pc += sizeof(struct unary_op);
1123 PO;
1124 }
1125 OP(FILTER_OP_UNARY_PLUS_DOUBLE):
1126 OP(FILTER_OP_UNARY_MINUS_DOUBLE):
1127 {
1128 BUG_ON(1);
1129 PO;
1130 }
1131 OP(FILTER_OP_UNARY_NOT_S64):
1132 {
1133 estack_ax_v = !estack_ax_v;
1134 estack_ax_t = REG_S64;
1135 next_pc += sizeof(struct unary_op);
1136 PO;
1137 }
1138 OP(FILTER_OP_UNARY_NOT_DOUBLE):
1139 {
1140 BUG_ON(1);
1141 PO;
1142 }
1143
1144 /* logical */
1145 OP(FILTER_OP_AND):
1146 {
1147 struct logical_op *insn = (struct logical_op *) pc;
1148
1149 /* If AX is 0, skip and evaluate to 0 */
1150 if (unlikely(estack_ax_v == 0)) {
1151 dbg_printk("Jumping to bytecode offset %u\n",
1152 (unsigned int) insn->skip_offset);
1153 next_pc = start_pc + insn->skip_offset;
1154 } else {
1155 /* Pop 1 when jump not taken */
1156 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1157 next_pc += sizeof(struct logical_op);
1158 }
1159 PO;
1160 }
1161 OP(FILTER_OP_OR):
1162 {
1163 struct logical_op *insn = (struct logical_op *) pc;
1164
1165 /* If AX is nonzero, skip and evaluate to 1 */
1166
1167 if (unlikely(estack_ax_v != 0)) {
1168 estack_ax_v = 1;
1169 dbg_printk("Jumping to bytecode offset %u\n",
1170 (unsigned int) insn->skip_offset);
1171 next_pc = start_pc + insn->skip_offset;
1172 } else {
1173 /* Pop 1 when jump not taken */
1174 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1175 next_pc += sizeof(struct logical_op);
1176 }
1177 PO;
1178 }
1179
1180
1181 /* load field ref */
1182 OP(FILTER_OP_LOAD_FIELD_REF_STRING):
1183 {
1184 struct load_op *insn = (struct load_op *) pc;
1185 struct field_ref *ref = (struct field_ref *) insn->data;
1186
1187 dbg_printk("load field ref offset %u type string\n",
1188 ref->offset);
1189 estack_push(stack, top, ax, bx, ax_t, bx_t);
1190 estack_ax(stack, top)->u.s.str =
1191 *(const char * const *) &filter_stack_data[ref->offset];
1192 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1193 dbg_printk("Filter warning: loading a NULL string.\n");
1194 ret = -EINVAL;
1195 goto end;
1196 }
1197 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1198 estack_ax(stack, top)->u.s.literal_type =
1199 ESTACK_STRING_LITERAL_TYPE_NONE;
1200 estack_ax(stack, top)->u.s.user = 0;
1201 estack_ax(stack, top)->type = REG_STRING;
1202 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1203 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1204 PO;
1205 }
1206
1207 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE):
1208 {
1209 struct load_op *insn = (struct load_op *) pc;
1210 struct field_ref *ref = (struct field_ref *) insn->data;
1211
1212 dbg_printk("load field ref offset %u type sequence\n",
1213 ref->offset);
1214 estack_push(stack, top, ax, bx, ax_t, bx_t);
1215 estack_ax(stack, top)->u.s.seq_len =
1216 *(unsigned long *) &filter_stack_data[ref->offset];
1217 estack_ax(stack, top)->u.s.str =
1218 *(const char **) (&filter_stack_data[ref->offset
1219 + sizeof(unsigned long)]);
1220 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1221 dbg_printk("Filter warning: loading a NULL sequence.\n");
1222 ret = -EINVAL;
1223 goto end;
1224 }
1225 estack_ax(stack, top)->u.s.literal_type =
1226 ESTACK_STRING_LITERAL_TYPE_NONE;
1227 estack_ax(stack, top)->u.s.user = 0;
1228 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1229 PO;
1230 }
1231
1232 OP(FILTER_OP_LOAD_FIELD_REF_S64):
1233 {
1234 struct load_op *insn = (struct load_op *) pc;
1235 struct field_ref *ref = (struct field_ref *) insn->data;
1236
1237 dbg_printk("load field ref offset %u type s64\n",
1238 ref->offset);
1239 estack_push(stack, top, ax, bx, ax_t, bx_t);
1240 estack_ax_v =
1241 ((struct literal_numeric *) &filter_stack_data[ref->offset])->v;
1242 estack_ax_t = REG_S64;
1243 dbg_printk("ref load s64 %lld\n",
1244 (long long) estack_ax_v);
1245 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1246 PO;
1247 }
1248
1249 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE):
1250 {
1251 BUG_ON(1);
1252 PO;
1253 }
1254
1255 /* load from immediate operand */
1256 OP(FILTER_OP_LOAD_STRING):
1257 {
1258 struct load_op *insn = (struct load_op *) pc;
1259
1260 dbg_printk("load string %s\n", insn->data);
1261 estack_push(stack, top, ax, bx, ax_t, bx_t);
1262 estack_ax(stack, top)->u.s.str = insn->data;
1263 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1264 estack_ax(stack, top)->u.s.literal_type =
1265 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1266 estack_ax(stack, top)->u.s.user = 0;
1267 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1268 PO;
1269 }
1270
1271 OP(FILTER_OP_LOAD_STAR_GLOB_STRING):
1272 {
1273 struct load_op *insn = (struct load_op *) pc;
1274
1275 dbg_printk("load globbing pattern %s\n", insn->data);
1276 estack_push(stack, top, ax, bx, ax_t, bx_t);
1277 estack_ax(stack, top)->u.s.str = insn->data;
1278 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1279 estack_ax(stack, top)->u.s.literal_type =
1280 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1281 estack_ax(stack, top)->u.s.user = 0;
1282 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1283 PO;
1284 }
1285
1286 OP(FILTER_OP_LOAD_S64):
1287 {
1288 struct load_op *insn = (struct load_op *) pc;
1289
1290 estack_push(stack, top, ax, bx, ax_t, bx_t);
1291 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1292 estack_ax_t = REG_S64;
1293 dbg_printk("load s64 %lld\n",
1294 (long long) estack_ax_v);
1295 next_pc += sizeof(struct load_op)
1296 + sizeof(struct literal_numeric);
1297 PO;
1298 }
1299
1300 OP(FILTER_OP_LOAD_DOUBLE):
1301 {
1302 BUG_ON(1);
1303 PO;
1304 }
1305
1306 /* cast */
1307 OP(FILTER_OP_CAST_TO_S64):
1308 printk(KERN_WARNING "LTTng: filter: unsupported non-specialized bytecode op %u\n",
1309 (unsigned int) *(filter_opcode_t *) pc);
1310 ret = -EINVAL;
1311 goto end;
1312
1313 OP(FILTER_OP_CAST_DOUBLE_TO_S64):
1314 {
1315 BUG_ON(1);
1316 PO;
1317 }
1318
1319 OP(FILTER_OP_CAST_NOP):
1320 {
1321 next_pc += sizeof(struct cast_op);
1322 PO;
1323 }
1324
1325 /* get context ref */
1326 OP(FILTER_OP_GET_CONTEXT_REF_STRING):
1327 {
1328 struct load_op *insn = (struct load_op *) pc;
1329 struct field_ref *ref = (struct field_ref *) insn->data;
1330 struct lttng_ctx_field *ctx_field;
1331 union lttng_ctx_value v;
1332
1333 dbg_printk("get context ref offset %u type string\n",
1334 ref->offset);
1335 ctx_field = &lttng_static_ctx->fields[ref->offset];
1336 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1337 estack_push(stack, top, ax, bx, ax_t, bx_t);
1338 estack_ax(stack, top)->u.s.str = v.str;
1339 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1340 dbg_printk("Filter warning: loading a NULL string.\n");
1341 ret = -EINVAL;
1342 goto end;
1343 }
1344 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1345 estack_ax(stack, top)->u.s.literal_type =
1346 ESTACK_STRING_LITERAL_TYPE_NONE;
1347 estack_ax(stack, top)->u.s.user = 0;
1348 estack_ax(stack, top)->type = REG_STRING;
1349 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1350 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1351 PO;
1352 }
1353
1354 OP(FILTER_OP_GET_CONTEXT_REF_S64):
1355 {
1356 struct load_op *insn = (struct load_op *) pc;
1357 struct field_ref *ref = (struct field_ref *) insn->data;
1358 struct lttng_ctx_field *ctx_field;
1359 union lttng_ctx_value v;
1360
1361 dbg_printk("get context ref offset %u type s64\n",
1362 ref->offset);
1363 ctx_field = &lttng_static_ctx->fields[ref->offset];
1364 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1365 estack_push(stack, top, ax, bx, ax_t, bx_t);
1366 estack_ax_v = v.s64;
1367 estack_ax_t = REG_S64;
1368 dbg_printk("ref get context s64 %lld\n",
1369 (long long) estack_ax_v);
1370 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1371 PO;
1372 }
1373
1374 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE):
1375 {
1376 BUG_ON(1);
1377 PO;
1378 }
1379
1380 /* load userspace field ref */
1381 OP(FILTER_OP_LOAD_FIELD_REF_USER_STRING):
1382 {
1383 struct load_op *insn = (struct load_op *) pc;
1384 struct field_ref *ref = (struct field_ref *) insn->data;
1385
1386 dbg_printk("load field ref offset %u type user string\n",
1387 ref->offset);
1388 estack_push(stack, top, ax, bx, ax_t, bx_t);
1389 estack_ax(stack, top)->u.s.user_str =
1390 *(const char * const *) &filter_stack_data[ref->offset];
1391 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1392 dbg_printk("Filter warning: loading a NULL string.\n");
1393 ret = -EINVAL;
1394 goto end;
1395 }
1396 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1397 estack_ax(stack, top)->u.s.literal_type =
1398 ESTACK_STRING_LITERAL_TYPE_NONE;
1399 estack_ax(stack, top)->u.s.user = 1;
1400 estack_ax(stack, top)->type = REG_STRING;
1401 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1402 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1403 PO;
1404 }
1405
1406 OP(FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1407 {
1408 struct load_op *insn = (struct load_op *) pc;
1409 struct field_ref *ref = (struct field_ref *) insn->data;
1410
1411 dbg_printk("load field ref offset %u type user sequence\n",
1412 ref->offset);
1413 estack_push(stack, top, ax, bx, ax_t, bx_t);
1414 estack_ax(stack, top)->u.s.seq_len =
1415 *(unsigned long *) &filter_stack_data[ref->offset];
1416 estack_ax(stack, top)->u.s.user_str =
1417 *(const char **) (&filter_stack_data[ref->offset
1418 + sizeof(unsigned long)]);
1419 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1420 dbg_printk("Filter warning: loading a NULL sequence.\n");
1421 ret = -EINVAL;
1422 goto end;
1423 }
1424 estack_ax(stack, top)->u.s.literal_type =
1425 ESTACK_STRING_LITERAL_TYPE_NONE;
1426 estack_ax(stack, top)->u.s.user = 1;
1427 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1428 PO;
1429 }
1430
1431 OP(FILTER_OP_GET_CONTEXT_ROOT):
1432 {
1433 dbg_printk("op get context root\n");
1434 estack_push(stack, top, ax, bx, ax_t, bx_t);
1435 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1436 /* "field" only needed for variants. */
1437 estack_ax(stack, top)->u.ptr.field = NULL;
1438 estack_ax(stack, top)->type = REG_PTR;
1439 next_pc += sizeof(struct load_op);
1440 PO;
1441 }
1442
1443 OP(FILTER_OP_GET_APP_CONTEXT_ROOT):
1444 {
1445 BUG_ON(1);
1446 PO;
1447 }
1448
1449 OP(FILTER_OP_GET_PAYLOAD_ROOT):
1450 {
1451 dbg_printk("op get app payload root\n");
1452 estack_push(stack, top, ax, bx, ax_t, bx_t);
1453 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1454 estack_ax(stack, top)->u.ptr.ptr = filter_stack_data;
1455 /* "field" only needed for variants. */
1456 estack_ax(stack, top)->u.ptr.field = NULL;
1457 estack_ax(stack, top)->type = REG_PTR;
1458 next_pc += sizeof(struct load_op);
1459 PO;
1460 }
1461
1462 OP(FILTER_OP_GET_SYMBOL):
1463 {
1464 dbg_printk("op get symbol\n");
1465 switch (estack_ax(stack, top)->u.ptr.type) {
1466 case LOAD_OBJECT:
1467 printk(KERN_WARNING "LTTng: filter: Nested fields not implemented yet.\n");
1468 ret = -EINVAL;
1469 goto end;
1470 case LOAD_ROOT_CONTEXT:
1471 case LOAD_ROOT_APP_CONTEXT:
1472 case LOAD_ROOT_PAYLOAD:
1473 /*
1474 * symbol lookup is performed by
1475 * specialization.
1476 */
1477 ret = -EINVAL;
1478 goto end;
1479 }
1480 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1481 PO;
1482 }
1483
1484 OP(FILTER_OP_GET_SYMBOL_FIELD):
1485 {
1486 /*
1487 * Used for first variant encountered in a
1488 * traversal. Variants are not implemented yet.
1489 */
1490 ret = -EINVAL;
1491 goto end;
1492 }
1493
1494 OP(FILTER_OP_GET_INDEX_U16):
1495 {
1496 struct load_op *insn = (struct load_op *) pc;
1497 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1498
1499 dbg_printk("op get index u16\n");
1500 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1501 if (ret)
1502 goto end;
1503 estack_ax_v = estack_ax(stack, top)->u.v;
1504 estack_ax_t = estack_ax(stack, top)->type;
1505 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1506 PO;
1507 }
1508
1509 OP(FILTER_OP_GET_INDEX_U64):
1510 {
1511 struct load_op *insn = (struct load_op *) pc;
1512 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1513
1514 dbg_printk("op get index u64\n");
1515 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1516 if (ret)
1517 goto end;
1518 estack_ax_v = estack_ax(stack, top)->u.v;
1519 estack_ax_t = estack_ax(stack, top)->type;
1520 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1521 PO;
1522 }
1523
1524 OP(FILTER_OP_LOAD_FIELD):
1525 {
1526 dbg_printk("op load field\n");
1527 ret = dynamic_load_field(estack_ax(stack, top));
1528 if (ret)
1529 goto end;
1530 estack_ax_v = estack_ax(stack, top)->u.v;
1531 estack_ax_t = estack_ax(stack, top)->type;
1532 next_pc += sizeof(struct load_op);
1533 PO;
1534 }
1535
1536 OP(FILTER_OP_LOAD_FIELD_S8):
1537 {
1538 dbg_printk("op load field s8\n");
1539
1540 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1541 estack_ax_t = REG_S64;
1542 next_pc += sizeof(struct load_op);
1543 PO;
1544 }
1545 OP(FILTER_OP_LOAD_FIELD_S16):
1546 {
1547 dbg_printk("op load field s16\n");
1548
1549 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1550 estack_ax_t = REG_S64;
1551 next_pc += sizeof(struct load_op);
1552 PO;
1553 }
1554 OP(FILTER_OP_LOAD_FIELD_S32):
1555 {
1556 dbg_printk("op load field s32\n");
1557
1558 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1559 estack_ax_t = REG_S64;
1560 next_pc += sizeof(struct load_op);
1561 PO;
1562 }
1563 OP(FILTER_OP_LOAD_FIELD_S64):
1564 {
1565 dbg_printk("op load field s64\n");
1566
1567 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1568 estack_ax_t = REG_S64;
1569 next_pc += sizeof(struct load_op);
1570 PO;
1571 }
1572 OP(FILTER_OP_LOAD_FIELD_U8):
1573 {
1574 dbg_printk("op load field u8\n");
1575
1576 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1577 estack_ax_t = REG_S64;
1578 next_pc += sizeof(struct load_op);
1579 PO;
1580 }
1581 OP(FILTER_OP_LOAD_FIELD_U16):
1582 {
1583 dbg_printk("op load field u16\n");
1584
1585 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1586 estack_ax_t = REG_S64;
1587 next_pc += sizeof(struct load_op);
1588 PO;
1589 }
1590 OP(FILTER_OP_LOAD_FIELD_U32):
1591 {
1592 dbg_printk("op load field u32\n");
1593
1594 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1595 estack_ax_t = REG_S64;
1596 next_pc += sizeof(struct load_op);
1597 PO;
1598 }
1599 OP(FILTER_OP_LOAD_FIELD_U64):
1600 {
1601 dbg_printk("op load field u64\n");
1602
1603 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1604 estack_ax_t = REG_S64;
1605 next_pc += sizeof(struct load_op);
1606 PO;
1607 }
1608 OP(FILTER_OP_LOAD_FIELD_DOUBLE):
1609 {
1610 ret = -EINVAL;
1611 goto end;
1612 }
1613
1614 OP(FILTER_OP_LOAD_FIELD_STRING):
1615 {
1616 const char *str;
1617
1618 dbg_printk("op load field string\n");
1619 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1620 estack_ax(stack, top)->u.s.str = str;
1621 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1622 dbg_printk("Filter warning: loading a NULL string.\n");
1623 ret = -EINVAL;
1624 goto end;
1625 }
1626 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1627 estack_ax(stack, top)->u.s.literal_type =
1628 ESTACK_STRING_LITERAL_TYPE_NONE;
1629 estack_ax(stack, top)->type = REG_STRING;
1630 next_pc += sizeof(struct load_op);
1631 PO;
1632 }
1633
1634 OP(FILTER_OP_LOAD_FIELD_SEQUENCE):
1635 {
1636 const char *ptr;
1637
1638 dbg_printk("op load field string sequence\n");
1639 ptr = estack_ax(stack, top)->u.ptr.ptr;
1640 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1641 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1642 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1643 dbg_printk("Filter warning: loading a NULL sequence.\n");
1644 ret = -EINVAL;
1645 goto end;
1646 }
1647 estack_ax(stack, top)->u.s.literal_type =
1648 ESTACK_STRING_LITERAL_TYPE_NONE;
1649 estack_ax(stack, top)->type = REG_STRING;
1650 next_pc += sizeof(struct load_op);
1651 PO;
1652 }
1653
1654 END_OP
1655 end:
1656 /* Return _DISCARD on error. */
1657 if (ret)
1658 return LTTNG_FILTER_DISCARD;
1659 return retval;
1660 }
1661
1662 #undef START_OP
1663 #undef OP
1664 #undef PO
1665 #undef END_OP
This page took 0.107341 seconds and 3 git commands to generate.