Fix: bytecode: Validate register type for instructions expecting unknown type
[lttng-modules.git] / src / lttng-filter-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-filter-interpreter.c
4 *
5 * LTTng modules filter interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
14
15 #include <lttng/filter.h>
16 #include <lttng/string-utils.h>
17
18 LTTNG_STACK_FRAME_NON_STANDARD(lttng_filter_interpret_bytecode);
19
20 /*
21 * get_char should be called with page fault handler disabled if it is expected
22 * to handle user-space read.
23 */
24 static
25 char get_char(struct estack_entry *reg, size_t offset)
26 {
27 if (unlikely(offset >= reg->u.s.seq_len))
28 return '\0';
29 if (reg->u.s.user) {
30 char c;
31
32 /* Handle invalid access as end of string. */
33 if (unlikely(!lttng_access_ok(VERIFY_READ,
34 reg->u.s.user_str + offset,
35 sizeof(c))))
36 return '\0';
37 /* Handle fault (nonzero return value) as end of string. */
38 if (unlikely(__copy_from_user_inatomic(&c,
39 reg->u.s.user_str + offset,
40 sizeof(c))))
41 return '\0';
42 return c;
43 } else {
44 return reg->u.s.str[offset];
45 }
46 }
47
48 /*
49 * -1: wildcard found.
50 * -2: unknown escape char.
51 * 0: normal char.
52 */
53 static
54 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
55 {
56 switch (*c) {
57 case '\\':
58 (*offset)++;
59 *c = get_char(reg, *offset);
60 switch (*c) {
61 case '\\':
62 case '*':
63 return 0;
64 default:
65 return -2;
66 }
67 case '*':
68 return -1;
69 default:
70 return 0;
71 }
72 }
73
74 static
75 char get_char_at_cb(size_t at, void *data)
76 {
77 return get_char(data, at);
78 }
79
80 static
81 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
82 {
83 bool has_user = false;
84 int result;
85 struct estack_entry *pattern_reg;
86 struct estack_entry *candidate_reg;
87
88 /* Disable the page fault handler when reading from userspace. */
89 if (estack_bx(stack, top)->u.s.user
90 || estack_ax(stack, top)->u.s.user) {
91 has_user = true;
92 pagefault_disable();
93 }
94
95 /* Find out which side is the pattern vs. the candidate. */
96 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
97 pattern_reg = estack_ax(stack, top);
98 candidate_reg = estack_bx(stack, top);
99 } else {
100 pattern_reg = estack_bx(stack, top);
101 candidate_reg = estack_ax(stack, top);
102 }
103
104 /* Perform the match operation. */
105 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
106 pattern_reg, get_char_at_cb, candidate_reg);
107 if (has_user)
108 pagefault_enable();
109
110 return result;
111 }
112
113 static
114 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
115 {
116 size_t offset_bx = 0, offset_ax = 0;
117 int diff, has_user = 0;
118
119 if (estack_bx(stack, top)->u.s.user
120 || estack_ax(stack, top)->u.s.user) {
121 has_user = 1;
122 pagefault_disable();
123 }
124
125 for (;;) {
126 int ret;
127 int escaped_r0 = 0;
128 char char_bx, char_ax;
129
130 char_bx = get_char(estack_bx(stack, top), offset_bx);
131 char_ax = get_char(estack_ax(stack, top), offset_ax);
132
133 if (unlikely(char_bx == '\0')) {
134 if (char_ax == '\0') {
135 diff = 0;
136 break;
137 } else {
138 if (estack_ax(stack, top)->u.s.literal_type ==
139 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
140 ret = parse_char(estack_ax(stack, top),
141 &char_ax, &offset_ax);
142 if (ret == -1) {
143 diff = 0;
144 break;
145 }
146 }
147 diff = -1;
148 break;
149 }
150 }
151 if (unlikely(char_ax == '\0')) {
152 if (estack_bx(stack, top)->u.s.literal_type ==
153 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
154 ret = parse_char(estack_bx(stack, top),
155 &char_bx, &offset_bx);
156 if (ret == -1) {
157 diff = 0;
158 break;
159 }
160 }
161 diff = 1;
162 break;
163 }
164 if (estack_bx(stack, top)->u.s.literal_type ==
165 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
166 ret = parse_char(estack_bx(stack, top),
167 &char_bx, &offset_bx);
168 if (ret == -1) {
169 diff = 0;
170 break;
171 } else if (ret == -2) {
172 escaped_r0 = 1;
173 }
174 /* else compare both char */
175 }
176 if (estack_ax(stack, top)->u.s.literal_type ==
177 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
178 ret = parse_char(estack_ax(stack, top),
179 &char_ax, &offset_ax);
180 if (ret == -1) {
181 diff = 0;
182 break;
183 } else if (ret == -2) {
184 if (!escaped_r0) {
185 diff = -1;
186 break;
187 }
188 } else {
189 if (escaped_r0) {
190 diff = 1;
191 break;
192 }
193 }
194 } else {
195 if (escaped_r0) {
196 diff = 1;
197 break;
198 }
199 }
200 diff = char_bx - char_ax;
201 if (diff != 0)
202 break;
203 offset_bx++;
204 offset_ax++;
205 }
206 if (has_user)
207 pagefault_enable();
208
209 return diff;
210 }
211
212 uint64_t lttng_filter_false(void *filter_data,
213 struct lttng_probe_ctx *lttng_probe_ctx,
214 const char *filter_stack_data)
215 {
216 return LTTNG_FILTER_DISCARD;
217 }
218
219 #ifdef INTERPRETER_USE_SWITCH
220
221 /*
222 * Fallback for compilers that do not support taking address of labels.
223 */
224
225 #define START_OP \
226 start_pc = &bytecode->data[0]; \
227 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
228 pc = next_pc) { \
229 dbg_printk("LTTng: Executing op %s (%u)\n", \
230 lttng_filter_print_op((unsigned int) *(filter_opcode_t *) pc), \
231 (unsigned int) *(filter_opcode_t *) pc); \
232 switch (*(filter_opcode_t *) pc) {
233
234 #define OP(name) case name
235
236 #define PO break
237
238 #define END_OP } \
239 }
240
241 #else
242
243 /*
244 * Dispatch-table based interpreter.
245 */
246
247 #define START_OP \
248 start_pc = &bytecode->code[0]; \
249 pc = next_pc = start_pc; \
250 if (unlikely(pc - start_pc >= bytecode->len)) \
251 goto end; \
252 goto *dispatch[*(filter_opcode_t *) pc];
253
254 #define OP(name) \
255 LABEL_##name
256
257 #define PO \
258 pc = next_pc; \
259 goto *dispatch[*(filter_opcode_t *) pc];
260
261 #define END_OP
262
263 #endif
264
265 #define IS_INTEGER_REGISTER(reg_type) (reg_type == REG_S64)
266
267 static int context_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
268 struct load_ptr *ptr,
269 uint32_t idx)
270 {
271
272 struct lttng_ctx_field *ctx_field;
273 struct lttng_event_field *field;
274 union lttng_ctx_value v;
275
276 ctx_field = &lttng_static_ctx->fields[idx];
277 field = &ctx_field->event_field;
278 ptr->type = LOAD_OBJECT;
279 /* field is only used for types nested within variants. */
280 ptr->field = NULL;
281
282 switch (field->type.atype) {
283 case atype_integer:
284 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
285 if (field->type.u.integer.signedness) {
286 ptr->object_type = OBJECT_TYPE_S64;
287 ptr->u.s64 = v.s64;
288 ptr->ptr = &ptr->u.s64;
289 } else {
290 ptr->object_type = OBJECT_TYPE_U64;
291 ptr->u.u64 = v.s64; /* Cast. */
292 ptr->ptr = &ptr->u.u64;
293 }
294 break;
295 case atype_enum_nestable:
296 {
297 const struct lttng_integer_type *itype =
298 &field->type.u.enum_nestable.container_type->u.integer;
299
300 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
301 if (itype->signedness) {
302 ptr->object_type = OBJECT_TYPE_S64;
303 ptr->u.s64 = v.s64;
304 ptr->ptr = &ptr->u.s64;
305 } else {
306 ptr->object_type = OBJECT_TYPE_U64;
307 ptr->u.u64 = v.s64; /* Cast. */
308 ptr->ptr = &ptr->u.u64;
309 }
310 break;
311 }
312 case atype_array_nestable:
313 if (!lttng_is_bytewise_integer(field->type.u.array_nestable.elem_type)) {
314 printk(KERN_WARNING "LTTng: filter: Array nesting only supports integer types.\n");
315 return -EINVAL;
316 }
317 if (field->type.u.array_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
318 printk(KERN_WARNING "LTTng: filter: Only string arrays are supported for contexts.\n");
319 return -EINVAL;
320 }
321 ptr->object_type = OBJECT_TYPE_STRING;
322 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
323 ptr->ptr = v.str;
324 break;
325 case atype_sequence_nestable:
326 if (!lttng_is_bytewise_integer(field->type.u.sequence_nestable.elem_type)) {
327 printk(KERN_WARNING "LTTng: filter: Sequence nesting only supports integer types.\n");
328 return -EINVAL;
329 }
330 if (field->type.u.sequence_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
331 printk(KERN_WARNING "LTTng: filter: Only string sequences are supported for contexts.\n");
332 return -EINVAL;
333 }
334 ptr->object_type = OBJECT_TYPE_STRING;
335 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
336 ptr->ptr = v.str;
337 break;
338 case atype_string:
339 ptr->object_type = OBJECT_TYPE_STRING;
340 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
341 ptr->ptr = v.str;
342 break;
343 case atype_struct_nestable:
344 printk(KERN_WARNING "LTTng: filter: Structure type cannot be loaded.\n");
345 return -EINVAL;
346 case atype_variant_nestable:
347 printk(KERN_WARNING "LTTng: filter: Variant type cannot be loaded.\n");
348 return -EINVAL;
349 default:
350 printk(KERN_WARNING "LTTng: filter: Unknown type: %d", (int) field->type.atype);
351 return -EINVAL;
352 }
353 return 0;
354 }
355
356 static int dynamic_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
357 struct bytecode_runtime *runtime,
358 uint64_t index, struct estack_entry *stack_top)
359 {
360 int ret;
361 const struct filter_get_index_data *gid;
362
363 /*
364 * Types nested within variants need to perform dynamic lookup
365 * based on the field descriptions. LTTng-UST does not implement
366 * variants for now.
367 */
368 if (stack_top->u.ptr.field)
369 return -EINVAL;
370 gid = (const struct filter_get_index_data *) &runtime->data[index];
371 switch (stack_top->u.ptr.type) {
372 case LOAD_OBJECT:
373 switch (stack_top->u.ptr.object_type) {
374 case OBJECT_TYPE_ARRAY:
375 {
376 const char *ptr;
377
378 WARN_ON_ONCE(gid->offset >= gid->array_len);
379 /* Skip count (unsigned long) */
380 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
381 ptr = ptr + gid->offset;
382 stack_top->u.ptr.ptr = ptr;
383 stack_top->u.ptr.object_type = gid->elem.type;
384 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
385 /* field is only used for types nested within variants. */
386 stack_top->u.ptr.field = NULL;
387 break;
388 }
389 case OBJECT_TYPE_SEQUENCE:
390 {
391 const char *ptr;
392 size_t ptr_seq_len;
393
394 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
395 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
396 if (gid->offset >= gid->elem.len * ptr_seq_len) {
397 ret = -EINVAL;
398 goto end;
399 }
400 ptr = ptr + gid->offset;
401 stack_top->u.ptr.ptr = ptr;
402 stack_top->u.ptr.object_type = gid->elem.type;
403 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
404 /* field is only used for types nested within variants. */
405 stack_top->u.ptr.field = NULL;
406 break;
407 }
408 case OBJECT_TYPE_STRUCT:
409 printk(KERN_WARNING "LTTng: filter: Nested structures are not supported yet.\n");
410 ret = -EINVAL;
411 goto end;
412 case OBJECT_TYPE_VARIANT:
413 default:
414 printk(KERN_WARNING "LTTng: filter: Unexpected get index type %d",
415 (int) stack_top->u.ptr.object_type);
416 ret = -EINVAL;
417 goto end;
418 }
419 break;
420 case LOAD_ROOT_CONTEXT:
421 case LOAD_ROOT_APP_CONTEXT: /* Fall-through */
422 {
423 ret = context_get_index(lttng_probe_ctx,
424 &stack_top->u.ptr,
425 gid->ctx_index);
426 if (ret) {
427 goto end;
428 }
429 break;
430 }
431 case LOAD_ROOT_PAYLOAD:
432 stack_top->u.ptr.ptr += gid->offset;
433 if (gid->elem.type == OBJECT_TYPE_STRING)
434 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
435 stack_top->u.ptr.object_type = gid->elem.type;
436 stack_top->u.ptr.type = LOAD_OBJECT;
437 /* field is only used for types nested within variants. */
438 stack_top->u.ptr.field = NULL;
439 break;
440 }
441 return 0;
442
443 end:
444 return ret;
445 }
446
447 static int dynamic_load_field(struct estack_entry *stack_top)
448 {
449 int ret;
450
451 switch (stack_top->u.ptr.type) {
452 case LOAD_OBJECT:
453 break;
454 case LOAD_ROOT_CONTEXT:
455 case LOAD_ROOT_APP_CONTEXT:
456 case LOAD_ROOT_PAYLOAD:
457 default:
458 dbg_printk("Filter warning: cannot load root, missing field name.\n");
459 ret = -EINVAL;
460 goto end;
461 }
462 switch (stack_top->u.ptr.object_type) {
463 case OBJECT_TYPE_S8:
464 dbg_printk("op load field s8\n");
465 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
466 stack_top->type = REG_S64;
467 break;
468 case OBJECT_TYPE_S16:
469 {
470 int16_t tmp;
471
472 dbg_printk("op load field s16\n");
473 tmp = *(int16_t *) stack_top->u.ptr.ptr;
474 if (stack_top->u.ptr.rev_bo)
475 __swab16s(&tmp);
476 stack_top->u.v = tmp;
477 stack_top->type = REG_S64;
478 break;
479 }
480 case OBJECT_TYPE_S32:
481 {
482 int32_t tmp;
483
484 dbg_printk("op load field s32\n");
485 tmp = *(int32_t *) stack_top->u.ptr.ptr;
486 if (stack_top->u.ptr.rev_bo)
487 __swab32s(&tmp);
488 stack_top->u.v = tmp;
489 stack_top->type = REG_S64;
490 break;
491 }
492 case OBJECT_TYPE_S64:
493 {
494 int64_t tmp;
495
496 dbg_printk("op load field s64\n");
497 tmp = *(int64_t *) stack_top->u.ptr.ptr;
498 if (stack_top->u.ptr.rev_bo)
499 __swab64s(&tmp);
500 stack_top->u.v = tmp;
501 stack_top->type = REG_S64;
502 break;
503 }
504 case OBJECT_TYPE_U8:
505 dbg_printk("op load field u8\n");
506 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
507 stack_top->type = REG_S64;
508 break;
509 case OBJECT_TYPE_U16:
510 {
511 uint16_t tmp;
512
513 dbg_printk("op load field u16\n");
514 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
515 if (stack_top->u.ptr.rev_bo)
516 __swab16s(&tmp);
517 stack_top->u.v = tmp;
518 stack_top->type = REG_S64;
519 break;
520 }
521 case OBJECT_TYPE_U32:
522 {
523 uint32_t tmp;
524
525 dbg_printk("op load field u32\n");
526 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
527 if (stack_top->u.ptr.rev_bo)
528 __swab32s(&tmp);
529 stack_top->u.v = tmp;
530 stack_top->type = REG_S64;
531 break;
532 }
533 case OBJECT_TYPE_U64:
534 {
535 uint64_t tmp;
536
537 dbg_printk("op load field u64\n");
538 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
539 if (stack_top->u.ptr.rev_bo)
540 __swab64s(&tmp);
541 stack_top->u.v = tmp;
542 stack_top->type = REG_S64;
543 break;
544 }
545 case OBJECT_TYPE_STRING:
546 {
547 const char *str;
548
549 dbg_printk("op load field string\n");
550 str = (const char *) stack_top->u.ptr.ptr;
551 stack_top->u.s.str = str;
552 if (unlikely(!stack_top->u.s.str)) {
553 dbg_printk("Filter warning: loading a NULL string.\n");
554 ret = -EINVAL;
555 goto end;
556 }
557 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
558 stack_top->u.s.literal_type =
559 ESTACK_STRING_LITERAL_TYPE_NONE;
560 stack_top->type = REG_STRING;
561 break;
562 }
563 case OBJECT_TYPE_STRING_SEQUENCE:
564 {
565 const char *ptr;
566
567 dbg_printk("op load field string sequence\n");
568 ptr = stack_top->u.ptr.ptr;
569 stack_top->u.s.seq_len = *(unsigned long *) ptr;
570 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
571 if (unlikely(!stack_top->u.s.str)) {
572 dbg_printk("Filter warning: loading a NULL sequence.\n");
573 ret = -EINVAL;
574 goto end;
575 }
576 stack_top->u.s.literal_type =
577 ESTACK_STRING_LITERAL_TYPE_NONE;
578 stack_top->type = REG_STRING;
579 break;
580 }
581 case OBJECT_TYPE_DYNAMIC:
582 /*
583 * Dynamic types in context are looked up
584 * by context get index.
585 */
586 ret = -EINVAL;
587 goto end;
588 case OBJECT_TYPE_DOUBLE:
589 ret = -EINVAL;
590 goto end;
591 case OBJECT_TYPE_SEQUENCE:
592 case OBJECT_TYPE_ARRAY:
593 case OBJECT_TYPE_STRUCT:
594 case OBJECT_TYPE_VARIANT:
595 printk(KERN_WARNING "LTTng: filter: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
596 ret = -EINVAL;
597 goto end;
598 }
599 return 0;
600
601 end:
602 return ret;
603 }
604
605 /*
606 * Return 0 (discard), or raise the 0x1 flag (log event).
607 * Currently, other flags are kept for future extensions and have no
608 * effect.
609 */
610 uint64_t lttng_filter_interpret_bytecode(void *filter_data,
611 struct lttng_probe_ctx *lttng_probe_ctx,
612 const char *filter_stack_data)
613 {
614 struct bytecode_runtime *bytecode = filter_data;
615 void *pc, *next_pc, *start_pc;
616 int ret = -EINVAL;
617 uint64_t retval = 0;
618 struct estack _stack;
619 struct estack *stack = &_stack;
620 register int64_t ax = 0, bx = 0;
621 register enum entry_type ax_t = REG_TYPE_UNKNOWN, bx_t = REG_TYPE_UNKNOWN;
622 register int top = FILTER_STACK_EMPTY;
623 #ifndef INTERPRETER_USE_SWITCH
624 static void *dispatch[NR_FILTER_OPS] = {
625 [ FILTER_OP_UNKNOWN ] = &&LABEL_FILTER_OP_UNKNOWN,
626
627 [ FILTER_OP_RETURN ] = &&LABEL_FILTER_OP_RETURN,
628
629 /* binary */
630 [ FILTER_OP_MUL ] = &&LABEL_FILTER_OP_MUL,
631 [ FILTER_OP_DIV ] = &&LABEL_FILTER_OP_DIV,
632 [ FILTER_OP_MOD ] = &&LABEL_FILTER_OP_MOD,
633 [ FILTER_OP_PLUS ] = &&LABEL_FILTER_OP_PLUS,
634 [ FILTER_OP_MINUS ] = &&LABEL_FILTER_OP_MINUS,
635 [ FILTER_OP_BIT_RSHIFT ] = &&LABEL_FILTER_OP_BIT_RSHIFT,
636 [ FILTER_OP_BIT_LSHIFT ] = &&LABEL_FILTER_OP_BIT_LSHIFT,
637 [ FILTER_OP_BIT_AND ] = &&LABEL_FILTER_OP_BIT_AND,
638 [ FILTER_OP_BIT_OR ] = &&LABEL_FILTER_OP_BIT_OR,
639 [ FILTER_OP_BIT_XOR ] = &&LABEL_FILTER_OP_BIT_XOR,
640
641 /* binary comparators */
642 [ FILTER_OP_EQ ] = &&LABEL_FILTER_OP_EQ,
643 [ FILTER_OP_NE ] = &&LABEL_FILTER_OP_NE,
644 [ FILTER_OP_GT ] = &&LABEL_FILTER_OP_GT,
645 [ FILTER_OP_LT ] = &&LABEL_FILTER_OP_LT,
646 [ FILTER_OP_GE ] = &&LABEL_FILTER_OP_GE,
647 [ FILTER_OP_LE ] = &&LABEL_FILTER_OP_LE,
648
649 /* string binary comparator */
650 [ FILTER_OP_EQ_STRING ] = &&LABEL_FILTER_OP_EQ_STRING,
651 [ FILTER_OP_NE_STRING ] = &&LABEL_FILTER_OP_NE_STRING,
652 [ FILTER_OP_GT_STRING ] = &&LABEL_FILTER_OP_GT_STRING,
653 [ FILTER_OP_LT_STRING ] = &&LABEL_FILTER_OP_LT_STRING,
654 [ FILTER_OP_GE_STRING ] = &&LABEL_FILTER_OP_GE_STRING,
655 [ FILTER_OP_LE_STRING ] = &&LABEL_FILTER_OP_LE_STRING,
656
657 /* globbing pattern binary comparator */
658 [ FILTER_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_EQ_STAR_GLOB_STRING,
659 [ FILTER_OP_NE_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_NE_STAR_GLOB_STRING,
660
661 /* s64 binary comparator */
662 [ FILTER_OP_EQ_S64 ] = &&LABEL_FILTER_OP_EQ_S64,
663 [ FILTER_OP_NE_S64 ] = &&LABEL_FILTER_OP_NE_S64,
664 [ FILTER_OP_GT_S64 ] = &&LABEL_FILTER_OP_GT_S64,
665 [ FILTER_OP_LT_S64 ] = &&LABEL_FILTER_OP_LT_S64,
666 [ FILTER_OP_GE_S64 ] = &&LABEL_FILTER_OP_GE_S64,
667 [ FILTER_OP_LE_S64 ] = &&LABEL_FILTER_OP_LE_S64,
668
669 /* double binary comparator */
670 [ FILTER_OP_EQ_DOUBLE ] = &&LABEL_FILTER_OP_EQ_DOUBLE,
671 [ FILTER_OP_NE_DOUBLE ] = &&LABEL_FILTER_OP_NE_DOUBLE,
672 [ FILTER_OP_GT_DOUBLE ] = &&LABEL_FILTER_OP_GT_DOUBLE,
673 [ FILTER_OP_LT_DOUBLE ] = &&LABEL_FILTER_OP_LT_DOUBLE,
674 [ FILTER_OP_GE_DOUBLE ] = &&LABEL_FILTER_OP_GE_DOUBLE,
675 [ FILTER_OP_LE_DOUBLE ] = &&LABEL_FILTER_OP_LE_DOUBLE,
676
677 /* Mixed S64-double binary comparators */
678 [ FILTER_OP_EQ_DOUBLE_S64 ] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64,
679 [ FILTER_OP_NE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_NE_DOUBLE_S64,
680 [ FILTER_OP_GT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GT_DOUBLE_S64,
681 [ FILTER_OP_LT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LT_DOUBLE_S64,
682 [ FILTER_OP_GE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GE_DOUBLE_S64,
683 [ FILTER_OP_LE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LE_DOUBLE_S64,
684
685 [ FILTER_OP_EQ_S64_DOUBLE ] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE,
686 [ FILTER_OP_NE_S64_DOUBLE ] = &&LABEL_FILTER_OP_NE_S64_DOUBLE,
687 [ FILTER_OP_GT_S64_DOUBLE ] = &&LABEL_FILTER_OP_GT_S64_DOUBLE,
688 [ FILTER_OP_LT_S64_DOUBLE ] = &&LABEL_FILTER_OP_LT_S64_DOUBLE,
689 [ FILTER_OP_GE_S64_DOUBLE ] = &&LABEL_FILTER_OP_GE_S64_DOUBLE,
690 [ FILTER_OP_LE_S64_DOUBLE ] = &&LABEL_FILTER_OP_LE_S64_DOUBLE,
691
692 /* unary */
693 [ FILTER_OP_UNARY_PLUS ] = &&LABEL_FILTER_OP_UNARY_PLUS,
694 [ FILTER_OP_UNARY_MINUS ] = &&LABEL_FILTER_OP_UNARY_MINUS,
695 [ FILTER_OP_UNARY_NOT ] = &&LABEL_FILTER_OP_UNARY_NOT,
696 [ FILTER_OP_UNARY_PLUS_S64 ] = &&LABEL_FILTER_OP_UNARY_PLUS_S64,
697 [ FILTER_OP_UNARY_MINUS_S64 ] = &&LABEL_FILTER_OP_UNARY_MINUS_S64,
698 [ FILTER_OP_UNARY_NOT_S64 ] = &&LABEL_FILTER_OP_UNARY_NOT_S64,
699 [ FILTER_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE,
700 [ FILTER_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE,
701 [ FILTER_OP_UNARY_NOT_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE,
702
703 /* logical */
704 [ FILTER_OP_AND ] = &&LABEL_FILTER_OP_AND,
705 [ FILTER_OP_OR ] = &&LABEL_FILTER_OP_OR,
706
707 /* load field ref */
708 [ FILTER_OP_LOAD_FIELD_REF ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF,
709 [ FILTER_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING,
710 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE,
711 [ FILTER_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64,
712 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE,
713
714 /* load from immediate operand */
715 [ FILTER_OP_LOAD_STRING ] = &&LABEL_FILTER_OP_LOAD_STRING,
716 [ FILTER_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_LOAD_STAR_GLOB_STRING,
717 [ FILTER_OP_LOAD_S64 ] = &&LABEL_FILTER_OP_LOAD_S64,
718 [ FILTER_OP_LOAD_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_DOUBLE,
719
720 /* cast */
721 [ FILTER_OP_CAST_TO_S64 ] = &&LABEL_FILTER_OP_CAST_TO_S64,
722 [ FILTER_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64,
723 [ FILTER_OP_CAST_NOP ] = &&LABEL_FILTER_OP_CAST_NOP,
724
725 /* get context ref */
726 [ FILTER_OP_GET_CONTEXT_REF ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF,
727 [ FILTER_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING,
728 [ FILTER_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64,
729 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE,
730
731 /* load userspace field ref */
732 [ FILTER_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_STRING,
733 [ FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE,
734
735 /* Instructions for recursive traversal through composed types. */
736 [ FILTER_OP_GET_CONTEXT_ROOT ] = &&LABEL_FILTER_OP_GET_CONTEXT_ROOT,
737 [ FILTER_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_FILTER_OP_GET_APP_CONTEXT_ROOT,
738 [ FILTER_OP_GET_PAYLOAD_ROOT ] = &&LABEL_FILTER_OP_GET_PAYLOAD_ROOT,
739
740 [ FILTER_OP_GET_SYMBOL ] = &&LABEL_FILTER_OP_GET_SYMBOL,
741 [ FILTER_OP_GET_SYMBOL_FIELD ] = &&LABEL_FILTER_OP_GET_SYMBOL_FIELD,
742 [ FILTER_OP_GET_INDEX_U16 ] = &&LABEL_FILTER_OP_GET_INDEX_U16,
743 [ FILTER_OP_GET_INDEX_U64 ] = &&LABEL_FILTER_OP_GET_INDEX_U64,
744
745 [ FILTER_OP_LOAD_FIELD ] = &&LABEL_FILTER_OP_LOAD_FIELD,
746 [ FILTER_OP_LOAD_FIELD_S8 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S8,
747 [ FILTER_OP_LOAD_FIELD_S16 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S16,
748 [ FILTER_OP_LOAD_FIELD_S32 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S32,
749 [ FILTER_OP_LOAD_FIELD_S64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S64,
750 [ FILTER_OP_LOAD_FIELD_U8 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U8,
751 [ FILTER_OP_LOAD_FIELD_U16 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U16,
752 [ FILTER_OP_LOAD_FIELD_U32 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U32,
753 [ FILTER_OP_LOAD_FIELD_U64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U64,
754 [ FILTER_OP_LOAD_FIELD_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_STRING,
755 [ FILTER_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_SEQUENCE,
756 [ FILTER_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_FIELD_DOUBLE,
757
758 [ FILTER_OP_UNARY_BIT_NOT ] = &&LABEL_FILTER_OP_UNARY_BIT_NOT,
759
760 [ FILTER_OP_RETURN_S64 ] = &&LABEL_FILTER_OP_RETURN_S64,
761 };
762 #endif /* #ifndef INTERPRETER_USE_SWITCH */
763
764 START_OP
765
766 OP(FILTER_OP_UNKNOWN):
767 OP(FILTER_OP_LOAD_FIELD_REF):
768 OP(FILTER_OP_GET_CONTEXT_REF):
769 #ifdef INTERPRETER_USE_SWITCH
770 default:
771 #endif /* INTERPRETER_USE_SWITCH */
772 printk(KERN_WARNING "LTTng: filter: unknown bytecode op %u\n",
773 (unsigned int) *(filter_opcode_t *) pc);
774 ret = -EINVAL;
775 goto end;
776
777 OP(FILTER_OP_RETURN):
778 OP(FILTER_OP_RETURN_S64):
779 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
780 switch (estack_ax_t) {
781 case REG_S64:
782 retval = !!estack_ax_v;
783 break;
784 case REG_DOUBLE:
785 case REG_STRING:
786 case REG_PTR:
787 case REG_STAR_GLOB_STRING:
788 case REG_TYPE_UNKNOWN:
789 ret = -EINVAL;
790 goto end;
791 }
792 ret = 0;
793 goto end;
794
795 /* binary */
796 OP(FILTER_OP_MUL):
797 OP(FILTER_OP_DIV):
798 OP(FILTER_OP_MOD):
799 OP(FILTER_OP_PLUS):
800 OP(FILTER_OP_MINUS):
801 printk(KERN_WARNING "LTTng: filter: unsupported bytecode op %u\n",
802 (unsigned int) *(filter_opcode_t *) pc);
803 ret = -EINVAL;
804 goto end;
805
806 OP(FILTER_OP_EQ):
807 OP(FILTER_OP_NE):
808 OP(FILTER_OP_GT):
809 OP(FILTER_OP_LT):
810 OP(FILTER_OP_GE):
811 OP(FILTER_OP_LE):
812 printk(KERN_WARNING "LTTng: filter: unsupported non-specialized bytecode op %u\n",
813 (unsigned int) *(filter_opcode_t *) pc);
814 ret = -EINVAL;
815 goto end;
816
817 OP(FILTER_OP_EQ_STRING):
818 {
819 int res;
820
821 res = (stack_strcmp(stack, top, "==") == 0);
822 estack_pop(stack, top, ax, bx, ax_t, bx_t);
823 estack_ax_v = res;
824 estack_ax_t = REG_S64;
825 next_pc += sizeof(struct binary_op);
826 PO;
827 }
828 OP(FILTER_OP_NE_STRING):
829 {
830 int res;
831
832 res = (stack_strcmp(stack, top, "!=") != 0);
833 estack_pop(stack, top, ax, bx, ax_t, bx_t);
834 estack_ax_v = res;
835 estack_ax_t = REG_S64;
836 next_pc += sizeof(struct binary_op);
837 PO;
838 }
839 OP(FILTER_OP_GT_STRING):
840 {
841 int res;
842
843 res = (stack_strcmp(stack, top, ">") > 0);
844 estack_pop(stack, top, ax, bx, ax_t, bx_t);
845 estack_ax_v = res;
846 estack_ax_t = REG_S64;
847 next_pc += sizeof(struct binary_op);
848 PO;
849 }
850 OP(FILTER_OP_LT_STRING):
851 {
852 int res;
853
854 res = (stack_strcmp(stack, top, "<") < 0);
855 estack_pop(stack, top, ax, bx, ax_t, bx_t);
856 estack_ax_v = res;
857 estack_ax_t = REG_S64;
858 next_pc += sizeof(struct binary_op);
859 PO;
860 }
861 OP(FILTER_OP_GE_STRING):
862 {
863 int res;
864
865 res = (stack_strcmp(stack, top, ">=") >= 0);
866 estack_pop(stack, top, ax, bx, ax_t, bx_t);
867 estack_ax_v = res;
868 estack_ax_t = REG_S64;
869 next_pc += sizeof(struct binary_op);
870 PO;
871 }
872 OP(FILTER_OP_LE_STRING):
873 {
874 int res;
875
876 res = (stack_strcmp(stack, top, "<=") <= 0);
877 estack_pop(stack, top, ax, bx, ax_t, bx_t);
878 estack_ax_v = res;
879 estack_ax_t = REG_S64;
880 next_pc += sizeof(struct binary_op);
881 PO;
882 }
883
884 OP(FILTER_OP_EQ_STAR_GLOB_STRING):
885 {
886 int res;
887
888 res = (stack_star_glob_match(stack, top, "==") == 0);
889 estack_pop(stack, top, ax, bx, ax_t, bx_t);
890 estack_ax_v = res;
891 estack_ax_t = REG_S64;
892 next_pc += sizeof(struct binary_op);
893 PO;
894 }
895 OP(FILTER_OP_NE_STAR_GLOB_STRING):
896 {
897 int res;
898
899 res = (stack_star_glob_match(stack, top, "!=") != 0);
900 estack_pop(stack, top, ax, bx, ax_t, bx_t);
901 estack_ax_v = res;
902 estack_ax_t = REG_S64;
903 next_pc += sizeof(struct binary_op);
904 PO;
905 }
906
907 OP(FILTER_OP_EQ_S64):
908 {
909 int res;
910
911 res = (estack_bx_v == estack_ax_v);
912 estack_pop(stack, top, ax, bx, ax_t, bx_t);
913 estack_ax_v = res;
914 estack_ax_t = REG_S64;
915 next_pc += sizeof(struct binary_op);
916 PO;
917 }
918 OP(FILTER_OP_NE_S64):
919 {
920 int res;
921
922 res = (estack_bx_v != estack_ax_v);
923 estack_pop(stack, top, ax, bx, ax_t, bx_t);
924 estack_ax_v = res;
925 estack_ax_t = REG_S64;
926 next_pc += sizeof(struct binary_op);
927 PO;
928 }
929 OP(FILTER_OP_GT_S64):
930 {
931 int res;
932
933 res = (estack_bx_v > estack_ax_v);
934 estack_pop(stack, top, ax, bx, ax_t, bx_t);
935 estack_ax_v = res;
936 estack_ax_t = REG_S64;
937 next_pc += sizeof(struct binary_op);
938 PO;
939 }
940 OP(FILTER_OP_LT_S64):
941 {
942 int res;
943
944 res = (estack_bx_v < estack_ax_v);
945 estack_pop(stack, top, ax, bx, ax_t, bx_t);
946 estack_ax_v = res;
947 estack_ax_t = REG_S64;
948 next_pc += sizeof(struct binary_op);
949 PO;
950 }
951 OP(FILTER_OP_GE_S64):
952 {
953 int res;
954
955 res = (estack_bx_v >= estack_ax_v);
956 estack_pop(stack, top, ax, bx, ax_t, bx_t);
957 estack_ax_v = res;
958 estack_ax_t = REG_S64;
959 next_pc += sizeof(struct binary_op);
960 PO;
961 }
962 OP(FILTER_OP_LE_S64):
963 {
964 int res;
965
966 res = (estack_bx_v <= estack_ax_v);
967 estack_pop(stack, top, ax, bx, ax_t, bx_t);
968 estack_ax_v = res;
969 estack_ax_t = REG_S64;
970 next_pc += sizeof(struct binary_op);
971 PO;
972 }
973
974 OP(FILTER_OP_EQ_DOUBLE):
975 OP(FILTER_OP_NE_DOUBLE):
976 OP(FILTER_OP_GT_DOUBLE):
977 OP(FILTER_OP_LT_DOUBLE):
978 OP(FILTER_OP_GE_DOUBLE):
979 OP(FILTER_OP_LE_DOUBLE):
980 {
981 BUG_ON(1);
982 PO;
983 }
984
985 /* Mixed S64-double binary comparators */
986 OP(FILTER_OP_EQ_DOUBLE_S64):
987 OP(FILTER_OP_NE_DOUBLE_S64):
988 OP(FILTER_OP_GT_DOUBLE_S64):
989 OP(FILTER_OP_LT_DOUBLE_S64):
990 OP(FILTER_OP_GE_DOUBLE_S64):
991 OP(FILTER_OP_LE_DOUBLE_S64):
992 OP(FILTER_OP_EQ_S64_DOUBLE):
993 OP(FILTER_OP_NE_S64_DOUBLE):
994 OP(FILTER_OP_GT_S64_DOUBLE):
995 OP(FILTER_OP_LT_S64_DOUBLE):
996 OP(FILTER_OP_GE_S64_DOUBLE):
997 OP(FILTER_OP_LE_S64_DOUBLE):
998 {
999 BUG_ON(1);
1000 PO;
1001 }
1002 OP(FILTER_OP_BIT_RSHIFT):
1003 {
1004 int64_t res;
1005
1006 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1007 ret = -EINVAL;
1008 goto end;
1009 }
1010
1011 /* Catch undefined behavior. */
1012 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1013 ret = -EINVAL;
1014 goto end;
1015 }
1016 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1017 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1018 estack_ax_v = res;
1019 estack_ax_t = REG_S64;
1020 next_pc += sizeof(struct binary_op);
1021 PO;
1022 }
1023 OP(FILTER_OP_BIT_LSHIFT):
1024 {
1025 int64_t res;
1026
1027 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1028 ret = -EINVAL;
1029 goto end;
1030 }
1031
1032 /* Catch undefined behavior. */
1033 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1034 ret = -EINVAL;
1035 goto end;
1036 }
1037 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1038 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1039 estack_ax_v = res;
1040 estack_ax_t = REG_S64;
1041 next_pc += sizeof(struct binary_op);
1042 PO;
1043 }
1044 OP(FILTER_OP_BIT_AND):
1045 {
1046 int64_t res;
1047
1048 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1049 ret = -EINVAL;
1050 goto end;
1051 }
1052
1053 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1054 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1055 estack_ax_v = res;
1056 estack_ax_t = REG_S64;
1057 next_pc += sizeof(struct binary_op);
1058 PO;
1059 }
1060 OP(FILTER_OP_BIT_OR):
1061 {
1062 int64_t res;
1063
1064 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1065 ret = -EINVAL;
1066 goto end;
1067 }
1068
1069 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1070 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1071 estack_ax_v = res;
1072 estack_ax_t = REG_S64;
1073 next_pc += sizeof(struct binary_op);
1074 PO;
1075 }
1076 OP(FILTER_OP_BIT_XOR):
1077 {
1078 int64_t res;
1079
1080 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1081 ret = -EINVAL;
1082 goto end;
1083 }
1084
1085 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1086 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1087 estack_ax_v = res;
1088 estack_ax_t = REG_S64;
1089 next_pc += sizeof(struct binary_op);
1090 PO;
1091 }
1092
1093 /* unary */
1094 OP(FILTER_OP_UNARY_PLUS):
1095 OP(FILTER_OP_UNARY_MINUS):
1096 OP(FILTER_OP_UNARY_NOT):
1097 printk(KERN_WARNING "LTTng: filter: unsupported non-specialized bytecode op %u\n",
1098 (unsigned int) *(filter_opcode_t *) pc);
1099 ret = -EINVAL;
1100 goto end;
1101
1102
1103 OP(FILTER_OP_UNARY_BIT_NOT):
1104 {
1105 estack_ax_v = ~(uint64_t) estack_ax_v;
1106 estack_ax_t = REG_S64;
1107 next_pc += sizeof(struct unary_op);
1108 PO;
1109 }
1110
1111 OP(FILTER_OP_UNARY_PLUS_S64):
1112 {
1113 next_pc += sizeof(struct unary_op);
1114 PO;
1115 }
1116 OP(FILTER_OP_UNARY_MINUS_S64):
1117 {
1118 estack_ax_v = -estack_ax_v;
1119 estack_ax_t = REG_S64;
1120 next_pc += sizeof(struct unary_op);
1121 PO;
1122 }
1123 OP(FILTER_OP_UNARY_PLUS_DOUBLE):
1124 OP(FILTER_OP_UNARY_MINUS_DOUBLE):
1125 {
1126 BUG_ON(1);
1127 PO;
1128 }
1129 OP(FILTER_OP_UNARY_NOT_S64):
1130 {
1131 estack_ax_v = !estack_ax_v;
1132 estack_ax_t = REG_S64;
1133 next_pc += sizeof(struct unary_op);
1134 PO;
1135 }
1136 OP(FILTER_OP_UNARY_NOT_DOUBLE):
1137 {
1138 BUG_ON(1);
1139 PO;
1140 }
1141
1142 /* logical */
1143 OP(FILTER_OP_AND):
1144 {
1145 struct logical_op *insn = (struct logical_op *) pc;
1146
1147 /* If AX is 0, skip and evaluate to 0 */
1148 if (unlikely(estack_ax_v == 0)) {
1149 dbg_printk("Jumping to bytecode offset %u\n",
1150 (unsigned int) insn->skip_offset);
1151 next_pc = start_pc + insn->skip_offset;
1152 } else {
1153 /* Pop 1 when jump not taken */
1154 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1155 next_pc += sizeof(struct logical_op);
1156 }
1157 PO;
1158 }
1159 OP(FILTER_OP_OR):
1160 {
1161 struct logical_op *insn = (struct logical_op *) pc;
1162
1163 /* If AX is nonzero, skip and evaluate to 1 */
1164
1165 if (unlikely(estack_ax_v != 0)) {
1166 estack_ax_v = 1;
1167 dbg_printk("Jumping to bytecode offset %u\n",
1168 (unsigned int) insn->skip_offset);
1169 next_pc = start_pc + insn->skip_offset;
1170 } else {
1171 /* Pop 1 when jump not taken */
1172 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1173 next_pc += sizeof(struct logical_op);
1174 }
1175 PO;
1176 }
1177
1178
1179 /* load field ref */
1180 OP(FILTER_OP_LOAD_FIELD_REF_STRING):
1181 {
1182 struct load_op *insn = (struct load_op *) pc;
1183 struct field_ref *ref = (struct field_ref *) insn->data;
1184
1185 dbg_printk("load field ref offset %u type string\n",
1186 ref->offset);
1187 estack_push(stack, top, ax, bx, ax_t, bx_t);
1188 estack_ax(stack, top)->u.s.str =
1189 *(const char * const *) &filter_stack_data[ref->offset];
1190 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1191 dbg_printk("Filter warning: loading a NULL string.\n");
1192 ret = -EINVAL;
1193 goto end;
1194 }
1195 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1196 estack_ax(stack, top)->u.s.literal_type =
1197 ESTACK_STRING_LITERAL_TYPE_NONE;
1198 estack_ax(stack, top)->u.s.user = 0;
1199 estack_ax(stack, top)->type = REG_STRING;
1200 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1201 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1202 PO;
1203 }
1204
1205 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE):
1206 {
1207 struct load_op *insn = (struct load_op *) pc;
1208 struct field_ref *ref = (struct field_ref *) insn->data;
1209
1210 dbg_printk("load field ref offset %u type sequence\n",
1211 ref->offset);
1212 estack_push(stack, top, ax, bx, ax_t, bx_t);
1213 estack_ax(stack, top)->u.s.seq_len =
1214 *(unsigned long *) &filter_stack_data[ref->offset];
1215 estack_ax(stack, top)->u.s.str =
1216 *(const char **) (&filter_stack_data[ref->offset
1217 + sizeof(unsigned long)]);
1218 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1219 dbg_printk("Filter warning: loading a NULL sequence.\n");
1220 ret = -EINVAL;
1221 goto end;
1222 }
1223 estack_ax(stack, top)->u.s.literal_type =
1224 ESTACK_STRING_LITERAL_TYPE_NONE;
1225 estack_ax(stack, top)->u.s.user = 0;
1226 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1227 PO;
1228 }
1229
1230 OP(FILTER_OP_LOAD_FIELD_REF_S64):
1231 {
1232 struct load_op *insn = (struct load_op *) pc;
1233 struct field_ref *ref = (struct field_ref *) insn->data;
1234
1235 dbg_printk("load field ref offset %u type s64\n",
1236 ref->offset);
1237 estack_push(stack, top, ax, bx, ax_t, bx_t);
1238 estack_ax_v =
1239 ((struct literal_numeric *) &filter_stack_data[ref->offset])->v;
1240 estack_ax_t = REG_S64;
1241 dbg_printk("ref load s64 %lld\n",
1242 (long long) estack_ax_v);
1243 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1244 PO;
1245 }
1246
1247 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE):
1248 {
1249 BUG_ON(1);
1250 PO;
1251 }
1252
1253 /* load from immediate operand */
1254 OP(FILTER_OP_LOAD_STRING):
1255 {
1256 struct load_op *insn = (struct load_op *) pc;
1257
1258 dbg_printk("load string %s\n", insn->data);
1259 estack_push(stack, top, ax, bx, ax_t, bx_t);
1260 estack_ax(stack, top)->u.s.str = insn->data;
1261 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1262 estack_ax(stack, top)->u.s.literal_type =
1263 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1264 estack_ax(stack, top)->u.s.user = 0;
1265 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1266 PO;
1267 }
1268
1269 OP(FILTER_OP_LOAD_STAR_GLOB_STRING):
1270 {
1271 struct load_op *insn = (struct load_op *) pc;
1272
1273 dbg_printk("load globbing pattern %s\n", insn->data);
1274 estack_push(stack, top, ax, bx, ax_t, bx_t);
1275 estack_ax(stack, top)->u.s.str = insn->data;
1276 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1277 estack_ax(stack, top)->u.s.literal_type =
1278 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1279 estack_ax(stack, top)->u.s.user = 0;
1280 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1281 PO;
1282 }
1283
1284 OP(FILTER_OP_LOAD_S64):
1285 {
1286 struct load_op *insn = (struct load_op *) pc;
1287
1288 estack_push(stack, top, ax, bx, ax_t, bx_t);
1289 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1290 estack_ax_t = REG_S64;
1291 dbg_printk("load s64 %lld\n",
1292 (long long) estack_ax_v);
1293 next_pc += sizeof(struct load_op)
1294 + sizeof(struct literal_numeric);
1295 PO;
1296 }
1297
1298 OP(FILTER_OP_LOAD_DOUBLE):
1299 {
1300 BUG_ON(1);
1301 PO;
1302 }
1303
1304 /* cast */
1305 OP(FILTER_OP_CAST_TO_S64):
1306 printk(KERN_WARNING "LTTng: filter: unsupported non-specialized bytecode op %u\n",
1307 (unsigned int) *(filter_opcode_t *) pc);
1308 ret = -EINVAL;
1309 goto end;
1310
1311 OP(FILTER_OP_CAST_DOUBLE_TO_S64):
1312 {
1313 BUG_ON(1);
1314 PO;
1315 }
1316
1317 OP(FILTER_OP_CAST_NOP):
1318 {
1319 next_pc += sizeof(struct cast_op);
1320 PO;
1321 }
1322
1323 /* get context ref */
1324 OP(FILTER_OP_GET_CONTEXT_REF_STRING):
1325 {
1326 struct load_op *insn = (struct load_op *) pc;
1327 struct field_ref *ref = (struct field_ref *) insn->data;
1328 struct lttng_ctx_field *ctx_field;
1329 union lttng_ctx_value v;
1330
1331 dbg_printk("get context ref offset %u type string\n",
1332 ref->offset);
1333 ctx_field = &lttng_static_ctx->fields[ref->offset];
1334 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1335 estack_push(stack, top, ax, bx, ax_t, bx_t);
1336 estack_ax(stack, top)->u.s.str = v.str;
1337 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1338 dbg_printk("Filter warning: loading a NULL string.\n");
1339 ret = -EINVAL;
1340 goto end;
1341 }
1342 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1343 estack_ax(stack, top)->u.s.literal_type =
1344 ESTACK_STRING_LITERAL_TYPE_NONE;
1345 estack_ax(stack, top)->u.s.user = 0;
1346 estack_ax(stack, top)->type = REG_STRING;
1347 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1348 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1349 PO;
1350 }
1351
1352 OP(FILTER_OP_GET_CONTEXT_REF_S64):
1353 {
1354 struct load_op *insn = (struct load_op *) pc;
1355 struct field_ref *ref = (struct field_ref *) insn->data;
1356 struct lttng_ctx_field *ctx_field;
1357 union lttng_ctx_value v;
1358
1359 dbg_printk("get context ref offset %u type s64\n",
1360 ref->offset);
1361 ctx_field = &lttng_static_ctx->fields[ref->offset];
1362 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1363 estack_push(stack, top, ax, bx, ax_t, bx_t);
1364 estack_ax_v = v.s64;
1365 estack_ax_t = REG_S64;
1366 dbg_printk("ref get context s64 %lld\n",
1367 (long long) estack_ax_v);
1368 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1369 PO;
1370 }
1371
1372 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE):
1373 {
1374 BUG_ON(1);
1375 PO;
1376 }
1377
1378 /* load userspace field ref */
1379 OP(FILTER_OP_LOAD_FIELD_REF_USER_STRING):
1380 {
1381 struct load_op *insn = (struct load_op *) pc;
1382 struct field_ref *ref = (struct field_ref *) insn->data;
1383
1384 dbg_printk("load field ref offset %u type user string\n",
1385 ref->offset);
1386 estack_push(stack, top, ax, bx, ax_t, bx_t);
1387 estack_ax(stack, top)->u.s.user_str =
1388 *(const char * const *) &filter_stack_data[ref->offset];
1389 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1390 dbg_printk("Filter warning: loading a NULL string.\n");
1391 ret = -EINVAL;
1392 goto end;
1393 }
1394 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1395 estack_ax(stack, top)->u.s.literal_type =
1396 ESTACK_STRING_LITERAL_TYPE_NONE;
1397 estack_ax(stack, top)->u.s.user = 1;
1398 estack_ax(stack, top)->type = REG_STRING;
1399 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1400 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1401 PO;
1402 }
1403
1404 OP(FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1405 {
1406 struct load_op *insn = (struct load_op *) pc;
1407 struct field_ref *ref = (struct field_ref *) insn->data;
1408
1409 dbg_printk("load field ref offset %u type user sequence\n",
1410 ref->offset);
1411 estack_push(stack, top, ax, bx, ax_t, bx_t);
1412 estack_ax(stack, top)->u.s.seq_len =
1413 *(unsigned long *) &filter_stack_data[ref->offset];
1414 estack_ax(stack, top)->u.s.user_str =
1415 *(const char **) (&filter_stack_data[ref->offset
1416 + sizeof(unsigned long)]);
1417 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1418 dbg_printk("Filter warning: loading a NULL sequence.\n");
1419 ret = -EINVAL;
1420 goto end;
1421 }
1422 estack_ax(stack, top)->u.s.literal_type =
1423 ESTACK_STRING_LITERAL_TYPE_NONE;
1424 estack_ax(stack, top)->u.s.user = 1;
1425 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1426 PO;
1427 }
1428
1429 OP(FILTER_OP_GET_CONTEXT_ROOT):
1430 {
1431 dbg_printk("op get context root\n");
1432 estack_push(stack, top, ax, bx, ax_t, bx_t);
1433 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1434 /* "field" only needed for variants. */
1435 estack_ax(stack, top)->u.ptr.field = NULL;
1436 estack_ax(stack, top)->type = REG_PTR;
1437 next_pc += sizeof(struct load_op);
1438 PO;
1439 }
1440
1441 OP(FILTER_OP_GET_APP_CONTEXT_ROOT):
1442 {
1443 BUG_ON(1);
1444 PO;
1445 }
1446
1447 OP(FILTER_OP_GET_PAYLOAD_ROOT):
1448 {
1449 dbg_printk("op get app payload root\n");
1450 estack_push(stack, top, ax, bx, ax_t, bx_t);
1451 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1452 estack_ax(stack, top)->u.ptr.ptr = filter_stack_data;
1453 /* "field" only needed for variants. */
1454 estack_ax(stack, top)->u.ptr.field = NULL;
1455 estack_ax(stack, top)->type = REG_PTR;
1456 next_pc += sizeof(struct load_op);
1457 PO;
1458 }
1459
1460 OP(FILTER_OP_GET_SYMBOL):
1461 {
1462 dbg_printk("op get symbol\n");
1463 switch (estack_ax(stack, top)->u.ptr.type) {
1464 case LOAD_OBJECT:
1465 printk(KERN_WARNING "LTTng: filter: Nested fields not implemented yet.\n");
1466 ret = -EINVAL;
1467 goto end;
1468 case LOAD_ROOT_CONTEXT:
1469 case LOAD_ROOT_APP_CONTEXT:
1470 case LOAD_ROOT_PAYLOAD:
1471 /*
1472 * symbol lookup is performed by
1473 * specialization.
1474 */
1475 ret = -EINVAL;
1476 goto end;
1477 }
1478 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1479 PO;
1480 }
1481
1482 OP(FILTER_OP_GET_SYMBOL_FIELD):
1483 {
1484 /*
1485 * Used for first variant encountered in a
1486 * traversal. Variants are not implemented yet.
1487 */
1488 ret = -EINVAL;
1489 goto end;
1490 }
1491
1492 OP(FILTER_OP_GET_INDEX_U16):
1493 {
1494 struct load_op *insn = (struct load_op *) pc;
1495 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1496
1497 dbg_printk("op get index u16\n");
1498 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1499 if (ret)
1500 goto end;
1501 estack_ax_v = estack_ax(stack, top)->u.v;
1502 estack_ax_t = estack_ax(stack, top)->type;
1503 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1504 PO;
1505 }
1506
1507 OP(FILTER_OP_GET_INDEX_U64):
1508 {
1509 struct load_op *insn = (struct load_op *) pc;
1510 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1511
1512 dbg_printk("op get index u64\n");
1513 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1514 if (ret)
1515 goto end;
1516 estack_ax_v = estack_ax(stack, top)->u.v;
1517 estack_ax_t = estack_ax(stack, top)->type;
1518 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1519 PO;
1520 }
1521
1522 OP(FILTER_OP_LOAD_FIELD):
1523 {
1524 dbg_printk("op load field\n");
1525 ret = dynamic_load_field(estack_ax(stack, top));
1526 if (ret)
1527 goto end;
1528 estack_ax_v = estack_ax(stack, top)->u.v;
1529 estack_ax_t = estack_ax(stack, top)->type;
1530 next_pc += sizeof(struct load_op);
1531 PO;
1532 }
1533
1534 OP(FILTER_OP_LOAD_FIELD_S8):
1535 {
1536 dbg_printk("op load field s8\n");
1537
1538 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1539 estack_ax_t = REG_S64;
1540 next_pc += sizeof(struct load_op);
1541 PO;
1542 }
1543 OP(FILTER_OP_LOAD_FIELD_S16):
1544 {
1545 dbg_printk("op load field s16\n");
1546
1547 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1548 estack_ax_t = REG_S64;
1549 next_pc += sizeof(struct load_op);
1550 PO;
1551 }
1552 OP(FILTER_OP_LOAD_FIELD_S32):
1553 {
1554 dbg_printk("op load field s32\n");
1555
1556 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1557 estack_ax_t = REG_S64;
1558 next_pc += sizeof(struct load_op);
1559 PO;
1560 }
1561 OP(FILTER_OP_LOAD_FIELD_S64):
1562 {
1563 dbg_printk("op load field s64\n");
1564
1565 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1566 estack_ax_t = REG_S64;
1567 next_pc += sizeof(struct load_op);
1568 PO;
1569 }
1570 OP(FILTER_OP_LOAD_FIELD_U8):
1571 {
1572 dbg_printk("op load field u8\n");
1573
1574 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1575 estack_ax_t = REG_S64;
1576 next_pc += sizeof(struct load_op);
1577 PO;
1578 }
1579 OP(FILTER_OP_LOAD_FIELD_U16):
1580 {
1581 dbg_printk("op load field u16\n");
1582
1583 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1584 estack_ax_t = REG_S64;
1585 next_pc += sizeof(struct load_op);
1586 PO;
1587 }
1588 OP(FILTER_OP_LOAD_FIELD_U32):
1589 {
1590 dbg_printk("op load field u32\n");
1591
1592 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1593 estack_ax_t = REG_S64;
1594 next_pc += sizeof(struct load_op);
1595 PO;
1596 }
1597 OP(FILTER_OP_LOAD_FIELD_U64):
1598 {
1599 dbg_printk("op load field u64\n");
1600
1601 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1602 estack_ax_t = REG_S64;
1603 next_pc += sizeof(struct load_op);
1604 PO;
1605 }
1606 OP(FILTER_OP_LOAD_FIELD_DOUBLE):
1607 {
1608 ret = -EINVAL;
1609 goto end;
1610 }
1611
1612 OP(FILTER_OP_LOAD_FIELD_STRING):
1613 {
1614 const char *str;
1615
1616 dbg_printk("op load field string\n");
1617 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1618 estack_ax(stack, top)->u.s.str = str;
1619 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1620 dbg_printk("Filter warning: loading a NULL string.\n");
1621 ret = -EINVAL;
1622 goto end;
1623 }
1624 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1625 estack_ax(stack, top)->u.s.literal_type =
1626 ESTACK_STRING_LITERAL_TYPE_NONE;
1627 estack_ax(stack, top)->type = REG_STRING;
1628 next_pc += sizeof(struct load_op);
1629 PO;
1630 }
1631
1632 OP(FILTER_OP_LOAD_FIELD_SEQUENCE):
1633 {
1634 const char *ptr;
1635
1636 dbg_printk("op load field string sequence\n");
1637 ptr = estack_ax(stack, top)->u.ptr.ptr;
1638 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1639 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1640 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1641 dbg_printk("Filter warning: loading a NULL sequence.\n");
1642 ret = -EINVAL;
1643 goto end;
1644 }
1645 estack_ax(stack, top)->u.s.literal_type =
1646 ESTACK_STRING_LITERAL_TYPE_NONE;
1647 estack_ax(stack, top)->type = REG_STRING;
1648 next_pc += sizeof(struct load_op);
1649 PO;
1650 }
1651
1652 END_OP
1653 end:
1654 /* Return _DISCARD on error. */
1655 if (ret)
1656 return LTTNG_FILTER_DISCARD;
1657 return retval;
1658 }
1659
1660 #undef START_OP
1661 #undef OP
1662 #undef PO
1663 #undef END_OP
This page took 0.125672 seconds and 4 git commands to generate.