2 * filter-visitor-generate-bytecode.c
4 * LTTng filter bytecode generation
6 * Copyright 2012 - Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * This library is free software; you can redistribute it and/or modify it
9 * under the terms of the GNU Lesser General Public License, version 2.1 only,
10 * as published by the Free Software Foundation.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public License
18 * along with this library; if not, write to the Free Software Foundation,
19 * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
25 #include <common/align.h>
26 #include <common/compat/string.h>
28 #include "filter-bytecode.h"
29 #include "filter-ir.h"
30 #include "filter-ast.h"
32 #include <common/macros.h>
35 #define max_t(type, a, b) ((type) ((a) > (b) ? (a) : (b)))
38 #define INIT_ALLOC_SIZE 4
41 int recursive_visit_gen_bytecode(struct filter_parser_ctx
*ctx
,
44 static inline int get_count_order(unsigned int count
)
48 order
= lttng_fls(count
) - 1;
49 if (count
& (count
- 1))
55 int bytecode_init(struct lttng_filter_bytecode_alloc
**fb
)
59 alloc_len
= sizeof(struct lttng_filter_bytecode_alloc
) + INIT_ALLOC_SIZE
;
60 *fb
= calloc(alloc_len
, 1);
64 (*fb
)->alloc_len
= alloc_len
;
70 int32_t bytecode_reserve(struct lttng_filter_bytecode_alloc
**fb
, uint32_t align
, uint32_t len
)
73 uint32_t padding
= offset_align((*fb
)->b
.len
, align
);
74 uint32_t new_len
= (*fb
)->b
.len
+ padding
+ len
;
75 uint32_t new_alloc_len
= sizeof(struct lttng_filter_bytecode_alloc
) + new_len
;
76 uint32_t old_alloc_len
= (*fb
)->alloc_len
;
78 if (new_len
> LTTNG_FILTER_MAX_LEN
)
81 if (new_alloc_len
> old_alloc_len
) {
82 struct lttng_filter_bytecode_alloc
*newptr
;
85 max_t(uint32_t, 1U << get_count_order(new_alloc_len
), old_alloc_len
<< 1);
86 newptr
= realloc(*fb
, new_alloc_len
);
90 /* We zero directly the memory from start of allocation. */
91 memset(&((char *) *fb
)[old_alloc_len
], 0, new_alloc_len
- old_alloc_len
);
92 (*fb
)->alloc_len
= new_alloc_len
;
94 (*fb
)->b
.len
+= padding
;
101 int bytecode_push(struct lttng_filter_bytecode_alloc
**fb
, const void *data
,
102 uint32_t align
, uint32_t len
)
106 offset
= bytecode_reserve(fb
, align
, len
);
109 memcpy(&(*fb
)->b
.data
[offset
], data
, len
);
114 int bytecode_push_logical(struct lttng_filter_bytecode_alloc
**fb
,
115 struct logical_op
*data
,
116 uint32_t align
, uint32_t len
,
117 uint16_t *skip_offset
)
121 offset
= bytecode_reserve(fb
, align
, len
);
124 memcpy(&(*fb
)->b
.data
[offset
], data
, len
);
126 (void *) &((struct logical_op
*) &(*fb
)->b
.data
[offset
])->skip_offset
127 - (void *) &(*fb
)->b
.data
[0];
132 int bytecode_patch(struct lttng_filter_bytecode_alloc
**fb
,
137 if (offset
>= (*fb
)->b
.len
) {
140 memcpy(&(*fb
)->b
.data
[offset
], data
, len
);
145 int visit_node_root(struct filter_parser_ctx
*ctx
, struct ir_op
*node
)
148 struct return_op insn
;
151 ret
= recursive_visit_gen_bytecode(ctx
, node
->u
.root
.child
);
155 /* Generate end of bytecode instruction */
156 insn
.op
= FILTER_OP_RETURN
;
157 return bytecode_push(&ctx
->bytecode
, &insn
, 1, sizeof(insn
));
161 int append_str(char **s
, const char *append
)
165 size_t oldlen
= (old
== NULL
) ? 0 : strlen(old
);
166 size_t appendlen
= strlen(append
);
168 new = calloc(oldlen
+ appendlen
+ 1, 1);
187 int load_expression_legacy_match(const struct ir_load_expression
*exp
,
188 enum filter_op
*op_type
,
191 const struct ir_load_expression_op
*op
;
192 bool need_dot
= false;
196 case IR_LOAD_EXPRESSION_GET_CONTEXT_ROOT
:
197 *op_type
= FILTER_OP_GET_CONTEXT_REF
;
198 if (append_str(symbol
, "$ctx.")) {
203 case IR_LOAD_EXPRESSION_GET_APP_CONTEXT_ROOT
:
204 *op_type
= FILTER_OP_GET_CONTEXT_REF
;
205 if (append_str(symbol
, "$app.")) {
210 case IR_LOAD_EXPRESSION_GET_PAYLOAD_ROOT
:
211 *op_type
= FILTER_OP_LOAD_FIELD_REF
;
215 case IR_LOAD_EXPRESSION_GET_SYMBOL
:
216 case IR_LOAD_EXPRESSION_GET_INDEX
:
217 case IR_LOAD_EXPRESSION_LOAD_FIELD
:
219 return 0; /* no match */
225 return 0; /* no match */
228 case IR_LOAD_EXPRESSION_LOAD_FIELD
:
230 case IR_LOAD_EXPRESSION_GET_SYMBOL
:
231 if (need_dot
&& append_str(symbol
, ".")) {
234 if (append_str(symbol
, op
->u
.symbol
)) {
239 return 0; /* no match */
244 return 1; /* Legacy match */
253 int visit_node_load_expression_legacy(struct filter_parser_ctx
*ctx
,
254 const struct ir_load_expression
*exp
,
255 const struct ir_load_expression_op
*op
)
257 struct load_op
*insn
= NULL
;
258 uint32_t insn_len
= sizeof(struct load_op
)
259 + sizeof(struct field_ref
);
260 struct field_ref ref_offset
;
261 uint32_t reloc_offset_u32
;
262 uint16_t reloc_offset
;
263 enum filter_op op_type
;
267 ret
= load_expression_legacy_match(exp
, &op_type
, &symbol
);
271 insn
= calloc(insn_len
, 1);
277 ref_offset
.offset
= (uint16_t) -1U;
278 memcpy(insn
->data
, &ref_offset
, sizeof(ref_offset
));
279 /* reloc_offset points to struct load_op */
280 reloc_offset_u32
= bytecode_get_len(&ctx
->bytecode
->b
);
281 if (reloc_offset_u32
> LTTNG_FILTER_MAX_LEN
- 1) {
285 reloc_offset
= (uint16_t) reloc_offset_u32
;
286 ret
= bytecode_push(&ctx
->bytecode
, insn
, 1, insn_len
);
291 ret
= bytecode_push(&ctx
->bytecode_reloc
, &reloc_offset
,
292 1, sizeof(reloc_offset
));
296 ret
= bytecode_push(&ctx
->bytecode_reloc
, symbol
,
297 1, strlen(symbol
) + 1);
301 ret
= 1; /* legacy */
309 int visit_node_load_expression(struct filter_parser_ctx
*ctx
,
310 const struct ir_op
*node
)
312 struct ir_load_expression
*exp
;
313 struct ir_load_expression_op
*op
;
316 exp
= node
->u
.load
.u
.expression
;
326 * TODO: if we remove legacy load for application contexts, we
327 * need to update session bytecode parser as well.
329 ret
= visit_node_load_expression_legacy(ctx
, exp
, op
);
334 return 0; /* legacy */
337 for (; op
!= NULL
; op
= op
->next
) {
339 case IR_LOAD_EXPRESSION_GET_CONTEXT_ROOT
:
341 struct load_op
*insn
;
342 uint32_t insn_len
= sizeof(struct load_op
);
345 insn
= calloc(insn_len
, 1);
348 insn
->op
= FILTER_OP_GET_CONTEXT_ROOT
;
349 ret
= bytecode_push(&ctx
->bytecode
, insn
, 1, insn_len
);
356 case IR_LOAD_EXPRESSION_GET_APP_CONTEXT_ROOT
:
358 struct load_op
*insn
;
359 uint32_t insn_len
= sizeof(struct load_op
);
362 insn
= calloc(insn_len
, 1);
365 insn
->op
= FILTER_OP_GET_APP_CONTEXT_ROOT
;
366 ret
= bytecode_push(&ctx
->bytecode
, insn
, 1, insn_len
);
373 case IR_LOAD_EXPRESSION_GET_PAYLOAD_ROOT
:
375 struct load_op
*insn
;
376 uint32_t insn_len
= sizeof(struct load_op
);
379 insn
= calloc(insn_len
, 1);
382 insn
->op
= FILTER_OP_GET_PAYLOAD_ROOT
;
383 ret
= bytecode_push(&ctx
->bytecode
, insn
, 1, insn_len
);
390 case IR_LOAD_EXPRESSION_GET_SYMBOL
:
392 struct load_op
*insn
;
393 uint32_t insn_len
= sizeof(struct load_op
)
394 + sizeof(struct get_symbol
);
395 struct get_symbol symbol_offset
;
396 uint32_t reloc_offset_u32
;
397 uint16_t reloc_offset
;
398 uint32_t bytecode_reloc_offset_u32
;
401 insn
= calloc(insn_len
, 1);
404 insn
->op
= FILTER_OP_GET_SYMBOL
;
405 bytecode_reloc_offset_u32
=
406 bytecode_get_len(&ctx
->bytecode_reloc
->b
)
407 + sizeof(reloc_offset
);
408 symbol_offset
.offset
=
409 (uint16_t) bytecode_reloc_offset_u32
;
410 memcpy(insn
->data
, &symbol_offset
,
411 sizeof(symbol_offset
));
412 /* reloc_offset points to struct load_op */
413 reloc_offset_u32
= bytecode_get_len(&ctx
->bytecode
->b
);
414 if (reloc_offset_u32
> LTTNG_FILTER_MAX_LEN
- 1) {
418 reloc_offset
= (uint16_t) reloc_offset_u32
;
419 ret
= bytecode_push(&ctx
->bytecode
, insn
, 1, insn_len
);
425 ret
= bytecode_push(&ctx
->bytecode_reloc
, &reloc_offset
,
426 1, sizeof(reloc_offset
));
431 ret
= bytecode_push(&ctx
->bytecode_reloc
,
433 1, strlen(op
->u
.symbol
) + 1);
440 case IR_LOAD_EXPRESSION_GET_INDEX
:
442 struct load_op
*insn
;
443 uint32_t insn_len
= sizeof(struct load_op
)
444 + sizeof(struct get_index_u64
);
445 struct get_index_u64 index
;
448 insn
= calloc(insn_len
, 1);
451 insn
->op
= FILTER_OP_GET_INDEX_U64
;
452 index
.index
= op
->u
.index
;
453 memcpy(insn
->data
, &index
, sizeof(index
));
454 ret
= bytecode_push(&ctx
->bytecode
, insn
, 1, insn_len
);
461 case IR_LOAD_EXPRESSION_LOAD_FIELD
:
463 struct load_op
*insn
;
464 uint32_t insn_len
= sizeof(struct load_op
);
467 insn
= calloc(insn_len
, 1);
470 insn
->op
= FILTER_OP_LOAD_FIELD
;
471 ret
= bytecode_push(&ctx
->bytecode
, insn
, 1, insn_len
);
484 int visit_node_load(struct filter_parser_ctx
*ctx
, struct ir_op
*node
)
488 switch (node
->data_type
) {
489 case IR_DATA_UNKNOWN
:
491 fprintf(stderr
, "[error] Unknown data type in %s\n",
497 struct load_op
*insn
;
498 uint32_t insn_len
= sizeof(struct load_op
)
499 + strlen(node
->u
.load
.u
.string
.value
) + 1;
501 insn
= calloc(insn_len
, 1);
505 switch (node
->u
.load
.u
.string
.type
) {
506 case IR_LOAD_STRING_TYPE_GLOB_STAR
:
508 * We explicitly tell the interpreter here that
509 * this load is a full star globbing pattern so
510 * that the appropriate matching function can be
511 * called. Also, see comment below.
513 insn
->op
= FILTER_OP_LOAD_STAR_GLOB_STRING
;
517 * This is the "legacy" string, which includes
518 * star globbing patterns with a star only at
519 * the end. Both "plain" and "star at the end"
520 * literal strings are handled at the same place
521 * by the tracer's filter bytecode interpreter,
522 * whereas full star globbing patterns (stars
523 * can be anywhere in the string) is a special
526 insn
->op
= FILTER_OP_LOAD_STRING
;
530 strcpy(insn
->data
, node
->u
.load
.u
.string
.value
);
531 ret
= bytecode_push(&ctx
->bytecode
, insn
, 1, insn_len
);
535 case IR_DATA_NUMERIC
:
537 struct load_op
*insn
;
538 uint32_t insn_len
= sizeof(struct load_op
)
539 + sizeof(struct literal_numeric
);
541 insn
= calloc(insn_len
, 1);
544 insn
->op
= FILTER_OP_LOAD_S64
;
545 memcpy(insn
->data
, &node
->u
.load
.u
.num
, sizeof(int64_t));
546 ret
= bytecode_push(&ctx
->bytecode
, insn
, 1, insn_len
);
552 struct load_op
*insn
;
553 uint32_t insn_len
= sizeof(struct load_op
)
554 + sizeof(struct literal_double
);
556 insn
= calloc(insn_len
, 1);
559 insn
->op
= FILTER_OP_LOAD_DOUBLE
;
560 memcpy(insn
->data
, &node
->u
.load
.u
.flt
, sizeof(double));
561 ret
= bytecode_push(&ctx
->bytecode
, insn
, 1, insn_len
);
565 case IR_DATA_EXPRESSION
:
566 return visit_node_load_expression(ctx
, node
);
571 int visit_node_unary(struct filter_parser_ctx
*ctx
, struct ir_op
*node
)
574 struct unary_op insn
;
577 ret
= recursive_visit_gen_bytecode(ctx
, node
->u
.unary
.child
);
581 /* Generate end of bytecode instruction */
582 switch (node
->u
.unary
.type
) {
583 case AST_UNARY_UNKNOWN
:
585 fprintf(stderr
, "[error] Unknown unary node type in %s\n",
591 case AST_UNARY_MINUS
:
592 insn
.op
= FILTER_OP_UNARY_MINUS
;
593 return bytecode_push(&ctx
->bytecode
, &insn
, 1, sizeof(insn
));
595 insn
.op
= FILTER_OP_UNARY_NOT
;
596 return bytecode_push(&ctx
->bytecode
, &insn
, 1, sizeof(insn
));
597 case AST_UNARY_BIT_NOT
:
598 insn
.op
= FILTER_OP_UNARY_BIT_NOT
;
599 return bytecode_push(&ctx
->bytecode
, &insn
, 1, sizeof(insn
));
604 * Binary comparator nesting is disallowed. This allows fitting into
608 int visit_node_binary(struct filter_parser_ctx
*ctx
, struct ir_op
*node
)
611 struct binary_op insn
;
614 ret
= recursive_visit_gen_bytecode(ctx
, node
->u
.binary
.left
);
617 ret
= recursive_visit_gen_bytecode(ctx
, node
->u
.binary
.right
);
621 switch (node
->u
.binary
.type
) {
624 fprintf(stderr
, "[error] Unknown unary node type in %s\n",
630 fprintf(stderr
, "[error] Unexpected logical node type in %s\n",
635 insn
.op
= FILTER_OP_MUL
;
638 insn
.op
= FILTER_OP_DIV
;
641 insn
.op
= FILTER_OP_MOD
;
644 insn
.op
= FILTER_OP_PLUS
;
647 insn
.op
= FILTER_OP_MINUS
;
649 case AST_OP_BIT_RSHIFT
:
650 insn
.op
= FILTER_OP_BIT_RSHIFT
;
652 case AST_OP_BIT_LSHIFT
:
653 insn
.op
= FILTER_OP_BIT_LSHIFT
;
656 insn
.op
= FILTER_OP_BIT_AND
;
659 insn
.op
= FILTER_OP_BIT_OR
;
662 insn
.op
= FILTER_OP_BIT_XOR
;
666 insn
.op
= FILTER_OP_EQ
;
669 insn
.op
= FILTER_OP_NE
;
672 insn
.op
= FILTER_OP_GT
;
675 insn
.op
= FILTER_OP_LT
;
678 insn
.op
= FILTER_OP_GE
;
681 insn
.op
= FILTER_OP_LE
;
684 return bytecode_push(&ctx
->bytecode
, &insn
, 1, sizeof(insn
));
688 * A logical op always return a s64 (1 or 0).
691 int visit_node_logical(struct filter_parser_ctx
*ctx
, struct ir_op
*node
)
694 struct logical_op insn
;
695 uint16_t skip_offset_loc
;
698 /* Visit left child */
699 ret
= recursive_visit_gen_bytecode(ctx
, node
->u
.binary
.left
);
702 /* Cast to s64 if float or field ref */
703 if ((node
->u
.binary
.left
->data_type
== IR_DATA_FIELD_REF
704 || node
->u
.binary
.left
->data_type
== IR_DATA_GET_CONTEXT_REF
705 || node
->u
.binary
.left
->data_type
== IR_DATA_EXPRESSION
)
706 || node
->u
.binary
.left
->data_type
== IR_DATA_FLOAT
) {
707 struct cast_op cast_insn
;
709 if (node
->u
.binary
.left
->data_type
== IR_DATA_FIELD_REF
710 || node
->u
.binary
.left
->data_type
== IR_DATA_GET_CONTEXT_REF
711 || node
->u
.binary
.left
->data_type
== IR_DATA_EXPRESSION
) {
712 cast_insn
.op
= FILTER_OP_CAST_TO_S64
;
714 cast_insn
.op
= FILTER_OP_CAST_DOUBLE_TO_S64
;
716 ret
= bytecode_push(&ctx
->bytecode
, &cast_insn
,
717 1, sizeof(cast_insn
));
721 switch (node
->u
.logical
.type
) {
723 fprintf(stderr
, "[error] Unknown node type in %s\n",
728 insn
.op
= FILTER_OP_AND
;
731 insn
.op
= FILTER_OP_OR
;
734 insn
.skip_offset
= (uint16_t) -1UL; /* Temporary */
735 ret
= bytecode_push_logical(&ctx
->bytecode
, &insn
, 1, sizeof(insn
),
739 /* Visit right child */
740 ret
= recursive_visit_gen_bytecode(ctx
, node
->u
.binary
.right
);
743 /* Cast to s64 if float or field ref */
744 if ((node
->u
.binary
.right
->data_type
== IR_DATA_FIELD_REF
745 || node
->u
.binary
.right
->data_type
== IR_DATA_GET_CONTEXT_REF
746 || node
->u
.binary
.right
->data_type
== IR_DATA_EXPRESSION
)
747 || node
->u
.binary
.right
->data_type
== IR_DATA_FLOAT
) {
748 struct cast_op cast_insn
;
750 if (node
->u
.binary
.right
->data_type
== IR_DATA_FIELD_REF
751 || node
->u
.binary
.right
->data_type
== IR_DATA_GET_CONTEXT_REF
752 || node
->u
.binary
.right
->data_type
== IR_DATA_EXPRESSION
) {
753 cast_insn
.op
= FILTER_OP_CAST_TO_S64
;
755 cast_insn
.op
= FILTER_OP_CAST_DOUBLE_TO_S64
;
757 ret
= bytecode_push(&ctx
->bytecode
, &cast_insn
,
758 1, sizeof(cast_insn
));
762 /* We now know where the logical op can skip. */
763 target_loc
= (uint16_t) bytecode_get_len(&ctx
->bytecode
->b
);
764 ret
= bytecode_patch(&ctx
->bytecode
,
765 &target_loc
, /* Offset to jump to */
766 skip_offset_loc
, /* Where to patch */
772 * Postorder traversal of the tree. We need the children result before
773 * we can evaluate the parent.
776 int recursive_visit_gen_bytecode(struct filter_parser_ctx
*ctx
,
782 fprintf(stderr
, "[error] Unknown node type in %s\n",
787 return visit_node_root(ctx
, node
);
789 return visit_node_load(ctx
, node
);
791 return visit_node_unary(ctx
, node
);
793 return visit_node_binary(ctx
, node
);
795 return visit_node_logical(ctx
, node
);
800 void filter_bytecode_free(struct filter_parser_ctx
*ctx
)
808 ctx
->bytecode
= NULL
;
811 if (ctx
->bytecode_reloc
) {
812 free(ctx
->bytecode_reloc
);
813 ctx
->bytecode_reloc
= NULL
;
818 int filter_visitor_bytecode_generate(struct filter_parser_ctx
*ctx
)
822 ret
= bytecode_init(&ctx
->bytecode
);
825 ret
= bytecode_init(&ctx
->bytecode_reloc
);
828 ret
= recursive_visit_gen_bytecode(ctx
, ctx
->ir_root
);
832 /* Finally, append symbol table to bytecode */
833 ctx
->bytecode
->b
.reloc_table_offset
= bytecode_get_len(&ctx
->bytecode
->b
);
834 return bytecode_push(&ctx
->bytecode
, ctx
->bytecode_reloc
->b
.data
,
835 1, bytecode_get_len(&ctx
->bytecode_reloc
->b
));
838 filter_bytecode_free(ctx
);