Clean-up: filter: fix variable shadowing in visit_node_load_expression
[lttng-tools.git] / src / common / filter / filter-visitor-generate-bytecode.c
1 /*
2 * filter-visitor-generate-bytecode.c
3 *
4 * LTTng filter bytecode generation
5 *
6 * Copyright 2012 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
7 *
8 * SPDX-License-Identifier: LGPL-2.1-only
9 *
10 */
11
12 #include <stdlib.h>
13 #include <string.h>
14 #include <common/align.h>
15 #include <common/compat/errno.h>
16 #include <common/compat/string.h>
17
18 #include "common/align.h"
19 #include "common/bytecode/bytecode.h"
20 #include "common/compat/string.h"
21 #include "common/macros.h"
22 #include "filter-ast.h"
23 #include "filter-ir.h"
24
25 #ifndef max_t
26 #define max_t(type, a, b) ((type) ((a) > (b) ? (a) : (b)))
27 #endif
28
29 static
30 int recursive_visit_gen_bytecode(struct filter_parser_ctx *ctx,
31 struct ir_op *node);
32
33 static
34 int bytecode_patch(struct lttng_bytecode_alloc **fb,
35 const void *data,
36 uint16_t offset,
37 uint32_t len)
38 {
39 if (offset >= (*fb)->b.len) {
40 return -EINVAL;
41 }
42 memcpy(&(*fb)->b.data[offset], data, len);
43 return 0;
44 }
45
46 static
47 int visit_node_root(struct filter_parser_ctx *ctx, struct ir_op *node)
48 {
49 int ret;
50 struct return_op insn;
51
52 /* Visit child */
53 ret = recursive_visit_gen_bytecode(ctx, node->u.root.child);
54 if (ret)
55 return ret;
56
57 /* Generate end of bytecode instruction */
58 insn.op = BYTECODE_OP_RETURN;
59 return bytecode_push(&ctx->bytecode, &insn, 1, sizeof(insn));
60 }
61
62 static
63 int append_str(char **s, const char *append)
64 {
65 char *old = *s;
66 char *new;
67 size_t oldlen = (old == NULL) ? 0 : strlen(old);
68 size_t appendlen = strlen(append);
69
70 new = calloc(oldlen + appendlen + 1, 1);
71 if (!new) {
72 return -ENOMEM;
73 }
74 if (oldlen) {
75 strcpy(new, old);
76 }
77 strcat(new, append);
78 *s = new;
79 free(old);
80 return 0;
81 }
82
83 /*
84 * 1: match
85 * 0: no match
86 * < 0: error
87 */
88 static
89 int load_expression_legacy_match(const struct ir_load_expression *exp,
90 enum bytecode_op *op_type,
91 char **symbol)
92 {
93 const struct ir_load_expression_op *op;
94 bool need_dot = false;
95
96 op = exp->child;
97 switch (op->type) {
98 case IR_LOAD_EXPRESSION_GET_CONTEXT_ROOT:
99 *op_type = BYTECODE_OP_GET_CONTEXT_REF;
100 if (append_str(symbol, "$ctx.")) {
101 return -ENOMEM;
102 }
103 need_dot = false;
104 break;
105 case IR_LOAD_EXPRESSION_GET_APP_CONTEXT_ROOT:
106 *op_type = BYTECODE_OP_GET_CONTEXT_REF;
107 if (append_str(symbol, "$app.")) {
108 return -ENOMEM;
109 }
110 need_dot = false;
111 break;
112 case IR_LOAD_EXPRESSION_GET_PAYLOAD_ROOT:
113 *op_type = BYTECODE_OP_LOAD_FIELD_REF;
114 need_dot = false;
115 break;
116
117 case IR_LOAD_EXPRESSION_GET_SYMBOL:
118 case IR_LOAD_EXPRESSION_GET_INDEX:
119 case IR_LOAD_EXPRESSION_LOAD_FIELD:
120 default:
121 return 0; /* no match */
122 }
123
124 for (;;) {
125 op = op->next;
126 if (!op) {
127 return 0; /* no match */
128 }
129 switch (op->type) {
130 case IR_LOAD_EXPRESSION_LOAD_FIELD:
131 goto end;
132 case IR_LOAD_EXPRESSION_GET_SYMBOL:
133 if (need_dot && append_str(symbol, ".")) {
134 return -ENOMEM;
135 }
136 if (append_str(symbol, op->u.symbol)) {
137 return -ENOMEM;
138 }
139 break;
140 default:
141 return 0; /* no match */
142 }
143 need_dot = true;
144 }
145 end:
146 return 1; /* Legacy match */
147 }
148
149 /*
150 * 1: legacy match
151 * 0: no legacy match
152 * < 0: error
153 */
154 static
155 int visit_node_load_expression_legacy(struct filter_parser_ctx *ctx,
156 const struct ir_load_expression *exp,
157 const struct ir_load_expression_op *op)
158 {
159 struct load_op *insn = NULL;
160 uint32_t insn_len = sizeof(struct load_op)
161 + sizeof(struct field_ref);
162 struct field_ref ref_offset;
163 uint32_t reloc_offset_u32;
164 uint16_t reloc_offset;
165 enum bytecode_op op_type;
166 char *symbol = NULL;
167 int ret;
168
169 ret = load_expression_legacy_match(exp, &op_type, &symbol);
170 if (ret <= 0) {
171 goto end;
172 }
173 insn = calloc(insn_len, 1);
174 if (!insn) {
175 ret = -ENOMEM;
176 goto end;
177 }
178 insn->op = op_type;
179 ref_offset.offset = (uint16_t) -1U;
180 memcpy(insn->data, &ref_offset, sizeof(ref_offset));
181 /* reloc_offset points to struct load_op */
182 reloc_offset_u32 = bytecode_get_len(&ctx->bytecode->b);
183 if (reloc_offset_u32 > LTTNG_FILTER_MAX_LEN - 1) {
184 ret = -EINVAL;
185 goto end;
186 }
187 reloc_offset = (uint16_t) reloc_offset_u32;
188 ret = bytecode_push(&ctx->bytecode, insn, 1, insn_len);
189 if (ret) {
190 goto end;
191 }
192 /* append reloc */
193 ret = bytecode_push(&ctx->bytecode_reloc, &reloc_offset,
194 1, sizeof(reloc_offset));
195 if (ret) {
196 goto end;
197 }
198 ret = bytecode_push(&ctx->bytecode_reloc, symbol,
199 1, strlen(symbol) + 1);
200 if (ret) {
201 goto end;
202 }
203 ret = 1; /* legacy */
204 end:
205 free(insn);
206 free(symbol);
207 return ret;
208 }
209
210 static
211 int visit_node_load_expression(struct filter_parser_ctx *ctx,
212 const struct ir_op *node)
213 {
214 struct ir_load_expression *exp;
215 struct ir_load_expression_op *op;
216 int ret;
217
218 exp = node->u.load.u.expression;
219 if (!exp) {
220 return -EINVAL;
221 }
222 op = exp->child;
223 if (!op) {
224 return -EINVAL;
225 }
226
227 /*
228 * TODO: if we remove legacy load for application contexts, we
229 * need to update session bytecode parser as well.
230 */
231 ret = visit_node_load_expression_legacy(ctx, exp, op);
232 if (ret < 0) {
233 return ret;
234 }
235 if (ret > 0) {
236 return 0; /* legacy */
237 }
238
239 for (; op != NULL; op = op->next) {
240 switch (op->type) {
241 case IR_LOAD_EXPRESSION_GET_CONTEXT_ROOT:
242 {
243 ret = bytecode_push_get_context_root(&ctx->bytecode);
244
245 if (ret) {
246 return ret;
247 }
248
249 break;
250 }
251 case IR_LOAD_EXPRESSION_GET_APP_CONTEXT_ROOT:
252 {
253 ret = bytecode_push_get_app_context_root(
254 &ctx->bytecode);
255
256 if (ret) {
257 return ret;
258 }
259
260 break;
261 }
262 case IR_LOAD_EXPRESSION_GET_PAYLOAD_ROOT:
263 {
264 ret = bytecode_push_get_payload_root(&ctx->bytecode);
265
266 if (ret) {
267 return ret;
268 }
269
270 break;
271 }
272 case IR_LOAD_EXPRESSION_GET_SYMBOL:
273 {
274 ret = bytecode_push_get_symbol(&ctx->bytecode,
275 &ctx->bytecode_reloc, op->u.symbol);
276
277 if (ret) {
278 return ret;
279 }
280
281 break;
282 }
283 case IR_LOAD_EXPRESSION_GET_INDEX:
284 {
285 ret = bytecode_push_get_index_u64(
286 &ctx->bytecode, op->u.index);
287
288 if (ret) {
289 return ret;
290 }
291
292 break;
293 }
294 case IR_LOAD_EXPRESSION_LOAD_FIELD:
295 {
296 struct load_op *insn;
297 uint32_t insn_len = sizeof(struct load_op);
298
299 insn = calloc(insn_len, 1);
300 if (!insn)
301 return -ENOMEM;
302 insn->op = BYTECODE_OP_LOAD_FIELD;
303 ret = bytecode_push(&ctx->bytecode, insn, 1, insn_len);
304 free(insn);
305 if (ret) {
306 return ret;
307 }
308 break;
309 }
310 }
311 }
312 return 0;
313 }
314
315 static
316 int visit_node_load(struct filter_parser_ctx *ctx, struct ir_op *node)
317 {
318 int ret;
319
320 switch (node->data_type) {
321 case IR_DATA_UNKNOWN:
322 default:
323 fprintf(stderr, "[error] Unknown data type in %s\n",
324 __func__);
325 return -EINVAL;
326
327 case IR_DATA_STRING:
328 {
329 struct load_op *insn;
330 uint32_t insn_len = sizeof(struct load_op)
331 + strlen(node->u.load.u.string.value) + 1;
332
333 insn = calloc(insn_len, 1);
334 if (!insn)
335 return -ENOMEM;
336
337 switch (node->u.load.u.string.type) {
338 case IR_LOAD_STRING_TYPE_GLOB_STAR:
339 /*
340 * We explicitly tell the interpreter here that
341 * this load is a full star globbing pattern so
342 * that the appropriate matching function can be
343 * called. Also, see comment below.
344 */
345 insn->op = BYTECODE_OP_LOAD_STAR_GLOB_STRING;
346 break;
347 default:
348 /*
349 * This is the "legacy" string, which includes
350 * star globbing patterns with a star only at
351 * the end. Both "plain" and "star at the end"
352 * literal strings are handled at the same place
353 * by the tracer's filter bytecode interpreter,
354 * whereas full star globbing patterns (stars
355 * can be anywhere in the string) is a special
356 * case.
357 */
358 insn->op = BYTECODE_OP_LOAD_STRING;
359 break;
360 }
361
362 strcpy(insn->data, node->u.load.u.string.value);
363 ret = bytecode_push(&ctx->bytecode, insn, 1, insn_len);
364 free(insn);
365 return ret;
366 }
367 case IR_DATA_NUMERIC:
368 {
369 struct load_op *insn;
370 uint32_t insn_len = sizeof(struct load_op)
371 + sizeof(struct literal_numeric);
372
373 insn = calloc(insn_len, 1);
374 if (!insn)
375 return -ENOMEM;
376 insn->op = BYTECODE_OP_LOAD_S64;
377 memcpy(insn->data, &node->u.load.u.num, sizeof(int64_t));
378 ret = bytecode_push(&ctx->bytecode, insn, 1, insn_len);
379 free(insn);
380 return ret;
381 }
382 case IR_DATA_FLOAT:
383 {
384 struct load_op *insn;
385 uint32_t insn_len = sizeof(struct load_op)
386 + sizeof(struct literal_double);
387
388 insn = calloc(insn_len, 1);
389 if (!insn)
390 return -ENOMEM;
391 insn->op = BYTECODE_OP_LOAD_DOUBLE;
392 memcpy(insn->data, &node->u.load.u.flt, sizeof(double));
393 ret = bytecode_push(&ctx->bytecode, insn, 1, insn_len);
394 free(insn);
395 return ret;
396 }
397 case IR_DATA_EXPRESSION:
398 return visit_node_load_expression(ctx, node);
399 }
400 }
401
402 static
403 int visit_node_unary(struct filter_parser_ctx *ctx, struct ir_op *node)
404 {
405 int ret;
406 struct unary_op insn;
407
408 /* Visit child */
409 ret = recursive_visit_gen_bytecode(ctx, node->u.unary.child);
410 if (ret)
411 return ret;
412
413 /* Generate end of bytecode instruction */
414 switch (node->u.unary.type) {
415 case AST_UNARY_UNKNOWN:
416 default:
417 fprintf(stderr, "[error] Unknown unary node type in %s\n",
418 __func__);
419 return -EINVAL;
420 case AST_UNARY_PLUS:
421 /* Nothing to do. */
422 return 0;
423 case AST_UNARY_MINUS:
424 insn.op = BYTECODE_OP_UNARY_MINUS;
425 return bytecode_push(&ctx->bytecode, &insn, 1, sizeof(insn));
426 case AST_UNARY_NOT:
427 insn.op = BYTECODE_OP_UNARY_NOT;
428 return bytecode_push(&ctx->bytecode, &insn, 1, sizeof(insn));
429 case AST_UNARY_BIT_NOT:
430 insn.op = BYTECODE_OP_UNARY_BIT_NOT;
431 return bytecode_push(&ctx->bytecode, &insn, 1, sizeof(insn));
432 }
433 }
434
435 /*
436 * Binary comparator nesting is disallowed. This allows fitting into
437 * only 2 registers.
438 */
439 static
440 int visit_node_binary(struct filter_parser_ctx *ctx, struct ir_op *node)
441 {
442 int ret;
443 struct binary_op insn;
444
445 /* Visit child */
446 ret = recursive_visit_gen_bytecode(ctx, node->u.binary.left);
447 if (ret)
448 return ret;
449 ret = recursive_visit_gen_bytecode(ctx, node->u.binary.right);
450 if (ret)
451 return ret;
452
453 switch (node->u.binary.type) {
454 case AST_OP_UNKNOWN:
455 default:
456 fprintf(stderr, "[error] Unknown unary node type in %s\n",
457 __func__);
458 return -EINVAL;
459
460 case AST_OP_AND:
461 case AST_OP_OR:
462 fprintf(stderr, "[error] Unexpected logical node type in %s\n",
463 __func__);
464 return -EINVAL;
465
466 case AST_OP_MUL:
467 insn.op = BYTECODE_OP_MUL;
468 break;
469 case AST_OP_DIV:
470 insn.op = BYTECODE_OP_DIV;
471 break;
472 case AST_OP_MOD:
473 insn.op = BYTECODE_OP_MOD;
474 break;
475 case AST_OP_PLUS:
476 insn.op = BYTECODE_OP_PLUS;
477 break;
478 case AST_OP_MINUS:
479 insn.op = BYTECODE_OP_MINUS;
480 break;
481 case AST_OP_BIT_RSHIFT:
482 insn.op = BYTECODE_OP_BIT_RSHIFT;
483 break;
484 case AST_OP_BIT_LSHIFT:
485 insn.op = BYTECODE_OP_BIT_LSHIFT;
486 break;
487 case AST_OP_BIT_AND:
488 insn.op = BYTECODE_OP_BIT_AND;
489 break;
490 case AST_OP_BIT_OR:
491 insn.op = BYTECODE_OP_BIT_OR;
492 break;
493 case AST_OP_BIT_XOR:
494 insn.op = BYTECODE_OP_BIT_XOR;
495 break;
496
497 case AST_OP_EQ:
498 insn.op = BYTECODE_OP_EQ;
499 break;
500 case AST_OP_NE:
501 insn.op = BYTECODE_OP_NE;
502 break;
503 case AST_OP_GT:
504 insn.op = BYTECODE_OP_GT;
505 break;
506 case AST_OP_LT:
507 insn.op = BYTECODE_OP_LT;
508 break;
509 case AST_OP_GE:
510 insn.op = BYTECODE_OP_GE;
511 break;
512 case AST_OP_LE:
513 insn.op = BYTECODE_OP_LE;
514 break;
515 }
516 return bytecode_push(&ctx->bytecode, &insn, 1, sizeof(insn));
517 }
518
519 /*
520 * A logical op always return a s64 (1 or 0).
521 */
522 static
523 int visit_node_logical(struct filter_parser_ctx *ctx, struct ir_op *node)
524 {
525 int ret;
526 struct logical_op insn;
527 uint16_t skip_offset_loc;
528 uint16_t target_loc;
529
530 /* Visit left child */
531 ret = recursive_visit_gen_bytecode(ctx, node->u.binary.left);
532 if (ret)
533 return ret;
534 /* Cast to s64 if float or field ref */
535 if ((node->u.binary.left->data_type == IR_DATA_FIELD_REF
536 || node->u.binary.left->data_type == IR_DATA_GET_CONTEXT_REF
537 || node->u.binary.left->data_type == IR_DATA_EXPRESSION)
538 || node->u.binary.left->data_type == IR_DATA_FLOAT) {
539 struct cast_op cast_insn;
540
541 if (node->u.binary.left->data_type == IR_DATA_FIELD_REF
542 || node->u.binary.left->data_type == IR_DATA_GET_CONTEXT_REF
543 || node->u.binary.left->data_type == IR_DATA_EXPRESSION) {
544 cast_insn.op = BYTECODE_OP_CAST_TO_S64;
545 } else {
546 cast_insn.op = BYTECODE_OP_CAST_DOUBLE_TO_S64;
547 }
548 ret = bytecode_push(&ctx->bytecode, &cast_insn,
549 1, sizeof(cast_insn));
550 if (ret)
551 return ret;
552 }
553 switch (node->u.logical.type) {
554 default:
555 fprintf(stderr, "[error] Unknown node type in %s\n",
556 __func__);
557 return -EINVAL;
558
559 case AST_OP_AND:
560 insn.op = BYTECODE_OP_AND;
561 break;
562 case AST_OP_OR:
563 insn.op = BYTECODE_OP_OR;
564 break;
565 }
566 insn.skip_offset = (uint16_t) -1UL; /* Temporary */
567 ret = bytecode_push_logical(&ctx->bytecode, &insn, 1, sizeof(insn),
568 &skip_offset_loc);
569 if (ret)
570 return ret;
571 /* Visit right child */
572 ret = recursive_visit_gen_bytecode(ctx, node->u.binary.right);
573 if (ret)
574 return ret;
575 /* Cast to s64 if float or field ref */
576 if ((node->u.binary.right->data_type == IR_DATA_FIELD_REF
577 || node->u.binary.right->data_type == IR_DATA_GET_CONTEXT_REF
578 || node->u.binary.right->data_type == IR_DATA_EXPRESSION)
579 || node->u.binary.right->data_type == IR_DATA_FLOAT) {
580 struct cast_op cast_insn;
581
582 if (node->u.binary.right->data_type == IR_DATA_FIELD_REF
583 || node->u.binary.right->data_type == IR_DATA_GET_CONTEXT_REF
584 || node->u.binary.right->data_type == IR_DATA_EXPRESSION) {
585 cast_insn.op = BYTECODE_OP_CAST_TO_S64;
586 } else {
587 cast_insn.op = BYTECODE_OP_CAST_DOUBLE_TO_S64;
588 }
589 ret = bytecode_push(&ctx->bytecode, &cast_insn,
590 1, sizeof(cast_insn));
591 if (ret)
592 return ret;
593 }
594 /* We now know where the logical op can skip. */
595 target_loc = (uint16_t) bytecode_get_len(&ctx->bytecode->b);
596 ret = bytecode_patch(&ctx->bytecode,
597 &target_loc, /* Offset to jump to */
598 skip_offset_loc, /* Where to patch */
599 sizeof(uint16_t));
600 return ret;
601 }
602
603 /*
604 * Postorder traversal of the tree. We need the children result before
605 * we can evaluate the parent.
606 */
607 static
608 int recursive_visit_gen_bytecode(struct filter_parser_ctx *ctx,
609 struct ir_op *node)
610 {
611 switch (node->op) {
612 case IR_OP_UNKNOWN:
613 default:
614 fprintf(stderr, "[error] Unknown node type in %s\n",
615 __func__);
616 return -EINVAL;
617
618 case IR_OP_ROOT:
619 return visit_node_root(ctx, node);
620 case IR_OP_LOAD:
621 return visit_node_load(ctx, node);
622 case IR_OP_UNARY:
623 return visit_node_unary(ctx, node);
624 case IR_OP_BINARY:
625 return visit_node_binary(ctx, node);
626 case IR_OP_LOGICAL:
627 return visit_node_logical(ctx, node);
628 }
629 }
630
631 LTTNG_HIDDEN
632 void filter_bytecode_free(struct filter_parser_ctx *ctx)
633 {
634 if (!ctx) {
635 return;
636 }
637
638 if (ctx->bytecode) {
639 free(ctx->bytecode);
640 ctx->bytecode = NULL;
641 }
642
643 if (ctx->bytecode_reloc) {
644 free(ctx->bytecode_reloc);
645 ctx->bytecode_reloc = NULL;
646 }
647 }
648
649 LTTNG_HIDDEN
650 int filter_visitor_bytecode_generate(struct filter_parser_ctx *ctx)
651 {
652 int ret;
653
654 ret = bytecode_init(&ctx->bytecode);
655 if (ret)
656 return ret;
657 ret = bytecode_init(&ctx->bytecode_reloc);
658 if (ret)
659 goto error;
660 ret = recursive_visit_gen_bytecode(ctx, ctx->ir_root);
661 if (ret)
662 goto error;
663
664 /* Finally, append symbol table to bytecode */
665 ctx->bytecode->b.reloc_table_offset = bytecode_get_len(&ctx->bytecode->b);
666 return bytecode_push(&ctx->bytecode, ctx->bytecode_reloc->b.data,
667 1, bytecode_get_len(&ctx->bytecode_reloc->b));
668
669 error:
670 filter_bytecode_free(ctx);
671 return ret;
672 }
This page took 0.044368 seconds and 4 git commands to generate.