2 * lttng-filter-interpreter.c
4 * LTTng UST filter interpreter.
6 * Copyright (C) 2010-2012 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; only
11 * version 2.1 of the License.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 #include "lttng-filter.h"
27 * -2: unknown escape char.
32 int parse_char(const char **p
)
52 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
54 const char *p
= estack_bx(stack
, top
)->u
.s
.str
, *q
= estack_ax(stack
, top
)->u
.s
.str
;
61 if (unlikely(p
- estack_bx(stack
, top
)->u
.s
.str
>= estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')) {
62 if (q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0') {
65 if (estack_ax(stack
, top
)->u
.s
.literal
) {
73 if (unlikely(q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')) {
74 if (estack_bx(stack
, top
)->u
.s
.literal
) {
81 if (estack_bx(stack
, top
)->u
.s
.literal
) {
85 } else if (ret
== -2) {
88 /* else compare both char */
90 if (estack_ax(stack
, top
)->u
.s
.literal
) {
94 } else if (ret
== -2) {
114 uint64_t lttng_filter_false(void *filter_data
,
115 const char *filter_stack_data
)
120 #ifdef INTERPRETER_USE_SWITCH
123 * Fallback for compilers that do not support taking address of labels.
127 start_pc = &bytecode->data[0]; \
128 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
130 dbg_printf("Executing op %s (%u)\n", \
131 print_op((unsigned int) *(filter_opcode_t *) pc), \
132 (unsigned int) *(filter_opcode_t *) pc); \
133 switch (*(filter_opcode_t *) pc) {
135 #define OP(name) case name
145 * Dispatch-table based interpreter.
149 start_pc = &bytecode->data[0]; \
150 pc = next_pc = start_pc; \
151 if (unlikely(pc - start_pc >= bytecode->len)) \
153 goto *dispatch[*(filter_opcode_t *) pc];
160 goto *dispatch[*(filter_opcode_t *) pc];
167 * Return 0 (discard), or raise the 0x1 flag (log event).
168 * Currently, other flags are kept for future extensions and have no
171 uint64_t lttng_filter_interpret_bytecode(void *filter_data
,
172 const char *filter_stack_data
)
174 struct bytecode_runtime
*bytecode
= filter_data
;
175 void *pc
, *next_pc
, *start_pc
;
178 struct estack _stack
;
179 struct estack
*stack
= &_stack
;
180 register int64_t ax
= 0, bx
= 0;
181 register int top
= FILTER_STACK_EMPTY
;
182 #ifndef INTERPRETER_USE_SWITCH
183 static void *dispatch
[NR_FILTER_OPS
] = {
184 [ FILTER_OP_UNKNOWN
] = &&LABEL_FILTER_OP_UNKNOWN
,
186 [ FILTER_OP_RETURN
] = &&LABEL_FILTER_OP_RETURN
,
189 [ FILTER_OP_MUL
] = &&LABEL_FILTER_OP_MUL
,
190 [ FILTER_OP_DIV
] = &&LABEL_FILTER_OP_DIV
,
191 [ FILTER_OP_MOD
] = &&LABEL_FILTER_OP_MOD
,
192 [ FILTER_OP_PLUS
] = &&LABEL_FILTER_OP_PLUS
,
193 [ FILTER_OP_MINUS
] = &&LABEL_FILTER_OP_MINUS
,
194 [ FILTER_OP_RSHIFT
] = &&LABEL_FILTER_OP_RSHIFT
,
195 [ FILTER_OP_LSHIFT
] = &&LABEL_FILTER_OP_LSHIFT
,
196 [ FILTER_OP_BIN_AND
] = &&LABEL_FILTER_OP_BIN_AND
,
197 [ FILTER_OP_BIN_OR
] = &&LABEL_FILTER_OP_BIN_OR
,
198 [ FILTER_OP_BIN_XOR
] = &&LABEL_FILTER_OP_BIN_XOR
,
200 /* binary comparators */
201 [ FILTER_OP_EQ
] = &&LABEL_FILTER_OP_EQ
,
202 [ FILTER_OP_NE
] = &&LABEL_FILTER_OP_NE
,
203 [ FILTER_OP_GT
] = &&LABEL_FILTER_OP_GT
,
204 [ FILTER_OP_LT
] = &&LABEL_FILTER_OP_LT
,
205 [ FILTER_OP_GE
] = &&LABEL_FILTER_OP_GE
,
206 [ FILTER_OP_LE
] = &&LABEL_FILTER_OP_LE
,
208 /* string binary comparator */
209 [ FILTER_OP_EQ_STRING
] = &&LABEL_FILTER_OP_EQ_STRING
,
210 [ FILTER_OP_NE_STRING
] = &&LABEL_FILTER_OP_NE_STRING
,
211 [ FILTER_OP_GT_STRING
] = &&LABEL_FILTER_OP_GT_STRING
,
212 [ FILTER_OP_LT_STRING
] = &&LABEL_FILTER_OP_LT_STRING
,
213 [ FILTER_OP_GE_STRING
] = &&LABEL_FILTER_OP_GE_STRING
,
214 [ FILTER_OP_LE_STRING
] = &&LABEL_FILTER_OP_LE_STRING
,
216 /* s64 binary comparator */
217 [ FILTER_OP_EQ_S64
] = &&LABEL_FILTER_OP_EQ_S64
,
218 [ FILTER_OP_NE_S64
] = &&LABEL_FILTER_OP_NE_S64
,
219 [ FILTER_OP_GT_S64
] = &&LABEL_FILTER_OP_GT_S64
,
220 [ FILTER_OP_LT_S64
] = &&LABEL_FILTER_OP_LT_S64
,
221 [ FILTER_OP_GE_S64
] = &&LABEL_FILTER_OP_GE_S64
,
222 [ FILTER_OP_LE_S64
] = &&LABEL_FILTER_OP_LE_S64
,
224 /* double binary comparator */
225 [ FILTER_OP_EQ_DOUBLE
] = &&LABEL_FILTER_OP_EQ_DOUBLE
,
226 [ FILTER_OP_NE_DOUBLE
] = &&LABEL_FILTER_OP_NE_DOUBLE
,
227 [ FILTER_OP_GT_DOUBLE
] = &&LABEL_FILTER_OP_GT_DOUBLE
,
228 [ FILTER_OP_LT_DOUBLE
] = &&LABEL_FILTER_OP_LT_DOUBLE
,
229 [ FILTER_OP_GE_DOUBLE
] = &&LABEL_FILTER_OP_GE_DOUBLE
,
230 [ FILTER_OP_LE_DOUBLE
] = &&LABEL_FILTER_OP_LE_DOUBLE
,
232 /* Mixed S64-double binary comparators */
233 [ FILTER_OP_EQ_DOUBLE_S64
] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64
,
234 [ FILTER_OP_NE_DOUBLE_S64
] = &&LABEL_FILTER_OP_NE_DOUBLE_S64
,
235 [ FILTER_OP_GT_DOUBLE_S64
] = &&LABEL_FILTER_OP_GT_DOUBLE_S64
,
236 [ FILTER_OP_LT_DOUBLE_S64
] = &&LABEL_FILTER_OP_LT_DOUBLE_S64
,
237 [ FILTER_OP_GE_DOUBLE_S64
] = &&LABEL_FILTER_OP_GE_DOUBLE_S64
,
238 [ FILTER_OP_LE_DOUBLE_S64
] = &&LABEL_FILTER_OP_LE_DOUBLE_S64
,
240 [ FILTER_OP_EQ_S64_DOUBLE
] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE
,
241 [ FILTER_OP_NE_S64_DOUBLE
] = &&LABEL_FILTER_OP_NE_S64_DOUBLE
,
242 [ FILTER_OP_GT_S64_DOUBLE
] = &&LABEL_FILTER_OP_GT_S64_DOUBLE
,
243 [ FILTER_OP_LT_S64_DOUBLE
] = &&LABEL_FILTER_OP_LT_S64_DOUBLE
,
244 [ FILTER_OP_GE_S64_DOUBLE
] = &&LABEL_FILTER_OP_GE_S64_DOUBLE
,
245 [ FILTER_OP_LE_S64_DOUBLE
] = &&LABEL_FILTER_OP_LE_S64_DOUBLE
,
248 [ FILTER_OP_UNARY_PLUS
] = &&LABEL_FILTER_OP_UNARY_PLUS
,
249 [ FILTER_OP_UNARY_MINUS
] = &&LABEL_FILTER_OP_UNARY_MINUS
,
250 [ FILTER_OP_UNARY_NOT
] = &&LABEL_FILTER_OP_UNARY_NOT
,
251 [ FILTER_OP_UNARY_PLUS_S64
] = &&LABEL_FILTER_OP_UNARY_PLUS_S64
,
252 [ FILTER_OP_UNARY_MINUS_S64
] = &&LABEL_FILTER_OP_UNARY_MINUS_S64
,
253 [ FILTER_OP_UNARY_NOT_S64
] = &&LABEL_FILTER_OP_UNARY_NOT_S64
,
254 [ FILTER_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE
,
255 [ FILTER_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE
,
256 [ FILTER_OP_UNARY_NOT_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE
,
259 [ FILTER_OP_AND
] = &&LABEL_FILTER_OP_AND
,
260 [ FILTER_OP_OR
] = &&LABEL_FILTER_OP_OR
,
263 [ FILTER_OP_LOAD_FIELD_REF
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF
,
264 [ FILTER_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING
,
265 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE
,
266 [ FILTER_OP_LOAD_FIELD_REF_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64
,
267 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE
,
269 /* load from immediate operand */
270 [ FILTER_OP_LOAD_STRING
] = &&LABEL_FILTER_OP_LOAD_STRING
,
271 [ FILTER_OP_LOAD_S64
] = &&LABEL_FILTER_OP_LOAD_S64
,
272 [ FILTER_OP_LOAD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_DOUBLE
,
275 [ FILTER_OP_CAST_TO_S64
] = &&LABEL_FILTER_OP_CAST_TO_S64
,
276 [ FILTER_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64
,
277 [ FILTER_OP_CAST_NOP
] = &&LABEL_FILTER_OP_CAST_NOP
,
279 /* get context ref */
280 [ FILTER_OP_GET_CONTEXT_REF
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF
,
281 [ FILTER_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING
,
282 [ FILTER_OP_GET_CONTEXT_REF_S64
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64
,
283 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE
,
285 #endif /* #ifndef INTERPRETER_USE_SWITCH */
289 OP(FILTER_OP_UNKNOWN
):
290 OP(FILTER_OP_LOAD_FIELD_REF
):
291 OP(FILTER_OP_GET_CONTEXT_REF
):
292 #ifdef INTERPRETER_USE_SWITCH
294 #endif /* INTERPRETER_USE_SWITCH */
295 ERR("unknown bytecode op %u\n",
296 (unsigned int) *(filter_opcode_t
*) pc
);
300 OP(FILTER_OP_RETURN
):
301 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
302 retval
= !!estack_ax_v
;
312 OP(FILTER_OP_RSHIFT
):
313 OP(FILTER_OP_LSHIFT
):
314 OP(FILTER_OP_BIN_AND
):
315 OP(FILTER_OP_BIN_OR
):
316 OP(FILTER_OP_BIN_XOR
):
317 ERR("unsupported bytecode op %u\n",
318 (unsigned int) *(filter_opcode_t
*) pc
);
328 ERR("unsupported non-specialized bytecode op %u\n",
329 (unsigned int) *(filter_opcode_t
*) pc
);
333 OP(FILTER_OP_EQ_STRING
):
337 res
= (stack_strcmp(stack
, top
, "==") == 0);
338 estack_pop(stack
, top
, ax
, bx
);
340 next_pc
+= sizeof(struct binary_op
);
343 OP(FILTER_OP_NE_STRING
):
347 res
= (stack_strcmp(stack
, top
, "!=") != 0);
348 estack_pop(stack
, top
, ax
, bx
);
350 next_pc
+= sizeof(struct binary_op
);
353 OP(FILTER_OP_GT_STRING
):
357 res
= (stack_strcmp(stack
, top
, ">") > 0);
358 estack_pop(stack
, top
, ax
, bx
);
360 next_pc
+= sizeof(struct binary_op
);
363 OP(FILTER_OP_LT_STRING
):
367 res
= (stack_strcmp(stack
, top
, "<") < 0);
368 estack_pop(stack
, top
, ax
, bx
);
370 next_pc
+= sizeof(struct binary_op
);
373 OP(FILTER_OP_GE_STRING
):
377 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
378 estack_pop(stack
, top
, ax
, bx
);
380 next_pc
+= sizeof(struct binary_op
);
383 OP(FILTER_OP_LE_STRING
):
387 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
388 estack_pop(stack
, top
, ax
, bx
);
390 next_pc
+= sizeof(struct binary_op
);
394 OP(FILTER_OP_EQ_S64
):
398 res
= (estack_bx_v
== estack_ax_v
);
399 estack_pop(stack
, top
, ax
, bx
);
401 next_pc
+= sizeof(struct binary_op
);
404 OP(FILTER_OP_NE_S64
):
408 res
= (estack_bx_v
!= estack_ax_v
);
409 estack_pop(stack
, top
, ax
, bx
);
411 next_pc
+= sizeof(struct binary_op
);
414 OP(FILTER_OP_GT_S64
):
418 res
= (estack_bx_v
> estack_ax_v
);
419 estack_pop(stack
, top
, ax
, bx
);
421 next_pc
+= sizeof(struct binary_op
);
424 OP(FILTER_OP_LT_S64
):
428 res
= (estack_bx_v
< estack_ax_v
);
429 estack_pop(stack
, top
, ax
, bx
);
431 next_pc
+= sizeof(struct binary_op
);
434 OP(FILTER_OP_GE_S64
):
438 res
= (estack_bx_v
>= estack_ax_v
);
439 estack_pop(stack
, top
, ax
, bx
);
441 next_pc
+= sizeof(struct binary_op
);
444 OP(FILTER_OP_LE_S64
):
448 res
= (estack_bx_v
<= estack_ax_v
);
449 estack_pop(stack
, top
, ax
, bx
);
451 next_pc
+= sizeof(struct binary_op
);
455 OP(FILTER_OP_EQ_DOUBLE
):
459 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax(stack
, top
)->u
.d
);
460 estack_pop(stack
, top
, ax
, bx
);
462 next_pc
+= sizeof(struct binary_op
);
465 OP(FILTER_OP_NE_DOUBLE
):
469 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax(stack
, top
)->u
.d
);
470 estack_pop(stack
, top
, ax
, bx
);
472 next_pc
+= sizeof(struct binary_op
);
475 OP(FILTER_OP_GT_DOUBLE
):
479 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax(stack
, top
)->u
.d
);
480 estack_pop(stack
, top
, ax
, bx
);
482 next_pc
+= sizeof(struct binary_op
);
485 OP(FILTER_OP_LT_DOUBLE
):
489 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax(stack
, top
)->u
.d
);
490 estack_pop(stack
, top
, ax
, bx
);
492 next_pc
+= sizeof(struct binary_op
);
495 OP(FILTER_OP_GE_DOUBLE
):
499 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax(stack
, top
)->u
.d
);
500 estack_pop(stack
, top
, ax
, bx
);
502 next_pc
+= sizeof(struct binary_op
);
505 OP(FILTER_OP_LE_DOUBLE
):
509 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax(stack
, top
)->u
.d
);
510 estack_pop(stack
, top
, ax
, bx
);
512 next_pc
+= sizeof(struct binary_op
);
516 /* Mixed S64-double binary comparators */
517 OP(FILTER_OP_EQ_DOUBLE_S64
):
521 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax_v
);
522 estack_pop(stack
, top
, ax
, bx
);
524 next_pc
+= sizeof(struct binary_op
);
527 OP(FILTER_OP_NE_DOUBLE_S64
):
531 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax_v
);
532 estack_pop(stack
, top
, ax
, bx
);
534 next_pc
+= sizeof(struct binary_op
);
537 OP(FILTER_OP_GT_DOUBLE_S64
):
541 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax_v
);
542 estack_pop(stack
, top
, ax
, bx
);
544 next_pc
+= sizeof(struct binary_op
);
547 OP(FILTER_OP_LT_DOUBLE_S64
):
551 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax_v
);
552 estack_pop(stack
, top
, ax
, bx
);
554 next_pc
+= sizeof(struct binary_op
);
557 OP(FILTER_OP_GE_DOUBLE_S64
):
561 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax_v
);
562 estack_pop(stack
, top
, ax
, bx
);
564 next_pc
+= sizeof(struct binary_op
);
567 OP(FILTER_OP_LE_DOUBLE_S64
):
571 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax_v
);
572 estack_pop(stack
, top
, ax
, bx
);
574 next_pc
+= sizeof(struct binary_op
);
578 OP(FILTER_OP_EQ_S64_DOUBLE
):
582 res
= (estack_bx_v
== estack_ax(stack
, top
)->u
.d
);
583 estack_pop(stack
, top
, ax
, bx
);
585 next_pc
+= sizeof(struct binary_op
);
588 OP(FILTER_OP_NE_S64_DOUBLE
):
592 res
= (estack_bx_v
!= estack_ax(stack
, top
)->u
.d
);
593 estack_pop(stack
, top
, ax
, bx
);
595 next_pc
+= sizeof(struct binary_op
);
598 OP(FILTER_OP_GT_S64_DOUBLE
):
602 res
= (estack_bx_v
> estack_ax(stack
, top
)->u
.d
);
603 estack_pop(stack
, top
, ax
, bx
);
605 next_pc
+= sizeof(struct binary_op
);
608 OP(FILTER_OP_LT_S64_DOUBLE
):
612 res
= (estack_bx_v
< estack_ax(stack
, top
)->u
.d
);
613 estack_pop(stack
, top
, ax
, bx
);
615 next_pc
+= sizeof(struct binary_op
);
618 OP(FILTER_OP_GE_S64_DOUBLE
):
622 res
= (estack_bx_v
>= estack_ax(stack
, top
)->u
.d
);
623 estack_pop(stack
, top
, ax
, bx
);
625 next_pc
+= sizeof(struct binary_op
);
628 OP(FILTER_OP_LE_S64_DOUBLE
):
632 res
= (estack_bx_v
<= estack_ax(stack
, top
)->u
.d
);
633 estack_pop(stack
, top
, ax
, bx
);
635 next_pc
+= sizeof(struct binary_op
);
640 OP(FILTER_OP_UNARY_PLUS
):
641 OP(FILTER_OP_UNARY_MINUS
):
642 OP(FILTER_OP_UNARY_NOT
):
643 ERR("unsupported non-specialized bytecode op %u\n",
644 (unsigned int) *(filter_opcode_t
*) pc
);
649 OP(FILTER_OP_UNARY_PLUS_S64
):
650 OP(FILTER_OP_UNARY_PLUS_DOUBLE
):
652 next_pc
+= sizeof(struct unary_op
);
655 OP(FILTER_OP_UNARY_MINUS_S64
):
657 estack_ax_v
= -estack_ax_v
;
658 next_pc
+= sizeof(struct unary_op
);
661 OP(FILTER_OP_UNARY_MINUS_DOUBLE
):
663 estack_ax(stack
, top
)->u
.d
= -estack_ax(stack
, top
)->u
.d
;
664 next_pc
+= sizeof(struct unary_op
);
667 OP(FILTER_OP_UNARY_NOT_S64
):
669 estack_ax_v
= !estack_ax_v
;
670 next_pc
+= sizeof(struct unary_op
);
673 OP(FILTER_OP_UNARY_NOT_DOUBLE
):
675 estack_ax(stack
, top
)->u
.d
= !estack_ax(stack
, top
)->u
.d
;
676 next_pc
+= sizeof(struct unary_op
);
683 struct logical_op
*insn
= (struct logical_op
*) pc
;
685 /* If AX is 0, skip and evaluate to 0 */
686 if (unlikely(estack_ax_v
== 0)) {
687 dbg_printf("Jumping to bytecode offset %u\n",
688 (unsigned int) insn
->skip_offset
);
689 next_pc
= start_pc
+ insn
->skip_offset
;
691 /* Pop 1 when jump not taken */
692 estack_pop(stack
, top
, ax
, bx
);
693 next_pc
+= sizeof(struct logical_op
);
699 struct logical_op
*insn
= (struct logical_op
*) pc
;
701 /* If AX is nonzero, skip and evaluate to 1 */
703 if (unlikely(estack_ax_v
!= 0)) {
705 dbg_printf("Jumping to bytecode offset %u\n",
706 (unsigned int) insn
->skip_offset
);
707 next_pc
= start_pc
+ insn
->skip_offset
;
709 /* Pop 1 when jump not taken */
710 estack_pop(stack
, top
, ax
, bx
);
711 next_pc
+= sizeof(struct logical_op
);
718 OP(FILTER_OP_LOAD_FIELD_REF_STRING
):
720 struct load_op
*insn
= (struct load_op
*) pc
;
721 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
723 dbg_printf("load field ref offset %u type string\n",
725 estack_push(stack
, top
, ax
, bx
);
726 estack_ax(stack
, top
)->u
.s
.str
=
727 *(const char * const *) &filter_stack_data
[ref
->offset
];
728 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
729 dbg_printf("Filter warning: loading a NULL string.\n");
733 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
734 estack_ax(stack
, top
)->u
.s
.literal
= 0;
735 dbg_printf("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
736 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
740 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE
):
742 struct load_op
*insn
= (struct load_op
*) pc
;
743 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
745 dbg_printf("load field ref offset %u type sequence\n",
747 estack_push(stack
, top
, ax
, bx
);
748 estack_ax(stack
, top
)->u
.s
.seq_len
=
749 *(unsigned long *) &filter_stack_data
[ref
->offset
];
750 estack_ax(stack
, top
)->u
.s
.str
=
751 *(const char **) (&filter_stack_data
[ref
->offset
752 + sizeof(unsigned long)]);
753 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
754 dbg_printf("Filter warning: loading a NULL sequence.\n");
758 estack_ax(stack
, top
)->u
.s
.literal
= 0;
759 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
763 OP(FILTER_OP_LOAD_FIELD_REF_S64
):
765 struct load_op
*insn
= (struct load_op
*) pc
;
766 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
768 dbg_printf("load field ref offset %u type s64\n",
770 estack_push(stack
, top
, ax
, bx
);
772 ((struct literal_numeric
*) &filter_stack_data
[ref
->offset
])->v
;
773 dbg_printf("ref load s64 %" PRIi64
"\n", estack_ax_v
);
774 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
778 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE
):
780 struct load_op
*insn
= (struct load_op
*) pc
;
781 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
783 dbg_printf("load field ref offset %u type double\n",
785 estack_push(stack
, top
, ax
, bx
);
786 memcpy(&estack_ax(stack
, top
)->u
.d
, &filter_stack_data
[ref
->offset
],
787 sizeof(struct literal_double
));
788 dbg_printf("ref load double %g\n", estack_ax(stack
, top
)->u
.d
);
789 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
793 /* load from immediate operand */
794 OP(FILTER_OP_LOAD_STRING
):
796 struct load_op
*insn
= (struct load_op
*) pc
;
798 dbg_printf("load string %s\n", insn
->data
);
799 estack_push(stack
, top
, ax
, bx
);
800 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
801 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
802 estack_ax(stack
, top
)->u
.s
.literal
= 1;
803 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
807 OP(FILTER_OP_LOAD_S64
):
809 struct load_op
*insn
= (struct load_op
*) pc
;
811 estack_push(stack
, top
, ax
, bx
);
812 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
813 dbg_printf("load s64 %" PRIi64
"\n", estack_ax_v
);
814 next_pc
+= sizeof(struct load_op
)
815 + sizeof(struct literal_numeric
);
819 OP(FILTER_OP_LOAD_DOUBLE
):
821 struct load_op
*insn
= (struct load_op
*) pc
;
823 estack_push(stack
, top
, ax
, bx
);
824 memcpy(&estack_ax(stack
, top
)->u
.d
, insn
->data
,
825 sizeof(struct literal_double
));
826 dbg_printf("load s64 %g\n", estack_ax(stack
, top
)->u
.d
);
827 next_pc
+= sizeof(struct load_op
)
828 + sizeof(struct literal_double
);
833 OP(FILTER_OP_CAST_TO_S64
):
834 ERR("unsupported non-specialized bytecode op %u\n",
835 (unsigned int) *(filter_opcode_t
*) pc
);
839 OP(FILTER_OP_CAST_DOUBLE_TO_S64
):
841 estack_ax_v
= (int64_t) estack_ax(stack
, top
)->u
.d
;
842 next_pc
+= sizeof(struct cast_op
);
846 OP(FILTER_OP_CAST_NOP
):
848 next_pc
+= sizeof(struct cast_op
);
852 /* get context ref */
853 OP(FILTER_OP_GET_CONTEXT_REF_STRING
):
855 struct load_op
*insn
= (struct load_op
*) pc
;
856 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
857 struct lttng_ctx_field
*ctx_field
;
858 union lttng_ctx_value v
;
860 dbg_printf("get context ref offset %u type string\n",
862 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
863 ctx_field
->get_value(ctx_field
, &v
);
864 estack_push(stack
, top
, ax
, bx
);
865 estack_ax(stack
, top
)->u
.s
.str
= v
.str
;
866 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
867 dbg_printf("Filter warning: loading a NULL string.\n");
871 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
872 estack_ax(stack
, top
)->u
.s
.literal
= 0;
873 dbg_printf("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
874 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
878 OP(FILTER_OP_GET_CONTEXT_REF_S64
):
880 struct load_op
*insn
= (struct load_op
*) pc
;
881 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
882 struct lttng_ctx_field
*ctx_field
;
883 union lttng_ctx_value v
;
885 dbg_printf("get context ref offset %u type s64\n",
887 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
888 ctx_field
->get_value(ctx_field
, &v
);
889 estack_push(stack
, top
, ax
, bx
);
891 dbg_printf("ref get context s64 %" PRIi64
"\n", estack_ax_v
);
892 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
896 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE
):
898 struct load_op
*insn
= (struct load_op
*) pc
;
899 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
900 struct lttng_ctx_field
*ctx_field
;
901 union lttng_ctx_value v
;
903 dbg_printf("get context ref offset %u type double\n",
905 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
906 ctx_field
->get_value(ctx_field
, &v
);
907 estack_push(stack
, top
, ax
, bx
);
908 memcpy(&estack_ax(stack
, top
)->u
.d
, &v
.d
, sizeof(struct literal_double
));
909 dbg_printf("ref get context double %g\n", estack_ax(stack
, top
)->u
.d
);
910 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
916 /* return 0 (discard) on error */