2 * lttng-filter-interpreter.c
4 * LTTng UST filter interpreter.
6 * Copyright (C) 2010-2012 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; only
11 * version 2.1 of the License.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 #include "lttng-filter.h"
27 * -2: unknown escape char.
32 int parse_char(const char **p
)
52 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
54 const char *p
= estack_bx(stack
, top
)->u
.s
.str
, *q
= estack_ax(stack
, top
)->u
.s
.str
;
61 if (unlikely(p
- estack_bx(stack
, top
)->u
.s
.str
> estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')) {
62 if (q
- estack_ax(stack
, top
)->u
.s
.str
> estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0') {
73 if (unlikely(q
- estack_ax(stack
, top
)->u
.s
.str
> estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')) {
74 if (p
- estack_bx(stack
, top
)->u
.s
.str
> estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0') {
85 if (estack_bx(stack
, top
)->u
.s
.literal
) {
89 } else if (ret
== -2) {
92 /* else compare both char */
94 if (estack_ax(stack
, top
)->u
.s
.literal
) {
98 } else if (ret
== -2) {
118 uint64_t lttng_filter_false(void *filter_data
,
119 const char *filter_stack_data
)
124 #ifdef INTERPRETER_USE_SWITCH
127 * Fallback for compilers that do not support taking address of labels.
131 start_pc = &bytecode->data[0]; \
132 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
134 dbg_printf("Executing op %s (%u)\n", \
135 print_op((unsigned int) *(filter_opcode_t *) pc), \
136 (unsigned int) *(filter_opcode_t *) pc); \
137 switch (*(filter_opcode_t *) pc) {
139 #define OP(name) case name
149 * Dispatch-table based interpreter.
153 start_pc = &bytecode->data[0]; \
154 pc = next_pc = start_pc; \
155 if (unlikely(pc - start_pc >= bytecode->len)) \
157 goto *dispatch[*(filter_opcode_t *) pc];
164 goto *dispatch[*(filter_opcode_t *) pc];
171 * Return 0 (discard), or raise the 0x1 flag (log event).
172 * Currently, other flags are kept for future extensions and have no
175 uint64_t lttng_filter_interpret_bytecode(void *filter_data
,
176 const char *filter_stack_data
)
178 struct bytecode_runtime
*bytecode
= filter_data
;
179 void *pc
, *next_pc
, *start_pc
;
182 struct estack _stack
;
183 struct estack
*stack
= &_stack
;
184 register int64_t ax
= 0, bx
= 0;
185 register int top
= FILTER_STACK_EMPTY
;
186 #ifndef INTERPRETER_USE_SWITCH
187 static void *dispatch
[NR_FILTER_OPS
] = {
188 [ FILTER_OP_UNKNOWN
] = &&LABEL_FILTER_OP_UNKNOWN
,
190 [ FILTER_OP_RETURN
] = &&LABEL_FILTER_OP_RETURN
,
193 [ FILTER_OP_MUL
] = &&LABEL_FILTER_OP_MUL
,
194 [ FILTER_OP_DIV
] = &&LABEL_FILTER_OP_DIV
,
195 [ FILTER_OP_MOD
] = &&LABEL_FILTER_OP_MOD
,
196 [ FILTER_OP_PLUS
] = &&LABEL_FILTER_OP_PLUS
,
197 [ FILTER_OP_MINUS
] = &&LABEL_FILTER_OP_MINUS
,
198 [ FILTER_OP_RSHIFT
] = &&LABEL_FILTER_OP_RSHIFT
,
199 [ FILTER_OP_LSHIFT
] = &&LABEL_FILTER_OP_LSHIFT
,
200 [ FILTER_OP_BIN_AND
] = &&LABEL_FILTER_OP_BIN_AND
,
201 [ FILTER_OP_BIN_OR
] = &&LABEL_FILTER_OP_BIN_OR
,
202 [ FILTER_OP_BIN_XOR
] = &&LABEL_FILTER_OP_BIN_XOR
,
204 /* binary comparators */
205 [ FILTER_OP_EQ
] = &&LABEL_FILTER_OP_EQ
,
206 [ FILTER_OP_NE
] = &&LABEL_FILTER_OP_NE
,
207 [ FILTER_OP_GT
] = &&LABEL_FILTER_OP_GT
,
208 [ FILTER_OP_LT
] = &&LABEL_FILTER_OP_LT
,
209 [ FILTER_OP_GE
] = &&LABEL_FILTER_OP_GE
,
210 [ FILTER_OP_LE
] = &&LABEL_FILTER_OP_LE
,
212 /* string binary comparator */
213 [ FILTER_OP_EQ_STRING
] = &&LABEL_FILTER_OP_EQ_STRING
,
214 [ FILTER_OP_NE_STRING
] = &&LABEL_FILTER_OP_NE_STRING
,
215 [ FILTER_OP_GT_STRING
] = &&LABEL_FILTER_OP_GT_STRING
,
216 [ FILTER_OP_LT_STRING
] = &&LABEL_FILTER_OP_LT_STRING
,
217 [ FILTER_OP_GE_STRING
] = &&LABEL_FILTER_OP_GE_STRING
,
218 [ FILTER_OP_LE_STRING
] = &&LABEL_FILTER_OP_LE_STRING
,
220 /* s64 binary comparator */
221 [ FILTER_OP_EQ_S64
] = &&LABEL_FILTER_OP_EQ_S64
,
222 [ FILTER_OP_NE_S64
] = &&LABEL_FILTER_OP_NE_S64
,
223 [ FILTER_OP_GT_S64
] = &&LABEL_FILTER_OP_GT_S64
,
224 [ FILTER_OP_LT_S64
] = &&LABEL_FILTER_OP_LT_S64
,
225 [ FILTER_OP_GE_S64
] = &&LABEL_FILTER_OP_GE_S64
,
226 [ FILTER_OP_LE_S64
] = &&LABEL_FILTER_OP_LE_S64
,
228 /* double binary comparator */
229 [ FILTER_OP_EQ_DOUBLE
] = &&LABEL_FILTER_OP_EQ_DOUBLE
,
230 [ FILTER_OP_NE_DOUBLE
] = &&LABEL_FILTER_OP_NE_DOUBLE
,
231 [ FILTER_OP_GT_DOUBLE
] = &&LABEL_FILTER_OP_GT_DOUBLE
,
232 [ FILTER_OP_LT_DOUBLE
] = &&LABEL_FILTER_OP_LT_DOUBLE
,
233 [ FILTER_OP_GE_DOUBLE
] = &&LABEL_FILTER_OP_GE_DOUBLE
,
234 [ FILTER_OP_LE_DOUBLE
] = &&LABEL_FILTER_OP_LE_DOUBLE
,
236 /* Mixed S64-double binary comparators */
237 [ FILTER_OP_EQ_DOUBLE_S64
] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64
,
238 [ FILTER_OP_NE_DOUBLE_S64
] = &&LABEL_FILTER_OP_NE_DOUBLE_S64
,
239 [ FILTER_OP_GT_DOUBLE_S64
] = &&LABEL_FILTER_OP_GT_DOUBLE_S64
,
240 [ FILTER_OP_LT_DOUBLE_S64
] = &&LABEL_FILTER_OP_LT_DOUBLE_S64
,
241 [ FILTER_OP_GE_DOUBLE_S64
] = &&LABEL_FILTER_OP_GE_DOUBLE_S64
,
242 [ FILTER_OP_LE_DOUBLE_S64
] = &&LABEL_FILTER_OP_LE_DOUBLE_S64
,
244 [ FILTER_OP_EQ_S64_DOUBLE
] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE
,
245 [ FILTER_OP_NE_S64_DOUBLE
] = &&LABEL_FILTER_OP_NE_S64_DOUBLE
,
246 [ FILTER_OP_GT_S64_DOUBLE
] = &&LABEL_FILTER_OP_GT_S64_DOUBLE
,
247 [ FILTER_OP_LT_S64_DOUBLE
] = &&LABEL_FILTER_OP_LT_S64_DOUBLE
,
248 [ FILTER_OP_GE_S64_DOUBLE
] = &&LABEL_FILTER_OP_GE_S64_DOUBLE
,
249 [ FILTER_OP_LE_S64_DOUBLE
] = &&LABEL_FILTER_OP_LE_S64_DOUBLE
,
252 [ FILTER_OP_UNARY_PLUS
] = &&LABEL_FILTER_OP_UNARY_PLUS
,
253 [ FILTER_OP_UNARY_MINUS
] = &&LABEL_FILTER_OP_UNARY_MINUS
,
254 [ FILTER_OP_UNARY_NOT
] = &&LABEL_FILTER_OP_UNARY_NOT
,
255 [ FILTER_OP_UNARY_PLUS_S64
] = &&LABEL_FILTER_OP_UNARY_PLUS_S64
,
256 [ FILTER_OP_UNARY_MINUS_S64
] = &&LABEL_FILTER_OP_UNARY_MINUS_S64
,
257 [ FILTER_OP_UNARY_NOT_S64
] = &&LABEL_FILTER_OP_UNARY_NOT_S64
,
258 [ FILTER_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE
,
259 [ FILTER_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE
,
260 [ FILTER_OP_UNARY_NOT_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE
,
263 [ FILTER_OP_AND
] = &&LABEL_FILTER_OP_AND
,
264 [ FILTER_OP_OR
] = &&LABEL_FILTER_OP_OR
,
267 [ FILTER_OP_LOAD_FIELD_REF
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF
,
268 [ FILTER_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING
,
269 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE
,
270 [ FILTER_OP_LOAD_FIELD_REF_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64
,
271 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE
,
273 [ FILTER_OP_LOAD_STRING
] = &&LABEL_FILTER_OP_LOAD_STRING
,
274 [ FILTER_OP_LOAD_S64
] = &&LABEL_FILTER_OP_LOAD_S64
,
275 [ FILTER_OP_LOAD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_DOUBLE
,
278 [ FILTER_OP_CAST_TO_S64
] = &&LABEL_FILTER_OP_CAST_TO_S64
,
279 [ FILTER_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64
,
280 [ FILTER_OP_CAST_NOP
] = &&LABEL_FILTER_OP_CAST_NOP
,
282 #endif /* #ifndef INTERPRETER_USE_SWITCH */
286 OP(FILTER_OP_UNKNOWN
):
287 OP(FILTER_OP_LOAD_FIELD_REF
):
288 #ifdef INTERPRETER_USE_SWITCH
290 #endif /* INTERPRETER_USE_SWITCH */
291 ERR("unknown bytecode op %u\n",
292 (unsigned int) *(filter_opcode_t
*) pc
);
296 OP(FILTER_OP_RETURN
):
297 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
298 retval
= !!estack_ax_v
;
308 OP(FILTER_OP_RSHIFT
):
309 OP(FILTER_OP_LSHIFT
):
310 OP(FILTER_OP_BIN_AND
):
311 OP(FILTER_OP_BIN_OR
):
312 OP(FILTER_OP_BIN_XOR
):
313 ERR("unsupported bytecode op %u\n",
314 (unsigned int) *(filter_opcode_t
*) pc
);
324 ERR("unsupported non-specialized bytecode op %u\n",
325 (unsigned int) *(filter_opcode_t
*) pc
);
329 OP(FILTER_OP_EQ_STRING
):
333 res
= (stack_strcmp(stack
, top
, "==") == 0);
334 estack_pop(stack
, top
, ax
, bx
);
336 next_pc
+= sizeof(struct binary_op
);
339 OP(FILTER_OP_NE_STRING
):
343 res
= (stack_strcmp(stack
, top
, "!=") != 0);
344 estack_pop(stack
, top
, ax
, bx
);
346 next_pc
+= sizeof(struct binary_op
);
349 OP(FILTER_OP_GT_STRING
):
353 res
= (stack_strcmp(stack
, top
, ">") > 0);
354 estack_pop(stack
, top
, ax
, bx
);
356 next_pc
+= sizeof(struct binary_op
);
359 OP(FILTER_OP_LT_STRING
):
363 res
= (stack_strcmp(stack
, top
, "<") < 0);
364 estack_pop(stack
, top
, ax
, bx
);
366 next_pc
+= sizeof(struct binary_op
);
369 OP(FILTER_OP_GE_STRING
):
373 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
374 estack_pop(stack
, top
, ax
, bx
);
376 next_pc
+= sizeof(struct binary_op
);
379 OP(FILTER_OP_LE_STRING
):
383 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
384 estack_pop(stack
, top
, ax
, bx
);
386 next_pc
+= sizeof(struct binary_op
);
390 OP(FILTER_OP_EQ_S64
):
394 res
= (estack_bx_v
== estack_ax_v
);
395 estack_pop(stack
, top
, ax
, bx
);
397 next_pc
+= sizeof(struct binary_op
);
400 OP(FILTER_OP_NE_S64
):
404 res
= (estack_bx_v
!= estack_ax_v
);
405 estack_pop(stack
, top
, ax
, bx
);
407 next_pc
+= sizeof(struct binary_op
);
410 OP(FILTER_OP_GT_S64
):
414 res
= (estack_bx_v
> estack_ax_v
);
415 estack_pop(stack
, top
, ax
, bx
);
417 next_pc
+= sizeof(struct binary_op
);
420 OP(FILTER_OP_LT_S64
):
424 res
= (estack_bx_v
< estack_ax_v
);
425 estack_pop(stack
, top
, ax
, bx
);
427 next_pc
+= sizeof(struct binary_op
);
430 OP(FILTER_OP_GE_S64
):
434 res
= (estack_bx_v
>= estack_ax_v
);
435 estack_pop(stack
, top
, ax
, bx
);
437 next_pc
+= sizeof(struct binary_op
);
440 OP(FILTER_OP_LE_S64
):
444 res
= (estack_bx_v
<= estack_ax_v
);
445 estack_pop(stack
, top
, ax
, bx
);
447 next_pc
+= sizeof(struct binary_op
);
451 OP(FILTER_OP_EQ_DOUBLE
):
455 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax(stack
, top
)->u
.d
);
456 estack_pop(stack
, top
, ax
, bx
);
458 next_pc
+= sizeof(struct binary_op
);
461 OP(FILTER_OP_NE_DOUBLE
):
465 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax(stack
, top
)->u
.d
);
466 estack_pop(stack
, top
, ax
, bx
);
468 next_pc
+= sizeof(struct binary_op
);
471 OP(FILTER_OP_GT_DOUBLE
):
475 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax(stack
, top
)->u
.d
);
476 estack_pop(stack
, top
, ax
, bx
);
478 next_pc
+= sizeof(struct binary_op
);
481 OP(FILTER_OP_LT_DOUBLE
):
485 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax(stack
, top
)->u
.d
);
486 estack_pop(stack
, top
, ax
, bx
);
488 next_pc
+= sizeof(struct binary_op
);
491 OP(FILTER_OP_GE_DOUBLE
):
495 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax(stack
, top
)->u
.d
);
496 estack_pop(stack
, top
, ax
, bx
);
498 next_pc
+= sizeof(struct binary_op
);
501 OP(FILTER_OP_LE_DOUBLE
):
505 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax(stack
, top
)->u
.d
);
506 estack_pop(stack
, top
, ax
, bx
);
508 next_pc
+= sizeof(struct binary_op
);
512 /* Mixed S64-double binary comparators */
513 OP(FILTER_OP_EQ_DOUBLE_S64
):
517 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax_v
);
518 estack_pop(stack
, top
, ax
, bx
);
520 next_pc
+= sizeof(struct binary_op
);
523 OP(FILTER_OP_NE_DOUBLE_S64
):
527 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax_v
);
528 estack_pop(stack
, top
, ax
, bx
);
530 next_pc
+= sizeof(struct binary_op
);
533 OP(FILTER_OP_GT_DOUBLE_S64
):
537 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax_v
);
538 estack_pop(stack
, top
, ax
, bx
);
540 next_pc
+= sizeof(struct binary_op
);
543 OP(FILTER_OP_LT_DOUBLE_S64
):
547 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax_v
);
548 estack_pop(stack
, top
, ax
, bx
);
550 next_pc
+= sizeof(struct binary_op
);
553 OP(FILTER_OP_GE_DOUBLE_S64
):
557 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax_v
);
558 estack_pop(stack
, top
, ax
, bx
);
560 next_pc
+= sizeof(struct binary_op
);
563 OP(FILTER_OP_LE_DOUBLE_S64
):
567 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax_v
);
568 estack_pop(stack
, top
, ax
, bx
);
570 next_pc
+= sizeof(struct binary_op
);
574 OP(FILTER_OP_EQ_S64_DOUBLE
):
578 res
= (estack_bx_v
== estack_ax(stack
, top
)->u
.d
);
579 estack_pop(stack
, top
, ax
, bx
);
581 next_pc
+= sizeof(struct binary_op
);
584 OP(FILTER_OP_NE_S64_DOUBLE
):
588 res
= (estack_bx_v
!= estack_ax(stack
, top
)->u
.d
);
589 estack_pop(stack
, top
, ax
, bx
);
591 next_pc
+= sizeof(struct binary_op
);
594 OP(FILTER_OP_GT_S64_DOUBLE
):
598 res
= (estack_bx_v
> estack_ax(stack
, top
)->u
.d
);
599 estack_pop(stack
, top
, ax
, bx
);
601 next_pc
+= sizeof(struct binary_op
);
604 OP(FILTER_OP_LT_S64_DOUBLE
):
608 res
= (estack_bx_v
< estack_ax(stack
, top
)->u
.d
);
609 estack_pop(stack
, top
, ax
, bx
);
611 next_pc
+= sizeof(struct binary_op
);
614 OP(FILTER_OP_GE_S64_DOUBLE
):
618 res
= (estack_bx_v
>= estack_ax(stack
, top
)->u
.d
);
619 estack_pop(stack
, top
, ax
, bx
);
621 next_pc
+= sizeof(struct binary_op
);
624 OP(FILTER_OP_LE_S64_DOUBLE
):
628 res
= (estack_bx_v
<= estack_ax(stack
, top
)->u
.d
);
629 estack_pop(stack
, top
, ax
, bx
);
631 next_pc
+= sizeof(struct binary_op
);
636 OP(FILTER_OP_UNARY_PLUS
):
637 OP(FILTER_OP_UNARY_MINUS
):
638 OP(FILTER_OP_UNARY_NOT
):
639 ERR("unsupported non-specialized bytecode op %u\n",
640 (unsigned int) *(filter_opcode_t
*) pc
);
645 OP(FILTER_OP_UNARY_PLUS_S64
):
646 OP(FILTER_OP_UNARY_PLUS_DOUBLE
):
648 next_pc
+= sizeof(struct unary_op
);
651 OP(FILTER_OP_UNARY_MINUS_S64
):
653 estack_ax_v
= -estack_ax_v
;
654 next_pc
+= sizeof(struct unary_op
);
657 OP(FILTER_OP_UNARY_MINUS_DOUBLE
):
659 estack_ax(stack
, top
)->u
.d
= -estack_ax(stack
, top
)->u
.d
;
660 next_pc
+= sizeof(struct unary_op
);
663 OP(FILTER_OP_UNARY_NOT_S64
):
665 estack_ax_v
= !estack_ax_v
;
666 next_pc
+= sizeof(struct unary_op
);
669 OP(FILTER_OP_UNARY_NOT_DOUBLE
):
671 estack_ax(stack
, top
)->u
.d
= !estack_ax(stack
, top
)->u
.d
;
672 next_pc
+= sizeof(struct unary_op
);
679 struct logical_op
*insn
= (struct logical_op
*) pc
;
681 /* If AX is 0, skip and evaluate to 0 */
682 if (unlikely(estack_ax_v
== 0)) {
683 dbg_printf("Jumping to bytecode offset %u\n",
684 (unsigned int) insn
->skip_offset
);
685 next_pc
= start_pc
+ insn
->skip_offset
;
687 /* Pop 1 when jump not taken */
688 estack_pop(stack
, top
, ax
, bx
);
689 next_pc
+= sizeof(struct logical_op
);
695 struct logical_op
*insn
= (struct logical_op
*) pc
;
697 /* If AX is nonzero, skip and evaluate to 1 */
699 if (unlikely(estack_ax_v
!= 0)) {
701 dbg_printf("Jumping to bytecode offset %u\n",
702 (unsigned int) insn
->skip_offset
);
703 next_pc
= start_pc
+ insn
->skip_offset
;
705 /* Pop 1 when jump not taken */
706 estack_pop(stack
, top
, ax
, bx
);
707 next_pc
+= sizeof(struct logical_op
);
714 OP(FILTER_OP_LOAD_FIELD_REF_STRING
):
716 struct load_op
*insn
= (struct load_op
*) pc
;
717 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
719 dbg_printf("load field ref offset %u type string\n",
721 estack_push(stack
, top
, ax
, bx
);
722 estack_ax(stack
, top
)->u
.s
.str
=
723 *(const char * const *) &filter_stack_data
[ref
->offset
];
724 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
725 dbg_printf("Filter warning: loading a NULL string.\n");
729 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
730 estack_ax(stack
, top
)->u
.s
.literal
= 0;
731 dbg_printf("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
732 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
736 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE
):
738 struct load_op
*insn
= (struct load_op
*) pc
;
739 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
741 dbg_printf("load field ref offset %u type sequence\n",
743 estack_push(stack
, top
, ax
, bx
);
744 estack_ax(stack
, top
)->u
.s
.seq_len
=
745 *(unsigned long *) &filter_stack_data
[ref
->offset
];
746 estack_ax(stack
, top
)->u
.s
.str
=
747 *(const char **) (&filter_stack_data
[ref
->offset
748 + sizeof(unsigned long)]);
749 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
750 dbg_printf("Filter warning: loading a NULL sequence.\n");
754 estack_ax(stack
, top
)->u
.s
.literal
= 0;
755 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
759 OP(FILTER_OP_LOAD_FIELD_REF_S64
):
761 struct load_op
*insn
= (struct load_op
*) pc
;
762 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
764 dbg_printf("load field ref offset %u type s64\n",
766 estack_push(stack
, top
, ax
, bx
);
768 ((struct literal_numeric
*) &filter_stack_data
[ref
->offset
])->v
;
769 dbg_printf("ref load s64 %" PRIi64
"\n", estack_ax_v
);
770 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
774 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE
):
776 struct load_op
*insn
= (struct load_op
*) pc
;
777 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
779 dbg_printf("load field ref offset %u type double\n",
781 estack_push(stack
, top
, ax
, bx
);
782 memcpy(&estack_ax(stack
, top
)->u
.d
, &filter_stack_data
[ref
->offset
],
783 sizeof(struct literal_double
));
784 dbg_printf("ref load double %g\n", estack_ax(stack
, top
)->u
.d
);
785 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
789 OP(FILTER_OP_LOAD_STRING
):
791 struct load_op
*insn
= (struct load_op
*) pc
;
793 dbg_printf("load string %s\n", insn
->data
);
794 estack_push(stack
, top
, ax
, bx
);
795 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
796 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
797 estack_ax(stack
, top
)->u
.s
.literal
= 1;
798 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
802 OP(FILTER_OP_LOAD_S64
):
804 struct load_op
*insn
= (struct load_op
*) pc
;
806 estack_push(stack
, top
, ax
, bx
);
807 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
808 dbg_printf("load s64 %" PRIi64
"\n", estack_ax_v
);
809 next_pc
+= sizeof(struct load_op
)
810 + sizeof(struct literal_numeric
);
814 OP(FILTER_OP_LOAD_DOUBLE
):
816 struct load_op
*insn
= (struct load_op
*) pc
;
818 estack_push(stack
, top
, ax
, bx
);
819 memcpy(&estack_ax(stack
, top
)->u
.d
, insn
->data
,
820 sizeof(struct literal_double
));
821 dbg_printf("load s64 %g\n", estack_ax(stack
, top
)->u
.d
);
822 next_pc
+= sizeof(struct load_op
)
823 + sizeof(struct literal_double
);
828 OP(FILTER_OP_CAST_TO_S64
):
829 ERR("unsupported non-specialized bytecode op %u\n",
830 (unsigned int) *(filter_opcode_t
*) pc
);
834 OP(FILTER_OP_CAST_DOUBLE_TO_S64
):
836 estack_ax_v
= (int64_t) estack_ax(stack
, top
)->u
.d
;
837 next_pc
+= sizeof(struct cast_op
);
841 OP(FILTER_OP_CAST_NOP
):
843 next_pc
+= sizeof(struct cast_op
);
849 /* return 0 (discard) on error */