2 * lttng-filter-interpreter.c
4 * LTTng UST filter interpreter.
6 * Copyright (C) 2010-2012 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; only
11 * version 2.1 of the License.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 #include "lttng-filter.h"
27 * -2: unknown escape char.
32 int parse_char(const char **p
)
52 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
54 const char *p
= estack_bx(stack
, top
)->u
.s
.str
, *q
= estack_ax(stack
, top
)->u
.s
.str
;
61 if (unlikely(p
- estack_bx(stack
, top
)->u
.s
.str
> estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')) {
62 if (q
- estack_ax(stack
, top
)->u
.s
.str
> estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')
68 if (unlikely(q
- estack_ax(stack
, top
)->u
.s
.str
> estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')) {
69 if (p
- estack_bx(stack
, top
)->u
.s
.str
> estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')
75 if (estack_bx(stack
, top
)->u
.s
.literal
) {
79 } else if (ret
== -2) {
82 /* else compare both char */
84 if (estack_ax(stack
, top
)->u
.s
.literal
) {
88 } else if (ret
== -2) {
108 int lttng_filter_false(void *filter_data
,
109 const char *filter_stack_data
)
114 #ifdef INTERPRETER_USE_SWITCH
117 * Fallback for compilers that do not support taking address of labels.
121 start_pc = &bytecode->data[0]; \
122 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
124 dbg_printf("Executing op %s (%u)\n", \
125 print_op((unsigned int) *(filter_opcode_t *) pc), \
126 (unsigned int) *(filter_opcode_t *) pc); \
127 switch (*(filter_opcode_t *) pc) {
129 #define OP(name) case name
139 * Dispatch-table based interpreter.
143 start_pc = &bytecode->data[0]; \
144 pc = next_pc = start_pc; \
145 if (unlikely(pc - start_pc >= bytecode->len)) \
147 goto *dispatch[*(filter_opcode_t *) pc];
154 goto *dispatch[*(filter_opcode_t *) pc];
160 int lttng_filter_interpret_bytecode(void *filter_data
,
161 const char *filter_stack_data
)
163 struct bytecode_runtime
*bytecode
= filter_data
;
164 void *pc
, *next_pc
, *start_pc
;
167 struct estack _stack
;
168 struct estack
*stack
= &_stack
;
169 register int64_t ax
= 0, bx
= 0;
170 register int top
= FILTER_STACK_EMPTY
;
171 #ifndef INTERPRETER_USE_SWITCH
172 static void *dispatch
[NR_FILTER_OPS
] = {
173 [ FILTER_OP_UNKNOWN
] = &&LABEL_FILTER_OP_UNKNOWN
,
175 [ FILTER_OP_RETURN
] = &&LABEL_FILTER_OP_RETURN
,
178 [ FILTER_OP_MUL
] = &&LABEL_FILTER_OP_MUL
,
179 [ FILTER_OP_DIV
] = &&LABEL_FILTER_OP_DIV
,
180 [ FILTER_OP_MOD
] = &&LABEL_FILTER_OP_MOD
,
181 [ FILTER_OP_PLUS
] = &&LABEL_FILTER_OP_PLUS
,
182 [ FILTER_OP_MINUS
] = &&LABEL_FILTER_OP_MINUS
,
183 [ FILTER_OP_RSHIFT
] = &&LABEL_FILTER_OP_RSHIFT
,
184 [ FILTER_OP_LSHIFT
] = &&LABEL_FILTER_OP_LSHIFT
,
185 [ FILTER_OP_BIN_AND
] = &&LABEL_FILTER_OP_BIN_AND
,
186 [ FILTER_OP_BIN_OR
] = &&LABEL_FILTER_OP_BIN_OR
,
187 [ FILTER_OP_BIN_XOR
] = &&LABEL_FILTER_OP_BIN_XOR
,
189 /* binary comparators */
190 [ FILTER_OP_EQ
] = &&LABEL_FILTER_OP_EQ
,
191 [ FILTER_OP_NE
] = &&LABEL_FILTER_OP_NE
,
192 [ FILTER_OP_GT
] = &&LABEL_FILTER_OP_GT
,
193 [ FILTER_OP_LT
] = &&LABEL_FILTER_OP_LT
,
194 [ FILTER_OP_GE
] = &&LABEL_FILTER_OP_GE
,
195 [ FILTER_OP_LE
] = &&LABEL_FILTER_OP_LE
,
197 /* string binary comparator */
198 [ FILTER_OP_EQ_STRING
] = &&LABEL_FILTER_OP_EQ_STRING
,
199 [ FILTER_OP_NE_STRING
] = &&LABEL_FILTER_OP_NE_STRING
,
200 [ FILTER_OP_GT_STRING
] = &&LABEL_FILTER_OP_GT_STRING
,
201 [ FILTER_OP_LT_STRING
] = &&LABEL_FILTER_OP_LT_STRING
,
202 [ FILTER_OP_GE_STRING
] = &&LABEL_FILTER_OP_GE_STRING
,
203 [ FILTER_OP_LE_STRING
] = &&LABEL_FILTER_OP_LE_STRING
,
205 /* s64 binary comparator */
206 [ FILTER_OP_EQ_S64
] = &&LABEL_FILTER_OP_EQ_S64
,
207 [ FILTER_OP_NE_S64
] = &&LABEL_FILTER_OP_NE_S64
,
208 [ FILTER_OP_GT_S64
] = &&LABEL_FILTER_OP_GT_S64
,
209 [ FILTER_OP_LT_S64
] = &&LABEL_FILTER_OP_LT_S64
,
210 [ FILTER_OP_GE_S64
] = &&LABEL_FILTER_OP_GE_S64
,
211 [ FILTER_OP_LE_S64
] = &&LABEL_FILTER_OP_LE_S64
,
213 /* double binary comparator */
214 [ FILTER_OP_EQ_DOUBLE
] = &&LABEL_FILTER_OP_EQ_DOUBLE
,
215 [ FILTER_OP_NE_DOUBLE
] = &&LABEL_FILTER_OP_NE_DOUBLE
,
216 [ FILTER_OP_GT_DOUBLE
] = &&LABEL_FILTER_OP_GT_DOUBLE
,
217 [ FILTER_OP_LT_DOUBLE
] = &&LABEL_FILTER_OP_LT_DOUBLE
,
218 [ FILTER_OP_GE_DOUBLE
] = &&LABEL_FILTER_OP_GE_DOUBLE
,
219 [ FILTER_OP_LE_DOUBLE
] = &&LABEL_FILTER_OP_LE_DOUBLE
,
221 /* Mixed S64-double binary comparators */
222 [ FILTER_OP_EQ_DOUBLE_S64
] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64
,
223 [ FILTER_OP_NE_DOUBLE_S64
] = &&LABEL_FILTER_OP_NE_DOUBLE_S64
,
224 [ FILTER_OP_GT_DOUBLE_S64
] = &&LABEL_FILTER_OP_GT_DOUBLE_S64
,
225 [ FILTER_OP_LT_DOUBLE_S64
] = &&LABEL_FILTER_OP_LT_DOUBLE_S64
,
226 [ FILTER_OP_GE_DOUBLE_S64
] = &&LABEL_FILTER_OP_GE_DOUBLE_S64
,
227 [ FILTER_OP_LE_DOUBLE_S64
] = &&LABEL_FILTER_OP_LE_DOUBLE_S64
,
229 [ FILTER_OP_EQ_S64_DOUBLE
] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE
,
230 [ FILTER_OP_NE_S64_DOUBLE
] = &&LABEL_FILTER_OP_NE_S64_DOUBLE
,
231 [ FILTER_OP_GT_S64_DOUBLE
] = &&LABEL_FILTER_OP_GT_S64_DOUBLE
,
232 [ FILTER_OP_LT_S64_DOUBLE
] = &&LABEL_FILTER_OP_LT_S64_DOUBLE
,
233 [ FILTER_OP_GE_S64_DOUBLE
] = &&LABEL_FILTER_OP_GE_S64_DOUBLE
,
234 [ FILTER_OP_LE_S64_DOUBLE
] = &&LABEL_FILTER_OP_LE_S64_DOUBLE
,
237 [ FILTER_OP_UNARY_PLUS
] = &&LABEL_FILTER_OP_UNARY_PLUS
,
238 [ FILTER_OP_UNARY_MINUS
] = &&LABEL_FILTER_OP_UNARY_MINUS
,
239 [ FILTER_OP_UNARY_NOT
] = &&LABEL_FILTER_OP_UNARY_NOT
,
240 [ FILTER_OP_UNARY_PLUS_S64
] = &&LABEL_FILTER_OP_UNARY_PLUS_S64
,
241 [ FILTER_OP_UNARY_MINUS_S64
] = &&LABEL_FILTER_OP_UNARY_MINUS_S64
,
242 [ FILTER_OP_UNARY_NOT_S64
] = &&LABEL_FILTER_OP_UNARY_NOT_S64
,
243 [ FILTER_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE
,
244 [ FILTER_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE
,
245 [ FILTER_OP_UNARY_NOT_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE
,
248 [ FILTER_OP_AND
] = &&LABEL_FILTER_OP_AND
,
249 [ FILTER_OP_OR
] = &&LABEL_FILTER_OP_OR
,
252 [ FILTER_OP_LOAD_FIELD_REF
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF
,
253 [ FILTER_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING
,
254 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE
,
255 [ FILTER_OP_LOAD_FIELD_REF_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64
,
256 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE
,
258 [ FILTER_OP_LOAD_STRING
] = &&LABEL_FILTER_OP_LOAD_STRING
,
259 [ FILTER_OP_LOAD_S64
] = &&LABEL_FILTER_OP_LOAD_S64
,
260 [ FILTER_OP_LOAD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_DOUBLE
,
263 [ FILTER_OP_CAST_TO_S64
] = &&LABEL_FILTER_OP_CAST_TO_S64
,
264 [ FILTER_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64
,
265 [ FILTER_OP_CAST_NOP
] = &&LABEL_FILTER_OP_CAST_NOP
,
267 #endif /* #ifndef INTERPRETER_USE_SWITCH */
271 OP(FILTER_OP_UNKNOWN
):
272 OP(FILTER_OP_LOAD_FIELD_REF
):
273 #ifdef INTERPRETER_USE_SWITCH
275 #endif /* INTERPRETER_USE_SWITCH */
276 ERR("unknown bytecode op %u\n",
277 (unsigned int) *(filter_opcode_t
*) pc
);
281 OP(FILTER_OP_RETURN
):
282 retval
= !!estack_ax_v
;
292 OP(FILTER_OP_RSHIFT
):
293 OP(FILTER_OP_LSHIFT
):
294 OP(FILTER_OP_BIN_AND
):
295 OP(FILTER_OP_BIN_OR
):
296 OP(FILTER_OP_BIN_XOR
):
297 ERR("unsupported bytecode op %u\n",
298 (unsigned int) *(filter_opcode_t
*) pc
);
308 ERR("unsupported non-specialized bytecode op %u\n",
309 (unsigned int) *(filter_opcode_t
*) pc
);
313 OP(FILTER_OP_EQ_STRING
):
317 res
= (stack_strcmp(stack
, top
, "==") == 0);
318 estack_pop(stack
, top
, ax
, bx
);
320 next_pc
+= sizeof(struct binary_op
);
323 OP(FILTER_OP_NE_STRING
):
327 res
= (stack_strcmp(stack
, top
, "!=") != 0);
328 estack_pop(stack
, top
, ax
, bx
);
330 next_pc
+= sizeof(struct binary_op
);
333 OP(FILTER_OP_GT_STRING
):
337 res
= (stack_strcmp(stack
, top
, ">") > 0);
338 estack_pop(stack
, top
, ax
, bx
);
340 next_pc
+= sizeof(struct binary_op
);
343 OP(FILTER_OP_LT_STRING
):
347 res
= (stack_strcmp(stack
, top
, "<") < 0);
348 estack_pop(stack
, top
, ax
, bx
);
350 next_pc
+= sizeof(struct binary_op
);
353 OP(FILTER_OP_GE_STRING
):
357 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
358 estack_pop(stack
, top
, ax
, bx
);
360 next_pc
+= sizeof(struct binary_op
);
363 OP(FILTER_OP_LE_STRING
):
367 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
368 estack_pop(stack
, top
, ax
, bx
);
370 next_pc
+= sizeof(struct binary_op
);
374 OP(FILTER_OP_EQ_S64
):
378 res
= (estack_bx_v
== estack_ax_v
);
379 estack_pop(stack
, top
, ax
, bx
);
381 next_pc
+= sizeof(struct binary_op
);
384 OP(FILTER_OP_NE_S64
):
388 res
= (estack_bx_v
!= estack_ax_v
);
389 estack_pop(stack
, top
, ax
, bx
);
391 next_pc
+= sizeof(struct binary_op
);
394 OP(FILTER_OP_GT_S64
):
398 res
= (estack_bx_v
> estack_ax_v
);
399 estack_pop(stack
, top
, ax
, bx
);
401 next_pc
+= sizeof(struct binary_op
);
404 OP(FILTER_OP_LT_S64
):
408 res
= (estack_bx_v
< estack_ax_v
);
409 estack_pop(stack
, top
, ax
, bx
);
411 next_pc
+= sizeof(struct binary_op
);
414 OP(FILTER_OP_GE_S64
):
418 res
= (estack_bx_v
>= estack_ax_v
);
419 estack_pop(stack
, top
, ax
, bx
);
421 next_pc
+= sizeof(struct binary_op
);
424 OP(FILTER_OP_LE_S64
):
428 res
= (estack_bx_v
<= estack_ax_v
);
429 estack_pop(stack
, top
, ax
, bx
);
431 next_pc
+= sizeof(struct binary_op
);
435 OP(FILTER_OP_EQ_DOUBLE
):
439 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax(stack
, top
)->u
.d
);
440 estack_pop(stack
, top
, ax
, bx
);
442 next_pc
+= sizeof(struct binary_op
);
445 OP(FILTER_OP_NE_DOUBLE
):
449 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax(stack
, top
)->u
.d
);
450 estack_pop(stack
, top
, ax
, bx
);
452 next_pc
+= sizeof(struct binary_op
);
455 OP(FILTER_OP_GT_DOUBLE
):
459 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax(stack
, top
)->u
.d
);
460 estack_pop(stack
, top
, ax
, bx
);
462 next_pc
+= sizeof(struct binary_op
);
465 OP(FILTER_OP_LT_DOUBLE
):
469 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax(stack
, top
)->u
.d
);
470 estack_pop(stack
, top
, ax
, bx
);
472 next_pc
+= sizeof(struct binary_op
);
475 OP(FILTER_OP_GE_DOUBLE
):
479 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax(stack
, top
)->u
.d
);
480 estack_pop(stack
, top
, ax
, bx
);
482 next_pc
+= sizeof(struct binary_op
);
485 OP(FILTER_OP_LE_DOUBLE
):
489 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax(stack
, top
)->u
.d
);
490 estack_pop(stack
, top
, ax
, bx
);
492 next_pc
+= sizeof(struct binary_op
);
496 /* Mixed S64-double binary comparators */
497 OP(FILTER_OP_EQ_DOUBLE_S64
):
501 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax_v
);
502 estack_pop(stack
, top
, ax
, bx
);
504 next_pc
+= sizeof(struct binary_op
);
507 OP(FILTER_OP_NE_DOUBLE_S64
):
511 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax_v
);
512 estack_pop(stack
, top
, ax
, bx
);
514 next_pc
+= sizeof(struct binary_op
);
517 OP(FILTER_OP_GT_DOUBLE_S64
):
521 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax_v
);
522 estack_pop(stack
, top
, ax
, bx
);
524 next_pc
+= sizeof(struct binary_op
);
527 OP(FILTER_OP_LT_DOUBLE_S64
):
531 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax_v
);
532 estack_pop(stack
, top
, ax
, bx
);
534 next_pc
+= sizeof(struct binary_op
);
537 OP(FILTER_OP_GE_DOUBLE_S64
):
541 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax_v
);
542 estack_pop(stack
, top
, ax
, bx
);
544 next_pc
+= sizeof(struct binary_op
);
547 OP(FILTER_OP_LE_DOUBLE_S64
):
551 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax_v
);
552 estack_pop(stack
, top
, ax
, bx
);
554 next_pc
+= sizeof(struct binary_op
);
558 OP(FILTER_OP_EQ_S64_DOUBLE
):
562 res
= (estack_bx_v
== estack_ax(stack
, top
)->u
.d
);
563 estack_pop(stack
, top
, ax
, bx
);
565 next_pc
+= sizeof(struct binary_op
);
568 OP(FILTER_OP_NE_S64_DOUBLE
):
572 res
= (estack_bx_v
!= estack_ax(stack
, top
)->u
.d
);
573 estack_pop(stack
, top
, ax
, bx
);
575 next_pc
+= sizeof(struct binary_op
);
578 OP(FILTER_OP_GT_S64_DOUBLE
):
582 res
= (estack_bx_v
> estack_ax(stack
, top
)->u
.d
);
583 estack_pop(stack
, top
, ax
, bx
);
585 next_pc
+= sizeof(struct binary_op
);
588 OP(FILTER_OP_LT_S64_DOUBLE
):
592 res
= (estack_bx_v
< estack_ax(stack
, top
)->u
.d
);
593 estack_pop(stack
, top
, ax
, bx
);
595 next_pc
+= sizeof(struct binary_op
);
598 OP(FILTER_OP_GE_S64_DOUBLE
):
602 res
= (estack_bx_v
>= estack_ax(stack
, top
)->u
.d
);
603 estack_pop(stack
, top
, ax
, bx
);
605 next_pc
+= sizeof(struct binary_op
);
608 OP(FILTER_OP_LE_S64_DOUBLE
):
612 res
= (estack_bx_v
<= estack_ax(stack
, top
)->u
.d
);
613 estack_pop(stack
, top
, ax
, bx
);
615 next_pc
+= sizeof(struct binary_op
);
620 OP(FILTER_OP_UNARY_PLUS
):
621 OP(FILTER_OP_UNARY_MINUS
):
622 OP(FILTER_OP_UNARY_NOT
):
623 ERR("unsupported non-specialized bytecode op %u\n",
624 (unsigned int) *(filter_opcode_t
*) pc
);
629 OP(FILTER_OP_UNARY_PLUS_S64
):
630 OP(FILTER_OP_UNARY_PLUS_DOUBLE
):
632 next_pc
+= sizeof(struct unary_op
);
635 OP(FILTER_OP_UNARY_MINUS_S64
):
637 estack_ax_v
= -estack_ax_v
;
638 next_pc
+= sizeof(struct unary_op
);
641 OP(FILTER_OP_UNARY_MINUS_DOUBLE
):
643 estack_ax(stack
, top
)->u
.d
= -estack_ax(stack
, top
)->u
.d
;
644 next_pc
+= sizeof(struct unary_op
);
647 OP(FILTER_OP_UNARY_NOT_S64
):
649 estack_ax_v
= !estack_ax_v
;
650 next_pc
+= sizeof(struct unary_op
);
653 OP(FILTER_OP_UNARY_NOT_DOUBLE
):
655 estack_ax(stack
, top
)->u
.d
= !estack_ax(stack
, top
)->u
.d
;
656 next_pc
+= sizeof(struct unary_op
);
663 struct logical_op
*insn
= (struct logical_op
*) pc
;
665 /* If AX is 0, skip and evaluate to 0 */
666 if (unlikely(estack_ax_v
== 0)) {
667 dbg_printf("Jumping to bytecode offset %u\n",
668 (unsigned int) insn
->skip_offset
);
669 next_pc
= start_pc
+ insn
->skip_offset
;
671 /* Pop 1 when jump not taken */
672 estack_pop(stack
, top
, ax
, bx
);
673 next_pc
+= sizeof(struct logical_op
);
679 struct logical_op
*insn
= (struct logical_op
*) pc
;
681 /* If AX is nonzero, skip and evaluate to 1 */
683 if (unlikely(estack_ax_v
!= 0)) {
685 dbg_printf("Jumping to bytecode offset %u\n",
686 (unsigned int) insn
->skip_offset
);
687 next_pc
= start_pc
+ insn
->skip_offset
;
689 /* Pop 1 when jump not taken */
690 estack_pop(stack
, top
, ax
, bx
);
691 next_pc
+= sizeof(struct logical_op
);
698 OP(FILTER_OP_LOAD_FIELD_REF_STRING
):
700 struct load_op
*insn
= (struct load_op
*) pc
;
701 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
703 dbg_printf("load field ref offset %u type string\n",
705 estack_push(stack
, top
, ax
, bx
);
706 estack_ax(stack
, top
)->u
.s
.str
=
707 *(const char * const *) &filter_stack_data
[ref
->offset
];
708 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
709 dbg_printf("Filter warning: loading a NULL string.\n");
713 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
714 estack_ax(stack
, top
)->u
.s
.literal
= 0;
715 dbg_printf("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
716 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
720 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE
):
722 struct load_op
*insn
= (struct load_op
*) pc
;
723 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
725 dbg_printf("load field ref offset %u type sequence\n",
727 estack_push(stack
, top
, ax
, bx
);
728 estack_ax(stack
, top
)->u
.s
.seq_len
=
729 *(unsigned long *) &filter_stack_data
[ref
->offset
];
730 estack_ax(stack
, top
)->u
.s
.str
=
731 *(const char **) (&filter_stack_data
[ref
->offset
732 + sizeof(unsigned long)]);
733 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
734 dbg_printf("Filter warning: loading a NULL sequence.\n");
738 estack_ax(stack
, top
)->u
.s
.literal
= 0;
739 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
743 OP(FILTER_OP_LOAD_FIELD_REF_S64
):
745 struct load_op
*insn
= (struct load_op
*) pc
;
746 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
748 dbg_printf("load field ref offset %u type s64\n",
750 estack_push(stack
, top
, ax
, bx
);
752 ((struct literal_numeric
*) &filter_stack_data
[ref
->offset
])->v
;
753 dbg_printf("ref load s64 %" PRIi64
"\n", estack_ax_v
);
754 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
758 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE
):
760 struct load_op
*insn
= (struct load_op
*) pc
;
761 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
763 dbg_printf("load field ref offset %u type double\n",
765 estack_push(stack
, top
, ax
, bx
);
766 memcpy(&estack_ax(stack
, top
)->u
.d
, &filter_stack_data
[ref
->offset
],
767 sizeof(struct literal_double
));
768 dbg_printf("ref load double %g\n", estack_ax(stack
, top
)->u
.d
);
769 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
773 OP(FILTER_OP_LOAD_STRING
):
775 struct load_op
*insn
= (struct load_op
*) pc
;
777 dbg_printf("load string %s\n", insn
->data
);
778 estack_push(stack
, top
, ax
, bx
);
779 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
780 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
781 estack_ax(stack
, top
)->u
.s
.literal
= 1;
782 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
786 OP(FILTER_OP_LOAD_S64
):
788 struct load_op
*insn
= (struct load_op
*) pc
;
790 estack_push(stack
, top
, ax
, bx
);
791 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
792 dbg_printf("load s64 %" PRIi64
"\n", estack_ax_v
);
793 next_pc
+= sizeof(struct load_op
)
794 + sizeof(struct literal_numeric
);
798 OP(FILTER_OP_LOAD_DOUBLE
):
800 struct load_op
*insn
= (struct load_op
*) pc
;
802 estack_push(stack
, top
, ax
, bx
);
803 memcpy(&estack_ax(stack
, top
)->u
.d
, insn
->data
,
804 sizeof(struct literal_double
));
805 dbg_printf("load s64 %g\n", estack_ax(stack
, top
)->u
.d
);
806 next_pc
+= sizeof(struct load_op
)
807 + sizeof(struct literal_double
);
812 OP(FILTER_OP_CAST_TO_S64
):
813 ERR("unsupported non-specialized bytecode op %u\n",
814 (unsigned int) *(filter_opcode_t
*) pc
);
818 OP(FILTER_OP_CAST_DOUBLE_TO_S64
):
820 estack_ax_v
= (int64_t) estack_ax(stack
, top
)->u
.d
;
821 next_pc
+= sizeof(struct cast_op
);
825 OP(FILTER_OP_CAST_NOP
):
827 next_pc
+= sizeof(struct cast_op
);
833 /* return 0 (discard) on error */