2 * lttng-filter-interpreter.c
4 * LTTng modules filter interpreter.
6 * Copyright (C) 2010-2014 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; only
11 * version 2.1 of the License.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 #include "lttng-filter.h"
27 * -2: unknown escape char.
32 int parse_char(const char **p
)
52 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
54 const char *p
= estack_bx(stack
, top
)->u
.s
.str
, *q
= estack_ax(stack
, top
)->u
.s
.str
;
61 if (unlikely(p
- estack_bx(stack
, top
)->u
.s
.str
> estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')) {
62 if (q
- estack_ax(stack
, top
)->u
.s
.str
> estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0') {
65 if (estack_ax(stack
, top
)->u
.s
.literal
) {
73 if (unlikely(q
- estack_ax(stack
, top
)->u
.s
.str
> estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')) {
74 if (p
- estack_bx(stack
, top
)->u
.s
.str
> estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0') {
77 if (estack_bx(stack
, top
)->u
.s
.literal
) {
85 if (estack_bx(stack
, top
)->u
.s
.literal
) {
89 } else if (ret
== -2) {
92 /* else compare both char */
94 if (estack_ax(stack
, top
)->u
.s
.literal
) {
98 } else if (ret
== -2) {
118 uint64_t lttng_filter_false(void *filter_data
,
119 const char *filter_stack_data
)
124 #ifdef INTERPRETER_USE_SWITCH
127 * Fallback for compilers that do not support taking address of labels.
131 start_pc = &bytecode->data[0]; \
132 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
134 dbg_printk("Executing op %s (%u)\n", \
135 lttng_filter_print_op((unsigned int) *(filter_opcode_t *) pc), \
136 (unsigned int) *(filter_opcode_t *) pc); \
137 switch (*(filter_opcode_t *) pc) {
139 #define OP(name) case name
149 * Dispatch-table based interpreter.
153 start_pc = &bytecode->data[0]; \
154 pc = next_pc = start_pc; \
155 if (unlikely(pc - start_pc >= bytecode->len)) \
157 goto *dispatch[*(filter_opcode_t *) pc];
164 goto *dispatch[*(filter_opcode_t *) pc];
171 * Return 0 (discard), or raise the 0x1 flag (log event).
172 * Currently, other flags are kept for future extensions and have no
175 uint64_t lttng_filter_interpret_bytecode(void *filter_data
,
176 const char *filter_stack_data
)
178 struct bytecode_runtime
*bytecode
= filter_data
;
179 void *pc
, *next_pc
, *start_pc
;
182 struct estack _stack
;
183 struct estack
*stack
= &_stack
;
184 register int64_t ax
= 0, bx
= 0;
185 register int top
= FILTER_STACK_EMPTY
;
186 #ifndef INTERPRETER_USE_SWITCH
187 static void *dispatch
[NR_FILTER_OPS
] = {
188 [ FILTER_OP_UNKNOWN
] = &&LABEL_FILTER_OP_UNKNOWN
,
190 [ FILTER_OP_RETURN
] = &&LABEL_FILTER_OP_RETURN
,
193 [ FILTER_OP_MUL
] = &&LABEL_FILTER_OP_MUL
,
194 [ FILTER_OP_DIV
] = &&LABEL_FILTER_OP_DIV
,
195 [ FILTER_OP_MOD
] = &&LABEL_FILTER_OP_MOD
,
196 [ FILTER_OP_PLUS
] = &&LABEL_FILTER_OP_PLUS
,
197 [ FILTER_OP_MINUS
] = &&LABEL_FILTER_OP_MINUS
,
198 [ FILTER_OP_RSHIFT
] = &&LABEL_FILTER_OP_RSHIFT
,
199 [ FILTER_OP_LSHIFT
] = &&LABEL_FILTER_OP_LSHIFT
,
200 [ FILTER_OP_BIN_AND
] = &&LABEL_FILTER_OP_BIN_AND
,
201 [ FILTER_OP_BIN_OR
] = &&LABEL_FILTER_OP_BIN_OR
,
202 [ FILTER_OP_BIN_XOR
] = &&LABEL_FILTER_OP_BIN_XOR
,
204 /* binary comparators */
205 [ FILTER_OP_EQ
] = &&LABEL_FILTER_OP_EQ
,
206 [ FILTER_OP_NE
] = &&LABEL_FILTER_OP_NE
,
207 [ FILTER_OP_GT
] = &&LABEL_FILTER_OP_GT
,
208 [ FILTER_OP_LT
] = &&LABEL_FILTER_OP_LT
,
209 [ FILTER_OP_GE
] = &&LABEL_FILTER_OP_GE
,
210 [ FILTER_OP_LE
] = &&LABEL_FILTER_OP_LE
,
212 /* string binary comparator */
213 [ FILTER_OP_EQ_STRING
] = &&LABEL_FILTER_OP_EQ_STRING
,
214 [ FILTER_OP_NE_STRING
] = &&LABEL_FILTER_OP_NE_STRING
,
215 [ FILTER_OP_GT_STRING
] = &&LABEL_FILTER_OP_GT_STRING
,
216 [ FILTER_OP_LT_STRING
] = &&LABEL_FILTER_OP_LT_STRING
,
217 [ FILTER_OP_GE_STRING
] = &&LABEL_FILTER_OP_GE_STRING
,
218 [ FILTER_OP_LE_STRING
] = &&LABEL_FILTER_OP_LE_STRING
,
220 /* s64 binary comparator */
221 [ FILTER_OP_EQ_S64
] = &&LABEL_FILTER_OP_EQ_S64
,
222 [ FILTER_OP_NE_S64
] = &&LABEL_FILTER_OP_NE_S64
,
223 [ FILTER_OP_GT_S64
] = &&LABEL_FILTER_OP_GT_S64
,
224 [ FILTER_OP_LT_S64
] = &&LABEL_FILTER_OP_LT_S64
,
225 [ FILTER_OP_GE_S64
] = &&LABEL_FILTER_OP_GE_S64
,
226 [ FILTER_OP_LE_S64
] = &&LABEL_FILTER_OP_LE_S64
,
228 /* double binary comparator */
229 [ FILTER_OP_EQ_DOUBLE
] = &&LABEL_FILTER_OP_EQ_DOUBLE
,
230 [ FILTER_OP_NE_DOUBLE
] = &&LABEL_FILTER_OP_NE_DOUBLE
,
231 [ FILTER_OP_GT_DOUBLE
] = &&LABEL_FILTER_OP_GT_DOUBLE
,
232 [ FILTER_OP_LT_DOUBLE
] = &&LABEL_FILTER_OP_LT_DOUBLE
,
233 [ FILTER_OP_GE_DOUBLE
] = &&LABEL_FILTER_OP_GE_DOUBLE
,
234 [ FILTER_OP_LE_DOUBLE
] = &&LABEL_FILTER_OP_LE_DOUBLE
,
236 /* Mixed S64-double binary comparators */
237 [ FILTER_OP_EQ_DOUBLE_S64
] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64
,
238 [ FILTER_OP_NE_DOUBLE_S64
] = &&LABEL_FILTER_OP_NE_DOUBLE_S64
,
239 [ FILTER_OP_GT_DOUBLE_S64
] = &&LABEL_FILTER_OP_GT_DOUBLE_S64
,
240 [ FILTER_OP_LT_DOUBLE_S64
] = &&LABEL_FILTER_OP_LT_DOUBLE_S64
,
241 [ FILTER_OP_GE_DOUBLE_S64
] = &&LABEL_FILTER_OP_GE_DOUBLE_S64
,
242 [ FILTER_OP_LE_DOUBLE_S64
] = &&LABEL_FILTER_OP_LE_DOUBLE_S64
,
244 [ FILTER_OP_EQ_S64_DOUBLE
] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE
,
245 [ FILTER_OP_NE_S64_DOUBLE
] = &&LABEL_FILTER_OP_NE_S64_DOUBLE
,
246 [ FILTER_OP_GT_S64_DOUBLE
] = &&LABEL_FILTER_OP_GT_S64_DOUBLE
,
247 [ FILTER_OP_LT_S64_DOUBLE
] = &&LABEL_FILTER_OP_LT_S64_DOUBLE
,
248 [ FILTER_OP_GE_S64_DOUBLE
] = &&LABEL_FILTER_OP_GE_S64_DOUBLE
,
249 [ FILTER_OP_LE_S64_DOUBLE
] = &&LABEL_FILTER_OP_LE_S64_DOUBLE
,
252 [ FILTER_OP_UNARY_PLUS
] = &&LABEL_FILTER_OP_UNARY_PLUS
,
253 [ FILTER_OP_UNARY_MINUS
] = &&LABEL_FILTER_OP_UNARY_MINUS
,
254 [ FILTER_OP_UNARY_NOT
] = &&LABEL_FILTER_OP_UNARY_NOT
,
255 [ FILTER_OP_UNARY_PLUS_S64
] = &&LABEL_FILTER_OP_UNARY_PLUS_S64
,
256 [ FILTER_OP_UNARY_MINUS_S64
] = &&LABEL_FILTER_OP_UNARY_MINUS_S64
,
257 [ FILTER_OP_UNARY_NOT_S64
] = &&LABEL_FILTER_OP_UNARY_NOT_S64
,
258 [ FILTER_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE
,
259 [ FILTER_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE
,
260 [ FILTER_OP_UNARY_NOT_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE
,
263 [ FILTER_OP_AND
] = &&LABEL_FILTER_OP_AND
,
264 [ FILTER_OP_OR
] = &&LABEL_FILTER_OP_OR
,
267 [ FILTER_OP_LOAD_FIELD_REF
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF
,
268 [ FILTER_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING
,
269 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE
,
270 [ FILTER_OP_LOAD_FIELD_REF_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64
,
271 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE
,
273 /* load from immediate operand */
274 [ FILTER_OP_LOAD_STRING
] = &&LABEL_FILTER_OP_LOAD_STRING
,
275 [ FILTER_OP_LOAD_S64
] = &&LABEL_FILTER_OP_LOAD_S64
,
276 [ FILTER_OP_LOAD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_DOUBLE
,
279 [ FILTER_OP_CAST_TO_S64
] = &&LABEL_FILTER_OP_CAST_TO_S64
,
280 [ FILTER_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64
,
281 [ FILTER_OP_CAST_NOP
] = &&LABEL_FILTER_OP_CAST_NOP
,
283 /* get context ref */
284 [ FILTER_OP_GET_CONTEXT_REF
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF
,
285 [ FILTER_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING
,
286 [ FILTER_OP_GET_CONTEXT_REF_S64
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64
,
287 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE
,
289 #endif /* #ifndef INTERPRETER_USE_SWITCH */
293 OP(FILTER_OP_UNKNOWN
):
294 OP(FILTER_OP_LOAD_FIELD_REF
):
295 OP(FILTER_OP_GET_CONTEXT_REF
):
296 #ifdef INTERPRETER_USE_SWITCH
298 #endif /* INTERPRETER_USE_SWITCH */
299 printk(KERN_WARNING
"unknown bytecode op %u\n",
300 (unsigned int) *(filter_opcode_t
*) pc
);
304 OP(FILTER_OP_RETURN
):
305 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
306 retval
= !!estack_ax_v
;
316 OP(FILTER_OP_RSHIFT
):
317 OP(FILTER_OP_LSHIFT
):
318 OP(FILTER_OP_BIN_AND
):
319 OP(FILTER_OP_BIN_OR
):
320 OP(FILTER_OP_BIN_XOR
):
321 printk(KERN_WARNING
"unsupported bytecode op %u\n",
322 (unsigned int) *(filter_opcode_t
*) pc
);
332 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
333 (unsigned int) *(filter_opcode_t
*) pc
);
337 OP(FILTER_OP_EQ_STRING
):
341 res
= (stack_strcmp(stack
, top
, "==") == 0);
342 estack_pop(stack
, top
, ax
, bx
);
344 next_pc
+= sizeof(struct binary_op
);
347 OP(FILTER_OP_NE_STRING
):
351 res
= (stack_strcmp(stack
, top
, "!=") != 0);
352 estack_pop(stack
, top
, ax
, bx
);
354 next_pc
+= sizeof(struct binary_op
);
357 OP(FILTER_OP_GT_STRING
):
361 res
= (stack_strcmp(stack
, top
, ">") > 0);
362 estack_pop(stack
, top
, ax
, bx
);
364 next_pc
+= sizeof(struct binary_op
);
367 OP(FILTER_OP_LT_STRING
):
371 res
= (stack_strcmp(stack
, top
, "<") < 0);
372 estack_pop(stack
, top
, ax
, bx
);
374 next_pc
+= sizeof(struct binary_op
);
377 OP(FILTER_OP_GE_STRING
):
381 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
382 estack_pop(stack
, top
, ax
, bx
);
384 next_pc
+= sizeof(struct binary_op
);
387 OP(FILTER_OP_LE_STRING
):
391 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
392 estack_pop(stack
, top
, ax
, bx
);
394 next_pc
+= sizeof(struct binary_op
);
398 OP(FILTER_OP_EQ_S64
):
402 res
= (estack_bx_v
== estack_ax_v
);
403 estack_pop(stack
, top
, ax
, bx
);
405 next_pc
+= sizeof(struct binary_op
);
408 OP(FILTER_OP_NE_S64
):
412 res
= (estack_bx_v
!= estack_ax_v
);
413 estack_pop(stack
, top
, ax
, bx
);
415 next_pc
+= sizeof(struct binary_op
);
418 OP(FILTER_OP_GT_S64
):
422 res
= (estack_bx_v
> estack_ax_v
);
423 estack_pop(stack
, top
, ax
, bx
);
425 next_pc
+= sizeof(struct binary_op
);
428 OP(FILTER_OP_LT_S64
):
432 res
= (estack_bx_v
< estack_ax_v
);
433 estack_pop(stack
, top
, ax
, bx
);
435 next_pc
+= sizeof(struct binary_op
);
438 OP(FILTER_OP_GE_S64
):
442 res
= (estack_bx_v
>= estack_ax_v
);
443 estack_pop(stack
, top
, ax
, bx
);
445 next_pc
+= sizeof(struct binary_op
);
448 OP(FILTER_OP_LE_S64
):
452 res
= (estack_bx_v
<= estack_ax_v
);
453 estack_pop(stack
, top
, ax
, bx
);
455 next_pc
+= sizeof(struct binary_op
);
459 OP(FILTER_OP_EQ_DOUBLE
):
460 OP(FILTER_OP_NE_DOUBLE
):
461 OP(FILTER_OP_GT_DOUBLE
):
462 OP(FILTER_OP_LT_DOUBLE
):
463 OP(FILTER_OP_GE_DOUBLE
):
464 OP(FILTER_OP_LE_DOUBLE
):
470 /* Mixed S64-double binary comparators */
471 OP(FILTER_OP_EQ_DOUBLE_S64
):
472 OP(FILTER_OP_NE_DOUBLE_S64
):
473 OP(FILTER_OP_GT_DOUBLE_S64
):
474 OP(FILTER_OP_LT_DOUBLE_S64
):
475 OP(FILTER_OP_GE_DOUBLE_S64
):
476 OP(FILTER_OP_LE_DOUBLE_S64
):
477 OP(FILTER_OP_EQ_S64_DOUBLE
):
478 OP(FILTER_OP_NE_S64_DOUBLE
):
479 OP(FILTER_OP_GT_S64_DOUBLE
):
480 OP(FILTER_OP_LT_S64_DOUBLE
):
481 OP(FILTER_OP_GE_S64_DOUBLE
):
482 OP(FILTER_OP_LE_S64_DOUBLE
):
489 OP(FILTER_OP_UNARY_PLUS
):
490 OP(FILTER_OP_UNARY_MINUS
):
491 OP(FILTER_OP_UNARY_NOT
):
492 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
493 (unsigned int) *(filter_opcode_t
*) pc
);
498 OP(FILTER_OP_UNARY_PLUS_S64
):
500 next_pc
+= sizeof(struct unary_op
);
503 OP(FILTER_OP_UNARY_MINUS_S64
):
505 estack_ax_v
= -estack_ax_v
;
506 next_pc
+= sizeof(struct unary_op
);
509 OP(FILTER_OP_UNARY_PLUS_DOUBLE
):
510 OP(FILTER_OP_UNARY_MINUS_DOUBLE
):
515 OP(FILTER_OP_UNARY_NOT_S64
):
517 estack_ax_v
= !estack_ax_v
;
518 next_pc
+= sizeof(struct unary_op
);
521 OP(FILTER_OP_UNARY_NOT_DOUBLE
):
530 struct logical_op
*insn
= (struct logical_op
*) pc
;
532 /* If AX is 0, skip and evaluate to 0 */
533 if (unlikely(estack_ax_v
== 0)) {
534 dbg_printk("Jumping to bytecode offset %u\n",
535 (unsigned int) insn
->skip_offset
);
536 next_pc
= start_pc
+ insn
->skip_offset
;
538 /* Pop 1 when jump not taken */
539 estack_pop(stack
, top
, ax
, bx
);
540 next_pc
+= sizeof(struct logical_op
);
546 struct logical_op
*insn
= (struct logical_op
*) pc
;
548 /* If AX is nonzero, skip and evaluate to 1 */
550 if (unlikely(estack_ax_v
!= 0)) {
552 dbg_printk("Jumping to bytecode offset %u\n",
553 (unsigned int) insn
->skip_offset
);
554 next_pc
= start_pc
+ insn
->skip_offset
;
556 /* Pop 1 when jump not taken */
557 estack_pop(stack
, top
, ax
, bx
);
558 next_pc
+= sizeof(struct logical_op
);
565 OP(FILTER_OP_LOAD_FIELD_REF_STRING
):
567 struct load_op
*insn
= (struct load_op
*) pc
;
568 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
570 dbg_printk("load field ref offset %u type string\n",
572 estack_push(stack
, top
, ax
, bx
);
573 estack_ax(stack
, top
)->u
.s
.str
=
574 *(const char * const *) &filter_stack_data
[ref
->offset
];
575 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
576 dbg_printk("Filter warning: loading a NULL string.\n");
580 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
581 estack_ax(stack
, top
)->u
.s
.literal
= 0;
582 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
583 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
587 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE
):
589 struct load_op
*insn
= (struct load_op
*) pc
;
590 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
592 dbg_printk("load field ref offset %u type sequence\n",
594 estack_push(stack
, top
, ax
, bx
);
595 estack_ax(stack
, top
)->u
.s
.seq_len
=
596 *(unsigned long *) &filter_stack_data
[ref
->offset
];
597 estack_ax(stack
, top
)->u
.s
.str
=
598 *(const char **) (&filter_stack_data
[ref
->offset
599 + sizeof(unsigned long)]);
600 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
601 dbg_printk("Filter warning: loading a NULL sequence.\n");
605 estack_ax(stack
, top
)->u
.s
.literal
= 0;
606 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
610 OP(FILTER_OP_LOAD_FIELD_REF_S64
):
612 struct load_op
*insn
= (struct load_op
*) pc
;
613 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
615 dbg_printk("load field ref offset %u type s64\n",
617 estack_push(stack
, top
, ax
, bx
);
619 ((struct literal_numeric
*) &filter_stack_data
[ref
->offset
])->v
;
620 dbg_printk("ref load s64 %lld\n",
621 (long long) estack_ax_v
);
622 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
626 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE
):
632 /* load from immediate operand */
633 OP(FILTER_OP_LOAD_STRING
):
635 struct load_op
*insn
= (struct load_op
*) pc
;
637 dbg_printk("load string %s\n", insn
->data
);
638 estack_push(stack
, top
, ax
, bx
);
639 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
640 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
641 estack_ax(stack
, top
)->u
.s
.literal
= 1;
642 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
646 OP(FILTER_OP_LOAD_S64
):
648 struct load_op
*insn
= (struct load_op
*) pc
;
650 estack_push(stack
, top
, ax
, bx
);
651 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
652 dbg_printk("load s64 %lld\n",
653 (long long) estack_ax_v
);
654 next_pc
+= sizeof(struct load_op
)
655 + sizeof(struct literal_numeric
);
659 OP(FILTER_OP_LOAD_DOUBLE
):
666 OP(FILTER_OP_CAST_TO_S64
):
667 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
668 (unsigned int) *(filter_opcode_t
*) pc
);
672 OP(FILTER_OP_CAST_DOUBLE_TO_S64
):
678 OP(FILTER_OP_CAST_NOP
):
680 next_pc
+= sizeof(struct cast_op
);
684 /* get context ref */
685 OP(FILTER_OP_GET_CONTEXT_REF_STRING
):
687 struct load_op
*insn
= (struct load_op
*) pc
;
688 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
689 struct lttng_ctx_field
*ctx_field
;
690 union lttng_ctx_value v
;
692 dbg_printk("get context ref offset %u type string\n",
694 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
695 ctx_field
->get_value(ctx_field
, &v
);
696 estack_push(stack
, top
, ax
, bx
);
697 estack_ax(stack
, top
)->u
.s
.str
= v
.str
;
698 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
699 dbg_printk("Filter warning: loading a NULL string.\n");
703 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
704 estack_ax(stack
, top
)->u
.s
.literal
= 0;
705 dbg_printk("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
706 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
710 OP(FILTER_OP_GET_CONTEXT_REF_S64
):
712 struct load_op
*insn
= (struct load_op
*) pc
;
713 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
714 struct lttng_ctx_field
*ctx_field
;
715 union lttng_ctx_value v
;
717 dbg_printk("get context ref offset %u type s64\n",
719 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
720 ctx_field
->get_value(ctx_field
, &v
);
721 estack_push(stack
, top
, ax
, bx
);
723 dbg_printk("ref get context s64 %lld\n",
724 (long long) estack_ax_v
);
725 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
729 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE
):
737 /* return 0 (discard) on error */