From 04aa13f8c2944839f6514e3841b93057b443a783 Mon Sep 17 00:00:00 2001 From: Francis Deslauriers Date: Wed, 15 Apr 2020 11:03:53 -0400 Subject: [PATCH] Rename filter bytecode types and files File renames: - filter-bytecode.h -> bytecode.h - lttng-filter-interpreter.c -> lttng-bytecode-interpreter.c - lttng-filter-specialize.c -> lttng-bytecode-specialize.c - lttng-filter-validator.c -> lttng-bytecode-validator.c - lttng-filter.c -> lttng-bytecode.c - lttng-filter.h -> lttng-bytecode.h Function renames: - `lttng_filter_interpret_bytecode_false()` -> `lttng_bytecode_filter_interpret_false()` - `lttng_filter_interpret_bytecode()` -> `lttng_bytecode_filter_interpret()` - `lttng_filter_specialize_bytecode()` -> `lttng_bytecode_specialize()` - `lttng_filter_validate_bytecode()` -> `lttng_bytecode_validate()` Type renames - `filter_opcode_t` to `bytecode_opcode_t` Enum renames: - `enum filter_op` to `enum bytecode_op` - `FILTER_OP_*` to `BYTECODE_OP_*` - `enum lttng_filter_ret` -> `enum lttng_bytecode_interpreter_ret` - `LTTNG_FILTER_DISCARD` -> `LTTNG_INTERPRETER_DISCARD` - `LTTNG_FILTER_RECORD_FLAG` -> `LTTNG_INTERPRETER_RECORD_FLAG` Define renames: - `FILTER_STACK_EMPTY` -> `INTERPRETER_STACK_EMPTY` - `FILTER_STACK_LEN`-> `INTERPRETER_STACK_LEN` - `FILTER_MAX_DATA_LEN` -> `BYTECODE_MAX_DATA_LEN` Signed-off-by: Francis Deslauriers Signed-off-by: Mathieu Desnoyers Change-Id: Iaab55116da5a3a8562b0f9e5b6033b556292f55b --- include/lttng/ust-events.h | 8 +- include/lttng/ust-tracepoint-event.h | 4 +- liblttng-ust/Makefile.am | 12 +- liblttng-ust/bytecode.h | 251 +++++++ liblttng-ust/filter-bytecode.h | 249 ------- ...rpreter.c => lttng-bytecode-interpreter.c} | 610 ++++++++--------- ...ecialize.c => lttng-bytecode-specialize.c} | 336 ++++----- ...validator.c => lttng-bytecode-validator.c} | 636 +++++++++--------- .../{lttng-filter.c => lttng-bytecode.c} | 268 ++++---- .../{lttng-filter.h => lttng-bytecode.h} | 46 +- 10 files changed, 1211 insertions(+), 1209 deletions(-) create mode 100644 liblttng-ust/bytecode.h delete mode 100644 liblttng-ust/filter-bytecode.h rename liblttng-ust/{lttng-filter-interpreter.c => lttng-bytecode-interpreter.c} (78%) rename liblttng-ust/{lttng-filter-specialize.c => lttng-bytecode-specialize.c} (83%) rename liblttng-ust/{lttng-filter-validator.c => lttng-bytecode-validator.c} (76%) rename liblttng-ust/{lttng-filter.c => lttng-bytecode.c} (64%) rename liblttng-ust/{lttng-filter.h => lttng-bytecode.h} (87%) diff --git a/include/lttng/ust-events.h b/include/lttng/ust-events.h index 4c02ffa3..356fa3b0 100644 --- a/include/lttng/ust-events.h +++ b/include/lttng/ust-events.h @@ -426,11 +426,11 @@ struct ust_pending_probe; struct lttng_event; /* - * Filter return value masks. + * Bytecode interpreter return value masks. */ -enum lttng_filter_ret { - LTTNG_FILTER_DISCARD = 0, - LTTNG_FILTER_RECORD_FLAG = (1ULL << 0), +enum lttng_bytecode_interpreter_ret { + LTTNG_INTERPRETER_DISCARD = 0, + LTTNG_INTERPRETER_RECORD_FLAG = (1ULL << 0), /* Other bits are kept for future use. */ }; diff --git a/include/lttng/ust-tracepoint-event.h b/include/lttng/ust-tracepoint-event.h index 5e9245f1..00e931a8 100644 --- a/include/lttng/ust-tracepoint-event.h +++ b/include/lttng/ust-tracepoint-event.h @@ -879,7 +879,7 @@ void __event_probe__##_provider##___##_name(_TP_ARGS_DATA_PROTO(_args)) \ _TP_ARGS_DATA_VAR(_args)); \ tp_list_for_each_entry_rcu(__filter_bc_runtime, &__event->filter_bytecode_runtime_head, node) { \ if (caa_unlikely(__filter_bc_runtime->filter(__filter_bc_runtime, \ - __stackvar.__filter_stack_data) & LTTNG_FILTER_RECORD_FLAG)) { \ + __stackvar.__filter_stack_data) & LTTNG_INTERPRETER_RECORD_FLAG)) { \ __filter_record = 1; \ break; \ } \ @@ -961,7 +961,7 @@ void __event_notifier_probe__##_provider##___##_name(_TP_ARGS_DATA_PROTO(_args)) _TP_ARGS_DATA_VAR(_args)); \ tp_list_for_each_entry_rcu(__filter_bc_runtime, &__event_notifier->filter_bytecode_runtime_head, node) { \ if (caa_unlikely(__filter_bc_runtime->filter(__filter_bc_runtime, \ - __stackvar.__filter_stack_data) & LTTNG_FILTER_RECORD_FLAG)) \ + __stackvar.__filter_stack_data) & LTTNG_INTERPRETER_RECORD_FLAG)) \ __filter_record = 1; \ } \ if (caa_likely(!__filter_record)) \ diff --git a/liblttng-ust/Makefile.am b/liblttng-ust/Makefile.am index f2cc835e..1f35535e 100644 --- a/liblttng-ust/Makefile.am +++ b/liblttng-ust/Makefile.am @@ -21,9 +21,15 @@ liblttng_ust_tracepoint_la_LDFLAGS = -no-undefined -version-info $(LTTNG_UST_LIB liblttng_ust_tracepoint_la_CFLAGS = -DUST_COMPONENT="liblttng_ust_tracepoint" $(AM_CFLAGS) liblttng_ust_runtime_la_SOURCES = \ + bytecode.h \ lttng-ust-comm.c \ lttng-ust-abi.c \ lttng-probes.c \ + lttng-bytecode.c \ + lttng-bytecode.h \ + lttng-bytecode-validator.c \ + lttng-bytecode-specialize.c \ + lttng-bytecode-interpreter.c \ lttng-context-provider.c \ lttng-context-vtid.c \ lttng-context-vpid.c \ @@ -47,12 +53,6 @@ liblttng_ust_runtime_la_SOURCES = \ lttng-context-vsgid.c \ lttng-context.c \ lttng-events.c \ - lttng-filter.c \ - lttng-filter.h \ - lttng-filter-validator.c \ - lttng-filter-specialize.c \ - lttng-filter-interpreter.c \ - filter-bytecode.h \ lttng-hash-helper.h \ lttng-ust-elf.c \ lttng-ust-statedump.c \ diff --git a/liblttng-ust/bytecode.h b/liblttng-ust/bytecode.h new file mode 100644 index 00000000..90ea9ad4 --- /dev/null +++ b/liblttng-ust/bytecode.h @@ -0,0 +1,251 @@ +#ifndef _BYTECODE_H +#define _BYTECODE_H + +/* + * bytecode.h + * + * LTTng bytecode + * + * Copyright 2012-2016 - Mathieu Desnoyers + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#include +#include + +#ifndef LTTNG_PACKED +#error "LTTNG_PACKED should be defined" +#endif + +/* + * offsets are absolute from start of bytecode. + */ + +struct field_ref { + /* Initially, symbol offset. After link, field offset. */ + uint16_t offset; +} __attribute__((packed)); + +struct get_symbol { + /* Symbol offset. */ + uint16_t offset; +} LTTNG_PACKED; + +struct get_index_u16 { + uint16_t index; +} LTTNG_PACKED; + +struct get_index_u64 { + uint64_t index; +} LTTNG_PACKED; + +struct literal_numeric { + int64_t v; +} __attribute__((packed)); + +struct literal_double { + double v; +} __attribute__((packed)); + +struct literal_string { + char string[0]; +} __attribute__((packed)); + +enum bytecode_op { + BYTECODE_OP_UNKNOWN = 0, + + BYTECODE_OP_RETURN = 1, + + /* binary */ + BYTECODE_OP_MUL = 2, + BYTECODE_OP_DIV = 3, + BYTECODE_OP_MOD = 4, + BYTECODE_OP_PLUS = 5, + BYTECODE_OP_MINUS = 6, + BYTECODE_OP_BIT_RSHIFT = 7, + BYTECODE_OP_BIT_LSHIFT = 8, + BYTECODE_OP_BIT_AND = 9, + BYTECODE_OP_BIT_OR = 10, + BYTECODE_OP_BIT_XOR = 11, + + /* binary comparators */ + BYTECODE_OP_EQ = 12, + BYTECODE_OP_NE = 13, + BYTECODE_OP_GT = 14, + BYTECODE_OP_LT = 15, + BYTECODE_OP_GE = 16, + BYTECODE_OP_LE = 17, + + /* string binary comparator: apply to */ + BYTECODE_OP_EQ_STRING = 18, + BYTECODE_OP_NE_STRING = 19, + BYTECODE_OP_GT_STRING = 20, + BYTECODE_OP_LT_STRING = 21, + BYTECODE_OP_GE_STRING = 22, + BYTECODE_OP_LE_STRING = 23, + + /* s64 binary comparator */ + BYTECODE_OP_EQ_S64 = 24, + BYTECODE_OP_NE_S64 = 25, + BYTECODE_OP_GT_S64 = 26, + BYTECODE_OP_LT_S64 = 27, + BYTECODE_OP_GE_S64 = 28, + BYTECODE_OP_LE_S64 = 29, + + /* double binary comparator */ + BYTECODE_OP_EQ_DOUBLE = 30, + BYTECODE_OP_NE_DOUBLE = 31, + BYTECODE_OP_GT_DOUBLE = 32, + BYTECODE_OP_LT_DOUBLE = 33, + BYTECODE_OP_GE_DOUBLE = 34, + BYTECODE_OP_LE_DOUBLE = 35, + + /* Mixed S64-double binary comparators */ + BYTECODE_OP_EQ_DOUBLE_S64 = 36, + BYTECODE_OP_NE_DOUBLE_S64 = 37, + BYTECODE_OP_GT_DOUBLE_S64 = 38, + BYTECODE_OP_LT_DOUBLE_S64 = 39, + BYTECODE_OP_GE_DOUBLE_S64 = 40, + BYTECODE_OP_LE_DOUBLE_S64 = 41, + + BYTECODE_OP_EQ_S64_DOUBLE = 42, + BYTECODE_OP_NE_S64_DOUBLE = 43, + BYTECODE_OP_GT_S64_DOUBLE = 44, + BYTECODE_OP_LT_S64_DOUBLE = 45, + BYTECODE_OP_GE_S64_DOUBLE = 46, + BYTECODE_OP_LE_S64_DOUBLE = 47, + + /* unary */ + BYTECODE_OP_UNARY_PLUS = 48, + BYTECODE_OP_UNARY_MINUS = 49, + BYTECODE_OP_UNARY_NOT = 50, + BYTECODE_OP_UNARY_PLUS_S64 = 51, + BYTECODE_OP_UNARY_MINUS_S64 = 52, + BYTECODE_OP_UNARY_NOT_S64 = 53, + BYTECODE_OP_UNARY_PLUS_DOUBLE = 54, + BYTECODE_OP_UNARY_MINUS_DOUBLE = 55, + BYTECODE_OP_UNARY_NOT_DOUBLE = 56, + + /* logical */ + BYTECODE_OP_AND = 57, + BYTECODE_OP_OR = 58, + + /* load field ref */ + BYTECODE_OP_LOAD_FIELD_REF = 59, + BYTECODE_OP_LOAD_FIELD_REF_STRING = 60, + BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE = 61, + BYTECODE_OP_LOAD_FIELD_REF_S64 = 62, + BYTECODE_OP_LOAD_FIELD_REF_DOUBLE = 63, + + /* load immediate from operand */ + BYTECODE_OP_LOAD_STRING = 64, + BYTECODE_OP_LOAD_S64 = 65, + BYTECODE_OP_LOAD_DOUBLE = 66, + + /* cast */ + BYTECODE_OP_CAST_TO_S64 = 67, + BYTECODE_OP_CAST_DOUBLE_TO_S64 = 68, + BYTECODE_OP_CAST_NOP = 69, + + /* get context ref */ + BYTECODE_OP_GET_CONTEXT_REF = 70, + BYTECODE_OP_GET_CONTEXT_REF_STRING = 71, + BYTECODE_OP_GET_CONTEXT_REF_S64 = 72, + BYTECODE_OP_GET_CONTEXT_REF_DOUBLE = 73, + + /* load userspace field ref */ + BYTECODE_OP_LOAD_FIELD_REF_USER_STRING = 74, + BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE = 75, + + /* + * load immediate star globbing pattern (literal string) + * from immediate + */ + BYTECODE_OP_LOAD_STAR_GLOB_STRING = 76, + + /* globbing pattern binary operator: apply to */ + BYTECODE_OP_EQ_STAR_GLOB_STRING = 77, + BYTECODE_OP_NE_STAR_GLOB_STRING = 78, + + /* + * Instructions for recursive traversal through composed types. + */ + BYTECODE_OP_GET_CONTEXT_ROOT = 79, + BYTECODE_OP_GET_APP_CONTEXT_ROOT = 80, + BYTECODE_OP_GET_PAYLOAD_ROOT = 81, + + BYTECODE_OP_GET_SYMBOL = 82, + BYTECODE_OP_GET_SYMBOL_FIELD = 83, + BYTECODE_OP_GET_INDEX_U16 = 84, + BYTECODE_OP_GET_INDEX_U64 = 85, + + BYTECODE_OP_LOAD_FIELD = 86, + BYTECODE_OP_LOAD_FIELD_S8 = 87, + BYTECODE_OP_LOAD_FIELD_S16 = 88, + BYTECODE_OP_LOAD_FIELD_S32 = 89, + BYTECODE_OP_LOAD_FIELD_S64 = 90, + BYTECODE_OP_LOAD_FIELD_U8 = 91, + BYTECODE_OP_LOAD_FIELD_U16 = 92, + BYTECODE_OP_LOAD_FIELD_U32 = 93, + BYTECODE_OP_LOAD_FIELD_U64 = 94, + BYTECODE_OP_LOAD_FIELD_STRING = 95, + BYTECODE_OP_LOAD_FIELD_SEQUENCE = 96, + BYTECODE_OP_LOAD_FIELD_DOUBLE = 97, + + BYTECODE_OP_UNARY_BIT_NOT = 98, + + BYTECODE_OP_RETURN_S64 = 99, + + NR_BYTECODE_OPS, +}; + +typedef uint8_t bytecode_opcode_t; + +struct load_op { + bytecode_opcode_t op; + /* + * data to load. Size known by enum bytecode_opcode and null-term char. + */ + char data[0]; +} __attribute__((packed)); + +struct binary_op { + bytecode_opcode_t op; +} __attribute__((packed)); + +struct unary_op { + bytecode_opcode_t op; +} __attribute__((packed)); + +/* skip_offset is absolute from start of bytecode */ +struct logical_op { + bytecode_opcode_t op; + uint16_t skip_offset; /* bytecode insn, if skip second test */ +} __attribute__((packed)); + +struct cast_op { + bytecode_opcode_t op; +} __attribute__((packed)); + +struct return_op { + bytecode_opcode_t op; +} __attribute__((packed)); + +#endif /* _BYTECODE_H */ diff --git a/liblttng-ust/filter-bytecode.h b/liblttng-ust/filter-bytecode.h deleted file mode 100644 index 59e84555..00000000 --- a/liblttng-ust/filter-bytecode.h +++ /dev/null @@ -1,249 +0,0 @@ -#ifndef _FILTER_BYTECODE_H -#define _FILTER_BYTECODE_H - -/* - * filter-bytecode.h - * - * LTTng filter bytecode - * - * Copyright 2012-2016 - Mathieu Desnoyers - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -#include -#include - -#ifndef LTTNG_PACKED -#error "LTTNG_PACKED should be defined" -#endif - -/* - * offsets are absolute from start of bytecode. - */ - -struct field_ref { - /* Initially, symbol offset. After link, field offset. */ - uint16_t offset; -} __attribute__((packed)); - -struct get_symbol { - /* Symbol offset. */ - uint16_t offset; -} LTTNG_PACKED; - -struct get_index_u16 { - uint16_t index; -} LTTNG_PACKED; - -struct get_index_u64 { - uint64_t index; -} LTTNG_PACKED; - -struct literal_numeric { - int64_t v; -} __attribute__((packed)); - -struct literal_double { - double v; -} __attribute__((packed)); - -struct literal_string { - char string[0]; -} __attribute__((packed)); - -enum filter_op { - FILTER_OP_UNKNOWN = 0, - - FILTER_OP_RETURN = 1, - - /* binary */ - FILTER_OP_MUL = 2, - FILTER_OP_DIV = 3, - FILTER_OP_MOD = 4, - FILTER_OP_PLUS = 5, - FILTER_OP_MINUS = 6, - FILTER_OP_BIT_RSHIFT = 7, - FILTER_OP_BIT_LSHIFT = 8, - FILTER_OP_BIT_AND = 9, - FILTER_OP_BIT_OR = 10, - FILTER_OP_BIT_XOR = 11, - - /* binary comparators */ - FILTER_OP_EQ = 12, - FILTER_OP_NE = 13, - FILTER_OP_GT = 14, - FILTER_OP_LT = 15, - FILTER_OP_GE = 16, - FILTER_OP_LE = 17, - - /* string binary comparator: apply to */ - FILTER_OP_EQ_STRING = 18, - FILTER_OP_NE_STRING = 19, - FILTER_OP_GT_STRING = 20, - FILTER_OP_LT_STRING = 21, - FILTER_OP_GE_STRING = 22, - FILTER_OP_LE_STRING = 23, - - /* s64 binary comparator */ - FILTER_OP_EQ_S64 = 24, - FILTER_OP_NE_S64 = 25, - FILTER_OP_GT_S64 = 26, - FILTER_OP_LT_S64 = 27, - FILTER_OP_GE_S64 = 28, - FILTER_OP_LE_S64 = 29, - - /* double binary comparator */ - FILTER_OP_EQ_DOUBLE = 30, - FILTER_OP_NE_DOUBLE = 31, - FILTER_OP_GT_DOUBLE = 32, - FILTER_OP_LT_DOUBLE = 33, - FILTER_OP_GE_DOUBLE = 34, - FILTER_OP_LE_DOUBLE = 35, - - /* Mixed S64-double binary comparators */ - FILTER_OP_EQ_DOUBLE_S64 = 36, - FILTER_OP_NE_DOUBLE_S64 = 37, - FILTER_OP_GT_DOUBLE_S64 = 38, - FILTER_OP_LT_DOUBLE_S64 = 39, - FILTER_OP_GE_DOUBLE_S64 = 40, - FILTER_OP_LE_DOUBLE_S64 = 41, - - FILTER_OP_EQ_S64_DOUBLE = 42, - FILTER_OP_NE_S64_DOUBLE = 43, - FILTER_OP_GT_S64_DOUBLE = 44, - FILTER_OP_LT_S64_DOUBLE = 45, - FILTER_OP_GE_S64_DOUBLE = 46, - FILTER_OP_LE_S64_DOUBLE = 47, - - /* unary */ - FILTER_OP_UNARY_PLUS = 48, - FILTER_OP_UNARY_MINUS = 49, - FILTER_OP_UNARY_NOT = 50, - FILTER_OP_UNARY_PLUS_S64 = 51, - FILTER_OP_UNARY_MINUS_S64 = 52, - FILTER_OP_UNARY_NOT_S64 = 53, - FILTER_OP_UNARY_PLUS_DOUBLE = 54, - FILTER_OP_UNARY_MINUS_DOUBLE = 55, - FILTER_OP_UNARY_NOT_DOUBLE = 56, - - /* logical */ - FILTER_OP_AND = 57, - FILTER_OP_OR = 58, - - /* load field ref */ - FILTER_OP_LOAD_FIELD_REF = 59, - FILTER_OP_LOAD_FIELD_REF_STRING = 60, - FILTER_OP_LOAD_FIELD_REF_SEQUENCE = 61, - FILTER_OP_LOAD_FIELD_REF_S64 = 62, - FILTER_OP_LOAD_FIELD_REF_DOUBLE = 63, - - /* load immediate from operand */ - FILTER_OP_LOAD_STRING = 64, - FILTER_OP_LOAD_S64 = 65, - FILTER_OP_LOAD_DOUBLE = 66, - - /* cast */ - FILTER_OP_CAST_TO_S64 = 67, - FILTER_OP_CAST_DOUBLE_TO_S64 = 68, - FILTER_OP_CAST_NOP = 69, - - /* get context ref */ - FILTER_OP_GET_CONTEXT_REF = 70, - FILTER_OP_GET_CONTEXT_REF_STRING = 71, - FILTER_OP_GET_CONTEXT_REF_S64 = 72, - FILTER_OP_GET_CONTEXT_REF_DOUBLE = 73, - - /* load userspace field ref */ - FILTER_OP_LOAD_FIELD_REF_USER_STRING = 74, - FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE = 75, - - /* - * load immediate star globbing pattern (literal string) - * from immediate - */ - FILTER_OP_LOAD_STAR_GLOB_STRING = 76, - - /* globbing pattern binary operator: apply to */ - FILTER_OP_EQ_STAR_GLOB_STRING = 77, - FILTER_OP_NE_STAR_GLOB_STRING = 78, - - /* - * Instructions for recursive traversal through composed types. - */ - FILTER_OP_GET_CONTEXT_ROOT = 79, - FILTER_OP_GET_APP_CONTEXT_ROOT = 80, - FILTER_OP_GET_PAYLOAD_ROOT = 81, - - FILTER_OP_GET_SYMBOL = 82, - FILTER_OP_GET_SYMBOL_FIELD = 83, - FILTER_OP_GET_INDEX_U16 = 84, - FILTER_OP_GET_INDEX_U64 = 85, - - FILTER_OP_LOAD_FIELD = 86, - FILTER_OP_LOAD_FIELD_S8 = 87, - FILTER_OP_LOAD_FIELD_S16 = 88, - FILTER_OP_LOAD_FIELD_S32 = 89, - FILTER_OP_LOAD_FIELD_S64 = 90, - FILTER_OP_LOAD_FIELD_U8 = 91, - FILTER_OP_LOAD_FIELD_U16 = 92, - FILTER_OP_LOAD_FIELD_U32 = 93, - FILTER_OP_LOAD_FIELD_U64 = 94, - FILTER_OP_LOAD_FIELD_STRING = 95, - FILTER_OP_LOAD_FIELD_SEQUENCE = 96, - FILTER_OP_LOAD_FIELD_DOUBLE = 97, - - FILTER_OP_UNARY_BIT_NOT = 98, - - FILTER_OP_RETURN_S64 = 99, - - NR_FILTER_OPS, -}; - -typedef uint8_t filter_opcode_t; - -struct load_op { - filter_opcode_t op; - char data[0]; - /* data to load. Size known by enum filter_opcode and null-term char. */ -} __attribute__((packed)); - -struct binary_op { - filter_opcode_t op; -} __attribute__((packed)); - -struct unary_op { - filter_opcode_t op; -} __attribute__((packed)); - -/* skip_offset is absolute from start of bytecode */ -struct logical_op { - filter_opcode_t op; - uint16_t skip_offset; /* bytecode insn, if skip second test */ -} __attribute__((packed)); - -struct cast_op { - filter_opcode_t op; -} __attribute__((packed)); - -struct return_op { - filter_opcode_t op; -} __attribute__((packed)); - -#endif /* _FILTER_BYTECODE_H */ diff --git a/liblttng-ust/lttng-filter-interpreter.c b/liblttng-ust/lttng-bytecode-interpreter.c similarity index 78% rename from liblttng-ust/lttng-filter-interpreter.c rename to liblttng-ust/lttng-bytecode-interpreter.c index d00179cd..9d7258f7 100644 --- a/liblttng-ust/lttng-filter-interpreter.c +++ b/liblttng-ust/lttng-bytecode-interpreter.c @@ -1,7 +1,7 @@ /* - * lttng-filter-interpreter.c + * lttng-bytecode-interpreter.c * - * LTTng UST filter interpreter. + * LTTng UST bytecode interpreter. * * Copyright (C) 2010-2016 Mathieu Desnoyers * @@ -33,7 +33,7 @@ #include #include -#include "lttng-filter.h" +#include "lttng-bytecode.h" #include "string-utils.h" @@ -166,10 +166,10 @@ int stack_strcmp(struct estack *stack, int top, const char *cmp_type) return diff; } -uint64_t lttng_filter_interpret_bytecode_false(void *filter_data, +uint64_t lttng_bytecode_filter_interpret_false(void *filter_data, const char *filter_stack_data) { - return LTTNG_FILTER_DISCARD; + return LTTNG_INTERPRETER_DISCARD; } #ifdef INTERPRETER_USE_SWITCH @@ -183,9 +183,9 @@ uint64_t lttng_filter_interpret_bytecode_false(void *filter_data, for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \ pc = next_pc) { \ dbg_printf("Executing op %s (%u)\n", \ - print_op((unsigned int) *(filter_opcode_t *) pc), \ - (unsigned int) *(filter_opcode_t *) pc); \ - switch (*(filter_opcode_t *) pc) { + print_op((unsigned int) *(bytecode_opcode_t *) pc), \ + (unsigned int) *(bytecode_opcode_t *) pc); \ + switch (*(bytecode_opcode_t *) pc) { #define OP(name) jump_target_##name: __attribute__((unused)); \ case name @@ -209,14 +209,14 @@ uint64_t lttng_filter_interpret_bytecode_false(void *filter_data, pc = next_pc = start_pc; \ if (unlikely(pc - start_pc >= bytecode->len)) \ goto end; \ - goto *dispatch[*(filter_opcode_t *) pc]; + goto *dispatch[*(bytecode_opcode_t *) pc]; #define OP(name) \ LABEL_##name #define PO \ pc = next_pc; \ - goto *dispatch[*(filter_opcode_t *) pc]; + goto *dispatch[*(bytecode_opcode_t *) pc]; #define END_OP @@ -363,7 +363,7 @@ static int context_get_index(struct lttng_ctx *ctx, dbg_printf("context get index dynamic string %s\n", (const char *) ptr->ptr); break; default: - dbg_printf("Filter warning: unknown dynamic type (%d).\n", (int) v.sel); + dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v.sel); return -EINVAL; } break; @@ -382,9 +382,9 @@ static int dynamic_get_index(struct lttng_ctx *ctx, uint64_t index, struct estack_entry *stack_top) { int ret; - const struct filter_get_index_data *gid; + const struct bytecode_get_index_data *gid; - gid = (const struct filter_get_index_data *) &runtime->data[index]; + gid = (const struct bytecode_get_index_data *) &runtime->data[index]; switch (stack_top->u.ptr.type) { case LOAD_OBJECT: switch (stack_top->u.ptr.object_type) { @@ -477,7 +477,7 @@ static int dynamic_load_field(struct estack_entry *stack_top) case LOAD_ROOT_APP_CONTEXT: case LOAD_ROOT_PAYLOAD: default: - dbg_printf("Filter warning: cannot load root, missing field name.\n"); + dbg_printf("Interpreter warning: cannot load root, missing field name.\n"); ret = -EINVAL; goto end; } @@ -578,7 +578,7 @@ static int dynamic_load_field(struct estack_entry *stack_top) str = (const char *) stack_top->u.ptr.ptr; stack_top->u.s.str = str; if (unlikely(!stack_top->u.s.str)) { - dbg_printf("Filter warning: loading a NULL string.\n"); + dbg_printf("Interpreter warning: loading a NULL string.\n"); ret = -EINVAL; goto end; } @@ -598,7 +598,7 @@ static int dynamic_load_field(struct estack_entry *stack_top) stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long)); stack_top->type = REG_STRING; if (unlikely(!stack_top->u.s.str)) { - dbg_printf("Filter warning: loading a NULL sequence.\n"); + dbg_printf("Interpreter warning: loading a NULL sequence.\n"); ret = -EINVAL; goto end; } @@ -696,7 +696,7 @@ again: return -EINVAL; } - return LTTNG_FILTER_RECORD_FLAG; + return LTTNG_INTERPRETER_RECORD_FLAG; } /* @@ -718,158 +718,158 @@ uint64_t bytecode_interpret(void *interpreter_data, struct estack *stack = &_stack; register int64_t ax = 0, bx = 0; register enum entry_type ax_t = REG_UNKNOWN, bx_t = REG_UNKNOWN; - register int top = FILTER_STACK_EMPTY; + register int top = INTERPRETER_STACK_EMPTY; #ifndef INTERPRETER_USE_SWITCH - static void *dispatch[NR_FILTER_OPS] = { - [ FILTER_OP_UNKNOWN ] = &&LABEL_FILTER_OP_UNKNOWN, + static void *dispatch[NR_BYTECODE_OPS] = { + [ BYTECODE_OP_UNKNOWN ] = &&LABEL_BYTECODE_OP_UNKNOWN, - [ FILTER_OP_RETURN ] = &&LABEL_FILTER_OP_RETURN, + [ BYTECODE_OP_RETURN ] = &&LABEL_BYTECODE_OP_RETURN, /* binary */ - [ FILTER_OP_MUL ] = &&LABEL_FILTER_OP_MUL, - [ FILTER_OP_DIV ] = &&LABEL_FILTER_OP_DIV, - [ FILTER_OP_MOD ] = &&LABEL_FILTER_OP_MOD, - [ FILTER_OP_PLUS ] = &&LABEL_FILTER_OP_PLUS, - [ FILTER_OP_MINUS ] = &&LABEL_FILTER_OP_MINUS, - [ FILTER_OP_BIT_RSHIFT ] = &&LABEL_FILTER_OP_BIT_RSHIFT, - [ FILTER_OP_BIT_LSHIFT ] = &&LABEL_FILTER_OP_BIT_LSHIFT, - [ FILTER_OP_BIT_AND ] = &&LABEL_FILTER_OP_BIT_AND, - [ FILTER_OP_BIT_OR ] = &&LABEL_FILTER_OP_BIT_OR, - [ FILTER_OP_BIT_XOR ] = &&LABEL_FILTER_OP_BIT_XOR, + [ BYTECODE_OP_MUL ] = &&LABEL_BYTECODE_OP_MUL, + [ BYTECODE_OP_DIV ] = &&LABEL_BYTECODE_OP_DIV, + [ BYTECODE_OP_MOD ] = &&LABEL_BYTECODE_OP_MOD, + [ BYTECODE_OP_PLUS ] = &&LABEL_BYTECODE_OP_PLUS, + [ BYTECODE_OP_MINUS ] = &&LABEL_BYTECODE_OP_MINUS, + [ BYTECODE_OP_BIT_RSHIFT ] = &&LABEL_BYTECODE_OP_BIT_RSHIFT, + [ BYTECODE_OP_BIT_LSHIFT ] = &&LABEL_BYTECODE_OP_BIT_LSHIFT, + [ BYTECODE_OP_BIT_AND ] = &&LABEL_BYTECODE_OP_BIT_AND, + [ BYTECODE_OP_BIT_OR ] = &&LABEL_BYTECODE_OP_BIT_OR, + [ BYTECODE_OP_BIT_XOR ] = &&LABEL_BYTECODE_OP_BIT_XOR, /* binary comparators */ - [ FILTER_OP_EQ ] = &&LABEL_FILTER_OP_EQ, - [ FILTER_OP_NE ] = &&LABEL_FILTER_OP_NE, - [ FILTER_OP_GT ] = &&LABEL_FILTER_OP_GT, - [ FILTER_OP_LT ] = &&LABEL_FILTER_OP_LT, - [ FILTER_OP_GE ] = &&LABEL_FILTER_OP_GE, - [ FILTER_OP_LE ] = &&LABEL_FILTER_OP_LE, + [ BYTECODE_OP_EQ ] = &&LABEL_BYTECODE_OP_EQ, + [ BYTECODE_OP_NE ] = &&LABEL_BYTECODE_OP_NE, + [ BYTECODE_OP_GT ] = &&LABEL_BYTECODE_OP_GT, + [ BYTECODE_OP_LT ] = &&LABEL_BYTECODE_OP_LT, + [ BYTECODE_OP_GE ] = &&LABEL_BYTECODE_OP_GE, + [ BYTECODE_OP_LE ] = &&LABEL_BYTECODE_OP_LE, /* string binary comparator */ - [ FILTER_OP_EQ_STRING ] = &&LABEL_FILTER_OP_EQ_STRING, - [ FILTER_OP_NE_STRING ] = &&LABEL_FILTER_OP_NE_STRING, - [ FILTER_OP_GT_STRING ] = &&LABEL_FILTER_OP_GT_STRING, - [ FILTER_OP_LT_STRING ] = &&LABEL_FILTER_OP_LT_STRING, - [ FILTER_OP_GE_STRING ] = &&LABEL_FILTER_OP_GE_STRING, - [ FILTER_OP_LE_STRING ] = &&LABEL_FILTER_OP_LE_STRING, + [ BYTECODE_OP_EQ_STRING ] = &&LABEL_BYTECODE_OP_EQ_STRING, + [ BYTECODE_OP_NE_STRING ] = &&LABEL_BYTECODE_OP_NE_STRING, + [ BYTECODE_OP_GT_STRING ] = &&LABEL_BYTECODE_OP_GT_STRING, + [ BYTECODE_OP_LT_STRING ] = &&LABEL_BYTECODE_OP_LT_STRING, + [ BYTECODE_OP_GE_STRING ] = &&LABEL_BYTECODE_OP_GE_STRING, + [ BYTECODE_OP_LE_STRING ] = &&LABEL_BYTECODE_OP_LE_STRING, /* globbing pattern binary comparator */ - [ FILTER_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_EQ_STAR_GLOB_STRING, - [ FILTER_OP_NE_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_NE_STAR_GLOB_STRING, + [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING, + [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING, /* s64 binary comparator */ - [ FILTER_OP_EQ_S64 ] = &&LABEL_FILTER_OP_EQ_S64, - [ FILTER_OP_NE_S64 ] = &&LABEL_FILTER_OP_NE_S64, - [ FILTER_OP_GT_S64 ] = &&LABEL_FILTER_OP_GT_S64, - [ FILTER_OP_LT_S64 ] = &&LABEL_FILTER_OP_LT_S64, - [ FILTER_OP_GE_S64 ] = &&LABEL_FILTER_OP_GE_S64, - [ FILTER_OP_LE_S64 ] = &&LABEL_FILTER_OP_LE_S64, + [ BYTECODE_OP_EQ_S64 ] = &&LABEL_BYTECODE_OP_EQ_S64, + [ BYTECODE_OP_NE_S64 ] = &&LABEL_BYTECODE_OP_NE_S64, + [ BYTECODE_OP_GT_S64 ] = &&LABEL_BYTECODE_OP_GT_S64, + [ BYTECODE_OP_LT_S64 ] = &&LABEL_BYTECODE_OP_LT_S64, + [ BYTECODE_OP_GE_S64 ] = &&LABEL_BYTECODE_OP_GE_S64, + [ BYTECODE_OP_LE_S64 ] = &&LABEL_BYTECODE_OP_LE_S64, /* double binary comparator */ - [ FILTER_OP_EQ_DOUBLE ] = &&LABEL_FILTER_OP_EQ_DOUBLE, - [ FILTER_OP_NE_DOUBLE ] = &&LABEL_FILTER_OP_NE_DOUBLE, - [ FILTER_OP_GT_DOUBLE ] = &&LABEL_FILTER_OP_GT_DOUBLE, - [ FILTER_OP_LT_DOUBLE ] = &&LABEL_FILTER_OP_LT_DOUBLE, - [ FILTER_OP_GE_DOUBLE ] = &&LABEL_FILTER_OP_GE_DOUBLE, - [ FILTER_OP_LE_DOUBLE ] = &&LABEL_FILTER_OP_LE_DOUBLE, + [ BYTECODE_OP_EQ_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE, + [ BYTECODE_OP_NE_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_DOUBLE, + [ BYTECODE_OP_GT_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_DOUBLE, + [ BYTECODE_OP_LT_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_DOUBLE, + [ BYTECODE_OP_GE_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_DOUBLE, + [ BYTECODE_OP_LE_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_DOUBLE, /* Mixed S64-double binary comparators */ - [ FILTER_OP_EQ_DOUBLE_S64 ] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64, - [ FILTER_OP_NE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_NE_DOUBLE_S64, - [ FILTER_OP_GT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GT_DOUBLE_S64, - [ FILTER_OP_LT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LT_DOUBLE_S64, - [ FILTER_OP_GE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GE_DOUBLE_S64, - [ FILTER_OP_LE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LE_DOUBLE_S64, - - [ FILTER_OP_EQ_S64_DOUBLE ] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE, - [ FILTER_OP_NE_S64_DOUBLE ] = &&LABEL_FILTER_OP_NE_S64_DOUBLE, - [ FILTER_OP_GT_S64_DOUBLE ] = &&LABEL_FILTER_OP_GT_S64_DOUBLE, - [ FILTER_OP_LT_S64_DOUBLE ] = &&LABEL_FILTER_OP_LT_S64_DOUBLE, - [ FILTER_OP_GE_S64_DOUBLE ] = &&LABEL_FILTER_OP_GE_S64_DOUBLE, - [ FILTER_OP_LE_S64_DOUBLE ] = &&LABEL_FILTER_OP_LE_S64_DOUBLE, + [ BYTECODE_OP_EQ_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64, + [ BYTECODE_OP_NE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64, + [ BYTECODE_OP_GT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64, + [ BYTECODE_OP_LT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64, + [ BYTECODE_OP_GE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64, + [ BYTECODE_OP_LE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64, + + [ BYTECODE_OP_EQ_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE, + [ BYTECODE_OP_NE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE, + [ BYTECODE_OP_GT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE, + [ BYTECODE_OP_LT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE, + [ BYTECODE_OP_GE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE, + [ BYTECODE_OP_LE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE, /* unary */ - [ FILTER_OP_UNARY_PLUS ] = &&LABEL_FILTER_OP_UNARY_PLUS, - [ FILTER_OP_UNARY_MINUS ] = &&LABEL_FILTER_OP_UNARY_MINUS, - [ FILTER_OP_UNARY_NOT ] = &&LABEL_FILTER_OP_UNARY_NOT, - [ FILTER_OP_UNARY_PLUS_S64 ] = &&LABEL_FILTER_OP_UNARY_PLUS_S64, - [ FILTER_OP_UNARY_MINUS_S64 ] = &&LABEL_FILTER_OP_UNARY_MINUS_S64, - [ FILTER_OP_UNARY_NOT_S64 ] = &&LABEL_FILTER_OP_UNARY_NOT_S64, - [ FILTER_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE, - [ FILTER_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE, - [ FILTER_OP_UNARY_NOT_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE, + [ BYTECODE_OP_UNARY_PLUS ] = &&LABEL_BYTECODE_OP_UNARY_PLUS, + [ BYTECODE_OP_UNARY_MINUS ] = &&LABEL_BYTECODE_OP_UNARY_MINUS, + [ BYTECODE_OP_UNARY_NOT ] = &&LABEL_BYTECODE_OP_UNARY_NOT, + [ BYTECODE_OP_UNARY_PLUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64, + [ BYTECODE_OP_UNARY_MINUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64, + [ BYTECODE_OP_UNARY_NOT_S64 ] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64, + [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE, + [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE, + [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE, /* logical */ - [ FILTER_OP_AND ] = &&LABEL_FILTER_OP_AND, - [ FILTER_OP_OR ] = &&LABEL_FILTER_OP_OR, + [ BYTECODE_OP_AND ] = &&LABEL_BYTECODE_OP_AND, + [ BYTECODE_OP_OR ] = &&LABEL_BYTECODE_OP_OR, /* load field ref */ - [ FILTER_OP_LOAD_FIELD_REF ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF, - [ FILTER_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING, - [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE, - [ FILTER_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64, - [ FILTER_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE, + [ BYTECODE_OP_LOAD_FIELD_REF ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF, + [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING, + [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE, + [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64, + [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE, /* load from immediate operand */ - [ FILTER_OP_LOAD_STRING ] = &&LABEL_FILTER_OP_LOAD_STRING, - [ FILTER_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_LOAD_STAR_GLOB_STRING, - [ FILTER_OP_LOAD_S64 ] = &&LABEL_FILTER_OP_LOAD_S64, - [ FILTER_OP_LOAD_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_DOUBLE, + [ BYTECODE_OP_LOAD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STRING, + [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING, + [ BYTECODE_OP_LOAD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_S64, + [ BYTECODE_OP_LOAD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE, /* cast */ - [ FILTER_OP_CAST_TO_S64 ] = &&LABEL_FILTER_OP_CAST_TO_S64, - [ FILTER_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64, - [ FILTER_OP_CAST_NOP ] = &&LABEL_FILTER_OP_CAST_NOP, + [ BYTECODE_OP_CAST_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_TO_S64, + [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64, + [ BYTECODE_OP_CAST_NOP ] = &&LABEL_BYTECODE_OP_CAST_NOP, /* get context ref */ - [ FILTER_OP_GET_CONTEXT_REF ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF, - [ FILTER_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING, - [ FILTER_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64, - [ FILTER_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE, + [ BYTECODE_OP_GET_CONTEXT_REF ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF, + [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING, + [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64, + [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE, /* Instructions for recursive traversal through composed types. */ - [ FILTER_OP_GET_CONTEXT_ROOT ] = &&LABEL_FILTER_OP_GET_CONTEXT_ROOT, - [ FILTER_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_FILTER_OP_GET_APP_CONTEXT_ROOT, - [ FILTER_OP_GET_PAYLOAD_ROOT ] = &&LABEL_FILTER_OP_GET_PAYLOAD_ROOT, - - [ FILTER_OP_GET_SYMBOL ] = &&LABEL_FILTER_OP_GET_SYMBOL, - [ FILTER_OP_GET_SYMBOL_FIELD ] = &&LABEL_FILTER_OP_GET_SYMBOL_FIELD, - [ FILTER_OP_GET_INDEX_U16 ] = &&LABEL_FILTER_OP_GET_INDEX_U16, - [ FILTER_OP_GET_INDEX_U64 ] = &&LABEL_FILTER_OP_GET_INDEX_U64, - - [ FILTER_OP_LOAD_FIELD ] = &&LABEL_FILTER_OP_LOAD_FIELD, - [ FILTER_OP_LOAD_FIELD_S8 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S8, - [ FILTER_OP_LOAD_FIELD_S16 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S16, - [ FILTER_OP_LOAD_FIELD_S32 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S32, - [ FILTER_OP_LOAD_FIELD_S64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S64, - [ FILTER_OP_LOAD_FIELD_U8 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U8, - [ FILTER_OP_LOAD_FIELD_U16 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U16, - [ FILTER_OP_LOAD_FIELD_U32 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U32, - [ FILTER_OP_LOAD_FIELD_U64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U64, - [ FILTER_OP_LOAD_FIELD_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_STRING, - [ FILTER_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_SEQUENCE, - [ FILTER_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_FIELD_DOUBLE, - - [ FILTER_OP_UNARY_BIT_NOT ] = &&LABEL_FILTER_OP_UNARY_BIT_NOT, - - [ FILTER_OP_RETURN_S64 ] = &&LABEL_FILTER_OP_RETURN_S64, + [ BYTECODE_OP_GET_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT, + [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT, + [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT, + + [ BYTECODE_OP_GET_SYMBOL ] = &&LABEL_BYTECODE_OP_GET_SYMBOL, + [ BYTECODE_OP_GET_SYMBOL_FIELD ] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD, + [ BYTECODE_OP_GET_INDEX_U16 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U16, + [ BYTECODE_OP_GET_INDEX_U64 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U64, + + [ BYTECODE_OP_LOAD_FIELD ] = &&LABEL_BYTECODE_OP_LOAD_FIELD, + [ BYTECODE_OP_LOAD_FIELD_S8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8, + [ BYTECODE_OP_LOAD_FIELD_S16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16, + [ BYTECODE_OP_LOAD_FIELD_S32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32, + [ BYTECODE_OP_LOAD_FIELD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64, + [ BYTECODE_OP_LOAD_FIELD_U8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8, + [ BYTECODE_OP_LOAD_FIELD_U16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16, + [ BYTECODE_OP_LOAD_FIELD_U32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32, + [ BYTECODE_OP_LOAD_FIELD_U64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64, + [ BYTECODE_OP_LOAD_FIELD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING, + [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE, + [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE, + + [ BYTECODE_OP_UNARY_BIT_NOT ] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT, + + [ BYTECODE_OP_RETURN_S64 ] = &&LABEL_BYTECODE_OP_RETURN_S64, }; #endif /* #ifndef INTERPRETER_USE_SWITCH */ START_OP - OP(FILTER_OP_UNKNOWN): - OP(FILTER_OP_LOAD_FIELD_REF): + OP(BYTECODE_OP_UNKNOWN): + OP(BYTECODE_OP_LOAD_FIELD_REF): #ifdef INTERPRETER_USE_SWITCH default: #endif /* INTERPRETER_USE_SWITCH */ ERR("unknown bytecode op %u", - (unsigned int) *(filter_opcode_t *) pc); + (unsigned int) *(bytecode_opcode_t *) pc); ret = -EINVAL; goto end; - OP(FILTER_OP_RETURN): - /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */ + OP(BYTECODE_OP_RETURN): + /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */ /* Handle dynamic typing. */ switch (estack_ax_t) { case REG_S64: @@ -894,24 +894,24 @@ uint64_t bytecode_interpret(void *interpreter_data, ret = 0; goto end; - OP(FILTER_OP_RETURN_S64): - /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */ + OP(BYTECODE_OP_RETURN_S64): + /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */ retval = !!estack_ax_v; ret = 0; goto end; /* binary */ - OP(FILTER_OP_MUL): - OP(FILTER_OP_DIV): - OP(FILTER_OP_MOD): - OP(FILTER_OP_PLUS): - OP(FILTER_OP_MINUS): + OP(BYTECODE_OP_MUL): + OP(BYTECODE_OP_DIV): + OP(BYTECODE_OP_MOD): + OP(BYTECODE_OP_PLUS): + OP(BYTECODE_OP_MINUS): ERR("unsupported bytecode op %u", - (unsigned int) *(filter_opcode_t *) pc); + (unsigned int) *(bytecode_opcode_t *) pc); ret = -EINVAL; goto end; - OP(FILTER_OP_EQ): + OP(BYTECODE_OP_EQ): { /* Dynamic typing. */ switch (estack_ax_t) { @@ -920,15 +920,15 @@ uint64_t bytecode_interpret(void *interpreter_data, switch (estack_bx_t) { case REG_S64: /* Fall-through */ case REG_U64: - JUMP_TO(FILTER_OP_EQ_S64); + JUMP_TO(BYTECODE_OP_EQ_S64); case REG_DOUBLE: - JUMP_TO(FILTER_OP_EQ_DOUBLE_S64); + JUMP_TO(BYTECODE_OP_EQ_DOUBLE_S64); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -938,15 +938,15 @@ uint64_t bytecode_interpret(void *interpreter_data, switch (estack_bx_t) { case REG_S64: /* Fall-through */ case REG_U64: - JUMP_TO(FILTER_OP_EQ_S64_DOUBLE); + JUMP_TO(BYTECODE_OP_EQ_S64_DOUBLE); case REG_DOUBLE: - JUMP_TO(FILTER_OP_EQ_DOUBLE); + JUMP_TO(BYTECODE_OP_EQ_DOUBLE); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -960,11 +960,11 @@ uint64_t bytecode_interpret(void *interpreter_data, ret = -EINVAL; goto end; case REG_STRING: - JUMP_TO(FILTER_OP_EQ_STRING); + JUMP_TO(BYTECODE_OP_EQ_STRING); case REG_STAR_GLOB_STRING: - JUMP_TO(FILTER_OP_EQ_STAR_GLOB_STRING); + JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING); default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -978,25 +978,25 @@ uint64_t bytecode_interpret(void *interpreter_data, ret = -EINVAL; goto end; case REG_STRING: - JUMP_TO(FILTER_OP_EQ_STAR_GLOB_STRING); + JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING); case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; } break; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_ax_t); ret = -EINVAL; goto end; } } - OP(FILTER_OP_NE): + OP(BYTECODE_OP_NE): { /* Dynamic typing. */ switch (estack_ax_t) { @@ -1005,15 +1005,15 @@ uint64_t bytecode_interpret(void *interpreter_data, switch (estack_bx_t) { case REG_S64: /* Fall-through */ case REG_U64: - JUMP_TO(FILTER_OP_NE_S64); + JUMP_TO(BYTECODE_OP_NE_S64); case REG_DOUBLE: - JUMP_TO(FILTER_OP_NE_DOUBLE_S64); + JUMP_TO(BYTECODE_OP_NE_DOUBLE_S64); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -1023,15 +1023,15 @@ uint64_t bytecode_interpret(void *interpreter_data, switch (estack_bx_t) { case REG_S64: /* Fall-through */ case REG_U64: - JUMP_TO(FILTER_OP_NE_S64_DOUBLE); + JUMP_TO(BYTECODE_OP_NE_S64_DOUBLE); case REG_DOUBLE: - JUMP_TO(FILTER_OP_NE_DOUBLE); + JUMP_TO(BYTECODE_OP_NE_DOUBLE); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -1045,11 +1045,11 @@ uint64_t bytecode_interpret(void *interpreter_data, ret = -EINVAL; goto end; case REG_STRING: - JUMP_TO(FILTER_OP_NE_STRING); + JUMP_TO(BYTECODE_OP_NE_STRING); case REG_STAR_GLOB_STRING: - JUMP_TO(FILTER_OP_NE_STAR_GLOB_STRING); + JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING); default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -1063,25 +1063,25 @@ uint64_t bytecode_interpret(void *interpreter_data, ret = -EINVAL; goto end; case REG_STRING: - JUMP_TO(FILTER_OP_NE_STAR_GLOB_STRING); + JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING); case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; } break; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_ax_t); ret = -EINVAL; goto end; } } - OP(FILTER_OP_GT): + OP(BYTECODE_OP_GT): { /* Dynamic typing. */ switch (estack_ax_t) { @@ -1090,15 +1090,15 @@ uint64_t bytecode_interpret(void *interpreter_data, switch (estack_bx_t) { case REG_S64: /* Fall-through */ case REG_U64: - JUMP_TO(FILTER_OP_GT_S64); + JUMP_TO(BYTECODE_OP_GT_S64); case REG_DOUBLE: - JUMP_TO(FILTER_OP_GT_DOUBLE_S64); + JUMP_TO(BYTECODE_OP_GT_DOUBLE_S64); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -1108,15 +1108,15 @@ uint64_t bytecode_interpret(void *interpreter_data, switch (estack_bx_t) { case REG_S64: /* Fall-through */ case REG_U64: - JUMP_TO(FILTER_OP_GT_S64_DOUBLE); + JUMP_TO(BYTECODE_OP_GT_S64_DOUBLE); case REG_DOUBLE: - JUMP_TO(FILTER_OP_GT_DOUBLE); + JUMP_TO(BYTECODE_OP_GT_DOUBLE); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -1131,22 +1131,22 @@ uint64_t bytecode_interpret(void *interpreter_data, ret = -EINVAL; goto end; case REG_STRING: - JUMP_TO(FILTER_OP_GT_STRING); + JUMP_TO(BYTECODE_OP_GT_STRING); default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; } break; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_ax_t); ret = -EINVAL; goto end; } } - OP(FILTER_OP_LT): + OP(BYTECODE_OP_LT): { /* Dynamic typing. */ switch (estack_ax_t) { @@ -1155,15 +1155,15 @@ uint64_t bytecode_interpret(void *interpreter_data, switch (estack_bx_t) { case REG_S64: /* Fall-through */ case REG_U64: - JUMP_TO(FILTER_OP_LT_S64); + JUMP_TO(BYTECODE_OP_LT_S64); case REG_DOUBLE: - JUMP_TO(FILTER_OP_LT_DOUBLE_S64); + JUMP_TO(BYTECODE_OP_LT_DOUBLE_S64); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -1173,15 +1173,15 @@ uint64_t bytecode_interpret(void *interpreter_data, switch (estack_bx_t) { case REG_S64: /* Fall-through */ case REG_U64: - JUMP_TO(FILTER_OP_LT_S64_DOUBLE); + JUMP_TO(BYTECODE_OP_LT_S64_DOUBLE); case REG_DOUBLE: - JUMP_TO(FILTER_OP_LT_DOUBLE); + JUMP_TO(BYTECODE_OP_LT_DOUBLE); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -1196,22 +1196,22 @@ uint64_t bytecode_interpret(void *interpreter_data, ret = -EINVAL; goto end; case REG_STRING: - JUMP_TO(FILTER_OP_LT_STRING); + JUMP_TO(BYTECODE_OP_LT_STRING); default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; } break; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_ax_t); ret = -EINVAL; goto end; } } - OP(FILTER_OP_GE): + OP(BYTECODE_OP_GE): { /* Dynamic typing. */ switch (estack_ax_t) { @@ -1220,15 +1220,15 @@ uint64_t bytecode_interpret(void *interpreter_data, switch (estack_bx_t) { case REG_S64: /* Fall-through */ case REG_U64: - JUMP_TO(FILTER_OP_GE_S64); + JUMP_TO(BYTECODE_OP_GE_S64); case REG_DOUBLE: - JUMP_TO(FILTER_OP_GE_DOUBLE_S64); + JUMP_TO(BYTECODE_OP_GE_DOUBLE_S64); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -1238,15 +1238,15 @@ uint64_t bytecode_interpret(void *interpreter_data, switch (estack_bx_t) { case REG_S64: /* Fall-through */ case REG_U64: - JUMP_TO(FILTER_OP_GE_S64_DOUBLE); + JUMP_TO(BYTECODE_OP_GE_S64_DOUBLE); case REG_DOUBLE: - JUMP_TO(FILTER_OP_GE_DOUBLE); + JUMP_TO(BYTECODE_OP_GE_DOUBLE); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -1261,22 +1261,22 @@ uint64_t bytecode_interpret(void *interpreter_data, ret = -EINVAL; goto end; case REG_STRING: - JUMP_TO(FILTER_OP_GE_STRING); + JUMP_TO(BYTECODE_OP_GE_STRING); default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; } break; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_ax_t); ret = -EINVAL; goto end; } } - OP(FILTER_OP_LE): + OP(BYTECODE_OP_LE): { /* Dynamic typing. */ switch (estack_ax_t) { @@ -1285,15 +1285,15 @@ uint64_t bytecode_interpret(void *interpreter_data, switch (estack_bx_t) { case REG_S64: /* Fall-through */ case REG_U64: - JUMP_TO(FILTER_OP_LE_S64); + JUMP_TO(BYTECODE_OP_LE_S64); case REG_DOUBLE: - JUMP_TO(FILTER_OP_LE_DOUBLE_S64); + JUMP_TO(BYTECODE_OP_LE_DOUBLE_S64); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -1303,15 +1303,15 @@ uint64_t bytecode_interpret(void *interpreter_data, switch (estack_bx_t) { case REG_S64: /* Fall-through */ case REG_U64: - JUMP_TO(FILTER_OP_LE_S64_DOUBLE); + JUMP_TO(BYTECODE_OP_LE_S64_DOUBLE); case REG_DOUBLE: - JUMP_TO(FILTER_OP_LE_DOUBLE); + JUMP_TO(BYTECODE_OP_LE_DOUBLE); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; @@ -1326,23 +1326,23 @@ uint64_t bytecode_interpret(void *interpreter_data, ret = -EINVAL; goto end; case REG_STRING: - JUMP_TO(FILTER_OP_LE_STRING); + JUMP_TO(BYTECODE_OP_LE_STRING); default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_bx_t); ret = -EINVAL; goto end; } break; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_ax_t); ret = -EINVAL; goto end; } } - OP(FILTER_OP_EQ_STRING): + OP(BYTECODE_OP_EQ_STRING): { int res; @@ -1353,7 +1353,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_NE_STRING): + OP(BYTECODE_OP_NE_STRING): { int res; @@ -1364,7 +1364,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_GT_STRING): + OP(BYTECODE_OP_GT_STRING): { int res; @@ -1375,7 +1375,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_LT_STRING): + OP(BYTECODE_OP_LT_STRING): { int res; @@ -1386,7 +1386,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_GE_STRING): + OP(BYTECODE_OP_GE_STRING): { int res; @@ -1397,7 +1397,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_LE_STRING): + OP(BYTECODE_OP_LE_STRING): { int res; @@ -1409,7 +1409,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_EQ_STAR_GLOB_STRING): + OP(BYTECODE_OP_EQ_STAR_GLOB_STRING): { int res; @@ -1420,7 +1420,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_NE_STAR_GLOB_STRING): + OP(BYTECODE_OP_NE_STAR_GLOB_STRING): { int res; @@ -1432,7 +1432,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_EQ_S64): + OP(BYTECODE_OP_EQ_S64): { int res; @@ -1443,7 +1443,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_NE_S64): + OP(BYTECODE_OP_NE_S64): { int res; @@ -1454,7 +1454,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_GT_S64): + OP(BYTECODE_OP_GT_S64): { int res; @@ -1465,7 +1465,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_LT_S64): + OP(BYTECODE_OP_LT_S64): { int res; @@ -1476,7 +1476,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_GE_S64): + OP(BYTECODE_OP_GE_S64): { int res; @@ -1487,7 +1487,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_LE_S64): + OP(BYTECODE_OP_LE_S64): { int res; @@ -1499,7 +1499,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_EQ_DOUBLE): + OP(BYTECODE_OP_EQ_DOUBLE): { int res; @@ -1510,7 +1510,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_NE_DOUBLE): + OP(BYTECODE_OP_NE_DOUBLE): { int res; @@ -1521,7 +1521,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_GT_DOUBLE): + OP(BYTECODE_OP_GT_DOUBLE): { int res; @@ -1532,7 +1532,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_LT_DOUBLE): + OP(BYTECODE_OP_LT_DOUBLE): { int res; @@ -1543,7 +1543,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_GE_DOUBLE): + OP(BYTECODE_OP_GE_DOUBLE): { int res; @@ -1554,7 +1554,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_LE_DOUBLE): + OP(BYTECODE_OP_LE_DOUBLE): { int res; @@ -1567,7 +1567,7 @@ uint64_t bytecode_interpret(void *interpreter_data, } /* Mixed S64-double binary comparators */ - OP(FILTER_OP_EQ_DOUBLE_S64): + OP(BYTECODE_OP_EQ_DOUBLE_S64): { int res; @@ -1578,7 +1578,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_NE_DOUBLE_S64): + OP(BYTECODE_OP_NE_DOUBLE_S64): { int res; @@ -1589,7 +1589,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_GT_DOUBLE_S64): + OP(BYTECODE_OP_GT_DOUBLE_S64): { int res; @@ -1600,7 +1600,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_LT_DOUBLE_S64): + OP(BYTECODE_OP_LT_DOUBLE_S64): { int res; @@ -1611,7 +1611,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_GE_DOUBLE_S64): + OP(BYTECODE_OP_GE_DOUBLE_S64): { int res; @@ -1622,7 +1622,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_LE_DOUBLE_S64): + OP(BYTECODE_OP_LE_DOUBLE_S64): { int res; @@ -1634,7 +1634,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_EQ_S64_DOUBLE): + OP(BYTECODE_OP_EQ_S64_DOUBLE): { int res; @@ -1645,7 +1645,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_NE_S64_DOUBLE): + OP(BYTECODE_OP_NE_S64_DOUBLE): { int res; @@ -1656,7 +1656,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_GT_S64_DOUBLE): + OP(BYTECODE_OP_GT_S64_DOUBLE): { int res; @@ -1667,7 +1667,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_LT_S64_DOUBLE): + OP(BYTECODE_OP_LT_S64_DOUBLE): { int res; @@ -1678,7 +1678,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_GE_S64_DOUBLE): + OP(BYTECODE_OP_GE_S64_DOUBLE): { int res; @@ -1689,7 +1689,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_LE_S64_DOUBLE): + OP(BYTECODE_OP_LE_S64_DOUBLE): { int res; @@ -1700,7 +1700,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_BIT_RSHIFT): + OP(BYTECODE_OP_BIT_RSHIFT): { int64_t res; @@ -1721,7 +1721,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_BIT_LSHIFT): + OP(BYTECODE_OP_BIT_LSHIFT): { int64_t res; @@ -1742,7 +1742,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_BIT_AND): + OP(BYTECODE_OP_BIT_AND): { int64_t res; @@ -1758,7 +1758,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_BIT_OR): + OP(BYTECODE_OP_BIT_OR): { int64_t res; @@ -1774,7 +1774,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct binary_op); PO; } - OP(FILTER_OP_BIT_XOR): + OP(BYTECODE_OP_BIT_XOR): { int64_t res; @@ -1792,61 +1792,61 @@ uint64_t bytecode_interpret(void *interpreter_data, } /* unary */ - OP(FILTER_OP_UNARY_PLUS): + OP(BYTECODE_OP_UNARY_PLUS): { /* Dynamic typing. */ switch (estack_ax_t) { case REG_S64: /* Fall-through. */ case REG_U64: - JUMP_TO(FILTER_OP_UNARY_PLUS_S64); + JUMP_TO(BYTECODE_OP_UNARY_PLUS_S64); case REG_DOUBLE: - JUMP_TO(FILTER_OP_UNARY_PLUS_DOUBLE); + JUMP_TO(BYTECODE_OP_UNARY_PLUS_DOUBLE); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_ax_t); ret = -EINVAL; goto end; } } - OP(FILTER_OP_UNARY_MINUS): + OP(BYTECODE_OP_UNARY_MINUS): { /* Dynamic typing. */ switch (estack_ax_t) { case REG_S64: /* Fall-through. */ case REG_U64: - JUMP_TO(FILTER_OP_UNARY_MINUS_S64); + JUMP_TO(BYTECODE_OP_UNARY_MINUS_S64); case REG_DOUBLE: - JUMP_TO(FILTER_OP_UNARY_MINUS_DOUBLE); + JUMP_TO(BYTECODE_OP_UNARY_MINUS_DOUBLE); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_ax_t); ret = -EINVAL; goto end; } } - OP(FILTER_OP_UNARY_NOT): + OP(BYTECODE_OP_UNARY_NOT): { /* Dynamic typing. */ switch (estack_ax_t) { case REG_S64: /* Fall-through. */ case REG_U64: - JUMP_TO(FILTER_OP_UNARY_NOT_S64); + JUMP_TO(BYTECODE_OP_UNARY_NOT_S64); case REG_DOUBLE: - JUMP_TO(FILTER_OP_UNARY_NOT_DOUBLE); + JUMP_TO(BYTECODE_OP_UNARY_NOT_DOUBLE); case REG_STRING: /* Fall-through */ case REG_STAR_GLOB_STRING: ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_ax_t); ret = -EINVAL; goto end; @@ -1855,7 +1855,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_UNARY_BIT_NOT): + OP(BYTECODE_OP_UNARY_BIT_NOT): { /* Dynamic typing. */ if (!IS_INTEGER_REGISTER(estack_ax_t)) { @@ -1869,32 +1869,32 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_UNARY_PLUS_S64): - OP(FILTER_OP_UNARY_PLUS_DOUBLE): + OP(BYTECODE_OP_UNARY_PLUS_S64): + OP(BYTECODE_OP_UNARY_PLUS_DOUBLE): { next_pc += sizeof(struct unary_op); PO; } - OP(FILTER_OP_UNARY_MINUS_S64): + OP(BYTECODE_OP_UNARY_MINUS_S64): { estack_ax_v = -estack_ax_v; next_pc += sizeof(struct unary_op); PO; } - OP(FILTER_OP_UNARY_MINUS_DOUBLE): + OP(BYTECODE_OP_UNARY_MINUS_DOUBLE): { estack_ax(stack, top)->u.d = -estack_ax(stack, top)->u.d; next_pc += sizeof(struct unary_op); PO; } - OP(FILTER_OP_UNARY_NOT_S64): + OP(BYTECODE_OP_UNARY_NOT_S64): { estack_ax_v = !estack_ax_v; estack_ax_t = REG_S64; next_pc += sizeof(struct unary_op); PO; } - OP(FILTER_OP_UNARY_NOT_DOUBLE): + OP(BYTECODE_OP_UNARY_NOT_DOUBLE): { estack_ax_v = !estack_ax(stack, top)->u.d; estack_ax_t = REG_S64; @@ -1903,7 +1903,7 @@ uint64_t bytecode_interpret(void *interpreter_data, } /* logical */ - OP(FILTER_OP_AND): + OP(BYTECODE_OP_AND): { struct logical_op *insn = (struct logical_op *) pc; @@ -1923,7 +1923,7 @@ uint64_t bytecode_interpret(void *interpreter_data, } PO; } - OP(FILTER_OP_OR): + OP(BYTECODE_OP_OR): { struct logical_op *insn = (struct logical_op *) pc; @@ -1947,7 +1947,7 @@ uint64_t bytecode_interpret(void *interpreter_data, /* load field ref */ - OP(FILTER_OP_LOAD_FIELD_REF_STRING): + OP(BYTECODE_OP_LOAD_FIELD_REF_STRING): { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -1958,7 +1958,7 @@ uint64_t bytecode_interpret(void *interpreter_data, estack_ax(stack, top)->u.s.str = *(const char * const *) &interpreter_stack_data[ref->offset]; if (unlikely(!estack_ax(stack, top)->u.s.str)) { - dbg_printf("Filter warning: loading a NULL string.\n"); + dbg_printf("Interpreter warning: loading a NULL string.\n"); ret = -EINVAL; goto end; } @@ -1971,7 +1971,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE): + OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE): { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -1986,7 +1986,7 @@ uint64_t bytecode_interpret(void *interpreter_data, + sizeof(unsigned long)]); estack_ax_t = REG_STRING; if (unlikely(!estack_ax(stack, top)->u.s.str)) { - dbg_printf("Filter warning: loading a NULL sequence.\n"); + dbg_printf("Interpreter warning: loading a NULL sequence.\n"); ret = -EINVAL; goto end; } @@ -1996,7 +1996,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_LOAD_FIELD_REF_S64): + OP(BYTECODE_OP_LOAD_FIELD_REF_S64): { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -2012,7 +2012,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE): + OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE): { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -2029,7 +2029,7 @@ uint64_t bytecode_interpret(void *interpreter_data, } /* load from immediate operand */ - OP(FILTER_OP_LOAD_STRING): + OP(BYTECODE_OP_LOAD_STRING): { struct load_op *insn = (struct load_op *) pc; @@ -2044,7 +2044,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_LOAD_STAR_GLOB_STRING): + OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING): { struct load_op *insn = (struct load_op *) pc; @@ -2059,7 +2059,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_LOAD_S64): + OP(BYTECODE_OP_LOAD_S64): { struct load_op *insn = (struct load_op *) pc; @@ -2072,7 +2072,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_LOAD_DOUBLE): + OP(BYTECODE_OP_LOAD_DOUBLE): { struct load_op *insn = (struct load_op *) pc; @@ -2087,14 +2087,14 @@ uint64_t bytecode_interpret(void *interpreter_data, } /* cast */ - OP(FILTER_OP_CAST_TO_S64): + OP(BYTECODE_OP_CAST_TO_S64): { /* Dynamic typing. */ switch (estack_ax_t) { case REG_S64: - JUMP_TO(FILTER_OP_CAST_NOP); + JUMP_TO(BYTECODE_OP_CAST_NOP); case REG_DOUBLE: - JUMP_TO(FILTER_OP_CAST_DOUBLE_TO_S64); + JUMP_TO(BYTECODE_OP_CAST_DOUBLE_TO_S64); case REG_U64: estack_ax_t = REG_S64; next_pc += sizeof(struct cast_op); @@ -2103,14 +2103,14 @@ uint64_t bytecode_interpret(void *interpreter_data, ret = -EINVAL; goto end; default: - ERR("Unknown filter register type (%d)", + ERR("Unknown interpreter register type (%d)", (int) estack_ax_t); ret = -EINVAL; goto end; } } - OP(FILTER_OP_CAST_DOUBLE_TO_S64): + OP(BYTECODE_OP_CAST_DOUBLE_TO_S64): { estack_ax_v = (int64_t) estack_ax(stack, top)->u.d; estack_ax_t = REG_S64; @@ -2118,14 +2118,14 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_CAST_NOP): + OP(BYTECODE_OP_CAST_NOP): { next_pc += sizeof(struct cast_op); PO; } /* get context ref */ - OP(FILTER_OP_GET_CONTEXT_REF): + OP(BYTECODE_OP_GET_CONTEXT_REF): { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -2154,7 +2154,7 @@ uint64_t bytecode_interpret(void *interpreter_data, case LTTNG_UST_DYNAMIC_TYPE_STRING: estack_ax(stack, top)->u.s.str = v.u.str; if (unlikely(!estack_ax(stack, top)->u.s.str)) { - dbg_printf("Filter warning: loading a NULL string.\n"); + dbg_printf("Interpreter warning: loading a NULL string.\n"); ret = -EINVAL; goto end; } @@ -2165,7 +2165,7 @@ uint64_t bytecode_interpret(void *interpreter_data, estack_ax_t = REG_STRING; break; default: - dbg_printf("Filter warning: unknown dynamic type (%d).\n", (int) v.sel); + dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v.sel); ret = -EINVAL; goto end; } @@ -2173,7 +2173,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_GET_CONTEXT_REF_STRING): + OP(BYTECODE_OP_GET_CONTEXT_REF_STRING): { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -2187,7 +2187,7 @@ uint64_t bytecode_interpret(void *interpreter_data, estack_push(stack, top, ax, bx, ax_t, bx_t); estack_ax(stack, top)->u.s.str = v.u.str; if (unlikely(!estack_ax(stack, top)->u.s.str)) { - dbg_printf("Filter warning: loading a NULL string.\n"); + dbg_printf("Interpreter warning: loading a NULL string.\n"); ret = -EINVAL; goto end; } @@ -2200,7 +2200,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_GET_CONTEXT_REF_S64): + OP(BYTECODE_OP_GET_CONTEXT_REF_S64): { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -2219,7 +2219,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE): + OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE): { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -2238,7 +2238,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_GET_CONTEXT_ROOT): + OP(BYTECODE_OP_GET_CONTEXT_ROOT): { dbg_printf("op get context root\n"); estack_push(stack, top, ax, bx, ax_t, bx_t); @@ -2250,7 +2250,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_GET_APP_CONTEXT_ROOT): + OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT): { dbg_printf("op get app context root\n"); estack_push(stack, top, ax, bx, ax_t, bx_t); @@ -2262,7 +2262,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_GET_PAYLOAD_ROOT): + OP(BYTECODE_OP_GET_PAYLOAD_ROOT): { dbg_printf("op get app payload root\n"); estack_push(stack, top, ax, bx, ax_t, bx_t); @@ -2275,7 +2275,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_GET_SYMBOL): + OP(BYTECODE_OP_GET_SYMBOL): { dbg_printf("op get symbol\n"); switch (estack_ax(stack, top)->u.ptr.type) { @@ -2297,7 +2297,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_GET_SYMBOL_FIELD): + OP(BYTECODE_OP_GET_SYMBOL_FIELD): { /* * Used for first variant encountered in a @@ -2307,7 +2307,7 @@ uint64_t bytecode_interpret(void *interpreter_data, goto end; } - OP(FILTER_OP_GET_INDEX_U16): + OP(BYTECODE_OP_GET_INDEX_U16): { struct load_op *insn = (struct load_op *) pc; struct get_index_u16 *index = (struct get_index_u16 *) insn->data; @@ -2322,7 +2322,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_GET_INDEX_U64): + OP(BYTECODE_OP_GET_INDEX_U64): { struct load_op *insn = (struct load_op *) pc; struct get_index_u64 *index = (struct get_index_u64 *) insn->data; @@ -2337,7 +2337,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_LOAD_FIELD): + OP(BYTECODE_OP_LOAD_FIELD): { dbg_printf("op load field\n"); ret = dynamic_load_field(estack_ax(stack, top)); @@ -2349,7 +2349,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_LOAD_FIELD_S8): + OP(BYTECODE_OP_LOAD_FIELD_S8): { dbg_printf("op load field s8\n"); @@ -2358,7 +2358,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct load_op); PO; } - OP(FILTER_OP_LOAD_FIELD_S16): + OP(BYTECODE_OP_LOAD_FIELD_S16): { dbg_printf("op load field s16\n"); @@ -2367,7 +2367,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct load_op); PO; } - OP(FILTER_OP_LOAD_FIELD_S32): + OP(BYTECODE_OP_LOAD_FIELD_S32): { dbg_printf("op load field s32\n"); @@ -2376,7 +2376,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct load_op); PO; } - OP(FILTER_OP_LOAD_FIELD_S64): + OP(BYTECODE_OP_LOAD_FIELD_S64): { dbg_printf("op load field s64\n"); @@ -2385,7 +2385,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct load_op); PO; } - OP(FILTER_OP_LOAD_FIELD_U8): + OP(BYTECODE_OP_LOAD_FIELD_U8): { dbg_printf("op load field u8\n"); @@ -2394,7 +2394,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct load_op); PO; } - OP(FILTER_OP_LOAD_FIELD_U16): + OP(BYTECODE_OP_LOAD_FIELD_U16): { dbg_printf("op load field u16\n"); @@ -2403,7 +2403,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct load_op); PO; } - OP(FILTER_OP_LOAD_FIELD_U32): + OP(BYTECODE_OP_LOAD_FIELD_U32): { dbg_printf("op load field u32\n"); @@ -2412,7 +2412,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct load_op); PO; } - OP(FILTER_OP_LOAD_FIELD_U64): + OP(BYTECODE_OP_LOAD_FIELD_U64): { dbg_printf("op load field u64\n"); @@ -2421,7 +2421,7 @@ uint64_t bytecode_interpret(void *interpreter_data, next_pc += sizeof(struct load_op); PO; } - OP(FILTER_OP_LOAD_FIELD_DOUBLE): + OP(BYTECODE_OP_LOAD_FIELD_DOUBLE): { dbg_printf("op load field double\n"); @@ -2433,7 +2433,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_LOAD_FIELD_STRING): + OP(BYTECODE_OP_LOAD_FIELD_STRING): { const char *str; @@ -2441,7 +2441,7 @@ uint64_t bytecode_interpret(void *interpreter_data, str = (const char *) estack_ax(stack, top)->u.ptr.ptr; estack_ax(stack, top)->u.s.str = str; if (unlikely(!estack_ax(stack, top)->u.s.str)) { - dbg_printf("Filter warning: loading a NULL string.\n"); + dbg_printf("Interpreter warning: loading a NULL string.\n"); ret = -EINVAL; goto end; } @@ -2453,7 +2453,7 @@ uint64_t bytecode_interpret(void *interpreter_data, PO; } - OP(FILTER_OP_LOAD_FIELD_SEQUENCE): + OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE): { const char *ptr; @@ -2463,7 +2463,7 @@ uint64_t bytecode_interpret(void *interpreter_data, estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long)); estack_ax(stack, top)->type = REG_STRING; if (unlikely(!estack_ax(stack, top)->u.s.str)) { - dbg_printf("Filter warning: loading a NULL sequence.\n"); + dbg_printf("Interpreter warning: loading a NULL sequence.\n"); ret = -EINVAL; goto end; } @@ -2477,7 +2477,7 @@ uint64_t bytecode_interpret(void *interpreter_data, end: /* Return _DISCARD on error. */ if (ret) - return LTTNG_FILTER_DISCARD; + return LTTNG_INTERPRETER_DISCARD; if (output) { return lttng_bytecode_interpret_format_output(estack_ax(stack, top), @@ -2487,7 +2487,7 @@ end: return retval; } -uint64_t lttng_filter_interpret_bytecode(void *filter_data, +uint64_t lttng_bytecode_filter_interpret(void *filter_data, const char *filter_stack_data) { return bytecode_interpret(filter_data, filter_stack_data, NULL); diff --git a/liblttng-ust/lttng-filter-specialize.c b/liblttng-ust/lttng-bytecode-specialize.c similarity index 83% rename from liblttng-ust/lttng-filter-specialize.c rename to liblttng-ust/lttng-bytecode-specialize.c index 947fde29..55b2ebf2 100644 --- a/liblttng-ust/lttng-filter-specialize.c +++ b/liblttng-ust/lttng-bytecode-specialize.c @@ -1,7 +1,7 @@ /* - * lttng-filter-specialize.c + * lttng-bytecode-specialize.c * - * LTTng UST filter code specializer. + * LTTng UST bytecode specializer. * * Copyright (C) 2010-2016 Mathieu Desnoyers * @@ -28,7 +28,7 @@ #include #include -#include "lttng-filter.h" +#include "lttng-bytecode.h" #include #include "ust-events-internal.h" @@ -80,7 +80,7 @@ static ssize_t bytecode_reserve_data(struct bytecode_runtime *runtime, size_t new_alloc_len = new_len; size_t old_alloc_len = runtime->data_alloc_len; - if (new_len > FILTER_MAX_DATA_LEN) + if (new_len > BYTECODE_MAX_DATA_LEN) return -EINVAL; if (new_alloc_len > old_alloc_len) { @@ -126,7 +126,7 @@ static int specialize_load_field(struct vstack_entry *stack_top, case LOAD_ROOT_APP_CONTEXT: case LOAD_ROOT_PAYLOAD: default: - dbg_printf("Filter warning: cannot load root, missing field name.\n"); + dbg_printf("Bytecode warning: cannot load root, missing field name.\n"); ret = -EINVAL; goto end; } @@ -135,62 +135,62 @@ static int specialize_load_field(struct vstack_entry *stack_top, dbg_printf("op load field s8\n"); stack_top->type = REG_S64; if (!stack_top->load.rev_bo) - insn->op = FILTER_OP_LOAD_FIELD_S8; + insn->op = BYTECODE_OP_LOAD_FIELD_S8; break; case OBJECT_TYPE_S16: dbg_printf("op load field s16\n"); stack_top->type = REG_S64; if (!stack_top->load.rev_bo) - insn->op = FILTER_OP_LOAD_FIELD_S16; + insn->op = BYTECODE_OP_LOAD_FIELD_S16; break; case OBJECT_TYPE_S32: dbg_printf("op load field s32\n"); stack_top->type = REG_S64; if (!stack_top->load.rev_bo) - insn->op = FILTER_OP_LOAD_FIELD_S32; + insn->op = BYTECODE_OP_LOAD_FIELD_S32; break; case OBJECT_TYPE_S64: dbg_printf("op load field s64\n"); stack_top->type = REG_S64; if (!stack_top->load.rev_bo) - insn->op = FILTER_OP_LOAD_FIELD_S64; + insn->op = BYTECODE_OP_LOAD_FIELD_S64; break; case OBJECT_TYPE_U8: dbg_printf("op load field u8\n"); stack_top->type = REG_U64; - insn->op = FILTER_OP_LOAD_FIELD_U8; + insn->op = BYTECODE_OP_LOAD_FIELD_U8; break; case OBJECT_TYPE_U16: dbg_printf("op load field u16\n"); stack_top->type = REG_U64; if (!stack_top->load.rev_bo) - insn->op = FILTER_OP_LOAD_FIELD_U16; + insn->op = BYTECODE_OP_LOAD_FIELD_U16; break; case OBJECT_TYPE_U32: dbg_printf("op load field u32\n"); stack_top->type = REG_U64; if (!stack_top->load.rev_bo) - insn->op = FILTER_OP_LOAD_FIELD_U32; + insn->op = BYTECODE_OP_LOAD_FIELD_U32; break; case OBJECT_TYPE_U64: dbg_printf("op load field u64\n"); stack_top->type = REG_U64; if (!stack_top->load.rev_bo) - insn->op = FILTER_OP_LOAD_FIELD_U64; + insn->op = BYTECODE_OP_LOAD_FIELD_U64; break; case OBJECT_TYPE_DOUBLE: stack_top->type = REG_DOUBLE; - insn->op = FILTER_OP_LOAD_FIELD_DOUBLE; + insn->op = BYTECODE_OP_LOAD_FIELD_DOUBLE; break; case OBJECT_TYPE_STRING: dbg_printf("op load field string\n"); stack_top->type = REG_STRING; - insn->op = FILTER_OP_LOAD_FIELD_STRING; + insn->op = BYTECODE_OP_LOAD_FIELD_STRING; break; case OBJECT_TYPE_STRING_SEQUENCE: dbg_printf("op load field string sequence\n"); stack_top->type = REG_STRING; - insn->op = FILTER_OP_LOAD_FIELD_SEQUENCE; + insn->op = BYTECODE_OP_LOAD_FIELD_SEQUENCE; break; case OBJECT_TYPE_DYNAMIC: dbg_printf("op load field dynamic\n"); @@ -251,7 +251,7 @@ static int specialize_get_index(struct bytecode_runtime *runtime, int idx_len) { int ret; - struct filter_get_index_data gid; + struct bytecode_get_index_data gid; ssize_t data_offset; memset(&gid, 0, sizeof(gid)); @@ -514,7 +514,7 @@ static int specialize_context_lookup(struct lttng_ctx *ctx, int idx, ret; struct lttng_ctx_field *ctx_field; struct lttng_event_field *field; - struct filter_get_index_data gid; + struct bytecode_get_index_data gid; ssize_t data_offset; idx = specialize_context_lookup_name(ctx, runtime, insn); @@ -527,7 +527,7 @@ static int specialize_context_lookup(struct lttng_ctx *ctx, if (ret) return ret; /* Specialize each get_symbol into a get_index. */ - insn->op = FILTER_OP_GET_INDEX_U16; + insn->op = BYTECODE_OP_GET_INDEX_U16; memset(&gid, 0, sizeof(gid)); gid.ctx_index = idx; gid.elem.type = load->object_type; @@ -553,7 +553,7 @@ static int specialize_app_context_lookup(struct lttng_ctx **pctx, int idx, ret; struct lttng_ctx_field *ctx_field; struct lttng_event_field *field; - struct filter_get_index_data gid; + struct bytecode_get_index_data gid; ssize_t data_offset; offset = ((struct get_symbol *) insn->data)->offset; @@ -582,7 +582,7 @@ static int specialize_app_context_lookup(struct lttng_ctx **pctx, if (ret) goto end; /* Specialize each get_symbol into a get_index. */ - insn->op = FILTER_OP_GET_INDEX_U16; + insn->op = BYTECODE_OP_GET_INDEX_U16; memset(&gid, 0, sizeof(gid)); gid.ctx_index = idx; gid.elem.type = load->object_type; @@ -613,7 +613,7 @@ static int specialize_payload_lookup(const struct lttng_event_desc *event_desc, uint32_t field_offset = 0; const struct lttng_event_field *field; int ret; - struct filter_get_index_data gid; + struct bytecode_get_index_data gid; ssize_t data_offset; nr_fields = event_desc->nr_fields; @@ -663,7 +663,7 @@ static int specialize_payload_lookup(const struct lttng_event_desc *event_desc, goto end; /* Specialize each get_symbol into a get_index. */ - insn->op = FILTER_OP_GET_INDEX_U16; + insn->op = BYTECODE_OP_GET_INDEX_U16; memset(&gid, 0, sizeof(gid)); gid.offset = field_offset; gid.elem.type = load->object_type; @@ -681,7 +681,7 @@ end: return ret; } -int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, +int lttng_bytecode_specialize(const struct lttng_event_desc *event_desc, struct bytecode_runtime *bytecode) { void *pc, *next_pc, *start_pc; @@ -695,22 +695,22 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, start_pc = &bytecode->code[0]; for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; pc = next_pc) { - switch (*(filter_opcode_t *) pc) { - case FILTER_OP_UNKNOWN: + switch (*(bytecode_opcode_t *) pc) { + case BYTECODE_OP_UNKNOWN: default: ERR("unknown bytecode op %u\n", - (unsigned int) *(filter_opcode_t *) pc); + (unsigned int) *(bytecode_opcode_t *) pc); ret = -EINVAL; goto end; - case FILTER_OP_RETURN: + case BYTECODE_OP_RETURN: if (vstack_ax(stack)->type == REG_S64 || vstack_ax(stack)->type == REG_U64) - *(filter_opcode_t *) pc = FILTER_OP_RETURN_S64; + *(bytecode_opcode_t *) pc = BYTECODE_OP_RETURN_S64; ret = 0; goto end; - case FILTER_OP_RETURN_S64: + case BYTECODE_OP_RETURN_S64: if (vstack_ax(stack)->type != REG_S64 && vstack_ax(stack)->type != REG_U64) { ERR("Unexpected register type\n"); @@ -721,17 +721,17 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, goto end; /* binary */ - case FILTER_OP_MUL: - case FILTER_OP_DIV: - case FILTER_OP_MOD: - case FILTER_OP_PLUS: - case FILTER_OP_MINUS: + case BYTECODE_OP_MUL: + case BYTECODE_OP_DIV: + case BYTECODE_OP_MOD: + case BYTECODE_OP_PLUS: + case BYTECODE_OP_MINUS: ERR("unsupported bytecode op %u\n", - (unsigned int) *(filter_opcode_t *) pc); + (unsigned int) *(bytecode_opcode_t *) pc); ret = -EINVAL; goto end; - case FILTER_OP_EQ: + case BYTECODE_OP_EQ: { struct binary_op *insn = (struct binary_op *) pc; @@ -745,14 +745,14 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, if (vstack_bx(stack)->type == REG_UNKNOWN) break; if (vstack_bx(stack)->type == REG_STAR_GLOB_STRING) - insn->op = FILTER_OP_EQ_STAR_GLOB_STRING; + insn->op = BYTECODE_OP_EQ_STAR_GLOB_STRING; else - insn->op = FILTER_OP_EQ_STRING; + insn->op = BYTECODE_OP_EQ_STRING; break; case REG_STAR_GLOB_STRING: if (vstack_bx(stack)->type == REG_UNKNOWN) break; - insn->op = FILTER_OP_EQ_STAR_GLOB_STRING; + insn->op = BYTECODE_OP_EQ_STAR_GLOB_STRING; break; case REG_S64: case REG_U64: @@ -760,18 +760,18 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; if (vstack_bx(stack)->type == REG_S64 || vstack_bx(stack)->type == REG_U64) - insn->op = FILTER_OP_EQ_S64; + insn->op = BYTECODE_OP_EQ_S64; else - insn->op = FILTER_OP_EQ_DOUBLE_S64; + insn->op = BYTECODE_OP_EQ_DOUBLE_S64; break; case REG_DOUBLE: if (vstack_bx(stack)->type == REG_UNKNOWN) break; if (vstack_bx(stack)->type == REG_S64 || vstack_bx(stack)->type == REG_U64) - insn->op = FILTER_OP_EQ_S64_DOUBLE; + insn->op = BYTECODE_OP_EQ_S64_DOUBLE; else - insn->op = FILTER_OP_EQ_DOUBLE; + insn->op = BYTECODE_OP_EQ_DOUBLE; break; case REG_UNKNOWN: break; /* Dynamic typing. */ @@ -786,7 +786,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_NE: + case BYTECODE_OP_NE: { struct binary_op *insn = (struct binary_op *) pc; @@ -800,14 +800,14 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, if (vstack_bx(stack)->type == REG_UNKNOWN) break; if (vstack_bx(stack)->type == REG_STAR_GLOB_STRING) - insn->op = FILTER_OP_NE_STAR_GLOB_STRING; + insn->op = BYTECODE_OP_NE_STAR_GLOB_STRING; else - insn->op = FILTER_OP_NE_STRING; + insn->op = BYTECODE_OP_NE_STRING; break; case REG_STAR_GLOB_STRING: if (vstack_bx(stack)->type == REG_UNKNOWN) break; - insn->op = FILTER_OP_NE_STAR_GLOB_STRING; + insn->op = BYTECODE_OP_NE_STAR_GLOB_STRING; break; case REG_S64: case REG_U64: @@ -815,18 +815,18 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; if (vstack_bx(stack)->type == REG_S64 || vstack_bx(stack)->type == REG_U64) - insn->op = FILTER_OP_NE_S64; + insn->op = BYTECODE_OP_NE_S64; else - insn->op = FILTER_OP_NE_DOUBLE_S64; + insn->op = BYTECODE_OP_NE_DOUBLE_S64; break; case REG_DOUBLE: if (vstack_bx(stack)->type == REG_UNKNOWN) break; if (vstack_bx(stack)->type == REG_S64 || vstack_bx(stack)->type == REG_U64) - insn->op = FILTER_OP_NE_S64_DOUBLE; + insn->op = BYTECODE_OP_NE_S64_DOUBLE; else - insn->op = FILTER_OP_NE_DOUBLE; + insn->op = BYTECODE_OP_NE_DOUBLE; break; case REG_UNKNOWN: break; /* Dynamic typing. */ @@ -841,7 +841,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_GT: + case BYTECODE_OP_GT: { struct binary_op *insn = (struct binary_op *) pc; @@ -858,7 +858,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, case REG_STRING: if (vstack_bx(stack)->type == REG_UNKNOWN) break; - insn->op = FILTER_OP_GT_STRING; + insn->op = BYTECODE_OP_GT_STRING; break; case REG_S64: case REG_U64: @@ -866,18 +866,18 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; if (vstack_bx(stack)->type == REG_S64 || vstack_bx(stack)->type == REG_U64) - insn->op = FILTER_OP_GT_S64; + insn->op = BYTECODE_OP_GT_S64; else - insn->op = FILTER_OP_GT_DOUBLE_S64; + insn->op = BYTECODE_OP_GT_DOUBLE_S64; break; case REG_DOUBLE: if (vstack_bx(stack)->type == REG_UNKNOWN) break; if (vstack_bx(stack)->type == REG_S64 || vstack_bx(stack)->type == REG_U64) - insn->op = FILTER_OP_GT_S64_DOUBLE; + insn->op = BYTECODE_OP_GT_S64_DOUBLE; else - insn->op = FILTER_OP_GT_DOUBLE; + insn->op = BYTECODE_OP_GT_DOUBLE; break; case REG_UNKNOWN: break; /* Dynamic typing. */ @@ -892,7 +892,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_LT: + case BYTECODE_OP_LT: { struct binary_op *insn = (struct binary_op *) pc; @@ -909,7 +909,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, case REG_STRING: if (vstack_bx(stack)->type == REG_UNKNOWN) break; - insn->op = FILTER_OP_LT_STRING; + insn->op = BYTECODE_OP_LT_STRING; break; case REG_S64: case REG_U64: @@ -917,18 +917,18 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; if (vstack_bx(stack)->type == REG_S64 || vstack_bx(stack)->type == REG_U64) - insn->op = FILTER_OP_LT_S64; + insn->op = BYTECODE_OP_LT_S64; else - insn->op = FILTER_OP_LT_DOUBLE_S64; + insn->op = BYTECODE_OP_LT_DOUBLE_S64; break; case REG_DOUBLE: if (vstack_bx(stack)->type == REG_UNKNOWN) break; if (vstack_bx(stack)->type == REG_S64 || vstack_bx(stack)->type == REG_U64) - insn->op = FILTER_OP_LT_S64_DOUBLE; + insn->op = BYTECODE_OP_LT_S64_DOUBLE; else - insn->op = FILTER_OP_LT_DOUBLE; + insn->op = BYTECODE_OP_LT_DOUBLE; break; case REG_UNKNOWN: break; /* Dynamic typing. */ @@ -943,7 +943,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_GE: + case BYTECODE_OP_GE: { struct binary_op *insn = (struct binary_op *) pc; @@ -960,7 +960,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, case REG_STRING: if (vstack_bx(stack)->type == REG_UNKNOWN) break; - insn->op = FILTER_OP_GE_STRING; + insn->op = BYTECODE_OP_GE_STRING; break; case REG_S64: case REG_U64: @@ -968,18 +968,18 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; if (vstack_bx(stack)->type == REG_S64 || vstack_bx(stack)->type == REG_U64) - insn->op = FILTER_OP_GE_S64; + insn->op = BYTECODE_OP_GE_S64; else - insn->op = FILTER_OP_GE_DOUBLE_S64; + insn->op = BYTECODE_OP_GE_DOUBLE_S64; break; case REG_DOUBLE: if (vstack_bx(stack)->type == REG_UNKNOWN) break; if (vstack_bx(stack)->type == REG_S64 || vstack_bx(stack)->type == REG_U64) - insn->op = FILTER_OP_GE_S64_DOUBLE; + insn->op = BYTECODE_OP_GE_S64_DOUBLE; else - insn->op = FILTER_OP_GE_DOUBLE; + insn->op = BYTECODE_OP_GE_DOUBLE; break; case REG_UNKNOWN: break; /* Dynamic typing. */ @@ -993,7 +993,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, next_pc += sizeof(struct binary_op); break; } - case FILTER_OP_LE: + case BYTECODE_OP_LE: { struct binary_op *insn = (struct binary_op *) pc; @@ -1010,7 +1010,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, case REG_STRING: if (vstack_bx(stack)->type == REG_UNKNOWN) break; - insn->op = FILTER_OP_LE_STRING; + insn->op = BYTECODE_OP_LE_STRING; break; case REG_S64: case REG_U64: @@ -1018,18 +1018,18 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; if (vstack_bx(stack)->type == REG_S64 || vstack_bx(stack)->type == REG_U64) - insn->op = FILTER_OP_LE_S64; + insn->op = BYTECODE_OP_LE_S64; else - insn->op = FILTER_OP_LE_DOUBLE_S64; + insn->op = BYTECODE_OP_LE_DOUBLE_S64; break; case REG_DOUBLE: if (vstack_bx(stack)->type == REG_UNKNOWN) break; if (vstack_bx(stack)->type == REG_S64 || vstack_bx(stack)->type == REG_U64) - insn->op = FILTER_OP_LE_S64_DOUBLE; + insn->op = BYTECODE_OP_LE_S64_DOUBLE; else - insn->op = FILTER_OP_LE_DOUBLE; + insn->op = BYTECODE_OP_LE_DOUBLE; break; case REG_UNKNOWN: break; /* Dynamic typing. */ @@ -1039,38 +1039,38 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_EQ_STRING: - case FILTER_OP_NE_STRING: - case FILTER_OP_GT_STRING: - case FILTER_OP_LT_STRING: - case FILTER_OP_GE_STRING: - case FILTER_OP_LE_STRING: - case FILTER_OP_EQ_STAR_GLOB_STRING: - case FILTER_OP_NE_STAR_GLOB_STRING: - case FILTER_OP_EQ_S64: - case FILTER_OP_NE_S64: - case FILTER_OP_GT_S64: - case FILTER_OP_LT_S64: - case FILTER_OP_GE_S64: - case FILTER_OP_LE_S64: - case FILTER_OP_EQ_DOUBLE: - case FILTER_OP_NE_DOUBLE: - case FILTER_OP_GT_DOUBLE: - case FILTER_OP_LT_DOUBLE: - case FILTER_OP_GE_DOUBLE: - case FILTER_OP_LE_DOUBLE: - case FILTER_OP_EQ_DOUBLE_S64: - case FILTER_OP_NE_DOUBLE_S64: - case FILTER_OP_GT_DOUBLE_S64: - case FILTER_OP_LT_DOUBLE_S64: - case FILTER_OP_GE_DOUBLE_S64: - case FILTER_OP_LE_DOUBLE_S64: - case FILTER_OP_EQ_S64_DOUBLE: - case FILTER_OP_NE_S64_DOUBLE: - case FILTER_OP_GT_S64_DOUBLE: - case FILTER_OP_LT_S64_DOUBLE: - case FILTER_OP_GE_S64_DOUBLE: - case FILTER_OP_LE_S64_DOUBLE: + case BYTECODE_OP_EQ_STRING: + case BYTECODE_OP_NE_STRING: + case BYTECODE_OP_GT_STRING: + case BYTECODE_OP_LT_STRING: + case BYTECODE_OP_GE_STRING: + case BYTECODE_OP_LE_STRING: + case BYTECODE_OP_EQ_STAR_GLOB_STRING: + case BYTECODE_OP_NE_STAR_GLOB_STRING: + case BYTECODE_OP_EQ_S64: + case BYTECODE_OP_NE_S64: + case BYTECODE_OP_GT_S64: + case BYTECODE_OP_LT_S64: + case BYTECODE_OP_GE_S64: + case BYTECODE_OP_LE_S64: + case BYTECODE_OP_EQ_DOUBLE: + case BYTECODE_OP_NE_DOUBLE: + case BYTECODE_OP_GT_DOUBLE: + case BYTECODE_OP_LT_DOUBLE: + case BYTECODE_OP_GE_DOUBLE: + case BYTECODE_OP_LE_DOUBLE: + case BYTECODE_OP_EQ_DOUBLE_S64: + case BYTECODE_OP_NE_DOUBLE_S64: + case BYTECODE_OP_GT_DOUBLE_S64: + case BYTECODE_OP_LT_DOUBLE_S64: + case BYTECODE_OP_GE_DOUBLE_S64: + case BYTECODE_OP_LE_DOUBLE_S64: + case BYTECODE_OP_EQ_S64_DOUBLE: + case BYTECODE_OP_NE_S64_DOUBLE: + case BYTECODE_OP_GT_S64_DOUBLE: + case BYTECODE_OP_LT_S64_DOUBLE: + case BYTECODE_OP_GE_S64_DOUBLE: + case BYTECODE_OP_LE_S64_DOUBLE: { /* Pop 2, push 1 */ if (vstack_pop(stack)) { @@ -1082,11 +1082,11 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_BIT_RSHIFT: - case FILTER_OP_BIT_LSHIFT: - case FILTER_OP_BIT_AND: - case FILTER_OP_BIT_OR: - case FILTER_OP_BIT_XOR: + case BYTECODE_OP_BIT_RSHIFT: + case BYTECODE_OP_BIT_LSHIFT: + case BYTECODE_OP_BIT_AND: + case BYTECODE_OP_BIT_OR: + case BYTECODE_OP_BIT_XOR: { /* Pop 2, push 1 */ if (vstack_pop(stack)) { @@ -1099,7 +1099,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, } /* unary */ - case FILTER_OP_UNARY_PLUS: + case BYTECODE_OP_UNARY_PLUS: { struct unary_op *insn = (struct unary_op *) pc; @@ -1111,10 +1111,10 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, case REG_S64: case REG_U64: - insn->op = FILTER_OP_UNARY_PLUS_S64; + insn->op = BYTECODE_OP_UNARY_PLUS_S64; break; case REG_DOUBLE: - insn->op = FILTER_OP_UNARY_PLUS_DOUBLE; + insn->op = BYTECODE_OP_UNARY_PLUS_DOUBLE; break; case REG_UNKNOWN: /* Dynamic typing. */ break; @@ -1124,7 +1124,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_UNARY_MINUS: + case BYTECODE_OP_UNARY_MINUS: { struct unary_op *insn = (struct unary_op *) pc; @@ -1136,10 +1136,10 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, case REG_S64: case REG_U64: - insn->op = FILTER_OP_UNARY_MINUS_S64; + insn->op = BYTECODE_OP_UNARY_MINUS_S64; break; case REG_DOUBLE: - insn->op = FILTER_OP_UNARY_MINUS_DOUBLE; + insn->op = BYTECODE_OP_UNARY_MINUS_DOUBLE; break; case REG_UNKNOWN: /* Dynamic typing. */ break; @@ -1149,7 +1149,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_UNARY_NOT: + case BYTECODE_OP_UNARY_NOT: { struct unary_op *insn = (struct unary_op *) pc; @@ -1161,10 +1161,10 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, case REG_S64: case REG_U64: - insn->op = FILTER_OP_UNARY_NOT_S64; + insn->op = BYTECODE_OP_UNARY_NOT_S64; break; case REG_DOUBLE: - insn->op = FILTER_OP_UNARY_NOT_DOUBLE; + insn->op = BYTECODE_OP_UNARY_NOT_DOUBLE; break; case REG_UNKNOWN: /* Dynamic typing. */ break; @@ -1174,19 +1174,19 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_UNARY_BIT_NOT: + case BYTECODE_OP_UNARY_BIT_NOT: { /* Pop 1, push 1 */ next_pc += sizeof(struct unary_op); break; } - case FILTER_OP_UNARY_PLUS_S64: - case FILTER_OP_UNARY_MINUS_S64: - case FILTER_OP_UNARY_NOT_S64: - case FILTER_OP_UNARY_PLUS_DOUBLE: - case FILTER_OP_UNARY_MINUS_DOUBLE: - case FILTER_OP_UNARY_NOT_DOUBLE: + case BYTECODE_OP_UNARY_PLUS_S64: + case BYTECODE_OP_UNARY_MINUS_S64: + case BYTECODE_OP_UNARY_NOT_S64: + case BYTECODE_OP_UNARY_PLUS_DOUBLE: + case BYTECODE_OP_UNARY_MINUS_DOUBLE: + case BYTECODE_OP_UNARY_NOT_DOUBLE: { /* Pop 1, push 1 */ next_pc += sizeof(struct unary_op); @@ -1194,8 +1194,8 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, } /* logical */ - case FILTER_OP_AND: - case FILTER_OP_OR: + case BYTECODE_OP_AND: + case BYTECODE_OP_OR: { /* Continue to next instruction */ /* Pop 1 when jump not taken */ @@ -1208,14 +1208,14 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, } /* load field ref */ - case FILTER_OP_LOAD_FIELD_REF: + case BYTECODE_OP_LOAD_FIELD_REF: { ERR("Unknown field ref type\n"); ret = -EINVAL; goto end; } /* get context ref */ - case FILTER_OP_GET_CONTEXT_REF: + case BYTECODE_OP_GET_CONTEXT_REF: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1225,9 +1225,9 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, next_pc += sizeof(struct load_op) + sizeof(struct field_ref); break; } - case FILTER_OP_LOAD_FIELD_REF_STRING: - case FILTER_OP_LOAD_FIELD_REF_SEQUENCE: - case FILTER_OP_GET_CONTEXT_REF_STRING: + case BYTECODE_OP_LOAD_FIELD_REF_STRING: + case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE: + case BYTECODE_OP_GET_CONTEXT_REF_STRING: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1237,8 +1237,8 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, next_pc += sizeof(struct load_op) + sizeof(struct field_ref); break; } - case FILTER_OP_LOAD_FIELD_REF_S64: - case FILTER_OP_GET_CONTEXT_REF_S64: + case BYTECODE_OP_LOAD_FIELD_REF_S64: + case BYTECODE_OP_GET_CONTEXT_REF_S64: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1248,8 +1248,8 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, next_pc += sizeof(struct load_op) + sizeof(struct field_ref); break; } - case FILTER_OP_LOAD_FIELD_REF_DOUBLE: - case FILTER_OP_GET_CONTEXT_REF_DOUBLE: + case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE: + case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1261,7 +1261,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, } /* load from immediate operand */ - case FILTER_OP_LOAD_STRING: + case BYTECODE_OP_LOAD_STRING: { struct load_op *insn = (struct load_op *) pc; @@ -1274,7 +1274,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_LOAD_STAR_GLOB_STRING: + case BYTECODE_OP_LOAD_STAR_GLOB_STRING: { struct load_op *insn = (struct load_op *) pc; @@ -1287,7 +1287,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_LOAD_S64: + case BYTECODE_OP_LOAD_S64: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1299,7 +1299,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_LOAD_DOUBLE: + case BYTECODE_OP_LOAD_DOUBLE: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1312,7 +1312,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, } /* cast */ - case FILTER_OP_CAST_TO_S64: + case BYTECODE_OP_CAST_TO_S64: { struct cast_op *insn = (struct cast_op *) pc; @@ -1328,10 +1328,10 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, ret = -EINVAL; goto end; case REG_S64: - insn->op = FILTER_OP_CAST_NOP; + insn->op = BYTECODE_OP_CAST_NOP; break; case REG_DOUBLE: - insn->op = FILTER_OP_CAST_DOUBLE_TO_S64; + insn->op = BYTECODE_OP_CAST_DOUBLE_TO_S64; break; case REG_UNKNOWN: case REG_U64: @@ -1342,14 +1342,14 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, next_pc += sizeof(struct cast_op); break; } - case FILTER_OP_CAST_DOUBLE_TO_S64: + case BYTECODE_OP_CAST_DOUBLE_TO_S64: { /* Pop 1, push 1 */ vstack_ax(stack)->type = REG_S64; next_pc += sizeof(struct cast_op); break; } - case FILTER_OP_CAST_NOP: + case BYTECODE_OP_CAST_NOP: { next_pc += sizeof(struct cast_op); break; @@ -1358,7 +1358,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, /* * Instructions for recursive traversal through composed types. */ - case FILTER_OP_GET_CONTEXT_ROOT: + case BYTECODE_OP_GET_CONTEXT_ROOT: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1369,7 +1369,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, next_pc += sizeof(struct load_op); break; } - case FILTER_OP_GET_APP_CONTEXT_ROOT: + case BYTECODE_OP_GET_APP_CONTEXT_ROOT: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1380,7 +1380,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, next_pc += sizeof(struct load_op); break; } - case FILTER_OP_GET_PAYLOAD_ROOT: + case BYTECODE_OP_GET_PAYLOAD_ROOT: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1392,7 +1392,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_LOAD_FIELD: + case BYTECODE_OP_LOAD_FIELD: { struct load_op *insn = (struct load_op *) pc; @@ -1406,10 +1406,10 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_LOAD_FIELD_S8: - case FILTER_OP_LOAD_FIELD_S16: - case FILTER_OP_LOAD_FIELD_S32: - case FILTER_OP_LOAD_FIELD_S64: + case BYTECODE_OP_LOAD_FIELD_S8: + case BYTECODE_OP_LOAD_FIELD_S16: + case BYTECODE_OP_LOAD_FIELD_S32: + case BYTECODE_OP_LOAD_FIELD_S64: { /* Pop 1, push 1 */ vstack_ax(stack)->type = REG_S64; @@ -1417,10 +1417,10 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_LOAD_FIELD_U8: - case FILTER_OP_LOAD_FIELD_U16: - case FILTER_OP_LOAD_FIELD_U32: - case FILTER_OP_LOAD_FIELD_U64: + case BYTECODE_OP_LOAD_FIELD_U8: + case BYTECODE_OP_LOAD_FIELD_U16: + case BYTECODE_OP_LOAD_FIELD_U32: + case BYTECODE_OP_LOAD_FIELD_U64: { /* Pop 1, push 1 */ vstack_ax(stack)->type = REG_U64; @@ -1428,8 +1428,8 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_LOAD_FIELD_STRING: - case FILTER_OP_LOAD_FIELD_SEQUENCE: + case BYTECODE_OP_LOAD_FIELD_STRING: + case BYTECODE_OP_LOAD_FIELD_SEQUENCE: { /* Pop 1, push 1 */ vstack_ax(stack)->type = REG_STRING; @@ -1437,7 +1437,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_LOAD_FIELD_DOUBLE: + case BYTECODE_OP_LOAD_FIELD_DOUBLE: { /* Pop 1, push 1 */ vstack_ax(stack)->type = REG_DOUBLE; @@ -1445,7 +1445,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_GET_SYMBOL: + case BYTECODE_OP_GET_SYMBOL: { struct load_op *insn = (struct load_op *) pc; @@ -1484,14 +1484,14 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_GET_SYMBOL_FIELD: + case BYTECODE_OP_GET_SYMBOL_FIELD: { /* Always generated by specialize phase. */ ret = -EINVAL; goto end; } - case FILTER_OP_GET_INDEX_U16: + case BYTECODE_OP_GET_INDEX_U16: { struct load_op *insn = (struct load_op *) pc; struct get_index_u16 *index = (struct get_index_u16 *) insn->data; @@ -1506,7 +1506,7 @@ int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, break; } - case FILTER_OP_GET_INDEX_U64: + case BYTECODE_OP_GET_INDEX_U64: { struct load_op *insn = (struct load_op *) pc; struct get_index_u64 *index = (struct get_index_u64 *) insn->data; diff --git a/liblttng-ust/lttng-filter-validator.c b/liblttng-ust/lttng-bytecode-validator.c similarity index 76% rename from liblttng-ust/lttng-filter-validator.c rename to liblttng-ust/lttng-bytecode-validator.c index e6982369..f60c9367 100644 --- a/liblttng-ust/lttng-filter-validator.c +++ b/liblttng-ust/lttng-bytecode-validator.c @@ -1,7 +1,7 @@ /* - * lttng-filter-validator.c + * lttng-bytecode-validator.c * - * LTTng UST filter bytecode validator. + * LTTng UST bytecode validator. * * Copyright (C) 2010-2016 Mathieu Desnoyers * @@ -32,7 +32,7 @@ #include #include -#include "lttng-filter.h" +#include "lttng-bytecode.h" #include "lttng-hash-helper.h" #include "string-utils.h" #include "ust-events-internal.h" @@ -101,7 +101,7 @@ int merge_point_add_check(struct cds_lfht *ht, unsigned long target_pc, lttng_hash_seed); struct cds_lfht_node *ret; - dbg_printf("Filter: adding merge point at offset %lu, hash %lu\n", + dbg_printf("Bytecode: adding merge point at offset %lu, hash %lu\n", target_pc, hash); node = zmalloc(sizeof(struct lfht_mp_node)); if (!node) @@ -115,7 +115,7 @@ int merge_point_add_check(struct cds_lfht *ht, unsigned long target_pc, caa_container_of(ret, struct lfht_mp_node, node); /* Key already present */ - dbg_printf("Filter: compare merge points for offset %lu, hash %lu\n", + dbg_printf("Bytecode: compare merge points for offset %lu, hash %lu\n", target_pc, hash); free(node); if (merge_points_compare(stack, &ret_mp->stack)) { @@ -133,7 +133,7 @@ int merge_point_add_check(struct cds_lfht *ht, unsigned long target_pc, * (unknown), negative error value on error. */ static -int bin_op_compare_check(struct vstack *stack, filter_opcode_t opcode, +int bin_op_compare_check(struct vstack *stack, bytecode_opcode_t opcode, const char *str) { if (unlikely(!vstack_ax(stack) || !vstack_bx(stack))) @@ -155,7 +155,7 @@ int bin_op_compare_check(struct vstack *stack, filter_opcode_t opcode, case REG_STRING: break; case REG_STAR_GLOB_STRING: - if (opcode != FILTER_OP_EQ && opcode != FILTER_OP_NE) { + if (opcode != BYTECODE_OP_EQ && opcode != BYTECODE_OP_NE) { goto error_mismatch; } break; @@ -173,7 +173,7 @@ int bin_op_compare_check(struct vstack *stack, filter_opcode_t opcode, case REG_UNKNOWN: goto unknown; case REG_STRING: - if (opcode != FILTER_OP_EQ && opcode != FILTER_OP_NE) { + if (opcode != BYTECODE_OP_EQ && opcode != BYTECODE_OP_NE) { goto error_mismatch; } break; @@ -227,7 +227,7 @@ error_type: * (unknown), negative error value on error. */ static -int bin_op_bitwise_check(struct vstack *stack, filter_opcode_t opcode, +int bin_op_bitwise_check(struct vstack *stack, bytecode_opcode_t opcode, const char *str) { if (unlikely(!vstack_ax(stack) || !vstack_bx(stack))) @@ -295,18 +295,18 @@ int bytecode_validate_overflow(struct bytecode_runtime *bytecode, { int ret = 0; - switch (*(filter_opcode_t *) pc) { - case FILTER_OP_UNKNOWN: + switch (*(bytecode_opcode_t *) pc) { + case BYTECODE_OP_UNKNOWN: default: { ERR("unknown bytecode op %u\n", - (unsigned int) *(filter_opcode_t *) pc); + (unsigned int) *(bytecode_opcode_t *) pc); ret = -EINVAL; break; } - case FILTER_OP_RETURN: - case FILTER_OP_RETURN_S64: + case BYTECODE_OP_RETURN: + case BYTECODE_OP_RETURN_S64: { if (unlikely(pc + sizeof(struct return_op) > start_pc + bytecode->len)) { @@ -316,61 +316,61 @@ int bytecode_validate_overflow(struct bytecode_runtime *bytecode, } /* binary */ - case FILTER_OP_MUL: - case FILTER_OP_DIV: - case FILTER_OP_MOD: - case FILTER_OP_PLUS: - case FILTER_OP_MINUS: + case BYTECODE_OP_MUL: + case BYTECODE_OP_DIV: + case BYTECODE_OP_MOD: + case BYTECODE_OP_PLUS: + case BYTECODE_OP_MINUS: { ERR("unsupported bytecode op %u\n", - (unsigned int) *(filter_opcode_t *) pc); + (unsigned int) *(bytecode_opcode_t *) pc); ret = -EINVAL; break; } - case FILTER_OP_EQ: - case FILTER_OP_NE: - case FILTER_OP_GT: - case FILTER_OP_LT: - case FILTER_OP_GE: - case FILTER_OP_LE: - case FILTER_OP_EQ_STRING: - case FILTER_OP_NE_STRING: - case FILTER_OP_GT_STRING: - case FILTER_OP_LT_STRING: - case FILTER_OP_GE_STRING: - case FILTER_OP_LE_STRING: - case FILTER_OP_EQ_STAR_GLOB_STRING: - case FILTER_OP_NE_STAR_GLOB_STRING: - case FILTER_OP_EQ_S64: - case FILTER_OP_NE_S64: - case FILTER_OP_GT_S64: - case FILTER_OP_LT_S64: - case FILTER_OP_GE_S64: - case FILTER_OP_LE_S64: - case FILTER_OP_EQ_DOUBLE: - case FILTER_OP_NE_DOUBLE: - case FILTER_OP_GT_DOUBLE: - case FILTER_OP_LT_DOUBLE: - case FILTER_OP_GE_DOUBLE: - case FILTER_OP_LE_DOUBLE: - case FILTER_OP_EQ_DOUBLE_S64: - case FILTER_OP_NE_DOUBLE_S64: - case FILTER_OP_GT_DOUBLE_S64: - case FILTER_OP_LT_DOUBLE_S64: - case FILTER_OP_GE_DOUBLE_S64: - case FILTER_OP_LE_DOUBLE_S64: - case FILTER_OP_EQ_S64_DOUBLE: - case FILTER_OP_NE_S64_DOUBLE: - case FILTER_OP_GT_S64_DOUBLE: - case FILTER_OP_LT_S64_DOUBLE: - case FILTER_OP_GE_S64_DOUBLE: - case FILTER_OP_LE_S64_DOUBLE: - case FILTER_OP_BIT_RSHIFT: - case FILTER_OP_BIT_LSHIFT: - case FILTER_OP_BIT_AND: - case FILTER_OP_BIT_OR: - case FILTER_OP_BIT_XOR: + case BYTECODE_OP_EQ: + case BYTECODE_OP_NE: + case BYTECODE_OP_GT: + case BYTECODE_OP_LT: + case BYTECODE_OP_GE: + case BYTECODE_OP_LE: + case BYTECODE_OP_EQ_STRING: + case BYTECODE_OP_NE_STRING: + case BYTECODE_OP_GT_STRING: + case BYTECODE_OP_LT_STRING: + case BYTECODE_OP_GE_STRING: + case BYTECODE_OP_LE_STRING: + case BYTECODE_OP_EQ_STAR_GLOB_STRING: + case BYTECODE_OP_NE_STAR_GLOB_STRING: + case BYTECODE_OP_EQ_S64: + case BYTECODE_OP_NE_S64: + case BYTECODE_OP_GT_S64: + case BYTECODE_OP_LT_S64: + case BYTECODE_OP_GE_S64: + case BYTECODE_OP_LE_S64: + case BYTECODE_OP_EQ_DOUBLE: + case BYTECODE_OP_NE_DOUBLE: + case BYTECODE_OP_GT_DOUBLE: + case BYTECODE_OP_LT_DOUBLE: + case BYTECODE_OP_GE_DOUBLE: + case BYTECODE_OP_LE_DOUBLE: + case BYTECODE_OP_EQ_DOUBLE_S64: + case BYTECODE_OP_NE_DOUBLE_S64: + case BYTECODE_OP_GT_DOUBLE_S64: + case BYTECODE_OP_LT_DOUBLE_S64: + case BYTECODE_OP_GE_DOUBLE_S64: + case BYTECODE_OP_LE_DOUBLE_S64: + case BYTECODE_OP_EQ_S64_DOUBLE: + case BYTECODE_OP_NE_S64_DOUBLE: + case BYTECODE_OP_GT_S64_DOUBLE: + case BYTECODE_OP_LT_S64_DOUBLE: + case BYTECODE_OP_GE_S64_DOUBLE: + case BYTECODE_OP_LE_S64_DOUBLE: + case BYTECODE_OP_BIT_RSHIFT: + case BYTECODE_OP_BIT_LSHIFT: + case BYTECODE_OP_BIT_AND: + case BYTECODE_OP_BIT_OR: + case BYTECODE_OP_BIT_XOR: { if (unlikely(pc + sizeof(struct binary_op) > start_pc + bytecode->len)) { @@ -380,16 +380,16 @@ int bytecode_validate_overflow(struct bytecode_runtime *bytecode, } /* unary */ - case FILTER_OP_UNARY_PLUS: - case FILTER_OP_UNARY_MINUS: - case FILTER_OP_UNARY_NOT: - case FILTER_OP_UNARY_PLUS_S64: - case FILTER_OP_UNARY_MINUS_S64: - case FILTER_OP_UNARY_NOT_S64: - case FILTER_OP_UNARY_PLUS_DOUBLE: - case FILTER_OP_UNARY_MINUS_DOUBLE: - case FILTER_OP_UNARY_NOT_DOUBLE: - case FILTER_OP_UNARY_BIT_NOT: + case BYTECODE_OP_UNARY_PLUS: + case BYTECODE_OP_UNARY_MINUS: + case BYTECODE_OP_UNARY_NOT: + case BYTECODE_OP_UNARY_PLUS_S64: + case BYTECODE_OP_UNARY_MINUS_S64: + case BYTECODE_OP_UNARY_NOT_S64: + case BYTECODE_OP_UNARY_PLUS_DOUBLE: + case BYTECODE_OP_UNARY_MINUS_DOUBLE: + case BYTECODE_OP_UNARY_NOT_DOUBLE: + case BYTECODE_OP_UNARY_BIT_NOT: { if (unlikely(pc + sizeof(struct unary_op) > start_pc + bytecode->len)) { @@ -399,8 +399,8 @@ int bytecode_validate_overflow(struct bytecode_runtime *bytecode, } /* logical */ - case FILTER_OP_AND: - case FILTER_OP_OR: + case BYTECODE_OP_AND: + case BYTECODE_OP_OR: { if (unlikely(pc + sizeof(struct logical_op) > start_pc + bytecode->len)) { @@ -410,7 +410,7 @@ int bytecode_validate_overflow(struct bytecode_runtime *bytecode, } /* load field ref */ - case FILTER_OP_LOAD_FIELD_REF: + case BYTECODE_OP_LOAD_FIELD_REF: { ERR("Unknown field ref type\n"); ret = -EINVAL; @@ -418,14 +418,14 @@ int bytecode_validate_overflow(struct bytecode_runtime *bytecode, } /* get context ref */ - case FILTER_OP_GET_CONTEXT_REF: - case FILTER_OP_LOAD_FIELD_REF_STRING: - case FILTER_OP_LOAD_FIELD_REF_SEQUENCE: - case FILTER_OP_LOAD_FIELD_REF_S64: - case FILTER_OP_LOAD_FIELD_REF_DOUBLE: - case FILTER_OP_GET_CONTEXT_REF_STRING: - case FILTER_OP_GET_CONTEXT_REF_S64: - case FILTER_OP_GET_CONTEXT_REF_DOUBLE: + case BYTECODE_OP_GET_CONTEXT_REF: + case BYTECODE_OP_LOAD_FIELD_REF_STRING: + case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE: + case BYTECODE_OP_LOAD_FIELD_REF_S64: + case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE: + case BYTECODE_OP_GET_CONTEXT_REF_STRING: + case BYTECODE_OP_GET_CONTEXT_REF_S64: + case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE: { if (unlikely(pc + sizeof(struct load_op) + sizeof(struct field_ref) > start_pc + bytecode->len)) { @@ -435,8 +435,8 @@ int bytecode_validate_overflow(struct bytecode_runtime *bytecode, } /* load from immediate operand */ - case FILTER_OP_LOAD_STRING: - case FILTER_OP_LOAD_STAR_GLOB_STRING: + case BYTECODE_OP_LOAD_STRING: + case BYTECODE_OP_LOAD_STAR_GLOB_STRING: { struct load_op *insn = (struct load_op *) pc; uint32_t str_len, maxlen; @@ -456,7 +456,7 @@ int bytecode_validate_overflow(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_LOAD_S64: + case BYTECODE_OP_LOAD_S64: { if (unlikely(pc + sizeof(struct load_op) + sizeof(struct literal_numeric) > start_pc + bytecode->len)) { @@ -465,7 +465,7 @@ int bytecode_validate_overflow(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_LOAD_DOUBLE: + case BYTECODE_OP_LOAD_DOUBLE: { if (unlikely(pc + sizeof(struct load_op) + sizeof(struct literal_double) > start_pc + bytecode->len)) { @@ -474,9 +474,9 @@ int bytecode_validate_overflow(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_CAST_TO_S64: - case FILTER_OP_CAST_DOUBLE_TO_S64: - case FILTER_OP_CAST_NOP: + case BYTECODE_OP_CAST_TO_S64: + case BYTECODE_OP_CAST_DOUBLE_TO_S64: + case BYTECODE_OP_CAST_NOP: { if (unlikely(pc + sizeof(struct cast_op) > start_pc + bytecode->len)) { @@ -488,28 +488,28 @@ int bytecode_validate_overflow(struct bytecode_runtime *bytecode, /* * Instructions for recursive traversal through composed types. */ - case FILTER_OP_GET_CONTEXT_ROOT: - case FILTER_OP_GET_APP_CONTEXT_ROOT: - case FILTER_OP_GET_PAYLOAD_ROOT: - case FILTER_OP_LOAD_FIELD: - case FILTER_OP_LOAD_FIELD_S8: - case FILTER_OP_LOAD_FIELD_S16: - case FILTER_OP_LOAD_FIELD_S32: - case FILTER_OP_LOAD_FIELD_S64: - case FILTER_OP_LOAD_FIELD_U8: - case FILTER_OP_LOAD_FIELD_U16: - case FILTER_OP_LOAD_FIELD_U32: - case FILTER_OP_LOAD_FIELD_U64: - case FILTER_OP_LOAD_FIELD_STRING: - case FILTER_OP_LOAD_FIELD_SEQUENCE: - case FILTER_OP_LOAD_FIELD_DOUBLE: + case BYTECODE_OP_GET_CONTEXT_ROOT: + case BYTECODE_OP_GET_APP_CONTEXT_ROOT: + case BYTECODE_OP_GET_PAYLOAD_ROOT: + case BYTECODE_OP_LOAD_FIELD: + case BYTECODE_OP_LOAD_FIELD_S8: + case BYTECODE_OP_LOAD_FIELD_S16: + case BYTECODE_OP_LOAD_FIELD_S32: + case BYTECODE_OP_LOAD_FIELD_S64: + case BYTECODE_OP_LOAD_FIELD_U8: + case BYTECODE_OP_LOAD_FIELD_U16: + case BYTECODE_OP_LOAD_FIELD_U32: + case BYTECODE_OP_LOAD_FIELD_U64: + case BYTECODE_OP_LOAD_FIELD_STRING: + case BYTECODE_OP_LOAD_FIELD_SEQUENCE: + case BYTECODE_OP_LOAD_FIELD_DOUBLE: if (unlikely(pc + sizeof(struct load_op) > start_pc + bytecode->len)) { ret = -ERANGE; } break; - case FILTER_OP_GET_SYMBOL: + case BYTECODE_OP_GET_SYMBOL: { struct load_op *insn = (struct load_op *) pc; struct get_symbol *sym = (struct get_symbol *) insn->data; @@ -523,19 +523,19 @@ int bytecode_validate_overflow(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_GET_SYMBOL_FIELD: + case BYTECODE_OP_GET_SYMBOL_FIELD: ERR("Unexpected get symbol field"); ret = -EINVAL; break; - case FILTER_OP_GET_INDEX_U16: + case BYTECODE_OP_GET_INDEX_U16: if (unlikely(pc + sizeof(struct load_op) + sizeof(struct get_index_u16) > start_pc + bytecode->len)) { ret = -ERANGE; } break; - case FILTER_OP_GET_INDEX_U64: + case BYTECODE_OP_GET_INDEX_U64: if (unlikely(pc + sizeof(struct load_op) + sizeof(struct get_index_u64) > start_pc + bytecode->len)) { ret = -ERANGE; @@ -577,30 +577,30 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, char *pc) { int ret = 0; - const filter_opcode_t opcode = *(filter_opcode_t *) pc; + const bytecode_opcode_t opcode = *(bytecode_opcode_t *) pc; switch (opcode) { - case FILTER_OP_UNKNOWN: + case BYTECODE_OP_UNKNOWN: default: { ERR("unknown bytecode op %u\n", - (unsigned int) *(filter_opcode_t *) pc); + (unsigned int) *(bytecode_opcode_t *) pc); ret = -EINVAL; goto end; } - case FILTER_OP_RETURN: - case FILTER_OP_RETURN_S64: + case BYTECODE_OP_RETURN: + case BYTECODE_OP_RETURN_S64: { goto end; } /* binary */ - case FILTER_OP_MUL: - case FILTER_OP_DIV: - case FILTER_OP_MOD: - case FILTER_OP_PLUS: - case FILTER_OP_MINUS: + case BYTECODE_OP_MUL: + case BYTECODE_OP_DIV: + case BYTECODE_OP_MOD: + case BYTECODE_OP_PLUS: + case BYTECODE_OP_MINUS: { ERR("unsupported bytecode op %u\n", (unsigned int) opcode); @@ -608,42 +608,42 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, goto end; } - case FILTER_OP_EQ: + case BYTECODE_OP_EQ: { ret = bin_op_compare_check(stack, opcode, "=="); if (ret < 0) goto end; break; } - case FILTER_OP_NE: + case BYTECODE_OP_NE: { ret = bin_op_compare_check(stack, opcode, "!="); if (ret < 0) goto end; break; } - case FILTER_OP_GT: + case BYTECODE_OP_GT: { ret = bin_op_compare_check(stack, opcode, ">"); if (ret < 0) goto end; break; } - case FILTER_OP_LT: + case BYTECODE_OP_LT: { ret = bin_op_compare_check(stack, opcode, "<"); if (ret < 0) goto end; break; } - case FILTER_OP_GE: + case BYTECODE_OP_GE: { ret = bin_op_compare_check(stack, opcode, ">="); if (ret < 0) goto end; break; } - case FILTER_OP_LE: + case BYTECODE_OP_LE: { ret = bin_op_compare_check(stack, opcode, "<="); if (ret < 0) @@ -651,12 +651,12 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_EQ_STRING: - case FILTER_OP_NE_STRING: - case FILTER_OP_GT_STRING: - case FILTER_OP_LT_STRING: - case FILTER_OP_GE_STRING: - case FILTER_OP_LE_STRING: + case BYTECODE_OP_EQ_STRING: + case BYTECODE_OP_NE_STRING: + case BYTECODE_OP_GT_STRING: + case BYTECODE_OP_LT_STRING: + case BYTECODE_OP_GE_STRING: + case BYTECODE_OP_LE_STRING: { if (!vstack_ax(stack) || !vstack_bx(stack)) { ERR("Empty stack\n"); @@ -672,8 +672,8 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_EQ_STAR_GLOB_STRING: - case FILTER_OP_NE_STAR_GLOB_STRING: + case BYTECODE_OP_EQ_STAR_GLOB_STRING: + case BYTECODE_OP_NE_STAR_GLOB_STRING: { if (!vstack_ax(stack) || !vstack_bx(stack)) { ERR("Empty stack\n"); @@ -689,12 +689,12 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_EQ_S64: - case FILTER_OP_NE_S64: - case FILTER_OP_GT_S64: - case FILTER_OP_LT_S64: - case FILTER_OP_GE_S64: - case FILTER_OP_LE_S64: + case BYTECODE_OP_EQ_S64: + case BYTECODE_OP_NE_S64: + case BYTECODE_OP_GT_S64: + case BYTECODE_OP_LT_S64: + case BYTECODE_OP_GE_S64: + case BYTECODE_OP_LE_S64: { if (!vstack_ax(stack) || !vstack_bx(stack)) { ERR("Empty stack\n"); @@ -722,12 +722,12 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_EQ_DOUBLE: - case FILTER_OP_NE_DOUBLE: - case FILTER_OP_GT_DOUBLE: - case FILTER_OP_LT_DOUBLE: - case FILTER_OP_GE_DOUBLE: - case FILTER_OP_LE_DOUBLE: + case BYTECODE_OP_EQ_DOUBLE: + case BYTECODE_OP_NE_DOUBLE: + case BYTECODE_OP_GT_DOUBLE: + case BYTECODE_OP_LT_DOUBLE: + case BYTECODE_OP_GE_DOUBLE: + case BYTECODE_OP_LE_DOUBLE: { if (!vstack_ax(stack) || !vstack_bx(stack)) { ERR("Empty stack\n"); @@ -742,12 +742,12 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_EQ_DOUBLE_S64: - case FILTER_OP_NE_DOUBLE_S64: - case FILTER_OP_GT_DOUBLE_S64: - case FILTER_OP_LT_DOUBLE_S64: - case FILTER_OP_GE_DOUBLE_S64: - case FILTER_OP_LE_DOUBLE_S64: + case BYTECODE_OP_EQ_DOUBLE_S64: + case BYTECODE_OP_NE_DOUBLE_S64: + case BYTECODE_OP_GT_DOUBLE_S64: + case BYTECODE_OP_LT_DOUBLE_S64: + case BYTECODE_OP_GE_DOUBLE_S64: + case BYTECODE_OP_LE_DOUBLE_S64: { if (!vstack_ax(stack) || !vstack_bx(stack)) { ERR("Empty stack\n"); @@ -774,12 +774,12 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_EQ_S64_DOUBLE: - case FILTER_OP_NE_S64_DOUBLE: - case FILTER_OP_GT_S64_DOUBLE: - case FILTER_OP_LT_S64_DOUBLE: - case FILTER_OP_GE_S64_DOUBLE: - case FILTER_OP_LE_S64_DOUBLE: + case BYTECODE_OP_EQ_S64_DOUBLE: + case BYTECODE_OP_NE_S64_DOUBLE: + case BYTECODE_OP_GT_S64_DOUBLE: + case BYTECODE_OP_LT_S64_DOUBLE: + case BYTECODE_OP_GE_S64_DOUBLE: + case BYTECODE_OP_LE_S64_DOUBLE: { if (!vstack_ax(stack) || !vstack_bx(stack)) { ERR("Empty stack\n"); @@ -806,36 +806,36 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_BIT_RSHIFT: + case BYTECODE_OP_BIT_RSHIFT: ret = bin_op_bitwise_check(stack, opcode, ">>"); if (ret < 0) goto end; break; - case FILTER_OP_BIT_LSHIFT: + case BYTECODE_OP_BIT_LSHIFT: ret = bin_op_bitwise_check(stack, opcode, "<<"); if (ret < 0) goto end; break; - case FILTER_OP_BIT_AND: + case BYTECODE_OP_BIT_AND: ret = bin_op_bitwise_check(stack, opcode, "&"); if (ret < 0) goto end; break; - case FILTER_OP_BIT_OR: + case BYTECODE_OP_BIT_OR: ret = bin_op_bitwise_check(stack, opcode, "|"); if (ret < 0) goto end; break; - case FILTER_OP_BIT_XOR: + case BYTECODE_OP_BIT_XOR: ret = bin_op_bitwise_check(stack, opcode, "^"); if (ret < 0) goto end; break; /* unary */ - case FILTER_OP_UNARY_PLUS: - case FILTER_OP_UNARY_MINUS: - case FILTER_OP_UNARY_NOT: + case BYTECODE_OP_UNARY_PLUS: + case BYTECODE_OP_UNARY_MINUS: + case BYTECODE_OP_UNARY_NOT: { if (!vstack_ax(stack)) { ERR("Empty stack\n"); @@ -864,7 +864,7 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, } break; } - case FILTER_OP_UNARY_BIT_NOT: + case BYTECODE_OP_UNARY_BIT_NOT: { if (!vstack_ax(stack)) { ERR("Empty stack\n"); @@ -893,9 +893,9 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_UNARY_PLUS_S64: - case FILTER_OP_UNARY_MINUS_S64: - case FILTER_OP_UNARY_NOT_S64: + case BYTECODE_OP_UNARY_PLUS_S64: + case BYTECODE_OP_UNARY_MINUS_S64: + case BYTECODE_OP_UNARY_NOT_S64: { if (!vstack_ax(stack)) { ERR("Empty stack\n"); @@ -911,9 +911,9 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_UNARY_PLUS_DOUBLE: - case FILTER_OP_UNARY_MINUS_DOUBLE: - case FILTER_OP_UNARY_NOT_DOUBLE: + case BYTECODE_OP_UNARY_PLUS_DOUBLE: + case BYTECODE_OP_UNARY_MINUS_DOUBLE: + case BYTECODE_OP_UNARY_NOT_DOUBLE: { if (!vstack_ax(stack)) { ERR("Empty stack\n"); @@ -929,8 +929,8 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, } /* logical */ - case FILTER_OP_AND: - case FILTER_OP_OR: + case BYTECODE_OP_AND: + case BYTECODE_OP_OR: { struct logical_op *insn = (struct logical_op *) pc; @@ -958,14 +958,14 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, } /* load field ref */ - case FILTER_OP_LOAD_FIELD_REF: + case BYTECODE_OP_LOAD_FIELD_REF: { ERR("Unknown field ref type\n"); ret = -EINVAL; goto end; } - case FILTER_OP_LOAD_FIELD_REF_STRING: - case FILTER_OP_LOAD_FIELD_REF_SEQUENCE: + case BYTECODE_OP_LOAD_FIELD_REF_STRING: + case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE: { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -974,7 +974,7 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, ref->offset); break; } - case FILTER_OP_LOAD_FIELD_REF_S64: + case BYTECODE_OP_LOAD_FIELD_REF_S64: { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -983,7 +983,7 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, ref->offset); break; } - case FILTER_OP_LOAD_FIELD_REF_DOUBLE: + case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE: { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -994,24 +994,24 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, } /* load from immediate operand */ - case FILTER_OP_LOAD_STRING: - case FILTER_OP_LOAD_STAR_GLOB_STRING: + case BYTECODE_OP_LOAD_STRING: + case BYTECODE_OP_LOAD_STAR_GLOB_STRING: { break; } - case FILTER_OP_LOAD_S64: + case BYTECODE_OP_LOAD_S64: { break; } - case FILTER_OP_LOAD_DOUBLE: + case BYTECODE_OP_LOAD_DOUBLE: { break; } - case FILTER_OP_CAST_TO_S64: - case FILTER_OP_CAST_DOUBLE_TO_S64: + case BYTECODE_OP_CAST_TO_S64: + case BYTECODE_OP_CAST_DOUBLE_TO_S64: { struct cast_op *insn = (struct cast_op *) pc; @@ -1040,7 +1040,7 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, case REG_UNKNOWN: break; } - if (insn->op == FILTER_OP_CAST_DOUBLE_TO_S64) { + if (insn->op == BYTECODE_OP_CAST_DOUBLE_TO_S64) { if (vstack_ax(stack)->type != REG_DOUBLE) { ERR("Cast expects double\n"); ret = -EINVAL; @@ -1049,13 +1049,13 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, } break; } - case FILTER_OP_CAST_NOP: + case BYTECODE_OP_CAST_NOP: { break; } /* get context ref */ - case FILTER_OP_GET_CONTEXT_REF: + case BYTECODE_OP_GET_CONTEXT_REF: { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -1064,7 +1064,7 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, ref->offset); break; } - case FILTER_OP_GET_CONTEXT_REF_STRING: + case BYTECODE_OP_GET_CONTEXT_REF_STRING: { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -1073,7 +1073,7 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, ref->offset); break; } - case FILTER_OP_GET_CONTEXT_REF_S64: + case BYTECODE_OP_GET_CONTEXT_REF_S64: { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -1082,7 +1082,7 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, ref->offset); break; } - case FILTER_OP_GET_CONTEXT_REF_DOUBLE: + case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE: { struct load_op *insn = (struct load_op *) pc; struct field_ref *ref = (struct field_ref *) insn->data; @@ -1095,22 +1095,22 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, /* * Instructions for recursive traversal through composed types. */ - case FILTER_OP_GET_CONTEXT_ROOT: + case BYTECODE_OP_GET_CONTEXT_ROOT: { dbg_printf("Validate get context root\n"); break; } - case FILTER_OP_GET_APP_CONTEXT_ROOT: + case BYTECODE_OP_GET_APP_CONTEXT_ROOT: { dbg_printf("Validate get app context root\n"); break; } - case FILTER_OP_GET_PAYLOAD_ROOT: + case BYTECODE_OP_GET_PAYLOAD_ROOT: { dbg_printf("Validate get payload root\n"); break; } - case FILTER_OP_LOAD_FIELD: + case BYTECODE_OP_LOAD_FIELD: { /* * We tolerate that field type is unknown at validation, @@ -1120,63 +1120,63 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, dbg_printf("Validate load field\n"); break; } - case FILTER_OP_LOAD_FIELD_S8: + case BYTECODE_OP_LOAD_FIELD_S8: { dbg_printf("Validate load field s8\n"); break; } - case FILTER_OP_LOAD_FIELD_S16: + case BYTECODE_OP_LOAD_FIELD_S16: { dbg_printf("Validate load field s16\n"); break; } - case FILTER_OP_LOAD_FIELD_S32: + case BYTECODE_OP_LOAD_FIELD_S32: { dbg_printf("Validate load field s32\n"); break; } - case FILTER_OP_LOAD_FIELD_S64: + case BYTECODE_OP_LOAD_FIELD_S64: { dbg_printf("Validate load field s64\n"); break; } - case FILTER_OP_LOAD_FIELD_U8: + case BYTECODE_OP_LOAD_FIELD_U8: { dbg_printf("Validate load field u8\n"); break; } - case FILTER_OP_LOAD_FIELD_U16: + case BYTECODE_OP_LOAD_FIELD_U16: { dbg_printf("Validate load field u16\n"); break; } - case FILTER_OP_LOAD_FIELD_U32: + case BYTECODE_OP_LOAD_FIELD_U32: { dbg_printf("Validate load field u32\n"); break; } - case FILTER_OP_LOAD_FIELD_U64: + case BYTECODE_OP_LOAD_FIELD_U64: { dbg_printf("Validate load field u64\n"); break; } - case FILTER_OP_LOAD_FIELD_STRING: + case BYTECODE_OP_LOAD_FIELD_STRING: { dbg_printf("Validate load field string\n"); break; } - case FILTER_OP_LOAD_FIELD_SEQUENCE: + case BYTECODE_OP_LOAD_FIELD_SEQUENCE: { dbg_printf("Validate load field sequence\n"); break; } - case FILTER_OP_LOAD_FIELD_DOUBLE: + case BYTECODE_OP_LOAD_FIELD_DOUBLE: { dbg_printf("Validate load field double\n"); break; } - case FILTER_OP_GET_SYMBOL: + case BYTECODE_OP_GET_SYMBOL: { struct load_op *insn = (struct load_op *) pc; struct get_symbol *sym = (struct get_symbol *) insn->data; @@ -1185,7 +1185,7 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_GET_SYMBOL_FIELD: + case BYTECODE_OP_GET_SYMBOL_FIELD: { struct load_op *insn = (struct load_op *) pc; struct get_symbol *sym = (struct get_symbol *) insn->data; @@ -1194,7 +1194,7 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_GET_INDEX_U16: + case BYTECODE_OP_GET_INDEX_U16: { struct load_op *insn = (struct load_op *) pc; struct get_index_u16 *get_index = (struct get_index_u16 *) insn->data; @@ -1203,7 +1203,7 @@ int validate_instruction_context(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_GET_INDEX_U64: + case BYTECODE_OP_GET_INDEX_U64: { struct load_op *insn = (struct load_op *) pc; struct get_index_u64 *get_index = (struct get_index_u64 *) insn->data; @@ -1249,7 +1249,7 @@ int validate_instruction_all_contexts(struct bytecode_runtime *bytecode, if (node) { mp_node = caa_container_of(node, struct lfht_mp_node, node); - dbg_printf("Filter: validate merge point at offset %lu\n", + dbg_printf("Bytecode: validate merge point at offset %lu\n", target_pc); if (merge_points_compare(stack, &mp_node->stack)) { ERR("Merge points differ for offset %lu\n", @@ -1257,7 +1257,7 @@ int validate_instruction_all_contexts(struct bytecode_runtime *bytecode, return -EINVAL; } /* Once validated, we can remove the merge point */ - dbg_printf("Filter: remove merge point at offset %lu\n", + dbg_printf("Bytecode: remove merge point at offset %lu\n", target_pc); ret = cds_lfht_del(merge_points, node); assert(!ret); @@ -1281,17 +1281,17 @@ int exec_insn(struct bytecode_runtime *bytecode, int ret = 1; char *next_pc = *_next_pc; - switch (*(filter_opcode_t *) pc) { - case FILTER_OP_UNKNOWN: + switch (*(bytecode_opcode_t *) pc) { + case BYTECODE_OP_UNKNOWN: default: { ERR("unknown bytecode op %u\n", - (unsigned int) *(filter_opcode_t *) pc); + (unsigned int) *(bytecode_opcode_t *) pc); ret = -EINVAL; goto end; } - case FILTER_OP_RETURN: + case BYTECODE_OP_RETURN: { if (!vstack_ax(stack)) { ERR("Empty stack\n"); @@ -1316,7 +1316,7 @@ int exec_insn(struct bytecode_runtime *bytecode, ret = 0; goto end; } - case FILTER_OP_RETURN_S64: + case BYTECODE_OP_RETURN_S64: { if (!vstack_ax(stack)) { ERR("Empty stack\n"); @@ -1340,56 +1340,56 @@ int exec_insn(struct bytecode_runtime *bytecode, } /* binary */ - case FILTER_OP_MUL: - case FILTER_OP_DIV: - case FILTER_OP_MOD: - case FILTER_OP_PLUS: - case FILTER_OP_MINUS: + case BYTECODE_OP_MUL: + case BYTECODE_OP_DIV: + case BYTECODE_OP_MOD: + case BYTECODE_OP_PLUS: + case BYTECODE_OP_MINUS: { ERR("unsupported bytecode op %u\n", - (unsigned int) *(filter_opcode_t *) pc); + (unsigned int) *(bytecode_opcode_t *) pc); ret = -EINVAL; goto end; } - case FILTER_OP_EQ: - case FILTER_OP_NE: - case FILTER_OP_GT: - case FILTER_OP_LT: - case FILTER_OP_GE: - case FILTER_OP_LE: - case FILTER_OP_EQ_STRING: - case FILTER_OP_NE_STRING: - case FILTER_OP_GT_STRING: - case FILTER_OP_LT_STRING: - case FILTER_OP_GE_STRING: - case FILTER_OP_LE_STRING: - case FILTER_OP_EQ_STAR_GLOB_STRING: - case FILTER_OP_NE_STAR_GLOB_STRING: - case FILTER_OP_EQ_S64: - case FILTER_OP_NE_S64: - case FILTER_OP_GT_S64: - case FILTER_OP_LT_S64: - case FILTER_OP_GE_S64: - case FILTER_OP_LE_S64: - case FILTER_OP_EQ_DOUBLE: - case FILTER_OP_NE_DOUBLE: - case FILTER_OP_GT_DOUBLE: - case FILTER_OP_LT_DOUBLE: - case FILTER_OP_GE_DOUBLE: - case FILTER_OP_LE_DOUBLE: - case FILTER_OP_EQ_DOUBLE_S64: - case FILTER_OP_NE_DOUBLE_S64: - case FILTER_OP_GT_DOUBLE_S64: - case FILTER_OP_LT_DOUBLE_S64: - case FILTER_OP_GE_DOUBLE_S64: - case FILTER_OP_LE_DOUBLE_S64: - case FILTER_OP_EQ_S64_DOUBLE: - case FILTER_OP_NE_S64_DOUBLE: - case FILTER_OP_GT_S64_DOUBLE: - case FILTER_OP_LT_S64_DOUBLE: - case FILTER_OP_GE_S64_DOUBLE: - case FILTER_OP_LE_S64_DOUBLE: + case BYTECODE_OP_EQ: + case BYTECODE_OP_NE: + case BYTECODE_OP_GT: + case BYTECODE_OP_LT: + case BYTECODE_OP_GE: + case BYTECODE_OP_LE: + case BYTECODE_OP_EQ_STRING: + case BYTECODE_OP_NE_STRING: + case BYTECODE_OP_GT_STRING: + case BYTECODE_OP_LT_STRING: + case BYTECODE_OP_GE_STRING: + case BYTECODE_OP_LE_STRING: + case BYTECODE_OP_EQ_STAR_GLOB_STRING: + case BYTECODE_OP_NE_STAR_GLOB_STRING: + case BYTECODE_OP_EQ_S64: + case BYTECODE_OP_NE_S64: + case BYTECODE_OP_GT_S64: + case BYTECODE_OP_LT_S64: + case BYTECODE_OP_GE_S64: + case BYTECODE_OP_LE_S64: + case BYTECODE_OP_EQ_DOUBLE: + case BYTECODE_OP_NE_DOUBLE: + case BYTECODE_OP_GT_DOUBLE: + case BYTECODE_OP_LT_DOUBLE: + case BYTECODE_OP_GE_DOUBLE: + case BYTECODE_OP_LE_DOUBLE: + case BYTECODE_OP_EQ_DOUBLE_S64: + case BYTECODE_OP_NE_DOUBLE_S64: + case BYTECODE_OP_GT_DOUBLE_S64: + case BYTECODE_OP_LT_DOUBLE_S64: + case BYTECODE_OP_GE_DOUBLE_S64: + case BYTECODE_OP_LE_DOUBLE_S64: + case BYTECODE_OP_EQ_S64_DOUBLE: + case BYTECODE_OP_NE_S64_DOUBLE: + case BYTECODE_OP_GT_S64_DOUBLE: + case BYTECODE_OP_LT_S64_DOUBLE: + case BYTECODE_OP_GE_S64_DOUBLE: + case BYTECODE_OP_LE_S64_DOUBLE: { /* Pop 2, push 1 */ if (vstack_pop(stack)) { @@ -1421,11 +1421,11 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_BIT_RSHIFT: - case FILTER_OP_BIT_LSHIFT: - case FILTER_OP_BIT_AND: - case FILTER_OP_BIT_OR: - case FILTER_OP_BIT_XOR: + case BYTECODE_OP_BIT_RSHIFT: + case BYTECODE_OP_BIT_LSHIFT: + case BYTECODE_OP_BIT_AND: + case BYTECODE_OP_BIT_OR: + case BYTECODE_OP_BIT_XOR: { /* Pop 2, push 1 */ if (vstack_pop(stack)) { @@ -1458,8 +1458,8 @@ int exec_insn(struct bytecode_runtime *bytecode, } /* unary */ - case FILTER_OP_UNARY_PLUS: - case FILTER_OP_UNARY_MINUS: + case BYTECODE_OP_UNARY_PLUS: + case BYTECODE_OP_UNARY_MINUS: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1484,9 +1484,9 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_UNARY_PLUS_S64: - case FILTER_OP_UNARY_MINUS_S64: - case FILTER_OP_UNARY_NOT_S64: + case BYTECODE_OP_UNARY_PLUS_S64: + case BYTECODE_OP_UNARY_MINUS_S64: + case BYTECODE_OP_UNARY_NOT_S64: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1509,7 +1509,7 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_UNARY_NOT: + case BYTECODE_OP_UNARY_NOT: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1534,7 +1534,7 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_UNARY_BIT_NOT: + case BYTECODE_OP_UNARY_BIT_NOT: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1560,7 +1560,7 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_UNARY_NOT_DOUBLE: + case BYTECODE_OP_UNARY_NOT_DOUBLE: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1583,8 +1583,8 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_UNARY_PLUS_DOUBLE: - case FILTER_OP_UNARY_MINUS_DOUBLE: + case BYTECODE_OP_UNARY_PLUS_DOUBLE: + case BYTECODE_OP_UNARY_MINUS_DOUBLE: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1608,8 +1608,8 @@ int exec_insn(struct bytecode_runtime *bytecode, } /* logical */ - case FILTER_OP_AND: - case FILTER_OP_OR: + case BYTECODE_OP_AND: + case BYTECODE_OP_OR: { struct logical_op *insn = (struct logical_op *) pc; int merge_ret; @@ -1650,14 +1650,14 @@ int exec_insn(struct bytecode_runtime *bytecode, } /* load field ref */ - case FILTER_OP_LOAD_FIELD_REF: + case BYTECODE_OP_LOAD_FIELD_REF: { ERR("Unknown field ref type\n"); ret = -EINVAL; goto end; } /* get context ref */ - case FILTER_OP_GET_CONTEXT_REF: + case BYTECODE_OP_GET_CONTEXT_REF: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1667,9 +1667,9 @@ int exec_insn(struct bytecode_runtime *bytecode, next_pc += sizeof(struct load_op) + sizeof(struct field_ref); break; } - case FILTER_OP_LOAD_FIELD_REF_STRING: - case FILTER_OP_LOAD_FIELD_REF_SEQUENCE: - case FILTER_OP_GET_CONTEXT_REF_STRING: + case BYTECODE_OP_LOAD_FIELD_REF_STRING: + case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE: + case BYTECODE_OP_GET_CONTEXT_REF_STRING: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1679,8 +1679,8 @@ int exec_insn(struct bytecode_runtime *bytecode, next_pc += sizeof(struct load_op) + sizeof(struct field_ref); break; } - case FILTER_OP_LOAD_FIELD_REF_S64: - case FILTER_OP_GET_CONTEXT_REF_S64: + case BYTECODE_OP_LOAD_FIELD_REF_S64: + case BYTECODE_OP_GET_CONTEXT_REF_S64: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1690,8 +1690,8 @@ int exec_insn(struct bytecode_runtime *bytecode, next_pc += sizeof(struct load_op) + sizeof(struct field_ref); break; } - case FILTER_OP_LOAD_FIELD_REF_DOUBLE: - case FILTER_OP_GET_CONTEXT_REF_DOUBLE: + case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE: + case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1703,7 +1703,7 @@ int exec_insn(struct bytecode_runtime *bytecode, } /* load from immediate operand */ - case FILTER_OP_LOAD_STRING: + case BYTECODE_OP_LOAD_STRING: { struct load_op *insn = (struct load_op *) pc; @@ -1716,7 +1716,7 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_LOAD_STAR_GLOB_STRING: + case BYTECODE_OP_LOAD_STAR_GLOB_STRING: { struct load_op *insn = (struct load_op *) pc; @@ -1729,7 +1729,7 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_LOAD_S64: + case BYTECODE_OP_LOAD_S64: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1741,7 +1741,7 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_LOAD_DOUBLE: + case BYTECODE_OP_LOAD_DOUBLE: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1753,8 +1753,8 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_CAST_TO_S64: - case FILTER_OP_CAST_DOUBLE_TO_S64: + case BYTECODE_OP_CAST_TO_S64: + case BYTECODE_OP_CAST_DOUBLE_TO_S64: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1778,7 +1778,7 @@ int exec_insn(struct bytecode_runtime *bytecode, next_pc += sizeof(struct cast_op); break; } - case FILTER_OP_CAST_NOP: + case BYTECODE_OP_CAST_NOP: { next_pc += sizeof(struct cast_op); break; @@ -1787,9 +1787,9 @@ int exec_insn(struct bytecode_runtime *bytecode, /* * Instructions for recursive traversal through composed types. */ - case FILTER_OP_GET_CONTEXT_ROOT: - case FILTER_OP_GET_APP_CONTEXT_ROOT: - case FILTER_OP_GET_PAYLOAD_ROOT: + case BYTECODE_OP_GET_CONTEXT_ROOT: + case BYTECODE_OP_GET_APP_CONTEXT_ROOT: + case BYTECODE_OP_GET_PAYLOAD_ROOT: { if (vstack_push(stack)) { ret = -EINVAL; @@ -1800,7 +1800,7 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_LOAD_FIELD: + case BYTECODE_OP_LOAD_FIELD: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1818,10 +1818,10 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_LOAD_FIELD_S8: - case FILTER_OP_LOAD_FIELD_S16: - case FILTER_OP_LOAD_FIELD_S32: - case FILTER_OP_LOAD_FIELD_S64: + case BYTECODE_OP_LOAD_FIELD_S8: + case BYTECODE_OP_LOAD_FIELD_S16: + case BYTECODE_OP_LOAD_FIELD_S32: + case BYTECODE_OP_LOAD_FIELD_S64: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1839,10 +1839,10 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_LOAD_FIELD_U8: - case FILTER_OP_LOAD_FIELD_U16: - case FILTER_OP_LOAD_FIELD_U32: - case FILTER_OP_LOAD_FIELD_U64: + case BYTECODE_OP_LOAD_FIELD_U8: + case BYTECODE_OP_LOAD_FIELD_U16: + case BYTECODE_OP_LOAD_FIELD_U32: + case BYTECODE_OP_LOAD_FIELD_U64: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1860,8 +1860,8 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_LOAD_FIELD_STRING: - case FILTER_OP_LOAD_FIELD_SEQUENCE: + case BYTECODE_OP_LOAD_FIELD_STRING: + case BYTECODE_OP_LOAD_FIELD_SEQUENCE: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1879,7 +1879,7 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_LOAD_FIELD_DOUBLE: + case BYTECODE_OP_LOAD_FIELD_DOUBLE: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1897,8 +1897,8 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_GET_SYMBOL: - case FILTER_OP_GET_SYMBOL_FIELD: + case BYTECODE_OP_GET_SYMBOL: + case BYTECODE_OP_GET_SYMBOL_FIELD: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1915,7 +1915,7 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_GET_INDEX_U16: + case BYTECODE_OP_GET_INDEX_U16: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1932,7 +1932,7 @@ int exec_insn(struct bytecode_runtime *bytecode, break; } - case FILTER_OP_GET_INDEX_U64: + case BYTECODE_OP_GET_INDEX_U64: { /* Pop 1, push 1 */ if (!vstack_ax(stack)) { @@ -1958,7 +1958,7 @@ end: /* * Never called concurrently (hash seed is shared). */ -int lttng_filter_validate_bytecode(struct bytecode_runtime *bytecode) +int lttng_bytecode_validate(struct bytecode_runtime *bytecode) { struct cds_lfht *merge_points; char *pc, *next_pc, *start_pc; @@ -1990,12 +1990,12 @@ int lttng_filter_validate_bytecode(struct bytecode_runtime *bytecode) ret = bytecode_validate_overflow(bytecode, start_pc, pc); if (ret != 0) { if (ret == -ERANGE) - ERR("filter bytecode overflow\n"); + ERR("Bytecode overflow\n"); goto end; } dbg_printf("Validating op %s (%u)\n", - print_op((unsigned int) *(filter_opcode_t *) pc), - (unsigned int) *(filter_opcode_t *) pc); + print_op((unsigned int) *(bytecode_opcode_t *) pc), + (unsigned int) *(bytecode_opcode_t *) pc); /* * For each instruction, validate the current context diff --git a/liblttng-ust/lttng-filter.c b/liblttng-ust/lttng-bytecode.c similarity index 64% rename from liblttng-ust/lttng-filter.c rename to liblttng-ust/lttng-bytecode.c index 55d707b8..9153674d 100644 --- a/liblttng-ust/lttng-filter.c +++ b/liblttng-ust/lttng-bytecode.c @@ -1,7 +1,7 @@ /* - * lttng-filter.c + * lttng-bytecode.c * - * LTTng UST filter code. + * LTTng UST bytecode code. * * Copyright (C) 2010-2016 Mathieu Desnoyers * @@ -30,158 +30,158 @@ #include -#include "lttng-filter.h" +#include "lttng-bytecode.h" #include "ust-events-internal.h" static const char *opnames[] = { - [ FILTER_OP_UNKNOWN ] = "UNKNOWN", + [ BYTECODE_OP_UNKNOWN ] = "UNKNOWN", - [ FILTER_OP_RETURN ] = "RETURN", + [ BYTECODE_OP_RETURN ] = "RETURN", /* binary */ - [ FILTER_OP_MUL ] = "MUL", - [ FILTER_OP_DIV ] = "DIV", - [ FILTER_OP_MOD ] = "MOD", - [ FILTER_OP_PLUS ] = "PLUS", - [ FILTER_OP_MINUS ] = "MINUS", - [ FILTER_OP_BIT_RSHIFT ] = "BIT_RSHIFT", - [ FILTER_OP_BIT_LSHIFT ] = "BIT_LSHIFT", - [ FILTER_OP_BIT_AND ] = "BIT_AND", - [ FILTER_OP_BIT_OR ] = "BIT_OR", - [ FILTER_OP_BIT_XOR ] = "BIT_XOR", + [ BYTECODE_OP_MUL ] = "MUL", + [ BYTECODE_OP_DIV ] = "DIV", + [ BYTECODE_OP_MOD ] = "MOD", + [ BYTECODE_OP_PLUS ] = "PLUS", + [ BYTECODE_OP_MINUS ] = "MINUS", + [ BYTECODE_OP_BIT_RSHIFT ] = "BIT_RSHIFT", + [ BYTECODE_OP_BIT_LSHIFT ] = "BIT_LSHIFT", + [ BYTECODE_OP_BIT_AND ] = "BIT_AND", + [ BYTECODE_OP_BIT_OR ] = "BIT_OR", + [ BYTECODE_OP_BIT_XOR ] = "BIT_XOR", /* binary comparators */ - [ FILTER_OP_EQ ] = "EQ", - [ FILTER_OP_NE ] = "NE", - [ FILTER_OP_GT ] = "GT", - [ FILTER_OP_LT ] = "LT", - [ FILTER_OP_GE ] = "GE", - [ FILTER_OP_LE ] = "LE", + [ BYTECODE_OP_EQ ] = "EQ", + [ BYTECODE_OP_NE ] = "NE", + [ BYTECODE_OP_GT ] = "GT", + [ BYTECODE_OP_LT ] = "LT", + [ BYTECODE_OP_GE ] = "GE", + [ BYTECODE_OP_LE ] = "LE", /* string binary comparators */ - [ FILTER_OP_EQ_STRING ] = "EQ_STRING", - [ FILTER_OP_NE_STRING ] = "NE_STRING", - [ FILTER_OP_GT_STRING ] = "GT_STRING", - [ FILTER_OP_LT_STRING ] = "LT_STRING", - [ FILTER_OP_GE_STRING ] = "GE_STRING", - [ FILTER_OP_LE_STRING ] = "LE_STRING", + [ BYTECODE_OP_EQ_STRING ] = "EQ_STRING", + [ BYTECODE_OP_NE_STRING ] = "NE_STRING", + [ BYTECODE_OP_GT_STRING ] = "GT_STRING", + [ BYTECODE_OP_LT_STRING ] = "LT_STRING", + [ BYTECODE_OP_GE_STRING ] = "GE_STRING", + [ BYTECODE_OP_LE_STRING ] = "LE_STRING", /* s64 binary comparators */ - [ FILTER_OP_EQ_S64 ] = "EQ_S64", - [ FILTER_OP_NE_S64 ] = "NE_S64", - [ FILTER_OP_GT_S64 ] = "GT_S64", - [ FILTER_OP_LT_S64 ] = "LT_S64", - [ FILTER_OP_GE_S64 ] = "GE_S64", - [ FILTER_OP_LE_S64 ] = "LE_S64", + [ BYTECODE_OP_EQ_S64 ] = "EQ_S64", + [ BYTECODE_OP_NE_S64 ] = "NE_S64", + [ BYTECODE_OP_GT_S64 ] = "GT_S64", + [ BYTECODE_OP_LT_S64 ] = "LT_S64", + [ BYTECODE_OP_GE_S64 ] = "GE_S64", + [ BYTECODE_OP_LE_S64 ] = "LE_S64", /* double binary comparators */ - [ FILTER_OP_EQ_DOUBLE ] = "EQ_DOUBLE", - [ FILTER_OP_NE_DOUBLE ] = "NE_DOUBLE", - [ FILTER_OP_GT_DOUBLE ] = "GT_DOUBLE", - [ FILTER_OP_LT_DOUBLE ] = "LT_DOUBLE", - [ FILTER_OP_GE_DOUBLE ] = "GE_DOUBLE", - [ FILTER_OP_LE_DOUBLE ] = "LE_DOUBLE", + [ BYTECODE_OP_EQ_DOUBLE ] = "EQ_DOUBLE", + [ BYTECODE_OP_NE_DOUBLE ] = "NE_DOUBLE", + [ BYTECODE_OP_GT_DOUBLE ] = "GT_DOUBLE", + [ BYTECODE_OP_LT_DOUBLE ] = "LT_DOUBLE", + [ BYTECODE_OP_GE_DOUBLE ] = "GE_DOUBLE", + [ BYTECODE_OP_LE_DOUBLE ] = "LE_DOUBLE", /* Mixed S64-double binary comparators */ - [ FILTER_OP_EQ_DOUBLE_S64 ] = "EQ_DOUBLE_S64", - [ FILTER_OP_NE_DOUBLE_S64 ] = "NE_DOUBLE_S64", - [ FILTER_OP_GT_DOUBLE_S64 ] = "GT_DOUBLE_S64", - [ FILTER_OP_LT_DOUBLE_S64 ] = "LT_DOUBLE_S64", - [ FILTER_OP_GE_DOUBLE_S64 ] = "GE_DOUBLE_S64", - [ FILTER_OP_LE_DOUBLE_S64 ] = "LE_DOUBLE_S64", - - [ FILTER_OP_EQ_S64_DOUBLE ] = "EQ_S64_DOUBLE", - [ FILTER_OP_NE_S64_DOUBLE ] = "NE_S64_DOUBLE", - [ FILTER_OP_GT_S64_DOUBLE ] = "GT_S64_DOUBLE", - [ FILTER_OP_LT_S64_DOUBLE ] = "LT_S64_DOUBLE", - [ FILTER_OP_GE_S64_DOUBLE ] = "GE_S64_DOUBLE", - [ FILTER_OP_LE_S64_DOUBLE ] = "LE_S64_DOUBLE", + [ BYTECODE_OP_EQ_DOUBLE_S64 ] = "EQ_DOUBLE_S64", + [ BYTECODE_OP_NE_DOUBLE_S64 ] = "NE_DOUBLE_S64", + [ BYTECODE_OP_GT_DOUBLE_S64 ] = "GT_DOUBLE_S64", + [ BYTECODE_OP_LT_DOUBLE_S64 ] = "LT_DOUBLE_S64", + [ BYTECODE_OP_GE_DOUBLE_S64 ] = "GE_DOUBLE_S64", + [ BYTECODE_OP_LE_DOUBLE_S64 ] = "LE_DOUBLE_S64", + + [ BYTECODE_OP_EQ_S64_DOUBLE ] = "EQ_S64_DOUBLE", + [ BYTECODE_OP_NE_S64_DOUBLE ] = "NE_S64_DOUBLE", + [ BYTECODE_OP_GT_S64_DOUBLE ] = "GT_S64_DOUBLE", + [ BYTECODE_OP_LT_S64_DOUBLE ] = "LT_S64_DOUBLE", + [ BYTECODE_OP_GE_S64_DOUBLE ] = "GE_S64_DOUBLE", + [ BYTECODE_OP_LE_S64_DOUBLE ] = "LE_S64_DOUBLE", /* unary */ - [ FILTER_OP_UNARY_PLUS ] = "UNARY_PLUS", - [ FILTER_OP_UNARY_MINUS ] = "UNARY_MINUS", - [ FILTER_OP_UNARY_NOT ] = "UNARY_NOT", - [ FILTER_OP_UNARY_PLUS_S64 ] = "UNARY_PLUS_S64", - [ FILTER_OP_UNARY_MINUS_S64 ] = "UNARY_MINUS_S64", - [ FILTER_OP_UNARY_NOT_S64 ] = "UNARY_NOT_S64", - [ FILTER_OP_UNARY_PLUS_DOUBLE ] = "UNARY_PLUS_DOUBLE", - [ FILTER_OP_UNARY_MINUS_DOUBLE ] = "UNARY_MINUS_DOUBLE", - [ FILTER_OP_UNARY_NOT_DOUBLE ] = "UNARY_NOT_DOUBLE", + [ BYTECODE_OP_UNARY_PLUS ] = "UNARY_PLUS", + [ BYTECODE_OP_UNARY_MINUS ] = "UNARY_MINUS", + [ BYTECODE_OP_UNARY_NOT ] = "UNARY_NOT", + [ BYTECODE_OP_UNARY_PLUS_S64 ] = "UNARY_PLUS_S64", + [ BYTECODE_OP_UNARY_MINUS_S64 ] = "UNARY_MINUS_S64", + [ BYTECODE_OP_UNARY_NOT_S64 ] = "UNARY_NOT_S64", + [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = "UNARY_PLUS_DOUBLE", + [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = "UNARY_MINUS_DOUBLE", + [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = "UNARY_NOT_DOUBLE", /* logical */ - [ FILTER_OP_AND ] = "AND", - [ FILTER_OP_OR ] = "OR", + [ BYTECODE_OP_AND ] = "AND", + [ BYTECODE_OP_OR ] = "OR", /* load field ref */ - [ FILTER_OP_LOAD_FIELD_REF ] = "LOAD_FIELD_REF", - [ FILTER_OP_LOAD_FIELD_REF_STRING ] = "LOAD_FIELD_REF_STRING", - [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE ] = "LOAD_FIELD_REF_SEQUENCE", - [ FILTER_OP_LOAD_FIELD_REF_S64 ] = "LOAD_FIELD_REF_S64", - [ FILTER_OP_LOAD_FIELD_REF_DOUBLE ] = "LOAD_FIELD_REF_DOUBLE", + [ BYTECODE_OP_LOAD_FIELD_REF ] = "LOAD_FIELD_REF", + [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = "LOAD_FIELD_REF_STRING", + [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = "LOAD_FIELD_REF_SEQUENCE", + [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = "LOAD_FIELD_REF_S64", + [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = "LOAD_FIELD_REF_DOUBLE", /* load from immediate operand */ - [ FILTER_OP_LOAD_STRING ] = "LOAD_STRING", - [ FILTER_OP_LOAD_S64 ] = "LOAD_S64", - [ FILTER_OP_LOAD_DOUBLE ] = "LOAD_DOUBLE", + [ BYTECODE_OP_LOAD_STRING ] = "LOAD_STRING", + [ BYTECODE_OP_LOAD_S64 ] = "LOAD_S64", + [ BYTECODE_OP_LOAD_DOUBLE ] = "LOAD_DOUBLE", /* cast */ - [ FILTER_OP_CAST_TO_S64 ] = "CAST_TO_S64", - [ FILTER_OP_CAST_DOUBLE_TO_S64 ] = "CAST_DOUBLE_TO_S64", - [ FILTER_OP_CAST_NOP ] = "CAST_NOP", + [ BYTECODE_OP_CAST_TO_S64 ] = "CAST_TO_S64", + [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = "CAST_DOUBLE_TO_S64", + [ BYTECODE_OP_CAST_NOP ] = "CAST_NOP", /* get context ref */ - [ FILTER_OP_GET_CONTEXT_REF ] = "GET_CONTEXT_REF", - [ FILTER_OP_GET_CONTEXT_REF_STRING ] = "GET_CONTEXT_REF_STRING", - [ FILTER_OP_GET_CONTEXT_REF_S64 ] = "GET_CONTEXT_REF_S64", - [ FILTER_OP_GET_CONTEXT_REF_DOUBLE ] = "GET_CONTEXT_REF_DOUBLE", + [ BYTECODE_OP_GET_CONTEXT_REF ] = "GET_CONTEXT_REF", + [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = "GET_CONTEXT_REF_STRING", + [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = "GET_CONTEXT_REF_S64", + [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = "GET_CONTEXT_REF_DOUBLE", /* load userspace field ref */ - [ FILTER_OP_LOAD_FIELD_REF_USER_STRING ] = "LOAD_FIELD_REF_USER_STRING", - [ FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = "LOAD_FIELD_REF_USER_SEQUENCE", + [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING ] = "LOAD_FIELD_REF_USER_STRING", + [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = "LOAD_FIELD_REF_USER_SEQUENCE", /* * load immediate star globbing pattern (literal string) * from immediate. */ - [ FILTER_OP_LOAD_STAR_GLOB_STRING ] = "LOAD_STAR_GLOB_STRING", + [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = "LOAD_STAR_GLOB_STRING", /* globbing pattern binary operator: apply to */ - [ FILTER_OP_EQ_STAR_GLOB_STRING ] = "EQ_STAR_GLOB_STRING", - [ FILTER_OP_NE_STAR_GLOB_STRING ] = "NE_STAR_GLOB_STRING", + [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = "EQ_STAR_GLOB_STRING", + [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = "NE_STAR_GLOB_STRING", /* * Instructions for recursive traversal through composed types. */ - [ FILTER_OP_GET_CONTEXT_ROOT ] = "GET_CONTEXT_ROOT", - [ FILTER_OP_GET_APP_CONTEXT_ROOT ] = "GET_APP_CONTEXT_ROOT", - [ FILTER_OP_GET_PAYLOAD_ROOT ] = "GET_PAYLOAD_ROOT", - - [ FILTER_OP_GET_SYMBOL ] = "GET_SYMBOL", - [ FILTER_OP_GET_SYMBOL_FIELD ] = "GET_SYMBOL_FIELD", - [ FILTER_OP_GET_INDEX_U16 ] = "GET_INDEX_U16", - [ FILTER_OP_GET_INDEX_U64 ] = "GET_INDEX_U64", - - [ FILTER_OP_LOAD_FIELD ] = "LOAD_FIELD", - [ FILTER_OP_LOAD_FIELD_S8 ] = "LOAD_FIELD_S8", - [ FILTER_OP_LOAD_FIELD_S16 ] = "LOAD_FIELD_S16", - [ FILTER_OP_LOAD_FIELD_S32 ] = "LOAD_FIELD_S32", - [ FILTER_OP_LOAD_FIELD_S64 ] = "LOAD_FIELD_S64", - [ FILTER_OP_LOAD_FIELD_U8 ] = "LOAD_FIELD_U8", - [ FILTER_OP_LOAD_FIELD_U16 ] = "LOAD_FIELD_U16", - [ FILTER_OP_LOAD_FIELD_U32 ] = "LOAD_FIELD_U32", - [ FILTER_OP_LOAD_FIELD_U64 ] = "LOAD_FIELD_U64", - [ FILTER_OP_LOAD_FIELD_STRING ] = "LOAD_FIELD_STRING", - [ FILTER_OP_LOAD_FIELD_SEQUENCE ] = "LOAD_FIELD_SEQUENCE", - [ FILTER_OP_LOAD_FIELD_DOUBLE ] = "LOAD_FIELD_DOUBLE", - - [ FILTER_OP_UNARY_BIT_NOT ] = "UNARY_BIT_NOT", - - [ FILTER_OP_RETURN_S64 ] = "RETURN_S64", + [ BYTECODE_OP_GET_CONTEXT_ROOT ] = "GET_CONTEXT_ROOT", + [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = "GET_APP_CONTEXT_ROOT", + [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = "GET_PAYLOAD_ROOT", + + [ BYTECODE_OP_GET_SYMBOL ] = "GET_SYMBOL", + [ BYTECODE_OP_GET_SYMBOL_FIELD ] = "GET_SYMBOL_FIELD", + [ BYTECODE_OP_GET_INDEX_U16 ] = "GET_INDEX_U16", + [ BYTECODE_OP_GET_INDEX_U64 ] = "GET_INDEX_U64", + + [ BYTECODE_OP_LOAD_FIELD ] = "LOAD_FIELD", + [ BYTECODE_OP_LOAD_FIELD_S8 ] = "LOAD_FIELD_S8", + [ BYTECODE_OP_LOAD_FIELD_S16 ] = "LOAD_FIELD_S16", + [ BYTECODE_OP_LOAD_FIELD_S32 ] = "LOAD_FIELD_S32", + [ BYTECODE_OP_LOAD_FIELD_S64 ] = "LOAD_FIELD_S64", + [ BYTECODE_OP_LOAD_FIELD_U8 ] = "LOAD_FIELD_U8", + [ BYTECODE_OP_LOAD_FIELD_U16 ] = "LOAD_FIELD_U16", + [ BYTECODE_OP_LOAD_FIELD_U32 ] = "LOAD_FIELD_U32", + [ BYTECODE_OP_LOAD_FIELD_U64 ] = "LOAD_FIELD_U64", + [ BYTECODE_OP_LOAD_FIELD_STRING ] = "LOAD_FIELD_STRING", + [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = "LOAD_FIELD_SEQUENCE", + [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = "LOAD_FIELD_DOUBLE", + + [ BYTECODE_OP_UNARY_BIT_NOT ] = "UNARY_BIT_NOT", + + [ BYTECODE_OP_RETURN_S64 ] = "RETURN_S64", }; -const char *print_op(enum filter_op op) +const char *print_op(enum bytecode_op op) { - if (op >= NR_FILTER_OPS) + if (op >= NR_BYTECODE_OPS) return "UNKNOWN"; else return opnames[op]; @@ -193,7 +193,7 @@ int apply_field_reloc(const struct lttng_event_desc *event_desc, uint32_t runtime_len, uint32_t reloc_offset, const char *field_name, - enum filter_op filter_op) + enum bytecode_op bytecode_op) { const struct lttng_event_field *fields, *field = NULL; unsigned int nr_fields, i; @@ -251,8 +251,8 @@ int apply_field_reloc(const struct lttng_event_desc *event_desc, /* set type */ op = (struct load_op *) &runtime->code[reloc_offset]; - switch (filter_op) { - case FILTER_OP_LOAD_FIELD_REF: + switch (bytecode_op) { + case BYTECODE_OP_LOAD_FIELD_REF: { struct field_ref *field_ref; @@ -261,19 +261,19 @@ int apply_field_reloc(const struct lttng_event_desc *event_desc, case atype_integer: case atype_enum: case atype_enum_nestable: - op->op = FILTER_OP_LOAD_FIELD_REF_S64; + op->op = BYTECODE_OP_LOAD_FIELD_REF_S64; break; case atype_array: case atype_array_nestable: case atype_sequence: case atype_sequence_nestable: - op->op = FILTER_OP_LOAD_FIELD_REF_SEQUENCE; + op->op = BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE; break; case atype_string: - op->op = FILTER_OP_LOAD_FIELD_REF_STRING; + op->op = BYTECODE_OP_LOAD_FIELD_REF_STRING; break; case atype_float: - op->op = FILTER_OP_LOAD_FIELD_REF_DOUBLE; + op->op = BYTECODE_OP_LOAD_FIELD_REF_DOUBLE; break; default: return -EINVAL; @@ -293,7 +293,7 @@ int apply_context_reloc(struct bytecode_runtime *runtime, uint32_t runtime_len, uint32_t reloc_offset, const char *context_name, - enum filter_op filter_op) + enum bytecode_op bytecode_op) { struct load_op *op; struct lttng_ctx_field *ctx_field; @@ -327,8 +327,8 @@ int apply_context_reloc(struct bytecode_runtime *runtime, ctx_field = &ctx->fields[idx]; op = (struct load_op *) &runtime->code[reloc_offset]; - switch (filter_op) { - case FILTER_OP_GET_CONTEXT_REF: + switch (bytecode_op) { + case BYTECODE_OP_GET_CONTEXT_REF: { struct field_ref *field_ref; @@ -337,7 +337,7 @@ int apply_context_reloc(struct bytecode_runtime *runtime, case atype_integer: case atype_enum: case atype_enum_nestable: - op->op = FILTER_OP_GET_CONTEXT_REF_S64; + op->op = BYTECODE_OP_GET_CONTEXT_REF_S64; break; /* Sequence and array supported as string */ case atype_string: @@ -345,13 +345,13 @@ int apply_context_reloc(struct bytecode_runtime *runtime, case atype_array_nestable: case atype_sequence: case atype_sequence_nestable: - op->op = FILTER_OP_GET_CONTEXT_REF_STRING; + op->op = BYTECODE_OP_GET_CONTEXT_REF_STRING; break; case atype_float: - op->op = FILTER_OP_GET_CONTEXT_REF_DOUBLE; + op->op = BYTECODE_OP_GET_CONTEXT_REF_DOUBLE; break; case atype_dynamic: - op->op = FILTER_OP_GET_CONTEXT_REF; + op->op = BYTECODE_OP_GET_CONTEXT_REF; break; default: return -EINVAL; @@ -383,14 +383,14 @@ int apply_reloc(const struct lttng_event_desc *event_desc, op = (struct load_op *) &runtime->code[reloc_offset]; switch (op->op) { - case FILTER_OP_LOAD_FIELD_REF: + case BYTECODE_OP_LOAD_FIELD_REF: return apply_field_reloc(event_desc, runtime, runtime_len, reloc_offset, name, op->op); - case FILTER_OP_GET_CONTEXT_REF: + case BYTECODE_OP_GET_CONTEXT_REF: return apply_context_reloc(runtime, runtime_len, reloc_offset, name, op->op); - case FILTER_OP_GET_SYMBOL: - case FILTER_OP_GET_SYMBOL_FIELD: + case BYTECODE_OP_GET_SYMBOL: + case BYTECODE_OP_GET_SYMBOL_FIELD: /* * Will be handled by load specialize phase or * dynamically by interpreter. @@ -469,23 +469,23 @@ int _lttng_filter_link_bytecode(const struct lttng_event_desc *event_desc, next_offset = offset + sizeof(uint16_t) + strlen(name) + 1; } /* Validate bytecode */ - ret = lttng_filter_validate_bytecode(runtime); + ret = lttng_bytecode_validate(runtime); if (ret) { goto link_error; } /* Specialize bytecode */ - ret = lttng_filter_specialize_bytecode(event_desc, runtime); + ret = lttng_bytecode_specialize(event_desc, runtime); if (ret) { goto link_error; } - runtime->p.filter = lttng_filter_interpret_bytecode; + runtime->p.filter = lttng_bytecode_filter_interpret; runtime->p.link_failed = 0; cds_list_add_rcu(&runtime->p.node, insert_loc); dbg_printf("Linking successful.\n"); return 0; link_error: - runtime->p.filter = lttng_filter_interpret_bytecode_false; + runtime->p.filter = lttng_bytecode_filter_interpret_false; runtime->p.link_failed = 1; cds_list_add_rcu(&runtime->p.node, insert_loc); alloc_error: @@ -498,9 +498,9 @@ void lttng_filter_sync_state(struct lttng_bytecode_runtime *runtime) struct lttng_ust_bytecode_node *bc = runtime->bc; if (!bc->enabler->enabled || runtime->link_failed) - runtime->filter = lttng_filter_interpret_bytecode_false; + runtime->filter = lttng_bytecode_filter_interpret_false; else - runtime->filter = lttng_filter_interpret_bytecode; + runtime->filter = lttng_bytecode_filter_interpret; } /* diff --git a/liblttng-ust/lttng-filter.h b/liblttng-ust/lttng-bytecode.h similarity index 87% rename from liblttng-ust/lttng-filter.h rename to liblttng-ust/lttng-bytecode.h index 61bc213b..4f1b88f1 100644 --- a/liblttng-ust/lttng-filter.h +++ b/liblttng-ust/lttng-bytecode.h @@ -1,10 +1,10 @@ -#ifndef _LTTNG_FILTER_H -#define _LTTNG_FILTER_H +#ifndef _LTTNG_BYTECODE_H +#define _LTTNG_BYTECODE_H /* - * lttng-filter.h + * lttng-bytecode.h * - * LTTng UST filter header. + * LTTng UST bytecode header. * * Copyright (C) 2010-2016 Mathieu Desnoyers * @@ -40,13 +40,13 @@ #include #include #include -#include "filter-bytecode.h" +#include "bytecode.h" -/* Filter stack length, in number of entries */ -#define FILTER_STACK_LEN 10 /* includes 2 dummy */ -#define FILTER_STACK_EMPTY 1 +/* Interpreter stack length, in number of entries */ +#define INTERPRETER_STACK_LEN 10 /* includes 2 dummy */ +#define INTERPRETER_STACK_EMPTY 1 -#define FILTER_MAX_DATA_LEN 65536 +#define BYTECODE_MAX_DATA_LEN 65536 #ifndef min_t #define min_t(type, a, b) \ @@ -124,7 +124,7 @@ enum object_type { OBJECT_TYPE_DYNAMIC, }; -struct filter_get_index_data { +struct bytecode_get_index_data { uint64_t offset; /* in bytes */ size_t ctx_index; size_t array_len; @@ -157,7 +157,7 @@ struct vstack_entry { struct vstack { int top; /* top of stack */ - struct vstack_entry e[FILTER_STACK_LEN]; + struct vstack_entry e[INTERPRETER_STACK_LEN]; }; static inline @@ -185,7 +185,7 @@ struct vstack_entry *vstack_bx(struct vstack *stack) static inline int vstack_push(struct vstack *stack) { - if (stack->top >= FILTER_STACK_LEN - 1) { + if (stack->top >= INTERPRETER_STACK_LEN - 1) { ERR("Stack full\n"); return -EINVAL; } @@ -243,7 +243,7 @@ struct estack_entry { struct estack { int top; /* top of stack */ - struct estack_entry e[FILTER_STACK_LEN]; + struct estack_entry e[INTERPRETER_STACK_LEN]; }; /* @@ -260,13 +260,13 @@ struct estack { */ #define estack_ax(stack, top) \ ({ \ - assert((top) > FILTER_STACK_EMPTY); \ + assert((top) > INTERPRETER_STACK_EMPTY); \ &(stack)->e[top]; \ }) #define estack_bx(stack, top) \ ({ \ - assert((top) > FILTER_STACK_EMPTY + 1); \ + assert((top) > INTERPRETER_STACK_EMPTY + 1); \ &(stack)->e[(top) - 1]; \ }) @@ -275,7 +275,7 @@ struct estack { */ #define estack_push(stack, top, ax, bx, ax_t, bx_t) \ do { \ - assert((top) < FILTER_STACK_LEN - 1); \ + assert((top) < INTERPRETER_STACK_LEN - 1); \ (stack)->e[(top) - 1].u.v = (bx); \ (stack)->e[(top) - 1].type = (bx_t); \ (bx) = (ax); \ @@ -285,7 +285,7 @@ struct estack { #define estack_pop(stack, top, ax, bx, ax_t, bx_t) \ do { \ - assert((top) > FILTER_STACK_EMPTY); \ + assert((top) > INTERPRETER_STACK_EMPTY); \ (ax) = (bx); \ (ax_t) = (bx_t); \ (bx) = (stack)->e[(top) - 2].u.v; \ @@ -329,15 +329,15 @@ struct lttng_interpreter_output { } u; }; -const char *print_op(enum filter_op op); +const char *print_op(enum bytecode_op op); -int lttng_filter_validate_bytecode(struct bytecode_runtime *bytecode); -int lttng_filter_specialize_bytecode(const struct lttng_event_desc *event_desc, +int lttng_bytecode_validate(struct bytecode_runtime *bytecode); +int lttng_bytecode_specialize(const struct lttng_event_desc *event_desc, struct bytecode_runtime *bytecode); -uint64_t lttng_filter_interpret_bytecode_false(void *filter_data, +uint64_t lttng_bytecode_filter_interpret_false(void *filter_data, const char *filter_stack_data); -uint64_t lttng_filter_interpret_bytecode(void *filter_data, +uint64_t lttng_bytecode_filter_interpret(void *filter_data, const char *filter_stack_data); -#endif /* _LTTNG_FILTER_H */ +#endif /* _LTTNG_BYTECODE_H */ -- 2.34.1