+ * Stage 3.1 of tracepoint event generation.
+ *
+ * Create static inline function that layout the filter stack data.
+ * We make both write and nowrite data available to the filter.
+ */
+
+/* Reset all macros within TRACEPOINT_EVENT */
+#include <lttng/ust-tracepoint-event-reset.h>
+#include <lttng/ust-tracepoint-event-write.h>
+#include <lttng/ust-tracepoint-event-nowrite.h>
+
+#undef _ctf_integer_ext
+#define _ctf_integer_ext(_type, _item, _src, _byte_order, _base, _nowrite) \
+ if (lttng_is_signed_type(_type)) { \
+ int64_t __ctf_tmp_int64; \
+ switch (sizeof(_type)) { \
+ case 1: \
+ { \
+ union { _type t; int8_t v; } __tmp = { (_type) (_src) }; \
+ __ctf_tmp_int64 = (int64_t) __tmp.v; \
+ break; \
+ } \
+ case 2: \
+ { \
+ union { _type t; int16_t v; } __tmp = { (_type) (_src) }; \
+ if (_byte_order != BYTE_ORDER) \
+ __tmp.v = bswap_16(__tmp.v); \
+ __ctf_tmp_int64 = (int64_t) __tmp.v; \
+ break; \
+ } \
+ case 4: \
+ { \
+ union { _type t; int32_t v; } __tmp = { (_type) (_src) }; \
+ if (_byte_order != BYTE_ORDER) \
+ __tmp.v = bswap_32(__tmp.v); \
+ __ctf_tmp_int64 = (int64_t) __tmp.v; \
+ break; \
+ } \
+ case 8: \
+ { \
+ union { _type t; int64_t v; } __tmp = { (_type) (_src) }; \
+ if (_byte_order != BYTE_ORDER) \
+ __tmp.v = bswap_64(__tmp.v); \
+ __ctf_tmp_int64 = (int64_t) __tmp.v; \
+ break; \
+ } \
+ default: \
+ abort(); \
+ }; \
+ memcpy(__stack_data, &__ctf_tmp_int64, sizeof(int64_t)); \
+ } else { \
+ uint64_t __ctf_tmp_uint64; \
+ switch (sizeof(_type)) { \
+ case 1: \
+ { \
+ union { _type t; uint8_t v; } __tmp = { (_type) (_src) }; \
+ __ctf_tmp_uint64 = (uint64_t) __tmp.v; \
+ break; \
+ } \
+ case 2: \
+ { \
+ union { _type t; uint16_t v; } __tmp = { (_type) (_src) }; \
+ if (_byte_order != BYTE_ORDER) \
+ __tmp.v = bswap_16(__tmp.v); \
+ __ctf_tmp_uint64 = (uint64_t) __tmp.v; \
+ break; \
+ } \
+ case 4: \
+ { \
+ union { _type t; uint32_t v; } __tmp = { (_type) (_src) }; \
+ if (_byte_order != BYTE_ORDER) \
+ __tmp.v = bswap_32(__tmp.v); \
+ __ctf_tmp_uint64 = (uint64_t) __tmp.v; \
+ break; \
+ } \
+ case 8: \
+ { \
+ union { _type t; uint64_t v; } __tmp = { (_type) (_src) }; \
+ if (_byte_order != BYTE_ORDER) \
+ __tmp.v = bswap_64(__tmp.v); \
+ __ctf_tmp_uint64 = (uint64_t) __tmp.v; \
+ break; \
+ } \
+ default: \
+ abort(); \
+ }; \
+ memcpy(__stack_data, &__ctf_tmp_uint64, sizeof(uint64_t)); \
+ } \
+ __stack_data += sizeof(int64_t);
+
+#undef _ctf_float
+#define _ctf_float(_type, _item, _src, _nowrite) \
+ { \
+ double __ctf_tmp_double = (double) (_type) (_src); \
+ memcpy(__stack_data, &__ctf_tmp_double, sizeof(double)); \
+ __stack_data += sizeof(double); \
+ }
+
+#undef _ctf_array_encoded
+#define _ctf_array_encoded(_type, _item, _src, _byte_order, _length, \
+ _encoding, _nowrite, _elem_type_base) \
+ { \
+ unsigned long __ctf_tmp_ulong = (unsigned long) (_length); \
+ const void *__ctf_tmp_ptr = (_src); \
+ memcpy(__stack_data, &__ctf_tmp_ulong, sizeof(unsigned long)); \
+ __stack_data += sizeof(unsigned long); \
+ memcpy(__stack_data, &__ctf_tmp_ptr, sizeof(void *)); \
+ __stack_data += sizeof(void *); \
+ }
+
+#undef _ctf_sequence_encoded
+#define _ctf_sequence_encoded(_type, _item, _src, _byte_order, _length_type, \
+ _src_length, _encoding, _nowrite, _elem_type_base) \
+ { \
+ unsigned long __ctf_tmp_ulong = (unsigned long) (_src_length); \
+ const void *__ctf_tmp_ptr = (_src); \
+ memcpy(__stack_data, &__ctf_tmp_ulong, sizeof(unsigned long)); \
+ __stack_data += sizeof(unsigned long); \
+ memcpy(__stack_data, &__ctf_tmp_ptr, sizeof(void *)); \
+ __stack_data += sizeof(void *); \
+ }
+
+#undef _ctf_string
+#define _ctf_string(_item, _src, _nowrite) \
+ { \
+ const void *__ctf_tmp_ptr = \
+ ((_src) ? (_src) : __LTTNG_UST_NULL_STRING); \
+ memcpy(__stack_data, &__ctf_tmp_ptr, sizeof(void *)); \
+ __stack_data += sizeof(void *); \
+ }
+
+#undef _ctf_enum
+#define _ctf_enum(_provider, _name, _type, _item, _src, _nowrite) \
+ _ctf_integer_ext(_type, _item, _src, BYTE_ORDER, 10, _nowrite)
+
+#undef TP_ARGS
+#define TP_ARGS(...) __VA_ARGS__
+
+#undef TP_FIELDS
+#define TP_FIELDS(...) __VA_ARGS__
+
+#undef TRACEPOINT_EVENT_CLASS
+#define TRACEPOINT_EVENT_CLASS(_provider, _name, _args, _fields) \
+static inline \
+void __event_prepare_filter_stack__##_provider##___##_name(char *__stack_data,\
+ _TP_ARGS_DATA_PROTO(_args)) \
+{ \
+ _fields \
+}
+
+#include TRACEPOINT_INCLUDE
+
+/*
+ * Stage 4 of tracepoint event generation.