-/* SPDX-License-Identifier: (GPL-2.0 or LGPL-2.1)
+/* SPDX-License-Identifier: (GPL-2.0-only or LGPL-2.1-only)
*
* lttng-context-callstack-legacy-impl.h
*
struct lttng_cs_type {
const char *name;
+ const char *length_name;
const char *save_func_name;
void (*save_func)(struct stack_trace *trace);
};
static struct lttng_cs_type cs_types[] = {
{
.name = "callstack_kernel",
+ .length_name = "_callstack_kernel_length",
.save_func_name = "save_stack_trace",
.save_func = NULL,
},
{
.name = "callstack_user",
+ .length_name = "_callstack_user_length",
.save_func_name = "save_stack_trace_user",
.save_func = NULL,
},
};
+static
+const char *lttng_cs_ctx_mode_name(enum lttng_cs_ctx_modes mode)
+{
+ return cs_types[mode].name;
+}
+
+static
+const char *lttng_cs_ctx_mode_length_name(enum lttng_cs_ctx_modes mode)
+{
+ return cs_types[mode].length_name;
+}
+
static
int init_type(enum lttng_cs_ctx_modes mode)
{
return 0;
}
+static
+void lttng_cs_set_init(struct lttng_cs __percpu *cs_set)
+{
+ int cpu, i;
+
+ for_each_possible_cpu(cpu) {
+ struct lttng_cs *cs;
+
+ cs = per_cpu_ptr(cs_set, cpu);
+ for (i = 0; i < RING_BUFFER_MAX_NESTING; i++) {
+ struct lttng_cs_dispatch *dispatch;
+
+ dispatch = &cs->dispatch[i];
+ dispatch->stack_trace.entries = dispatch->entries;
+ dispatch->stack_trace.max_entries = MAX_ENTRIES;
+ }
+ }
+}
+
/* Keep track of nesting inside userspace callstack context code */
DEFINE_PER_CPU(int, callstack_user_nesting);
return &cs->dispatch[buffer_nesting].stack_trace;
}
+static
+size_t lttng_callstack_length_get_size(size_t offset, struct lttng_ctx_field *field,
+ struct lib_ring_buffer_ctx *ctx,
+ struct lttng_channel *chan)
+{
+ size_t orig_offset = offset;
+
+ offset += lib_ring_buffer_align(offset, lttng_alignof(unsigned int));
+ offset += sizeof(unsigned int);
+ return offset - orig_offset;
+}
+
/*
* In order to reserve the correct size, the callstack is computed. The
* resulting callstack is saved to be accessed in the record step.
*/
static
-size_t lttng_callstack_get_size(size_t offset, struct lttng_ctx_field *field,
- struct lib_ring_buffer_ctx *ctx,
- struct lttng_channel *chan)
+size_t lttng_callstack_sequence_get_size(size_t offset, struct lttng_ctx_field *field,
+ struct lib_ring_buffer_ctx *ctx,
+ struct lttng_channel *chan)
{
struct stack_trace *trace;
struct field_data *fdata = field->priv;
/* do not write data if no space is available */
trace = stack_trace_context(field, ctx);
if (unlikely(!trace)) {
- offset += lib_ring_buffer_align(offset, lttng_alignof(unsigned int));
- offset += sizeof(unsigned int);
offset += lib_ring_buffer_align(offset, lttng_alignof(unsigned long));
return offset - orig_offset;
}
&& trace->entries[trace->nr_entries - 1] == ULONG_MAX) {
trace->nr_entries--;
}
- offset += lib_ring_buffer_align(offset, lttng_alignof(unsigned int));
- offset += sizeof(unsigned int);
offset += lib_ring_buffer_align(offset, lttng_alignof(unsigned long));
offset += sizeof(unsigned long) * trace->nr_entries;
/* Add our own ULONG_MAX delimiter to show incomplete stack. */
}
static
-void lttng_callstack_record(struct lttng_ctx_field *field,
+void lttng_callstack_length_record(struct lttng_ctx_field *field,
struct lib_ring_buffer_ctx *ctx,
struct lttng_channel *chan)
{
struct stack_trace *trace = stack_trace_context(field, ctx);
unsigned int nr_seq_entries;
+ lib_ring_buffer_align_ctx(ctx, lttng_alignof(unsigned int));
if (unlikely(!trace)) {
nr_seq_entries = 0;
- lib_ring_buffer_align_ctx(ctx, lttng_alignof(unsigned int));
- chan->ops->event_write(ctx, &nr_seq_entries, sizeof(unsigned int));
- lib_ring_buffer_align_ctx(ctx, lttng_alignof(unsigned long));
+ } else {
+ nr_seq_entries = trace->nr_entries;
+ if (trace->nr_entries == trace->max_entries)
+ nr_seq_entries++;
+ }
+ chan->ops->event_write(ctx, &nr_seq_entries, sizeof(unsigned int));
+}
+static
+void lttng_callstack_sequence_record(struct lttng_ctx_field *field,
+ struct lib_ring_buffer_ctx *ctx,
+ struct lttng_channel *chan)
+{
+ struct stack_trace *trace = stack_trace_context(field, ctx);
+ unsigned int nr_seq_entries;
+
+ lib_ring_buffer_align_ctx(ctx, lttng_alignof(unsigned long));
+ if (unlikely(!trace)) {
return;
}
- lib_ring_buffer_align_ctx(ctx, lttng_alignof(unsigned int));
nr_seq_entries = trace->nr_entries;
if (trace->nr_entries == trace->max_entries)
nr_seq_entries++;
- chan->ops->event_write(ctx, &nr_seq_entries, sizeof(unsigned int));
- lib_ring_buffer_align_ctx(ctx, lttng_alignof(unsigned long));
chan->ops->event_write(ctx, trace->entries,
sizeof(unsigned long) * trace->nr_entries);
/* Add our own ULONG_MAX delimiter to show incomplete stack. */