Move alignment into event write callback
[lttng-modules.git] / src / lttng-context-callstack-stackwalk-impl.h
index 2e43735904a00523e3049b9502cbe0b69c40164d..e73d1156ae7c0731e7918c194982e2446688f278 100644 (file)
@@ -132,7 +132,7 @@ struct lttng_stack_trace *stack_trace_context(struct field_data *fdata, int cpu)
 }
 
 static
-size_t lttng_callstack_length_get_size(void *priv, struct lttng_probe_ctx *probe_ctx, size_t offset)
+size_t lttng_callstack_length_get_size(void *priv, struct lttng_kernel_probe_ctx *probe_ctx, size_t offset)
 {
        size_t orig_offset = offset;
 
@@ -146,7 +146,7 @@ size_t lttng_callstack_length_get_size(void *priv, struct lttng_probe_ctx *probe
  * resulting callstack is saved to be accessed in the record step.
  */
 static
-size_t lttng_callstack_sequence_get_size(void *priv, struct lttng_probe_ctx *probe_ctx, size_t offset)
+size_t lttng_callstack_sequence_get_size(void *priv, struct lttng_kernel_probe_ctx *probe_ctx, size_t offset)
 {
        struct lttng_stack_trace *trace;
        struct field_data *fdata = (struct field_data *) priv;
@@ -193,16 +193,15 @@ size_t lttng_callstack_sequence_get_size(void *priv, struct lttng_probe_ctx *pro
 }
 
 static
-void lttng_callstack_length_record(void *priv, struct lttng_probe_ctx *probe_ctx,
-                       struct lib_ring_buffer_ctx *ctx,
-                       struct lttng_channel *chan)
+void lttng_callstack_length_record(void *priv, struct lttng_kernel_probe_ctx *probe_ctx,
+                       struct lttng_kernel_ring_buffer_ctx *ctx,
+                       struct lttng_kernel_channel_buffer *chan)
 {
        int cpu = ctx->priv.reserve_cpu;
        struct field_data *fdata = (struct field_data *) priv;
        struct lttng_stack_trace *trace = stack_trace_context(fdata, cpu);
        unsigned int nr_seq_entries;
 
-       lib_ring_buffer_align_ctx(ctx, lttng_alignof(unsigned int));
        if (unlikely(!trace)) {
                nr_seq_entries = 0;
        } else {
@@ -210,32 +209,33 @@ void lttng_callstack_length_record(void *priv, struct lttng_probe_ctx *probe_ctx
                if (trace->nr_entries == MAX_ENTRIES)
                        nr_seq_entries++;
        }
-       chan->ops->event_write(ctx, &nr_seq_entries, sizeof(unsigned int));
+       chan->ops->event_write(ctx, &nr_seq_entries, sizeof(unsigned int), lttng_alignof(unsigned int));
 }
 
 static
-void lttng_callstack_sequence_record(void *priv, struct lttng_probe_ctx *probe_ctx,
-                       struct lib_ring_buffer_ctx *ctx,
-                       struct lttng_channel *chan)
+void lttng_callstack_sequence_record(void *priv, struct lttng_kernel_probe_ctx *probe_ctx,
+                       struct lttng_kernel_ring_buffer_ctx *ctx,
+                       struct lttng_kernel_channel_buffer *chan)
 {
        int cpu = ctx->priv.reserve_cpu;
        struct field_data *fdata = (struct field_data *) priv;
        struct lttng_stack_trace *trace = stack_trace_context(fdata, cpu);
        unsigned int nr_seq_entries;
 
-       lib_ring_buffer_align_ctx(ctx, lttng_alignof(unsigned long));
        if (unlikely(!trace)) {
+               /* We need to align even if there are 0 elements. */
+               lib_ring_buffer_align_ctx(ctx, lttng_alignof(unsigned long));
                return;
        }
        nr_seq_entries = trace->nr_entries;
        if (trace->nr_entries == MAX_ENTRIES)
                nr_seq_entries++;
        chan->ops->event_write(ctx, trace->entries,
-                       sizeof(unsigned long) * trace->nr_entries);
+                       sizeof(unsigned long) * trace->nr_entries, lttng_alignof(unsigned long));
        /* Add our own ULONG_MAX delimiter to show incomplete stack. */
        if (trace->nr_entries == MAX_ENTRIES) {
                unsigned long delim = ULONG_MAX;
 
-               chan->ops->event_write(ctx, &delim, sizeof(unsigned long));
+               chan->ops->event_write(ctx, &delim, sizeof(unsigned long), 1);
        }
 }
This page took 0.026776 seconds and 4 git commands to generate.