callstack context: use delimiter when stack is incomplete
[lttng-modules.git] / lttng-context-callstack.c
index 8c3ee3dcf6d32aaa111b7a328896cb83be7fdff3..6bfe794c2d58725b91a714516b77261ddd834385 100644 (file)
@@ -119,7 +119,7 @@ struct stack_trace *stack_trace_context(struct lttng_ctx_field *field,
 {
        int nesting;
        struct lttng_cs *cs;
-       struct field_data *fdata = field->private;
+       struct field_data *fdata = field->priv;
 
        /*
         * get_cpu() is not required, preemption is already
@@ -147,7 +147,7 @@ size_t lttng_callstack_get_size(size_t offset, struct lttng_ctx_field *field,
 {
        size_t size = 0;
        struct stack_trace *trace;
-       struct field_data *fdata = field->private;
+       struct field_data *fdata = field->priv;
 
        /* do not write data if no space is available */
        trace = stack_trace_context(field, ctx);
@@ -159,10 +159,22 @@ size_t lttng_callstack_get_size(size_t offset, struct lttng_ctx_field *field,
 
        /* do the real work and reserve space */
        cs_types[fdata->mode].save_func(trace);
+       /*
+        * Remove final ULONG_MAX delimiter. If we cannot find it, add
+        * our own marker to show that the stack is incomplete. This is
+        * more compact for a trace.
+        */
+       if (trace->nr_entries > 0
+                       && trace->entries[trace->nr_entries - 1] == ULONG_MAX) {
+               trace->nr_entries--;
+       }
        size += lib_ring_buffer_align(offset, lttng_alignof(unsigned int));
        size += sizeof(unsigned int);
        size += lib_ring_buffer_align(offset, lttng_alignof(unsigned long));
        size += sizeof(unsigned long) * trace->nr_entries;
+       /* Add our own ULONG_MAX delimiter to show incomplete stack. */
+       if (trace->nr_entries == trace->max_entries)
+               size += sizeof(unsigned long);
        return size;
 }
 
@@ -172,14 +184,24 @@ void lttng_callstack_record(struct lttng_ctx_field *field,
                            struct lttng_channel *chan)
 {
        struct stack_trace *trace = stack_trace_context(field, ctx);
+       unsigned int nr_seq_entries;
 
        if (!trace)
                return;
        lib_ring_buffer_align_ctx(ctx, lttng_alignof(unsigned int));
-       chan->ops->event_write(ctx, &trace->nr_entries, sizeof(unsigned int));
+       nr_seq_entries = trace->nr_entries;
+       if (trace->nr_entries == trace->max_entries)
+               nr_seq_entries++;
+       chan->ops->event_write(ctx, &nr_seq_entries, sizeof(unsigned int));
        lib_ring_buffer_align_ctx(ctx, lttng_alignof(unsigned long));
        chan->ops->event_write(ctx, trace->entries,
                        sizeof(unsigned long) * trace->nr_entries);
+       /* Add our own ULONG_MAX delimiter to show incomplete stack. */
+       if (trace->nr_entries == trace->max_entries) {
+               unsigned long delim = ULONG_MAX;
+
+               chan->ops->event_write(ctx, &delim, sizeof(unsigned long));
+       }
 }
 
 static
@@ -239,7 +261,7 @@ error_alloc:
 static
 void lttng_callstack_destroy(struct lttng_ctx_field *field)
 {
-       struct field_data *fdata = field->private;
+       struct field_data *fdata = field->priv;
 
        field_data_free(fdata);
 }
@@ -259,7 +281,6 @@ int __lttng_add_callstack_generic(struct lttng_ctx **ctx, int mode)
        if (!field)
                return -ENOMEM;
        if (lttng_find_context(*ctx, ctx_name)) {
-               printk("%s lttng_find_context failed\n", ctx_name);
                ret = -EEXIST;
                goto error_find;
        }
@@ -289,10 +310,9 @@ int __lttng_add_callstack_generic(struct lttng_ctx **ctx, int mode)
 
        field->get_size_arg = lttng_callstack_get_size;
        field->record = lttng_callstack_record;
-       field->private = fdata;
+       field->priv = fdata;
        field->destroy = lttng_callstack_destroy;
        wrapper_vmalloc_sync_all();
-       printk("lttng add-context %s\n", ctx_name);
        return 0;
 
 error_create:
This page took 0.023882 seconds and 4 git commands to generate.