void compute_fields_offsets(LttTracefile *tf,
- LttFacility *fac, LttField *field, off_t *offset, void *root);
+ LttFacility *fac, LttField *field, off_t *offset, void *root, guint is_compact);
LttEvent *ltt_event_new()
new_offset = g_array_index(f->dynamic_offsets, off_t, i);
}
compute_fields_offsets(e->tracefile,
- ltt_event_facility(e), field, &new_offset, e->data);
+ ltt_event_facility(e), field, &new_offset, e->data, 0);
return field;
}
guint32 ltt_event_get_unsigned(LttEvent *e, LttField *f)
{
gboolean reverse_byte_order;
- if(unlikely(f->field_type.network)) {
- reverse_byte_order = (g_ntohs(0x1) != 0x1);
- } else {
- reverse_byte_order = LTT_GET_BO(e->tracefile);
- }
+ if(unlikely(f->field_type.network)) {
+ reverse_byte_order = (g_ntohs(0x1) != 0x1);
+ } else {
+ reverse_byte_order = LTT_GET_BO(e->tracefile);
+ }
switch(f->field_size) {
case 1:
gint32 ltt_event_get_int(LttEvent *e, LttField *f)
{
gboolean reverse_byte_order;
- if(unlikely(f->field_type.network)) {
- reverse_byte_order = (g_ntohs(0x1) != 0x1);
- } else {
- reverse_byte_order = LTT_GET_BO(e->tracefile);
- }
+ if(unlikely(f->field_type.network)) {
+ reverse_byte_order = (g_ntohs(0x1) != 0x1);
+ } else {
+ reverse_byte_order = LTT_GET_BO(e->tracefile);
+ }
switch(f->field_size) {
case 1:
guint64 ltt_event_get_long_unsigned(LttEvent *e, LttField *f)
{
- gboolean reverse_byte_order;
- if(unlikely(f->field_type.network)) {
- reverse_byte_order = (g_ntohs(0x1) != 0x1);
- } else {
- reverse_byte_order = LTT_GET_BO(e->tracefile);
- }
+ gboolean reverse_byte_order;
+ if(unlikely(f->field_type.network)) {
+ reverse_byte_order = (g_ntohs(0x1) != 0x1);
+ } else {
+ reverse_byte_order = LTT_GET_BO(e->tracefile);
+ }
switch(f->field_size) {
case 1:
gint64 ltt_event_get_long_int(LttEvent *e, LttField *f)
{
- gboolean reverse_byte_order;
- if(unlikely(f->field_type.network)) {
- reverse_byte_order = (g_ntohs(0x1) != 0x1);
- } else {
- reverse_byte_order = LTT_GET_BO(e->tracefile);
- }
+ gboolean reverse_byte_order;
+ if(unlikely(f->field_type.network)) {
+ reverse_byte_order = (g_ntohs(0x1) != 0x1);
+ } else {
+ reverse_byte_order = LTT_GET_BO(e->tracefile);
+ }
switch(f->field_size) {
case 1:
float ltt_event_get_float(LttEvent *e, LttField *f)
{
- gboolean reverse_byte_order;
- if(unlikely(f->field_type.network)) {
- reverse_byte_order = (g_ntohs(0x1) != 0x1);
- } else {
- g_assert(LTT_HAS_FLOAT(e->tracefile));
- reverse_byte_order = LTT_GET_FLOAT_BO(e->tracefile);
- }
+ gboolean reverse_byte_order;
+ if(unlikely(f->field_type.network)) {
+ reverse_byte_order = (g_ntohs(0x1) != 0x1);
+ } else {
+ g_assert(LTT_HAS_FLOAT(e->tracefile));
+ reverse_byte_order = LTT_GET_FLOAT_BO(e->tracefile);
+ }
g_assert(f->field_type.type_class == LTT_FLOAT && f->field_size == 4);
double ltt_event_get_double(LttEvent *e, LttField *f)
{
- gboolean reverse_byte_order;
- if(unlikely(f->field_type.network)) {
- reverse_byte_order = (g_ntohs(0x1) != 0x1);
- } else {
- g_assert(LTT_HAS_FLOAT(e->tracefile));
- reverse_byte_order = LTT_GET_FLOAT_BO(e->tracefile);
- }
+ gboolean reverse_byte_order;
+ if(unlikely(f->field_type.network)) {
+ reverse_byte_order = (g_ntohs(0x1) != 0x1);
+ } else {
+ g_assert(LTT_HAS_FLOAT(e->tracefile));
+ reverse_byte_order = LTT_GET_FLOAT_BO(e->tracefile);
+ }
if(f->field_size == 4)
return ltt_event_get_float(e, f);
void compute_fields_offsets(LttTracefile *tf,
- LttFacility *fac, LttField *field, off_t *offset, void *root)
+ LttFacility *fac, LttField *field, off_t *offset, void *root, guint is_compact)
{
LttType *type = &field->field_type;
+ if(unlikely(is_compact)) {
+ g_assert(field->field_size != 0);
+ /* FIXME THIS IS A HUUUUUGE hack :
+ * offset is between the compact_data field in struct LttEvent
+ * and the address of the field root in the memory map.
+ * ark. Both will stay at the same addresses while the event
+ * is readable, so it's ok.
+ */
+ field->offset_root = (unsigned long)(&tf->event.compact_data)
+ - (unsigned long)root;
+ field->fixed_root = FIELD_FIXED;
+ return;
+ }
+
switch(type->type_class) {
case LTT_INT_FIXED:
case LTT_UINT_FIXED:
0);
for(i=0; i<type->size; i++) {
g_array_append_val(field->dynamic_offsets, *offset);
- compute_fields_offsets(tf, fac, child, offset, root);
+ compute_fields_offsets(tf, fac, child, offset, root, is_compact);
}
}
// local_offset = field->array_offset;
field->offset_root = *offset;
child = &g_array_index(type->fields, LttField, 0);
- compute_fields_offsets(tf, fac, child, offset, root);
+ compute_fields_offsets(tf, fac, child, offset, root, is_compact);
child = &g_array_index(type->fields, LttField, 1);
*offset += ltt_align(*offset, get_alignment(child),
fac->alignment);
num_elem = ltt_event_field_element_number(&tf->event, field);
for(i=0; i<num_elem; i++) {
g_array_append_val(field->dynamic_offsets, *offset);
- compute_fields_offsets(tf, fac, child, offset, root);
+ compute_fields_offsets(tf, fac, child, offset, root, is_compact);
}
g_assert(num_elem == field->dynamic_offsets->len);
}
for(i=0; i<type->fields->len; i++) {
child = &g_array_index(type->fields, LttField, i);
- compute_fields_offsets(tf, fac, child, offset, root);
+ compute_fields_offsets(tf, fac, child, offset, root, is_compact);
}
}
break;
for(i=0; i<type->fields->len; i++) {
*offset = field->offset_root;
child = &g_array_index(type->fields, LttField, i);
- compute_fields_offsets(tf, fac, child, offset, root);
+ compute_fields_offsets(tf, fac, child, offset, root, is_compact);
}
*offset = field->offset_root + field->field_size;
}
for(i=0; i<event->fields->len; i++) {
//g_debug("computing offset %u of %u\n", i, event->fields->len-1);
LttField *field = &g_array_index(event->fields, LttField, i);
- compute_fields_offsets(tf, fac, field, offset, root);
+ if(event->has_compact_data && i == 0)
+ compute_fields_offsets(tf, fac, field, offset, root, 1);
+ else
+ compute_fields_offsets(tf, fac, field, offset, root, 0);
}
}