Refactoring: type description structures
[lttng-modules.git] / src / lttng-bytecode-specialize.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-bytecode-specialize.c
4 *
5 * LTTng modules bytecode code specializer.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <linux/slab.h>
11 #include <lttng/lttng-bytecode.h>
12 #include <lttng/align.h>
13 #include <lttng/events-internal.h>
14
15 static ssize_t bytecode_reserve_data(struct bytecode_runtime *runtime,
16 size_t align, size_t len)
17 {
18 ssize_t ret;
19 size_t padding = offset_align(runtime->data_len, align);
20 size_t new_len = runtime->data_len + padding + len;
21 size_t new_alloc_len = new_len;
22 size_t old_alloc_len = runtime->data_alloc_len;
23
24 if (new_len > INTERPRETER_MAX_DATA_LEN)
25 return -EINVAL;
26
27 if (new_alloc_len > old_alloc_len) {
28 char *newptr;
29
30 new_alloc_len =
31 max_t(size_t, 1U << get_count_order(new_alloc_len), old_alloc_len << 1);
32 newptr = krealloc(runtime->data, new_alloc_len, GFP_KERNEL);
33 if (!newptr)
34 return -ENOMEM;
35 runtime->data = newptr;
36 /* We zero directly the memory from start of allocation. */
37 memset(&runtime->data[old_alloc_len], 0, new_alloc_len - old_alloc_len);
38 runtime->data_alloc_len = new_alloc_len;
39 }
40 runtime->data_len += padding;
41 ret = runtime->data_len;
42 runtime->data_len += len;
43 return ret;
44 }
45
46 static ssize_t bytecode_push_data(struct bytecode_runtime *runtime,
47 const void *p, size_t align, size_t len)
48 {
49 ssize_t offset;
50
51 offset = bytecode_reserve_data(runtime, align, len);
52 if (offset < 0)
53 return -ENOMEM;
54 memcpy(&runtime->data[offset], p, len);
55 return offset;
56 }
57
58 static int specialize_load_field(struct vstack_entry *stack_top,
59 struct load_op *insn)
60 {
61 int ret;
62
63 switch (stack_top->load.type) {
64 case LOAD_OBJECT:
65 break;
66 case LOAD_ROOT_CONTEXT:
67 case LOAD_ROOT_APP_CONTEXT:
68 case LOAD_ROOT_PAYLOAD:
69 default:
70 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
71 ret = -EINVAL;
72 goto end;
73 }
74 switch (stack_top->load.object_type) {
75 case OBJECT_TYPE_S8:
76 dbg_printk("op load field s8\n");
77 stack_top->type = REG_S64;
78 if (!stack_top->load.rev_bo)
79 insn->op = BYTECODE_OP_LOAD_FIELD_S8;
80 break;
81 case OBJECT_TYPE_S16:
82 dbg_printk("op load field s16\n");
83 stack_top->type = REG_S64;
84 if (!stack_top->load.rev_bo)
85 insn->op = BYTECODE_OP_LOAD_FIELD_S16;
86 break;
87 case OBJECT_TYPE_S32:
88 dbg_printk("op load field s32\n");
89 stack_top->type = REG_S64;
90 if (!stack_top->load.rev_bo)
91 insn->op = BYTECODE_OP_LOAD_FIELD_S32;
92 break;
93 case OBJECT_TYPE_S64:
94 dbg_printk("op load field s64\n");
95 stack_top->type = REG_S64;
96 if (!stack_top->load.rev_bo)
97 insn->op = BYTECODE_OP_LOAD_FIELD_S64;
98 break;
99 case OBJECT_TYPE_SIGNED_ENUM:
100 dbg_printk("op load field signed enumeration\n");
101 stack_top->type = REG_PTR;
102 break;
103 case OBJECT_TYPE_U8:
104 dbg_printk("op load field u8\n");
105 stack_top->type = REG_S64;
106 insn->op = BYTECODE_OP_LOAD_FIELD_U8;
107 break;
108 case OBJECT_TYPE_U16:
109 dbg_printk("op load field u16\n");
110 stack_top->type = REG_S64;
111 if (!stack_top->load.rev_bo)
112 insn->op = BYTECODE_OP_LOAD_FIELD_U16;
113 break;
114 case OBJECT_TYPE_U32:
115 dbg_printk("op load field u32\n");
116 stack_top->type = REG_S64;
117 if (!stack_top->load.rev_bo)
118 insn->op = BYTECODE_OP_LOAD_FIELD_U32;
119 break;
120 case OBJECT_TYPE_U64:
121 dbg_printk("op load field u64\n");
122 stack_top->type = REG_S64;
123 if (!stack_top->load.rev_bo)
124 insn->op = BYTECODE_OP_LOAD_FIELD_U64;
125 break;
126 case OBJECT_TYPE_UNSIGNED_ENUM:
127 dbg_printk("op load field unsigned enumeration\n");
128 stack_top->type = REG_PTR;
129 break;
130 case OBJECT_TYPE_DOUBLE:
131 printk(KERN_WARNING "LTTng: bytecode: Double type unsupported\n\n");
132 ret = -EINVAL;
133 goto end;
134 case OBJECT_TYPE_STRING:
135 dbg_printk("op load field string\n");
136 stack_top->type = REG_STRING;
137 insn->op = BYTECODE_OP_LOAD_FIELD_STRING;
138 break;
139 case OBJECT_TYPE_STRING_SEQUENCE:
140 dbg_printk("op load field string sequence\n");
141 stack_top->type = REG_STRING;
142 insn->op = BYTECODE_OP_LOAD_FIELD_SEQUENCE;
143 break;
144 case OBJECT_TYPE_DYNAMIC:
145 ret = -EINVAL;
146 goto end;
147 case OBJECT_TYPE_SEQUENCE:
148 case OBJECT_TYPE_ARRAY:
149 case OBJECT_TYPE_STRUCT:
150 case OBJECT_TYPE_VARIANT:
151 printk(KERN_WARNING "LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
152 ret = -EINVAL;
153 goto end;
154 }
155 return 0;
156
157 end:
158 return ret;
159 }
160
161 static int specialize_get_index_object_type(enum object_type *otype,
162 int signedness, uint32_t elem_len)
163 {
164 switch (elem_len) {
165 case 8:
166 if (signedness)
167 *otype = OBJECT_TYPE_S8;
168 else
169 *otype = OBJECT_TYPE_U8;
170 break;
171 case 16:
172 if (signedness)
173 *otype = OBJECT_TYPE_S16;
174 else
175 *otype = OBJECT_TYPE_U16;
176 break;
177 case 32:
178 if (signedness)
179 *otype = OBJECT_TYPE_S32;
180 else
181 *otype = OBJECT_TYPE_U32;
182 break;
183 case 64:
184 if (signedness)
185 *otype = OBJECT_TYPE_S64;
186 else
187 *otype = OBJECT_TYPE_U64;
188 break;
189 default:
190 return -EINVAL;
191 }
192 return 0;
193 }
194
195 static int specialize_get_index(struct bytecode_runtime *runtime,
196 struct load_op *insn, uint64_t index,
197 struct vstack_entry *stack_top,
198 int idx_len)
199 {
200 int ret;
201 struct bytecode_get_index_data gid;
202 ssize_t data_offset;
203
204 memset(&gid, 0, sizeof(gid));
205 switch (stack_top->load.type) {
206 case LOAD_OBJECT:
207 switch (stack_top->load.object_type) {
208 case OBJECT_TYPE_ARRAY:
209 {
210 const struct lttng_kernel_event_field *field;
211 const struct lttng_kernel_type_array *array_type;
212 const struct lttng_kernel_type_integer *integer_type;
213 uint32_t elem_len, num_elems;
214 int signedness;
215
216 field = stack_top->load.field;
217 array_type = lttng_kernel_get_type_array(field->type);
218 if (!lttng_kernel_type_is_bytewise_integer(array_type->elem_type)) {
219 ret = -EINVAL;
220 goto end;
221 }
222 integer_type = lttng_kernel_get_type_integer(array_type->elem_type);
223 num_elems = array_type->length;
224 elem_len = integer_type->size;
225 signedness = integer_type->signedness;
226 if (index >= num_elems) {
227 ret = -EINVAL;
228 goto end;
229 }
230 ret = specialize_get_index_object_type(&stack_top->load.object_type,
231 signedness, elem_len);
232 if (ret)
233 goto end;
234 gid.offset = index * (elem_len / CHAR_BIT);
235 gid.array_len = num_elems * (elem_len / CHAR_BIT);
236 gid.elem.type = stack_top->load.object_type;
237 gid.elem.len = elem_len;
238 if (integer_type->reverse_byte_order)
239 gid.elem.rev_bo = true;
240 stack_top->load.rev_bo = gid.elem.rev_bo;
241 break;
242 }
243 case OBJECT_TYPE_SEQUENCE:
244 {
245 const struct lttng_kernel_event_field *field;
246 const struct lttng_kernel_type_sequence *sequence_type;
247 const struct lttng_kernel_type_integer *integer_type;
248 uint32_t elem_len;
249 int signedness;
250
251 field = stack_top->load.field;
252 sequence_type = lttng_kernel_get_type_sequence(field->type);
253 if (!lttng_kernel_type_is_bytewise_integer(sequence_type->elem_type)) {
254 ret = -EINVAL;
255 goto end;
256 }
257 integer_type = lttng_kernel_get_type_integer(sequence_type->elem_type);
258 elem_len = integer_type->size;
259 signedness = integer_type->signedness;
260 ret = specialize_get_index_object_type(&stack_top->load.object_type,
261 signedness, elem_len);
262 if (ret)
263 goto end;
264 gid.offset = index * (elem_len / CHAR_BIT);
265 gid.elem.type = stack_top->load.object_type;
266 gid.elem.len = elem_len;
267 if (integer_type->reverse_byte_order)
268 gid.elem.rev_bo = true;
269 stack_top->load.rev_bo = gid.elem.rev_bo;
270 break;
271 }
272 case OBJECT_TYPE_STRUCT:
273 /* Only generated by the specialize phase. */
274 case OBJECT_TYPE_VARIANT: /* Fall-through */
275 default:
276 printk(KERN_WARNING "LTTng: bytecode: Unexpected get index type %d",
277 (int) stack_top->load.object_type);
278 ret = -EINVAL;
279 goto end;
280 }
281 break;
282 case LOAD_ROOT_CONTEXT:
283 case LOAD_ROOT_APP_CONTEXT:
284 case LOAD_ROOT_PAYLOAD:
285 printk(KERN_WARNING "LTTng: bytecode: Index lookup for root field not implemented yet.\n");
286 ret = -EINVAL;
287 goto end;
288 }
289 data_offset = bytecode_push_data(runtime, &gid,
290 __alignof__(gid), sizeof(gid));
291 if (data_offset < 0) {
292 ret = -EINVAL;
293 goto end;
294 }
295 switch (idx_len) {
296 case 2:
297 ((struct get_index_u16 *) insn->data)->index = data_offset;
298 break;
299 case 8:
300 ((struct get_index_u64 *) insn->data)->index = data_offset;
301 break;
302 default:
303 ret = -EINVAL;
304 goto end;
305 }
306
307 return 0;
308
309 end:
310 return ret;
311 }
312
313 static int specialize_context_lookup_name(struct lttng_kernel_ctx *ctx,
314 struct bytecode_runtime *bytecode,
315 struct load_op *insn)
316 {
317 uint16_t offset;
318 const char *name;
319
320 offset = ((struct get_symbol *) insn->data)->offset;
321 name = bytecode->p.bc->bc.data + bytecode->p.bc->bc.reloc_offset + offset;
322 return lttng_kernel_get_context_index(ctx, name);
323 }
324
325 static int specialize_load_object(const struct lttng_kernel_event_field *field,
326 struct vstack_load *load, bool is_context)
327 {
328 load->type = LOAD_OBJECT;
329
330 switch (field->type->type) {
331 case lttng_kernel_type_integer:
332 if (lttng_kernel_get_type_integer(field->type)->signedness)
333 load->object_type = OBJECT_TYPE_S64;
334 else
335 load->object_type = OBJECT_TYPE_U64;
336 load->rev_bo = false;
337 break;
338 case lttng_kernel_type_enum:
339 {
340 const struct lttng_kernel_type_enum *enum_type = lttng_kernel_get_type_enum(field->type);
341 const struct lttng_kernel_type_integer *integer_type = lttng_kernel_get_type_integer(enum_type->container_type);
342
343 if (integer_type->signedness)
344 load->object_type = OBJECT_TYPE_SIGNED_ENUM;
345 else
346 load->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
347 load->rev_bo = false;
348 break;
349 }
350 case lttng_kernel_type_array:
351 {
352 const struct lttng_kernel_type_array *array_type = lttng_kernel_get_type_array(field->type);
353
354 if (!lttng_kernel_type_is_bytewise_integer(array_type->elem_type)) {
355 printk(KERN_WARNING "LTTng: bytecode: Array nesting only supports integer types.\n");
356 return -EINVAL;
357 }
358 if (is_context) {
359 load->object_type = OBJECT_TYPE_STRING;
360 } else {
361 if (array_type->encoding == lttng_kernel_string_encoding_none) {
362 load->object_type = OBJECT_TYPE_ARRAY;
363 load->field = field;
364 } else {
365 load->object_type = OBJECT_TYPE_STRING_SEQUENCE;
366 }
367 }
368 break;
369 }
370 case lttng_kernel_type_sequence:
371 {
372 const struct lttng_kernel_type_sequence *sequence_type = lttng_kernel_get_type_sequence(field->type);
373
374 if (!lttng_kernel_type_is_bytewise_integer(sequence_type->elem_type)) {
375 printk(KERN_WARNING "LTTng: bytecode: Sequence nesting only supports integer types.\n");
376 return -EINVAL;
377 }
378 if (is_context) {
379 load->object_type = OBJECT_TYPE_STRING;
380 } else {
381 if (sequence_type->encoding == lttng_kernel_string_encoding_none) {
382 load->object_type = OBJECT_TYPE_SEQUENCE;
383 load->field = field;
384 } else {
385 load->object_type = OBJECT_TYPE_STRING_SEQUENCE;
386 }
387 }
388 break;
389 }
390 case lttng_kernel_type_string:
391 load->object_type = OBJECT_TYPE_STRING;
392 break;
393 case lttng_kernel_type_struct:
394 printk(KERN_WARNING "LTTng: bytecode: Structure type cannot be loaded.\n");
395 return -EINVAL;
396 case lttng_kernel_type_variant:
397 printk(KERN_WARNING "LTTng: bytecode: Variant type cannot be loaded.\n");
398 return -EINVAL;
399 default:
400 printk(KERN_WARNING "LTTng: bytecode: Unknown type: %d", (int) field->type->type);
401 return -EINVAL;
402 }
403 return 0;
404 }
405
406 static int specialize_context_lookup(struct lttng_kernel_ctx *ctx,
407 struct bytecode_runtime *runtime,
408 struct load_op *insn,
409 struct vstack_load *load)
410 {
411 int idx, ret;
412 const struct lttng_kernel_ctx_field *ctx_field;
413 const struct lttng_kernel_event_field *field;
414 struct bytecode_get_index_data gid;
415 ssize_t data_offset;
416
417 idx = specialize_context_lookup_name(ctx, runtime, insn);
418 if (idx < 0) {
419 return -ENOENT;
420 }
421 ctx_field = &lttng_static_ctx->fields[idx];
422 field = ctx_field->event_field;
423 ret = specialize_load_object(field, load, true);
424 if (ret)
425 return ret;
426 /* Specialize each get_symbol into a get_index. */
427 insn->op = BYTECODE_OP_GET_INDEX_U16;
428 memset(&gid, 0, sizeof(gid));
429 gid.ctx_index = idx;
430 gid.elem.type = load->object_type;
431 gid.elem.rev_bo = load->rev_bo;
432 gid.field = field;
433 data_offset = bytecode_push_data(runtime, &gid,
434 __alignof__(gid), sizeof(gid));
435 if (data_offset < 0) {
436 return -EINVAL;
437 }
438 ((struct get_index_u16 *) insn->data)->index = data_offset;
439 return 0;
440 }
441
442 static int specialize_payload_lookup(const struct lttng_kernel_event_desc *event_desc,
443 struct bytecode_runtime *runtime,
444 struct load_op *insn,
445 struct vstack_load *load)
446 {
447 const char *name;
448 uint16_t offset;
449 unsigned int i, nr_fields;
450 bool found = false;
451 uint32_t field_offset = 0;
452 const struct lttng_kernel_event_field *field;
453 int ret;
454 struct bytecode_get_index_data gid;
455 ssize_t data_offset;
456
457 nr_fields = event_desc->nr_fields;
458 offset = ((struct get_symbol *) insn->data)->offset;
459 name = runtime->p.bc->bc.data + runtime->p.bc->bc.reloc_offset + offset;
460 for (i = 0; i < nr_fields; i++) {
461 field = event_desc->fields[i];
462 if (field->nofilter) {
463 continue;
464 }
465 if (!strcmp(field->name, name)) {
466 found = true;
467 break;
468 }
469 /* compute field offset on stack */
470 switch (field->type->type) {
471 case lttng_kernel_type_integer:
472 case lttng_kernel_type_enum:
473 field_offset += sizeof(int64_t);
474 break;
475 case lttng_kernel_type_array:
476 case lttng_kernel_type_sequence:
477 field_offset += sizeof(unsigned long);
478 field_offset += sizeof(void *);
479 break;
480 case lttng_kernel_type_string:
481 field_offset += sizeof(void *);
482 break;
483 default:
484 ret = -EINVAL;
485 goto end;
486 }
487 }
488 if (!found) {
489 ret = -EINVAL;
490 goto end;
491 }
492
493 ret = specialize_load_object(field, load, false);
494 if (ret)
495 goto end;
496
497 /* Specialize each get_symbol into a get_index. */
498 insn->op = BYTECODE_OP_GET_INDEX_U16;
499 memset(&gid, 0, sizeof(gid));
500 gid.offset = field_offset;
501 gid.elem.type = load->object_type;
502 gid.elem.rev_bo = load->rev_bo;
503 gid.field = field;
504 data_offset = bytecode_push_data(runtime, &gid,
505 __alignof__(gid), sizeof(gid));
506 if (data_offset < 0) {
507 ret = -EINVAL;
508 goto end;
509 }
510 ((struct get_index_u16 *) insn->data)->index = data_offset;
511 ret = 0;
512 end:
513 return ret;
514 }
515
516 int lttng_bytecode_specialize(const struct lttng_kernel_event_desc *event_desc,
517 struct bytecode_runtime *bytecode)
518 {
519 void *pc, *next_pc, *start_pc;
520 int ret = -EINVAL;
521 struct vstack _stack;
522 struct vstack *stack = &_stack;
523 struct lttng_kernel_ctx *ctx = bytecode->p.ctx;
524
525 vstack_init(stack);
526
527 start_pc = &bytecode->code[0];
528 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len;
529 pc = next_pc) {
530 switch (*(bytecode_opcode_t *) pc) {
531 case BYTECODE_OP_UNKNOWN:
532 default:
533 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
534 (unsigned int) *(bytecode_opcode_t *) pc);
535 ret = -EINVAL;
536 goto end;
537
538 case BYTECODE_OP_RETURN:
539 case BYTECODE_OP_RETURN_S64:
540 ret = 0;
541 goto end;
542
543 /* binary */
544 case BYTECODE_OP_MUL:
545 case BYTECODE_OP_DIV:
546 case BYTECODE_OP_MOD:
547 case BYTECODE_OP_PLUS:
548 case BYTECODE_OP_MINUS:
549 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
550 (unsigned int) *(bytecode_opcode_t *) pc);
551 ret = -EINVAL;
552 goto end;
553
554 case BYTECODE_OP_EQ:
555 {
556 struct binary_op *insn = (struct binary_op *) pc;
557
558 switch(vstack_ax(stack)->type) {
559 default:
560 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
561 ret = -EINVAL;
562 goto end;
563
564 case REG_STRING:
565 if (vstack_bx(stack)->type == REG_STAR_GLOB_STRING)
566 insn->op = BYTECODE_OP_EQ_STAR_GLOB_STRING;
567 else
568 insn->op = BYTECODE_OP_EQ_STRING;
569 break;
570 case REG_STAR_GLOB_STRING:
571 insn->op = BYTECODE_OP_EQ_STAR_GLOB_STRING;
572 break;
573 case REG_S64:
574 if (vstack_bx(stack)->type == REG_S64)
575 insn->op = BYTECODE_OP_EQ_S64;
576 else
577 insn->op = BYTECODE_OP_EQ_DOUBLE_S64;
578 break;
579 case REG_DOUBLE:
580 if (vstack_bx(stack)->type == REG_S64)
581 insn->op = BYTECODE_OP_EQ_S64_DOUBLE;
582 else
583 insn->op = BYTECODE_OP_EQ_DOUBLE;
584 break;
585 }
586 /* Pop 2, push 1 */
587 if (vstack_pop(stack)) {
588 ret = -EINVAL;
589 goto end;
590 }
591 vstack_ax(stack)->type = REG_S64;
592 next_pc += sizeof(struct binary_op);
593 break;
594 }
595
596 case BYTECODE_OP_NE:
597 {
598 struct binary_op *insn = (struct binary_op *) pc;
599
600 switch(vstack_ax(stack)->type) {
601 default:
602 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
603 ret = -EINVAL;
604 goto end;
605
606 case REG_STRING:
607 if (vstack_bx(stack)->type == REG_STAR_GLOB_STRING)
608 insn->op = BYTECODE_OP_NE_STAR_GLOB_STRING;
609 else
610 insn->op = BYTECODE_OP_NE_STRING;
611 break;
612 case REG_STAR_GLOB_STRING:
613 insn->op = BYTECODE_OP_NE_STAR_GLOB_STRING;
614 break;
615 case REG_S64:
616 if (vstack_bx(stack)->type == REG_S64)
617 insn->op = BYTECODE_OP_NE_S64;
618 else
619 insn->op = BYTECODE_OP_NE_DOUBLE_S64;
620 break;
621 case REG_DOUBLE:
622 if (vstack_bx(stack)->type == REG_S64)
623 insn->op = BYTECODE_OP_NE_S64_DOUBLE;
624 else
625 insn->op = BYTECODE_OP_NE_DOUBLE;
626 break;
627 }
628 /* Pop 2, push 1 */
629 if (vstack_pop(stack)) {
630 ret = -EINVAL;
631 goto end;
632 }
633 vstack_ax(stack)->type = REG_S64;
634 next_pc += sizeof(struct binary_op);
635 break;
636 }
637
638 case BYTECODE_OP_GT:
639 {
640 struct binary_op *insn = (struct binary_op *) pc;
641
642 switch(vstack_ax(stack)->type) {
643 default:
644 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
645 ret = -EINVAL;
646 goto end;
647
648 case REG_STAR_GLOB_STRING:
649 printk(KERN_WARNING "LTTng: bytecode: invalid register type for '>' binary operator\n");
650 ret = -EINVAL;
651 goto end;
652 case REG_STRING:
653 insn->op = BYTECODE_OP_GT_STRING;
654 break;
655 case REG_S64:
656 if (vstack_bx(stack)->type == REG_S64)
657 insn->op = BYTECODE_OP_GT_S64;
658 else
659 insn->op = BYTECODE_OP_GT_DOUBLE_S64;
660 break;
661 case REG_DOUBLE:
662 if (vstack_bx(stack)->type == REG_S64)
663 insn->op = BYTECODE_OP_GT_S64_DOUBLE;
664 else
665 insn->op = BYTECODE_OP_GT_DOUBLE;
666 break;
667 }
668 /* Pop 2, push 1 */
669 if (vstack_pop(stack)) {
670 ret = -EINVAL;
671 goto end;
672 }
673 vstack_ax(stack)->type = REG_S64;
674 next_pc += sizeof(struct binary_op);
675 break;
676 }
677
678 case BYTECODE_OP_LT:
679 {
680 struct binary_op *insn = (struct binary_op *) pc;
681
682 switch(vstack_ax(stack)->type) {
683 default:
684 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
685 ret = -EINVAL;
686 goto end;
687
688 case REG_STAR_GLOB_STRING:
689 printk(KERN_WARNING "LTTng: bytecode: invalid register type for '<' binary operator\n");
690 ret = -EINVAL;
691 goto end;
692 case REG_STRING:
693 insn->op = BYTECODE_OP_LT_STRING;
694 break;
695 case REG_S64:
696 if (vstack_bx(stack)->type == REG_S64)
697 insn->op = BYTECODE_OP_LT_S64;
698 else
699 insn->op = BYTECODE_OP_LT_DOUBLE_S64;
700 break;
701 case REG_DOUBLE:
702 if (vstack_bx(stack)->type == REG_S64)
703 insn->op = BYTECODE_OP_LT_S64_DOUBLE;
704 else
705 insn->op = BYTECODE_OP_LT_DOUBLE;
706 break;
707 }
708 /* Pop 2, push 1 */
709 if (vstack_pop(stack)) {
710 ret = -EINVAL;
711 goto end;
712 }
713 vstack_ax(stack)->type = REG_S64;
714 next_pc += sizeof(struct binary_op);
715 break;
716 }
717
718 case BYTECODE_OP_GE:
719 {
720 struct binary_op *insn = (struct binary_op *) pc;
721
722 switch(vstack_ax(stack)->type) {
723 default:
724 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
725 ret = -EINVAL;
726 goto end;
727
728 case REG_STAR_GLOB_STRING:
729 printk(KERN_WARNING "LTTng: bytecode: invalid register type for '>=' binary operator\n");
730 ret = -EINVAL;
731 goto end;
732 case REG_STRING:
733 insn->op = BYTECODE_OP_GE_STRING;
734 break;
735 case REG_S64:
736 if (vstack_bx(stack)->type == REG_S64)
737 insn->op = BYTECODE_OP_GE_S64;
738 else
739 insn->op = BYTECODE_OP_GE_DOUBLE_S64;
740 break;
741 case REG_DOUBLE:
742 if (vstack_bx(stack)->type == REG_S64)
743 insn->op = BYTECODE_OP_GE_S64_DOUBLE;
744 else
745 insn->op = BYTECODE_OP_GE_DOUBLE;
746 break;
747 }
748 /* Pop 2, push 1 */
749 if (vstack_pop(stack)) {
750 ret = -EINVAL;
751 goto end;
752 }
753 vstack_ax(stack)->type = REG_S64;
754 next_pc += sizeof(struct binary_op);
755 break;
756 }
757 case BYTECODE_OP_LE:
758 {
759 struct binary_op *insn = (struct binary_op *) pc;
760
761 switch(vstack_ax(stack)->type) {
762 default:
763 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
764 ret = -EINVAL;
765 goto end;
766
767 case REG_STAR_GLOB_STRING:
768 printk(KERN_WARNING "LTTng: bytecode: invalid register type for '<=' binary operator\n");
769 ret = -EINVAL;
770 goto end;
771 case REG_STRING:
772 insn->op = BYTECODE_OP_LE_STRING;
773 break;
774 case REG_S64:
775 if (vstack_bx(stack)->type == REG_S64)
776 insn->op = BYTECODE_OP_LE_S64;
777 else
778 insn->op = BYTECODE_OP_LE_DOUBLE_S64;
779 break;
780 case REG_DOUBLE:
781 if (vstack_bx(stack)->type == REG_S64)
782 insn->op = BYTECODE_OP_LE_S64_DOUBLE;
783 else
784 insn->op = BYTECODE_OP_LE_DOUBLE;
785 break;
786 }
787 vstack_ax(stack)->type = REG_S64;
788 next_pc += sizeof(struct binary_op);
789 break;
790 }
791
792 case BYTECODE_OP_EQ_STRING:
793 case BYTECODE_OP_NE_STRING:
794 case BYTECODE_OP_GT_STRING:
795 case BYTECODE_OP_LT_STRING:
796 case BYTECODE_OP_GE_STRING:
797 case BYTECODE_OP_LE_STRING:
798 case BYTECODE_OP_EQ_STAR_GLOB_STRING:
799 case BYTECODE_OP_NE_STAR_GLOB_STRING:
800 case BYTECODE_OP_EQ_S64:
801 case BYTECODE_OP_NE_S64:
802 case BYTECODE_OP_GT_S64:
803 case BYTECODE_OP_LT_S64:
804 case BYTECODE_OP_GE_S64:
805 case BYTECODE_OP_LE_S64:
806 case BYTECODE_OP_EQ_DOUBLE:
807 case BYTECODE_OP_NE_DOUBLE:
808 case BYTECODE_OP_GT_DOUBLE:
809 case BYTECODE_OP_LT_DOUBLE:
810 case BYTECODE_OP_GE_DOUBLE:
811 case BYTECODE_OP_LE_DOUBLE:
812 case BYTECODE_OP_EQ_DOUBLE_S64:
813 case BYTECODE_OP_NE_DOUBLE_S64:
814 case BYTECODE_OP_GT_DOUBLE_S64:
815 case BYTECODE_OP_LT_DOUBLE_S64:
816 case BYTECODE_OP_GE_DOUBLE_S64:
817 case BYTECODE_OP_LE_DOUBLE_S64:
818 case BYTECODE_OP_EQ_S64_DOUBLE:
819 case BYTECODE_OP_NE_S64_DOUBLE:
820 case BYTECODE_OP_GT_S64_DOUBLE:
821 case BYTECODE_OP_LT_S64_DOUBLE:
822 case BYTECODE_OP_GE_S64_DOUBLE:
823 case BYTECODE_OP_LE_S64_DOUBLE:
824 case BYTECODE_OP_BIT_RSHIFT:
825 case BYTECODE_OP_BIT_LSHIFT:
826 case BYTECODE_OP_BIT_AND:
827 case BYTECODE_OP_BIT_OR:
828 case BYTECODE_OP_BIT_XOR:
829 {
830 /* Pop 2, push 1 */
831 if (vstack_pop(stack)) {
832 ret = -EINVAL;
833 goto end;
834 }
835 vstack_ax(stack)->type = REG_S64;
836 next_pc += sizeof(struct binary_op);
837 break;
838 }
839
840 /* unary */
841 case BYTECODE_OP_UNARY_PLUS:
842 {
843 struct unary_op *insn = (struct unary_op *) pc;
844
845 switch(vstack_ax(stack)->type) {
846 default:
847 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
848 ret = -EINVAL;
849 goto end;
850
851 case REG_S64:
852 insn->op = BYTECODE_OP_UNARY_PLUS_S64;
853 break;
854 case REG_DOUBLE:
855 insn->op = BYTECODE_OP_UNARY_PLUS_DOUBLE;
856 break;
857 }
858 /* Pop 1, push 1 */
859 next_pc += sizeof(struct unary_op);
860 break;
861 }
862
863 case BYTECODE_OP_UNARY_MINUS:
864 {
865 struct unary_op *insn = (struct unary_op *) pc;
866
867 switch(vstack_ax(stack)->type) {
868 default:
869 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
870 ret = -EINVAL;
871 goto end;
872
873 case REG_S64:
874 insn->op = BYTECODE_OP_UNARY_MINUS_S64;
875 break;
876 case REG_DOUBLE:
877 insn->op = BYTECODE_OP_UNARY_MINUS_DOUBLE;
878 break;
879 }
880 /* Pop 1, push 1 */
881 next_pc += sizeof(struct unary_op);
882 break;
883 }
884
885 case BYTECODE_OP_UNARY_NOT:
886 {
887 struct unary_op *insn = (struct unary_op *) pc;
888
889 switch(vstack_ax(stack)->type) {
890 default:
891 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
892 ret = -EINVAL;
893 goto end;
894
895 case REG_S64:
896 insn->op = BYTECODE_OP_UNARY_NOT_S64;
897 break;
898 case REG_DOUBLE:
899 insn->op = BYTECODE_OP_UNARY_NOT_DOUBLE;
900 break;
901 }
902 /* Pop 1, push 1 */
903 next_pc += sizeof(struct unary_op);
904 break;
905 }
906
907 case BYTECODE_OP_UNARY_BIT_NOT:
908 {
909 /* Pop 1, push 1 */
910 next_pc += sizeof(struct unary_op);
911 break;
912 }
913
914 case BYTECODE_OP_UNARY_PLUS_S64:
915 case BYTECODE_OP_UNARY_MINUS_S64:
916 case BYTECODE_OP_UNARY_NOT_S64:
917 case BYTECODE_OP_UNARY_PLUS_DOUBLE:
918 case BYTECODE_OP_UNARY_MINUS_DOUBLE:
919 case BYTECODE_OP_UNARY_NOT_DOUBLE:
920 {
921 /* Pop 1, push 1 */
922 next_pc += sizeof(struct unary_op);
923 break;
924 }
925
926 /* logical */
927 case BYTECODE_OP_AND:
928 case BYTECODE_OP_OR:
929 {
930 /* Continue to next instruction */
931 /* Pop 1 when jump not taken */
932 if (vstack_pop(stack)) {
933 ret = -EINVAL;
934 goto end;
935 }
936 next_pc += sizeof(struct logical_op);
937 break;
938 }
939
940 /* load field ref */
941 case BYTECODE_OP_LOAD_FIELD_REF:
942 {
943 printk(KERN_WARNING "LTTng: bytecode: Unknown field ref type\n");
944 ret = -EINVAL;
945 goto end;
946 }
947 /* get context ref */
948 case BYTECODE_OP_GET_CONTEXT_REF:
949 {
950 printk(KERN_WARNING "LTTng: bytecode: Unknown get context ref type\n");
951 ret = -EINVAL;
952 goto end;
953 }
954 case BYTECODE_OP_LOAD_FIELD_REF_STRING:
955 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE:
956 case BYTECODE_OP_GET_CONTEXT_REF_STRING:
957 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING:
958 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE:
959 {
960 if (vstack_push(stack)) {
961 ret = -EINVAL;
962 goto end;
963 }
964 vstack_ax(stack)->type = REG_STRING;
965 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
966 break;
967 }
968 case BYTECODE_OP_LOAD_FIELD_REF_S64:
969 case BYTECODE_OP_GET_CONTEXT_REF_S64:
970 {
971 if (vstack_push(stack)) {
972 ret = -EINVAL;
973 goto end;
974 }
975 vstack_ax(stack)->type = REG_S64;
976 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
977 break;
978 }
979 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE:
980 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE:
981 {
982 if (vstack_push(stack)) {
983 ret = -EINVAL;
984 goto end;
985 }
986 vstack_ax(stack)->type = REG_DOUBLE;
987 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
988 break;
989 }
990
991 /* load from immediate operand */
992 case BYTECODE_OP_LOAD_STRING:
993 {
994 struct load_op *insn = (struct load_op *) pc;
995
996 if (vstack_push(stack)) {
997 ret = -EINVAL;
998 goto end;
999 }
1000 vstack_ax(stack)->type = REG_STRING;
1001 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1002 break;
1003 }
1004
1005 case BYTECODE_OP_LOAD_STAR_GLOB_STRING:
1006 {
1007 struct load_op *insn = (struct load_op *) pc;
1008
1009 if (vstack_push(stack)) {
1010 ret = -EINVAL;
1011 goto end;
1012 }
1013 vstack_ax(stack)->type = REG_STAR_GLOB_STRING;
1014 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1015 break;
1016 }
1017
1018 case BYTECODE_OP_LOAD_S64:
1019 {
1020 if (vstack_push(stack)) {
1021 ret = -EINVAL;
1022 goto end;
1023 }
1024 vstack_ax(stack)->type = REG_S64;
1025 next_pc += sizeof(struct load_op)
1026 + sizeof(struct literal_numeric);
1027 break;
1028 }
1029
1030 case BYTECODE_OP_LOAD_DOUBLE:
1031 {
1032 if (vstack_push(stack)) {
1033 ret = -EINVAL;
1034 goto end;
1035 }
1036 vstack_ax(stack)->type = REG_DOUBLE;
1037 next_pc += sizeof(struct load_op)
1038 + sizeof(struct literal_double);
1039 break;
1040 }
1041
1042 /* cast */
1043 case BYTECODE_OP_CAST_TO_S64:
1044 {
1045 struct cast_op *insn = (struct cast_op *) pc;
1046
1047 switch (vstack_ax(stack)->type) {
1048 default:
1049 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
1050 ret = -EINVAL;
1051 goto end;
1052
1053 case REG_STRING:
1054 case REG_STAR_GLOB_STRING:
1055 printk(KERN_WARNING "LTTng: bytecode: Cast op can only be applied to numeric or floating point registers\n");
1056 ret = -EINVAL;
1057 goto end;
1058 case REG_S64:
1059 insn->op = BYTECODE_OP_CAST_NOP;
1060 break;
1061 case REG_DOUBLE:
1062 insn->op = BYTECODE_OP_CAST_DOUBLE_TO_S64;
1063 break;
1064 }
1065 /* Pop 1, push 1 */
1066 vstack_ax(stack)->type = REG_S64;
1067 next_pc += sizeof(struct cast_op);
1068 break;
1069 }
1070 case BYTECODE_OP_CAST_DOUBLE_TO_S64:
1071 {
1072 /* Pop 1, push 1 */
1073 vstack_ax(stack)->type = REG_S64;
1074 next_pc += sizeof(struct cast_op);
1075 break;
1076 }
1077 case BYTECODE_OP_CAST_NOP:
1078 {
1079 next_pc += sizeof(struct cast_op);
1080 break;
1081 }
1082
1083 /*
1084 * Instructions for recursive traversal through composed types.
1085 */
1086 case BYTECODE_OP_GET_CONTEXT_ROOT:
1087 {
1088 if (vstack_push(stack)) {
1089 ret = -EINVAL;
1090 goto end;
1091 }
1092 vstack_ax(stack)->type = REG_PTR;
1093 vstack_ax(stack)->load.type = LOAD_ROOT_CONTEXT;
1094 next_pc += sizeof(struct load_op);
1095 break;
1096 }
1097 case BYTECODE_OP_GET_APP_CONTEXT_ROOT:
1098 {
1099 if (vstack_push(stack)) {
1100 ret = -EINVAL;
1101 goto end;
1102 }
1103 vstack_ax(stack)->type = REG_PTR;
1104 vstack_ax(stack)->load.type = LOAD_ROOT_APP_CONTEXT;
1105 next_pc += sizeof(struct load_op);
1106 break;
1107 }
1108 case BYTECODE_OP_GET_PAYLOAD_ROOT:
1109 {
1110 if (vstack_push(stack)) {
1111 ret = -EINVAL;
1112 goto end;
1113 }
1114 vstack_ax(stack)->type = REG_PTR;
1115 vstack_ax(stack)->load.type = LOAD_ROOT_PAYLOAD;
1116 next_pc += sizeof(struct load_op);
1117 break;
1118 }
1119
1120 case BYTECODE_OP_LOAD_FIELD:
1121 {
1122 struct load_op *insn = (struct load_op *) pc;
1123
1124 WARN_ON_ONCE(vstack_ax(stack)->type != REG_PTR);
1125 /* Pop 1, push 1 */
1126 ret = specialize_load_field(vstack_ax(stack), insn);
1127 if (ret)
1128 goto end;
1129
1130 next_pc += sizeof(struct load_op);
1131 break;
1132 }
1133
1134 case BYTECODE_OP_LOAD_FIELD_S8:
1135 case BYTECODE_OP_LOAD_FIELD_S16:
1136 case BYTECODE_OP_LOAD_FIELD_S32:
1137 case BYTECODE_OP_LOAD_FIELD_S64:
1138 case BYTECODE_OP_LOAD_FIELD_U8:
1139 case BYTECODE_OP_LOAD_FIELD_U16:
1140 case BYTECODE_OP_LOAD_FIELD_U32:
1141 case BYTECODE_OP_LOAD_FIELD_U64:
1142 {
1143 /* Pop 1, push 1 */
1144 vstack_ax(stack)->type = REG_S64;
1145 next_pc += sizeof(struct load_op);
1146 break;
1147 }
1148
1149 case BYTECODE_OP_LOAD_FIELD_STRING:
1150 case BYTECODE_OP_LOAD_FIELD_SEQUENCE:
1151 {
1152 /* Pop 1, push 1 */
1153 vstack_ax(stack)->type = REG_STRING;
1154 next_pc += sizeof(struct load_op);
1155 break;
1156 }
1157
1158 case BYTECODE_OP_LOAD_FIELD_DOUBLE:
1159 {
1160 /* Pop 1, push 1 */
1161 vstack_ax(stack)->type = REG_DOUBLE;
1162 next_pc += sizeof(struct load_op);
1163 break;
1164 }
1165
1166 case BYTECODE_OP_GET_SYMBOL:
1167 {
1168 struct load_op *insn = (struct load_op *) pc;
1169
1170 dbg_printk("op get symbol\n");
1171 switch (vstack_ax(stack)->load.type) {
1172 case LOAD_OBJECT:
1173 printk(KERN_WARNING "LTTng: bytecode: Nested fields not implemented yet.\n");
1174 ret = -EINVAL;
1175 goto end;
1176 case LOAD_ROOT_CONTEXT:
1177 /* Lookup context field. */
1178 ret = specialize_context_lookup(ctx, bytecode, insn,
1179 &vstack_ax(stack)->load);
1180 if (ret)
1181 goto end;
1182 break;
1183 case LOAD_ROOT_APP_CONTEXT:
1184 ret = -EINVAL;
1185 goto end;
1186 case LOAD_ROOT_PAYLOAD:
1187 /* Lookup event payload field. */
1188 ret = specialize_payload_lookup(event_desc,
1189 bytecode, insn,
1190 &vstack_ax(stack)->load);
1191 if (ret)
1192 goto end;
1193 break;
1194 }
1195 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1196 break;
1197 }
1198
1199 case BYTECODE_OP_GET_SYMBOL_FIELD:
1200 {
1201 /* Always generated by specialize phase. */
1202 ret = -EINVAL;
1203 goto end;
1204 }
1205
1206 case BYTECODE_OP_GET_INDEX_U16:
1207 {
1208 struct load_op *insn = (struct load_op *) pc;
1209 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1210
1211 dbg_printk("op get index u16\n");
1212 /* Pop 1, push 1 */
1213 ret = specialize_get_index(bytecode, insn, index->index,
1214 vstack_ax(stack), sizeof(*index));
1215 if (ret)
1216 goto end;
1217 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1218 break;
1219 }
1220
1221 case BYTECODE_OP_GET_INDEX_U64:
1222 {
1223 struct load_op *insn = (struct load_op *) pc;
1224 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1225
1226 dbg_printk("op get index u64\n");
1227 /* Pop 1, push 1 */
1228 ret = specialize_get_index(bytecode, insn, index->index,
1229 vstack_ax(stack), sizeof(*index));
1230 if (ret)
1231 goto end;
1232 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1233 break;
1234 }
1235
1236 }
1237 }
1238 end:
1239 return ret;
1240 }
This page took 0.054598 seconds and 4 git commands to generate.