83ba6f55b8e2d1a74c7c5c01752bcf64f7a9ee99
[lttng-modules.git] / src / lttng-bytecode-specialize.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-bytecode-specialize.c
4 *
5 * LTTng modules bytecode code specializer.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <linux/slab.h>
11 #include <lttng/lttng-bytecode.h>
12 #include <lttng/align.h>
13
14 static ssize_t bytecode_reserve_data(struct bytecode_runtime *runtime,
15 size_t align, size_t len)
16 {
17 ssize_t ret;
18 size_t padding = offset_align(runtime->data_len, align);
19 size_t new_len = runtime->data_len + padding + len;
20 size_t new_alloc_len = new_len;
21 size_t old_alloc_len = runtime->data_alloc_len;
22
23 if (new_len > INTERPRETER_MAX_DATA_LEN)
24 return -EINVAL;
25
26 if (new_alloc_len > old_alloc_len) {
27 char *newptr;
28
29 new_alloc_len =
30 max_t(size_t, 1U << get_count_order(new_alloc_len), old_alloc_len << 1);
31 newptr = krealloc(runtime->data, new_alloc_len, GFP_KERNEL);
32 if (!newptr)
33 return -ENOMEM;
34 runtime->data = newptr;
35 /* We zero directly the memory from start of allocation. */
36 memset(&runtime->data[old_alloc_len], 0, new_alloc_len - old_alloc_len);
37 runtime->data_alloc_len = new_alloc_len;
38 }
39 runtime->data_len += padding;
40 ret = runtime->data_len;
41 runtime->data_len += len;
42 return ret;
43 }
44
45 static ssize_t bytecode_push_data(struct bytecode_runtime *runtime,
46 const void *p, size_t align, size_t len)
47 {
48 ssize_t offset;
49
50 offset = bytecode_reserve_data(runtime, align, len);
51 if (offset < 0)
52 return -ENOMEM;
53 memcpy(&runtime->data[offset], p, len);
54 return offset;
55 }
56
57 static int specialize_load_field(struct vstack_entry *stack_top,
58 struct load_op *insn)
59 {
60 int ret;
61
62 switch (stack_top->load.type) {
63 case LOAD_OBJECT:
64 break;
65 case LOAD_ROOT_CONTEXT:
66 case LOAD_ROOT_APP_CONTEXT:
67 case LOAD_ROOT_PAYLOAD:
68 default:
69 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
70 ret = -EINVAL;
71 goto end;
72 }
73 switch (stack_top->load.object_type) {
74 case OBJECT_TYPE_S8:
75 dbg_printk("op load field s8\n");
76 stack_top->type = REG_S64;
77 if (!stack_top->load.rev_bo)
78 insn->op = BYTECODE_OP_LOAD_FIELD_S8;
79 break;
80 case OBJECT_TYPE_S16:
81 dbg_printk("op load field s16\n");
82 stack_top->type = REG_S64;
83 if (!stack_top->load.rev_bo)
84 insn->op = BYTECODE_OP_LOAD_FIELD_S16;
85 break;
86 case OBJECT_TYPE_S32:
87 dbg_printk("op load field s32\n");
88 stack_top->type = REG_S64;
89 if (!stack_top->load.rev_bo)
90 insn->op = BYTECODE_OP_LOAD_FIELD_S32;
91 break;
92 case OBJECT_TYPE_S64:
93 dbg_printk("op load field s64\n");
94 stack_top->type = REG_S64;
95 if (!stack_top->load.rev_bo)
96 insn->op = BYTECODE_OP_LOAD_FIELD_S64;
97 break;
98 case OBJECT_TYPE_U8:
99 dbg_printk("op load field u8\n");
100 stack_top->type = REG_S64;
101 insn->op = BYTECODE_OP_LOAD_FIELD_U8;
102 break;
103 case OBJECT_TYPE_U16:
104 dbg_printk("op load field u16\n");
105 stack_top->type = REG_S64;
106 if (!stack_top->load.rev_bo)
107 insn->op = BYTECODE_OP_LOAD_FIELD_U16;
108 break;
109 case OBJECT_TYPE_U32:
110 dbg_printk("op load field u32\n");
111 stack_top->type = REG_S64;
112 if (!stack_top->load.rev_bo)
113 insn->op = BYTECODE_OP_LOAD_FIELD_U32;
114 break;
115 case OBJECT_TYPE_U64:
116 dbg_printk("op load field u64\n");
117 stack_top->type = REG_S64;
118 if (!stack_top->load.rev_bo)
119 insn->op = BYTECODE_OP_LOAD_FIELD_U64;
120 break;
121 case OBJECT_TYPE_DOUBLE:
122 printk(KERN_WARNING "LTTng: bytecode: Double type unsupported\n\n");
123 ret = -EINVAL;
124 goto end;
125 case OBJECT_TYPE_STRING:
126 dbg_printk("op load field string\n");
127 stack_top->type = REG_STRING;
128 insn->op = BYTECODE_OP_LOAD_FIELD_STRING;
129 break;
130 case OBJECT_TYPE_STRING_SEQUENCE:
131 dbg_printk("op load field string sequence\n");
132 stack_top->type = REG_STRING;
133 insn->op = BYTECODE_OP_LOAD_FIELD_SEQUENCE;
134 break;
135 case OBJECT_TYPE_DYNAMIC:
136 ret = -EINVAL;
137 goto end;
138 case OBJECT_TYPE_SEQUENCE:
139 case OBJECT_TYPE_ARRAY:
140 case OBJECT_TYPE_STRUCT:
141 case OBJECT_TYPE_VARIANT:
142 printk(KERN_WARNING "LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
143 ret = -EINVAL;
144 goto end;
145 }
146 return 0;
147
148 end:
149 return ret;
150 }
151
152 static int specialize_get_index_object_type(enum object_type *otype,
153 int signedness, uint32_t elem_len)
154 {
155 switch (elem_len) {
156 case 8:
157 if (signedness)
158 *otype = OBJECT_TYPE_S8;
159 else
160 *otype = OBJECT_TYPE_U8;
161 break;
162 case 16:
163 if (signedness)
164 *otype = OBJECT_TYPE_S16;
165 else
166 *otype = OBJECT_TYPE_U16;
167 break;
168 case 32:
169 if (signedness)
170 *otype = OBJECT_TYPE_S32;
171 else
172 *otype = OBJECT_TYPE_U32;
173 break;
174 case 64:
175 if (signedness)
176 *otype = OBJECT_TYPE_S64;
177 else
178 *otype = OBJECT_TYPE_U64;
179 break;
180 default:
181 return -EINVAL;
182 }
183 return 0;
184 }
185
186 static int specialize_get_index(struct bytecode_runtime *runtime,
187 struct load_op *insn, uint64_t index,
188 struct vstack_entry *stack_top,
189 int idx_len)
190 {
191 int ret;
192 struct bytecode_get_index_data gid;
193 ssize_t data_offset;
194
195 memset(&gid, 0, sizeof(gid));
196 switch (stack_top->load.type) {
197 case LOAD_OBJECT:
198 switch (stack_top->load.object_type) {
199 case OBJECT_TYPE_ARRAY:
200 {
201 const struct lttng_integer_type *integer_type;
202 const struct lttng_event_field *field;
203 uint32_t elem_len, num_elems;
204 int signedness;
205
206 field = stack_top->load.field;
207 if (!lttng_is_bytewise_integer(field->type.u.array_nestable.elem_type)) {
208 ret = -EINVAL;
209 goto end;
210 }
211 integer_type = &field->type.u.array_nestable.elem_type->u.integer;
212 num_elems = field->type.u.array_nestable.length;
213 elem_len = integer_type->size;
214 signedness = integer_type->signedness;
215 if (index >= num_elems) {
216 ret = -EINVAL;
217 goto end;
218 }
219 ret = specialize_get_index_object_type(&stack_top->load.object_type,
220 signedness, elem_len);
221 if (ret)
222 goto end;
223 gid.offset = index * (elem_len / CHAR_BIT);
224 gid.array_len = num_elems * (elem_len / CHAR_BIT);
225 gid.elem.type = stack_top->load.object_type;
226 gid.elem.len = elem_len;
227 if (integer_type->reverse_byte_order)
228 gid.elem.rev_bo = true;
229 stack_top->load.rev_bo = gid.elem.rev_bo;
230 break;
231 }
232 case OBJECT_TYPE_SEQUENCE:
233 {
234 const struct lttng_integer_type *integer_type;
235 const struct lttng_event_field *field;
236 uint32_t elem_len;
237 int signedness;
238
239 field = stack_top->load.field;
240 if (!lttng_is_bytewise_integer(field->type.u.sequence_nestable.elem_type)) {
241 ret = -EINVAL;
242 goto end;
243 }
244 integer_type = &field->type.u.sequence_nestable.elem_type->u.integer;
245 elem_len = integer_type->size;
246 signedness = integer_type->signedness;
247 ret = specialize_get_index_object_type(&stack_top->load.object_type,
248 signedness, elem_len);
249 if (ret)
250 goto end;
251 gid.offset = index * (elem_len / CHAR_BIT);
252 gid.elem.type = stack_top->load.object_type;
253 gid.elem.len = elem_len;
254 if (integer_type->reverse_byte_order)
255 gid.elem.rev_bo = true;
256 stack_top->load.rev_bo = gid.elem.rev_bo;
257 break;
258 }
259 case OBJECT_TYPE_STRUCT:
260 /* Only generated by the specialize phase. */
261 case OBJECT_TYPE_VARIANT: /* Fall-through */
262 default:
263 printk(KERN_WARNING "LTTng: bytecode: Unexpected get index type %d",
264 (int) stack_top->load.object_type);
265 ret = -EINVAL;
266 goto end;
267 }
268 break;
269 case LOAD_ROOT_CONTEXT:
270 case LOAD_ROOT_APP_CONTEXT:
271 case LOAD_ROOT_PAYLOAD:
272 printk(KERN_WARNING "LTTng: bytecode: Index lookup for root field not implemented yet.\n");
273 ret = -EINVAL;
274 goto end;
275 }
276 data_offset = bytecode_push_data(runtime, &gid,
277 __alignof__(gid), sizeof(gid));
278 if (data_offset < 0) {
279 ret = -EINVAL;
280 goto end;
281 }
282 switch (idx_len) {
283 case 2:
284 ((struct get_index_u16 *) insn->data)->index = data_offset;
285 break;
286 case 8:
287 ((struct get_index_u64 *) insn->data)->index = data_offset;
288 break;
289 default:
290 ret = -EINVAL;
291 goto end;
292 }
293
294 return 0;
295
296 end:
297 return ret;
298 }
299
300 static int specialize_context_lookup_name(struct lttng_ctx *ctx,
301 struct bytecode_runtime *bytecode,
302 struct load_op *insn)
303 {
304 uint16_t offset;
305 const char *name;
306
307 offset = ((struct get_symbol *) insn->data)->offset;
308 name = bytecode->p.bc->bc.data + bytecode->p.bc->bc.reloc_offset + offset;
309 return lttng_get_context_index(ctx, name);
310 }
311
312 static int specialize_load_object(const struct lttng_event_field *field,
313 struct vstack_load *load, bool is_context)
314 {
315 load->type = LOAD_OBJECT;
316
317 switch (field->type.atype) {
318 case atype_integer:
319 if (field->type.u.integer.signedness)
320 load->object_type = OBJECT_TYPE_S64;
321 else
322 load->object_type = OBJECT_TYPE_U64;
323 load->rev_bo = false;
324 break;
325 case atype_enum_nestable:
326 {
327 const struct lttng_integer_type *itype =
328 &field->type.u.enum_nestable.container_type->u.integer;
329
330 if (itype->signedness)
331 load->object_type = OBJECT_TYPE_S64;
332 else
333 load->object_type = OBJECT_TYPE_U64;
334 load->rev_bo = false;
335 break;
336 }
337 case atype_array_nestable:
338 if (!lttng_is_bytewise_integer(field->type.u.array_nestable.elem_type)) {
339 printk(KERN_WARNING "LTTng: bytecode: Array nesting only supports integer types.\n");
340 return -EINVAL;
341 }
342 if (is_context) {
343 load->object_type = OBJECT_TYPE_STRING;
344 } else {
345 if (field->type.u.array_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
346 load->object_type = OBJECT_TYPE_ARRAY;
347 load->field = field;
348 } else {
349 load->object_type = OBJECT_TYPE_STRING_SEQUENCE;
350 }
351 }
352 break;
353 case atype_sequence_nestable:
354 if (!lttng_is_bytewise_integer(field->type.u.sequence_nestable.elem_type)) {
355 printk(KERN_WARNING "LTTng: bytecode: Sequence nesting only supports integer types.\n");
356 return -EINVAL;
357 }
358 if (is_context) {
359 load->object_type = OBJECT_TYPE_STRING;
360 } else {
361 if (field->type.u.sequence_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
362 load->object_type = OBJECT_TYPE_SEQUENCE;
363 load->field = field;
364 } else {
365 load->object_type = OBJECT_TYPE_STRING_SEQUENCE;
366 }
367 }
368 break;
369 case atype_string:
370 load->object_type = OBJECT_TYPE_STRING;
371 break;
372 case atype_struct_nestable:
373 printk(KERN_WARNING "LTTng: bytecode: Structure type cannot be loaded.\n");
374 return -EINVAL;
375 case atype_variant_nestable:
376 printk(KERN_WARNING "LTTng: bytecode: Variant type cannot be loaded.\n");
377 return -EINVAL;
378 default:
379 printk(KERN_WARNING "LTTng: bytecode: Unknown type: %d", (int) field->type.atype);
380 return -EINVAL;
381 }
382 return 0;
383 }
384
385 static int specialize_context_lookup(struct lttng_ctx *ctx,
386 struct bytecode_runtime *runtime,
387 struct load_op *insn,
388 struct vstack_load *load)
389 {
390 int idx, ret;
391 struct lttng_ctx_field *ctx_field;
392 struct lttng_event_field *field;
393 struct bytecode_get_index_data gid;
394 ssize_t data_offset;
395
396 idx = specialize_context_lookup_name(ctx, runtime, insn);
397 if (idx < 0) {
398 return -ENOENT;
399 }
400 ctx_field = &lttng_static_ctx->fields[idx];
401 field = &ctx_field->event_field;
402 ret = specialize_load_object(field, load, true);
403 if (ret)
404 return ret;
405 /* Specialize each get_symbol into a get_index. */
406 insn->op = BYTECODE_OP_GET_INDEX_U16;
407 memset(&gid, 0, sizeof(gid));
408 gid.ctx_index = idx;
409 gid.elem.type = load->object_type;
410 gid.elem.rev_bo = load->rev_bo;
411 gid.field = field;
412 data_offset = bytecode_push_data(runtime, &gid,
413 __alignof__(gid), sizeof(gid));
414 if (data_offset < 0) {
415 return -EINVAL;
416 }
417 ((struct get_index_u16 *) insn->data)->index = data_offset;
418 return 0;
419 }
420
421 static int specialize_payload_lookup(const struct lttng_event_desc *event_desc,
422 struct bytecode_runtime *runtime,
423 struct load_op *insn,
424 struct vstack_load *load)
425 {
426 const char *name;
427 uint16_t offset;
428 unsigned int i, nr_fields;
429 bool found = false;
430 uint32_t field_offset = 0;
431 const struct lttng_event_field *field;
432 int ret;
433 struct bytecode_get_index_data gid;
434 ssize_t data_offset;
435
436 nr_fields = event_desc->nr_fields;
437 offset = ((struct get_symbol *) insn->data)->offset;
438 name = runtime->p.bc->bc.data + runtime->p.bc->bc.reloc_offset + offset;
439 for (i = 0; i < nr_fields; i++) {
440 field = &event_desc->fields[i];
441 if (field->nofilter) {
442 continue;
443 }
444 if (!strcmp(field->name, name)) {
445 found = true;
446 break;
447 }
448 /* compute field offset on stack */
449 switch (field->type.atype) {
450 case atype_integer:
451 case atype_enum_nestable:
452 field_offset += sizeof(int64_t);
453 break;
454 case atype_array_nestable:
455 case atype_sequence_nestable:
456 field_offset += sizeof(unsigned long);
457 field_offset += sizeof(void *);
458 break;
459 case atype_string:
460 field_offset += sizeof(void *);
461 break;
462 default:
463 ret = -EINVAL;
464 goto end;
465 }
466 }
467 if (!found) {
468 ret = -EINVAL;
469 goto end;
470 }
471
472 ret = specialize_load_object(field, load, false);
473 if (ret)
474 goto end;
475
476 /* Specialize each get_symbol into a get_index. */
477 insn->op = BYTECODE_OP_GET_INDEX_U16;
478 memset(&gid, 0, sizeof(gid));
479 gid.offset = field_offset;
480 gid.elem.type = load->object_type;
481 gid.elem.rev_bo = load->rev_bo;
482 gid.field = field;
483 data_offset = bytecode_push_data(runtime, &gid,
484 __alignof__(gid), sizeof(gid));
485 if (data_offset < 0) {
486 ret = -EINVAL;
487 goto end;
488 }
489 ((struct get_index_u16 *) insn->data)->index = data_offset;
490 ret = 0;
491 end:
492 return ret;
493 }
494
495 int lttng_bytecode_specialize(const struct lttng_event_desc *event_desc,
496 struct bytecode_runtime *bytecode)
497 {
498 void *pc, *next_pc, *start_pc;
499 int ret = -EINVAL;
500 struct vstack _stack;
501 struct vstack *stack = &_stack;
502 struct lttng_ctx *ctx = bytecode->p.ctx;
503
504 vstack_init(stack);
505
506 start_pc = &bytecode->code[0];
507 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len;
508 pc = next_pc) {
509 switch (*(bytecode_opcode_t *) pc) {
510 case BYTECODE_OP_UNKNOWN:
511 default:
512 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
513 (unsigned int) *(bytecode_opcode_t *) pc);
514 ret = -EINVAL;
515 goto end;
516
517 case BYTECODE_OP_RETURN:
518 case BYTECODE_OP_RETURN_S64:
519 ret = 0;
520 goto end;
521
522 /* binary */
523 case BYTECODE_OP_MUL:
524 case BYTECODE_OP_DIV:
525 case BYTECODE_OP_MOD:
526 case BYTECODE_OP_PLUS:
527 case BYTECODE_OP_MINUS:
528 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
529 (unsigned int) *(bytecode_opcode_t *) pc);
530 ret = -EINVAL;
531 goto end;
532
533 case BYTECODE_OP_EQ:
534 {
535 struct binary_op *insn = (struct binary_op *) pc;
536
537 switch(vstack_ax(stack)->type) {
538 default:
539 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
540 ret = -EINVAL;
541 goto end;
542
543 case REG_STRING:
544 if (vstack_bx(stack)->type == REG_STAR_GLOB_STRING)
545 insn->op = BYTECODE_OP_EQ_STAR_GLOB_STRING;
546 else
547 insn->op = BYTECODE_OP_EQ_STRING;
548 break;
549 case REG_STAR_GLOB_STRING:
550 insn->op = BYTECODE_OP_EQ_STAR_GLOB_STRING;
551 break;
552 case REG_S64:
553 if (vstack_bx(stack)->type == REG_S64)
554 insn->op = BYTECODE_OP_EQ_S64;
555 else
556 insn->op = BYTECODE_OP_EQ_DOUBLE_S64;
557 break;
558 case REG_DOUBLE:
559 if (vstack_bx(stack)->type == REG_S64)
560 insn->op = BYTECODE_OP_EQ_S64_DOUBLE;
561 else
562 insn->op = BYTECODE_OP_EQ_DOUBLE;
563 break;
564 }
565 /* Pop 2, push 1 */
566 if (vstack_pop(stack)) {
567 ret = -EINVAL;
568 goto end;
569 }
570 vstack_ax(stack)->type = REG_S64;
571 next_pc += sizeof(struct binary_op);
572 break;
573 }
574
575 case BYTECODE_OP_NE:
576 {
577 struct binary_op *insn = (struct binary_op *) pc;
578
579 switch(vstack_ax(stack)->type) {
580 default:
581 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
582 ret = -EINVAL;
583 goto end;
584
585 case REG_STRING:
586 if (vstack_bx(stack)->type == REG_STAR_GLOB_STRING)
587 insn->op = BYTECODE_OP_NE_STAR_GLOB_STRING;
588 else
589 insn->op = BYTECODE_OP_NE_STRING;
590 break;
591 case REG_STAR_GLOB_STRING:
592 insn->op = BYTECODE_OP_NE_STAR_GLOB_STRING;
593 break;
594 case REG_S64:
595 if (vstack_bx(stack)->type == REG_S64)
596 insn->op = BYTECODE_OP_NE_S64;
597 else
598 insn->op = BYTECODE_OP_NE_DOUBLE_S64;
599 break;
600 case REG_DOUBLE:
601 if (vstack_bx(stack)->type == REG_S64)
602 insn->op = BYTECODE_OP_NE_S64_DOUBLE;
603 else
604 insn->op = BYTECODE_OP_NE_DOUBLE;
605 break;
606 }
607 /* Pop 2, push 1 */
608 if (vstack_pop(stack)) {
609 ret = -EINVAL;
610 goto end;
611 }
612 vstack_ax(stack)->type = REG_S64;
613 next_pc += sizeof(struct binary_op);
614 break;
615 }
616
617 case BYTECODE_OP_GT:
618 {
619 struct binary_op *insn = (struct binary_op *) pc;
620
621 switch(vstack_ax(stack)->type) {
622 default:
623 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
624 ret = -EINVAL;
625 goto end;
626
627 case REG_STAR_GLOB_STRING:
628 printk(KERN_WARNING "LTTng: bytecode: invalid register type for '>' binary operator\n");
629 ret = -EINVAL;
630 goto end;
631 case REG_STRING:
632 insn->op = BYTECODE_OP_GT_STRING;
633 break;
634 case REG_S64:
635 if (vstack_bx(stack)->type == REG_S64)
636 insn->op = BYTECODE_OP_GT_S64;
637 else
638 insn->op = BYTECODE_OP_GT_DOUBLE_S64;
639 break;
640 case REG_DOUBLE:
641 if (vstack_bx(stack)->type == REG_S64)
642 insn->op = BYTECODE_OP_GT_S64_DOUBLE;
643 else
644 insn->op = BYTECODE_OP_GT_DOUBLE;
645 break;
646 }
647 /* Pop 2, push 1 */
648 if (vstack_pop(stack)) {
649 ret = -EINVAL;
650 goto end;
651 }
652 vstack_ax(stack)->type = REG_S64;
653 next_pc += sizeof(struct binary_op);
654 break;
655 }
656
657 case BYTECODE_OP_LT:
658 {
659 struct binary_op *insn = (struct binary_op *) pc;
660
661 switch(vstack_ax(stack)->type) {
662 default:
663 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
664 ret = -EINVAL;
665 goto end;
666
667 case REG_STAR_GLOB_STRING:
668 printk(KERN_WARNING "LTTng: bytecode: invalid register type for '<' binary operator\n");
669 ret = -EINVAL;
670 goto end;
671 case REG_STRING:
672 insn->op = BYTECODE_OP_LT_STRING;
673 break;
674 case REG_S64:
675 if (vstack_bx(stack)->type == REG_S64)
676 insn->op = BYTECODE_OP_LT_S64;
677 else
678 insn->op = BYTECODE_OP_LT_DOUBLE_S64;
679 break;
680 case REG_DOUBLE:
681 if (vstack_bx(stack)->type == REG_S64)
682 insn->op = BYTECODE_OP_LT_S64_DOUBLE;
683 else
684 insn->op = BYTECODE_OP_LT_DOUBLE;
685 break;
686 }
687 /* Pop 2, push 1 */
688 if (vstack_pop(stack)) {
689 ret = -EINVAL;
690 goto end;
691 }
692 vstack_ax(stack)->type = REG_S64;
693 next_pc += sizeof(struct binary_op);
694 break;
695 }
696
697 case BYTECODE_OP_GE:
698 {
699 struct binary_op *insn = (struct binary_op *) pc;
700
701 switch(vstack_ax(stack)->type) {
702 default:
703 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
704 ret = -EINVAL;
705 goto end;
706
707 case REG_STAR_GLOB_STRING:
708 printk(KERN_WARNING "LTTng: bytecode: invalid register type for '>=' binary operator\n");
709 ret = -EINVAL;
710 goto end;
711 case REG_STRING:
712 insn->op = BYTECODE_OP_GE_STRING;
713 break;
714 case REG_S64:
715 if (vstack_bx(stack)->type == REG_S64)
716 insn->op = BYTECODE_OP_GE_S64;
717 else
718 insn->op = BYTECODE_OP_GE_DOUBLE_S64;
719 break;
720 case REG_DOUBLE:
721 if (vstack_bx(stack)->type == REG_S64)
722 insn->op = BYTECODE_OP_GE_S64_DOUBLE;
723 else
724 insn->op = BYTECODE_OP_GE_DOUBLE;
725 break;
726 }
727 /* Pop 2, push 1 */
728 if (vstack_pop(stack)) {
729 ret = -EINVAL;
730 goto end;
731 }
732 vstack_ax(stack)->type = REG_S64;
733 next_pc += sizeof(struct binary_op);
734 break;
735 }
736 case BYTECODE_OP_LE:
737 {
738 struct binary_op *insn = (struct binary_op *) pc;
739
740 switch(vstack_ax(stack)->type) {
741 default:
742 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
743 ret = -EINVAL;
744 goto end;
745
746 case REG_STAR_GLOB_STRING:
747 printk(KERN_WARNING "LTTng: bytecode: invalid register type for '<=' binary operator\n");
748 ret = -EINVAL;
749 goto end;
750 case REG_STRING:
751 insn->op = BYTECODE_OP_LE_STRING;
752 break;
753 case REG_S64:
754 if (vstack_bx(stack)->type == REG_S64)
755 insn->op = BYTECODE_OP_LE_S64;
756 else
757 insn->op = BYTECODE_OP_LE_DOUBLE_S64;
758 break;
759 case REG_DOUBLE:
760 if (vstack_bx(stack)->type == REG_S64)
761 insn->op = BYTECODE_OP_LE_S64_DOUBLE;
762 else
763 insn->op = BYTECODE_OP_LE_DOUBLE;
764 break;
765 }
766 vstack_ax(stack)->type = REG_S64;
767 next_pc += sizeof(struct binary_op);
768 break;
769 }
770
771 case BYTECODE_OP_EQ_STRING:
772 case BYTECODE_OP_NE_STRING:
773 case BYTECODE_OP_GT_STRING:
774 case BYTECODE_OP_LT_STRING:
775 case BYTECODE_OP_GE_STRING:
776 case BYTECODE_OP_LE_STRING:
777 case BYTECODE_OP_EQ_STAR_GLOB_STRING:
778 case BYTECODE_OP_NE_STAR_GLOB_STRING:
779 case BYTECODE_OP_EQ_S64:
780 case BYTECODE_OP_NE_S64:
781 case BYTECODE_OP_GT_S64:
782 case BYTECODE_OP_LT_S64:
783 case BYTECODE_OP_GE_S64:
784 case BYTECODE_OP_LE_S64:
785 case BYTECODE_OP_EQ_DOUBLE:
786 case BYTECODE_OP_NE_DOUBLE:
787 case BYTECODE_OP_GT_DOUBLE:
788 case BYTECODE_OP_LT_DOUBLE:
789 case BYTECODE_OP_GE_DOUBLE:
790 case BYTECODE_OP_LE_DOUBLE:
791 case BYTECODE_OP_EQ_DOUBLE_S64:
792 case BYTECODE_OP_NE_DOUBLE_S64:
793 case BYTECODE_OP_GT_DOUBLE_S64:
794 case BYTECODE_OP_LT_DOUBLE_S64:
795 case BYTECODE_OP_GE_DOUBLE_S64:
796 case BYTECODE_OP_LE_DOUBLE_S64:
797 case BYTECODE_OP_EQ_S64_DOUBLE:
798 case BYTECODE_OP_NE_S64_DOUBLE:
799 case BYTECODE_OP_GT_S64_DOUBLE:
800 case BYTECODE_OP_LT_S64_DOUBLE:
801 case BYTECODE_OP_GE_S64_DOUBLE:
802 case BYTECODE_OP_LE_S64_DOUBLE:
803 case BYTECODE_OP_BIT_RSHIFT:
804 case BYTECODE_OP_BIT_LSHIFT:
805 case BYTECODE_OP_BIT_AND:
806 case BYTECODE_OP_BIT_OR:
807 case BYTECODE_OP_BIT_XOR:
808 {
809 /* Pop 2, push 1 */
810 if (vstack_pop(stack)) {
811 ret = -EINVAL;
812 goto end;
813 }
814 vstack_ax(stack)->type = REG_S64;
815 next_pc += sizeof(struct binary_op);
816 break;
817 }
818
819 /* unary */
820 case BYTECODE_OP_UNARY_PLUS:
821 {
822 struct unary_op *insn = (struct unary_op *) pc;
823
824 switch(vstack_ax(stack)->type) {
825 default:
826 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
827 ret = -EINVAL;
828 goto end;
829
830 case REG_S64:
831 insn->op = BYTECODE_OP_UNARY_PLUS_S64;
832 break;
833 case REG_DOUBLE:
834 insn->op = BYTECODE_OP_UNARY_PLUS_DOUBLE;
835 break;
836 }
837 /* Pop 1, push 1 */
838 next_pc += sizeof(struct unary_op);
839 break;
840 }
841
842 case BYTECODE_OP_UNARY_MINUS:
843 {
844 struct unary_op *insn = (struct unary_op *) pc;
845
846 switch(vstack_ax(stack)->type) {
847 default:
848 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
849 ret = -EINVAL;
850 goto end;
851
852 case REG_S64:
853 insn->op = BYTECODE_OP_UNARY_MINUS_S64;
854 break;
855 case REG_DOUBLE:
856 insn->op = BYTECODE_OP_UNARY_MINUS_DOUBLE;
857 break;
858 }
859 /* Pop 1, push 1 */
860 next_pc += sizeof(struct unary_op);
861 break;
862 }
863
864 case BYTECODE_OP_UNARY_NOT:
865 {
866 struct unary_op *insn = (struct unary_op *) pc;
867
868 switch(vstack_ax(stack)->type) {
869 default:
870 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
871 ret = -EINVAL;
872 goto end;
873
874 case REG_S64:
875 insn->op = BYTECODE_OP_UNARY_NOT_S64;
876 break;
877 case REG_DOUBLE:
878 insn->op = BYTECODE_OP_UNARY_NOT_DOUBLE;
879 break;
880 }
881 /* Pop 1, push 1 */
882 next_pc += sizeof(struct unary_op);
883 break;
884 }
885
886 case BYTECODE_OP_UNARY_BIT_NOT:
887 {
888 /* Pop 1, push 1 */
889 next_pc += sizeof(struct unary_op);
890 break;
891 }
892
893 case BYTECODE_OP_UNARY_PLUS_S64:
894 case BYTECODE_OP_UNARY_MINUS_S64:
895 case BYTECODE_OP_UNARY_NOT_S64:
896 case BYTECODE_OP_UNARY_PLUS_DOUBLE:
897 case BYTECODE_OP_UNARY_MINUS_DOUBLE:
898 case BYTECODE_OP_UNARY_NOT_DOUBLE:
899 {
900 /* Pop 1, push 1 */
901 next_pc += sizeof(struct unary_op);
902 break;
903 }
904
905 /* logical */
906 case BYTECODE_OP_AND:
907 case BYTECODE_OP_OR:
908 {
909 /* Continue to next instruction */
910 /* Pop 1 when jump not taken */
911 if (vstack_pop(stack)) {
912 ret = -EINVAL;
913 goto end;
914 }
915 next_pc += sizeof(struct logical_op);
916 break;
917 }
918
919 /* load field ref */
920 case BYTECODE_OP_LOAD_FIELD_REF:
921 {
922 printk(KERN_WARNING "LTTng: bytecode: Unknown field ref type\n");
923 ret = -EINVAL;
924 goto end;
925 }
926 /* get context ref */
927 case BYTECODE_OP_GET_CONTEXT_REF:
928 {
929 printk(KERN_WARNING "LTTng: bytecode: Unknown get context ref type\n");
930 ret = -EINVAL;
931 goto end;
932 }
933 case BYTECODE_OP_LOAD_FIELD_REF_STRING:
934 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE:
935 case BYTECODE_OP_GET_CONTEXT_REF_STRING:
936 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING:
937 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE:
938 {
939 if (vstack_push(stack)) {
940 ret = -EINVAL;
941 goto end;
942 }
943 vstack_ax(stack)->type = REG_STRING;
944 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
945 break;
946 }
947 case BYTECODE_OP_LOAD_FIELD_REF_S64:
948 case BYTECODE_OP_GET_CONTEXT_REF_S64:
949 {
950 if (vstack_push(stack)) {
951 ret = -EINVAL;
952 goto end;
953 }
954 vstack_ax(stack)->type = REG_S64;
955 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
956 break;
957 }
958 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE:
959 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE:
960 {
961 if (vstack_push(stack)) {
962 ret = -EINVAL;
963 goto end;
964 }
965 vstack_ax(stack)->type = REG_DOUBLE;
966 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
967 break;
968 }
969
970 /* load from immediate operand */
971 case BYTECODE_OP_LOAD_STRING:
972 {
973 struct load_op *insn = (struct load_op *) pc;
974
975 if (vstack_push(stack)) {
976 ret = -EINVAL;
977 goto end;
978 }
979 vstack_ax(stack)->type = REG_STRING;
980 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
981 break;
982 }
983
984 case BYTECODE_OP_LOAD_STAR_GLOB_STRING:
985 {
986 struct load_op *insn = (struct load_op *) pc;
987
988 if (vstack_push(stack)) {
989 ret = -EINVAL;
990 goto end;
991 }
992 vstack_ax(stack)->type = REG_STAR_GLOB_STRING;
993 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
994 break;
995 }
996
997 case BYTECODE_OP_LOAD_S64:
998 {
999 if (vstack_push(stack)) {
1000 ret = -EINVAL;
1001 goto end;
1002 }
1003 vstack_ax(stack)->type = REG_S64;
1004 next_pc += sizeof(struct load_op)
1005 + sizeof(struct literal_numeric);
1006 break;
1007 }
1008
1009 case BYTECODE_OP_LOAD_DOUBLE:
1010 {
1011 if (vstack_push(stack)) {
1012 ret = -EINVAL;
1013 goto end;
1014 }
1015 vstack_ax(stack)->type = REG_DOUBLE;
1016 next_pc += sizeof(struct load_op)
1017 + sizeof(struct literal_double);
1018 break;
1019 }
1020
1021 /* cast */
1022 case BYTECODE_OP_CAST_TO_S64:
1023 {
1024 struct cast_op *insn = (struct cast_op *) pc;
1025
1026 switch (vstack_ax(stack)->type) {
1027 default:
1028 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
1029 ret = -EINVAL;
1030 goto end;
1031
1032 case REG_STRING:
1033 case REG_STAR_GLOB_STRING:
1034 printk(KERN_WARNING "LTTng: bytecode: Cast op can only be applied to numeric or floating point registers\n");
1035 ret = -EINVAL;
1036 goto end;
1037 case REG_S64:
1038 insn->op = BYTECODE_OP_CAST_NOP;
1039 break;
1040 case REG_DOUBLE:
1041 insn->op = BYTECODE_OP_CAST_DOUBLE_TO_S64;
1042 break;
1043 }
1044 /* Pop 1, push 1 */
1045 vstack_ax(stack)->type = REG_S64;
1046 next_pc += sizeof(struct cast_op);
1047 break;
1048 }
1049 case BYTECODE_OP_CAST_DOUBLE_TO_S64:
1050 {
1051 /* Pop 1, push 1 */
1052 vstack_ax(stack)->type = REG_S64;
1053 next_pc += sizeof(struct cast_op);
1054 break;
1055 }
1056 case BYTECODE_OP_CAST_NOP:
1057 {
1058 next_pc += sizeof(struct cast_op);
1059 break;
1060 }
1061
1062 /*
1063 * Instructions for recursive traversal through composed types.
1064 */
1065 case BYTECODE_OP_GET_CONTEXT_ROOT:
1066 {
1067 if (vstack_push(stack)) {
1068 ret = -EINVAL;
1069 goto end;
1070 }
1071 vstack_ax(stack)->type = REG_PTR;
1072 vstack_ax(stack)->load.type = LOAD_ROOT_CONTEXT;
1073 next_pc += sizeof(struct load_op);
1074 break;
1075 }
1076 case BYTECODE_OP_GET_APP_CONTEXT_ROOT:
1077 {
1078 if (vstack_push(stack)) {
1079 ret = -EINVAL;
1080 goto end;
1081 }
1082 vstack_ax(stack)->type = REG_PTR;
1083 vstack_ax(stack)->load.type = LOAD_ROOT_APP_CONTEXT;
1084 next_pc += sizeof(struct load_op);
1085 break;
1086 }
1087 case BYTECODE_OP_GET_PAYLOAD_ROOT:
1088 {
1089 if (vstack_push(stack)) {
1090 ret = -EINVAL;
1091 goto end;
1092 }
1093 vstack_ax(stack)->type = REG_PTR;
1094 vstack_ax(stack)->load.type = LOAD_ROOT_PAYLOAD;
1095 next_pc += sizeof(struct load_op);
1096 break;
1097 }
1098
1099 case BYTECODE_OP_LOAD_FIELD:
1100 {
1101 struct load_op *insn = (struct load_op *) pc;
1102
1103 WARN_ON_ONCE(vstack_ax(stack)->type != REG_PTR);
1104 /* Pop 1, push 1 */
1105 ret = specialize_load_field(vstack_ax(stack), insn);
1106 if (ret)
1107 goto end;
1108
1109 next_pc += sizeof(struct load_op);
1110 break;
1111 }
1112
1113 case BYTECODE_OP_LOAD_FIELD_S8:
1114 case BYTECODE_OP_LOAD_FIELD_S16:
1115 case BYTECODE_OP_LOAD_FIELD_S32:
1116 case BYTECODE_OP_LOAD_FIELD_S64:
1117 case BYTECODE_OP_LOAD_FIELD_U8:
1118 case BYTECODE_OP_LOAD_FIELD_U16:
1119 case BYTECODE_OP_LOAD_FIELD_U32:
1120 case BYTECODE_OP_LOAD_FIELD_U64:
1121 {
1122 /* Pop 1, push 1 */
1123 vstack_ax(stack)->type = REG_S64;
1124 next_pc += sizeof(struct load_op);
1125 break;
1126 }
1127
1128 case BYTECODE_OP_LOAD_FIELD_STRING:
1129 case BYTECODE_OP_LOAD_FIELD_SEQUENCE:
1130 {
1131 /* Pop 1, push 1 */
1132 vstack_ax(stack)->type = REG_STRING;
1133 next_pc += sizeof(struct load_op);
1134 break;
1135 }
1136
1137 case BYTECODE_OP_LOAD_FIELD_DOUBLE:
1138 {
1139 /* Pop 1, push 1 */
1140 vstack_ax(stack)->type = REG_DOUBLE;
1141 next_pc += sizeof(struct load_op);
1142 break;
1143 }
1144
1145 case BYTECODE_OP_GET_SYMBOL:
1146 {
1147 struct load_op *insn = (struct load_op *) pc;
1148
1149 dbg_printk("op get symbol\n");
1150 switch (vstack_ax(stack)->load.type) {
1151 case LOAD_OBJECT:
1152 printk(KERN_WARNING "LTTng: bytecode: Nested fields not implemented yet.\n");
1153 ret = -EINVAL;
1154 goto end;
1155 case LOAD_ROOT_CONTEXT:
1156 /* Lookup context field. */
1157 ret = specialize_context_lookup(ctx, bytecode, insn,
1158 &vstack_ax(stack)->load);
1159 if (ret)
1160 goto end;
1161 break;
1162 case LOAD_ROOT_APP_CONTEXT:
1163 ret = -EINVAL;
1164 goto end;
1165 case LOAD_ROOT_PAYLOAD:
1166 /* Lookup event payload field. */
1167 ret = specialize_payload_lookup(event_desc,
1168 bytecode, insn,
1169 &vstack_ax(stack)->load);
1170 if (ret)
1171 goto end;
1172 break;
1173 }
1174 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1175 break;
1176 }
1177
1178 case BYTECODE_OP_GET_SYMBOL_FIELD:
1179 {
1180 /* Always generated by specialize phase. */
1181 ret = -EINVAL;
1182 goto end;
1183 }
1184
1185 case BYTECODE_OP_GET_INDEX_U16:
1186 {
1187 struct load_op *insn = (struct load_op *) pc;
1188 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1189
1190 dbg_printk("op get index u16\n");
1191 /* Pop 1, push 1 */
1192 ret = specialize_get_index(bytecode, insn, index->index,
1193 vstack_ax(stack), sizeof(*index));
1194 if (ret)
1195 goto end;
1196 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1197 break;
1198 }
1199
1200 case BYTECODE_OP_GET_INDEX_U64:
1201 {
1202 struct load_op *insn = (struct load_op *) pc;
1203 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1204
1205 dbg_printk("op get index u64\n");
1206 /* Pop 1, push 1 */
1207 ret = specialize_get_index(bytecode, insn, index->index,
1208 vstack_ax(stack), sizeof(*index));
1209 if (ret)
1210 goto end;
1211 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1212 break;
1213 }
1214
1215 }
1216 }
1217 end:
1218 return ret;
1219 }
This page took 0.057532 seconds and 3 git commands to generate.