Move to kernel style SPDX license identifiers
[lttng-ust.git] / liblttng-ust / lttng-bytecode-validator.c
1 /*
2 * SPDX-License-Identifier: MIT
3 *
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5 *
6 * LTTng UST bytecode validator.
7 */
8
9 #define _LGPL_SOURCE
10 #include <stddef.h>
11 #include <stdint.h>
12 #include <time.h>
13
14 #include "rculfhash.h"
15
16 #include "lttng-bytecode.h"
17 #include "lttng-hash-helper.h"
18 #include "string-utils.h"
19 #include "ust-events-internal.h"
20
21 /*
22 * Number of merge points for hash table size. Hash table initialized to
23 * that size, and we do not resize, because we do not want to trigger
24 * RCU worker thread execution: fall-back on linear traversal if number
25 * of merge points exceeds this value.
26 */
27 #define DEFAULT_NR_MERGE_POINTS 128
28 #define MIN_NR_BUCKETS 128
29 #define MAX_NR_BUCKETS 128
30
31 /* merge point table node */
32 struct lfht_mp_node {
33 struct lttng_ust_lfht_node node;
34
35 /* Context at merge point */
36 struct vstack stack;
37 unsigned long target_pc;
38 };
39
40 static unsigned long lttng_hash_seed;
41 static unsigned int lttng_hash_seed_ready;
42
43 static
44 int lttng_hash_match(struct lttng_ust_lfht_node *node, const void *key)
45 {
46 struct lfht_mp_node *mp_node =
47 caa_container_of(node, struct lfht_mp_node, node);
48 unsigned long key_pc = (unsigned long) key;
49
50 if (mp_node->target_pc == key_pc)
51 return 1;
52 else
53 return 0;
54 }
55
56 static
57 int merge_points_compare(const struct vstack *stacka,
58 const struct vstack *stackb)
59 {
60 int i, len;
61
62 if (stacka->top != stackb->top)
63 return 1;
64 len = stacka->top + 1;
65 assert(len >= 0);
66 for (i = 0; i < len; i++) {
67 if (stacka->e[i].type != REG_UNKNOWN
68 && stackb->e[i].type != REG_UNKNOWN
69 && stacka->e[i].type != stackb->e[i].type)
70 return 1;
71 }
72 return 0;
73 }
74
75 static
76 int merge_point_add_check(struct lttng_ust_lfht *ht, unsigned long target_pc,
77 const struct vstack *stack)
78 {
79 struct lfht_mp_node *node;
80 unsigned long hash = lttng_hash_mix((const char *) target_pc,
81 sizeof(target_pc),
82 lttng_hash_seed);
83 struct lttng_ust_lfht_node *ret;
84
85 dbg_printf("Bytecode: adding merge point at offset %lu, hash %lu\n",
86 target_pc, hash);
87 node = zmalloc(sizeof(struct lfht_mp_node));
88 if (!node)
89 return -ENOMEM;
90 node->target_pc = target_pc;
91 memcpy(&node->stack, stack, sizeof(node->stack));
92 ret = lttng_ust_lfht_add_unique(ht, hash, lttng_hash_match,
93 (const char *) target_pc, &node->node);
94 if (ret != &node->node) {
95 struct lfht_mp_node *ret_mp =
96 caa_container_of(ret, struct lfht_mp_node, node);
97
98 /* Key already present */
99 dbg_printf("Bytecode: compare merge points for offset %lu, hash %lu\n",
100 target_pc, hash);
101 free(node);
102 if (merge_points_compare(stack, &ret_mp->stack)) {
103 ERR("Merge points differ for offset %lu\n",
104 target_pc);
105 return -EINVAL;
106 }
107 }
108 return 0;
109 }
110
111 /*
112 * Binary comparators use top of stack and top of stack -1.
113 * Return 0 if typing is known to match, 1 if typing is dynamic
114 * (unknown), negative error value on error.
115 */
116 static
117 int bin_op_compare_check(struct vstack *stack, bytecode_opcode_t opcode,
118 const char *str)
119 {
120 if (unlikely(!vstack_ax(stack) || !vstack_bx(stack)))
121 goto error_empty;
122
123 switch (vstack_ax(stack)->type) {
124 default:
125 goto error_type;
126
127 case REG_UNKNOWN:
128 goto unknown;
129 case REG_STRING:
130 switch (vstack_bx(stack)->type) {
131 default:
132 goto error_type;
133
134 case REG_UNKNOWN:
135 goto unknown;
136 case REG_STRING:
137 break;
138 case REG_STAR_GLOB_STRING:
139 if (opcode != BYTECODE_OP_EQ && opcode != BYTECODE_OP_NE) {
140 goto error_mismatch;
141 }
142 break;
143 case REG_S64:
144 case REG_U64:
145 case REG_DOUBLE:
146 goto error_mismatch;
147 }
148 break;
149 case REG_STAR_GLOB_STRING:
150 switch (vstack_bx(stack)->type) {
151 default:
152 goto error_type;
153
154 case REG_UNKNOWN:
155 goto unknown;
156 case REG_STRING:
157 if (opcode != BYTECODE_OP_EQ && opcode != BYTECODE_OP_NE) {
158 goto error_mismatch;
159 }
160 break;
161 case REG_STAR_GLOB_STRING:
162 case REG_S64:
163 case REG_U64:
164 case REG_DOUBLE:
165 goto error_mismatch;
166 }
167 break;
168 case REG_S64:
169 case REG_U64:
170 case REG_DOUBLE:
171 switch (vstack_bx(stack)->type) {
172 default:
173 goto error_type;
174
175 case REG_UNKNOWN:
176 goto unknown;
177 case REG_STRING:
178 case REG_STAR_GLOB_STRING:
179 goto error_mismatch;
180 case REG_S64:
181 case REG_U64:
182 case REG_DOUBLE:
183 break;
184 }
185 break;
186 }
187 return 0;
188
189 unknown:
190 return 1;
191
192 error_mismatch:
193 ERR("type mismatch for '%s' binary operator\n", str);
194 return -EINVAL;
195
196 error_empty:
197 ERR("empty stack for '%s' binary operator\n", str);
198 return -EINVAL;
199
200 error_type:
201 ERR("unknown type for '%s' binary operator\n", str);
202 return -EINVAL;
203 }
204
205 /*
206 * Binary bitwise operators use top of stack and top of stack -1.
207 * Return 0 if typing is known to match, 1 if typing is dynamic
208 * (unknown), negative error value on error.
209 */
210 static
211 int bin_op_bitwise_check(struct vstack *stack, bytecode_opcode_t opcode,
212 const char *str)
213 {
214 if (unlikely(!vstack_ax(stack) || !vstack_bx(stack)))
215 goto error_empty;
216
217 switch (vstack_ax(stack)->type) {
218 default:
219 goto error_type;
220
221 case REG_UNKNOWN:
222 goto unknown;
223 case REG_S64:
224 case REG_U64:
225 switch (vstack_bx(stack)->type) {
226 default:
227 goto error_type;
228
229 case REG_UNKNOWN:
230 goto unknown;
231 case REG_S64:
232 case REG_U64:
233 break;
234 }
235 break;
236 }
237 return 0;
238
239 unknown:
240 return 1;
241
242 error_empty:
243 ERR("empty stack for '%s' binary operator\n", str);
244 return -EINVAL;
245
246 error_type:
247 ERR("unknown type for '%s' binary operator\n", str);
248 return -EINVAL;
249 }
250
251 static
252 int validate_get_symbol(struct bytecode_runtime *bytecode,
253 const struct get_symbol *sym)
254 {
255 const char *str, *str_limit;
256 size_t len_limit;
257
258 if (sym->offset >= bytecode->p.bc->bc.len - bytecode->p.bc->bc.reloc_offset)
259 return -EINVAL;
260
261 str = bytecode->p.bc->bc.data + bytecode->p.bc->bc.reloc_offset + sym->offset;
262 str_limit = bytecode->p.bc->bc.data + bytecode->p.bc->bc.len;
263 len_limit = str_limit - str;
264 if (strnlen(str, len_limit) == len_limit)
265 return -EINVAL;
266 return 0;
267 }
268
269 /*
270 * Validate bytecode range overflow within the validation pass.
271 * Called for each instruction encountered.
272 */
273 static
274 int bytecode_validate_overflow(struct bytecode_runtime *bytecode,
275 char *start_pc, char *pc)
276 {
277 int ret = 0;
278
279 switch (*(bytecode_opcode_t *) pc) {
280 case BYTECODE_OP_UNKNOWN:
281 default:
282 {
283 ERR("unknown bytecode op %u\n",
284 (unsigned int) *(bytecode_opcode_t *) pc);
285 ret = -EINVAL;
286 break;
287 }
288
289 case BYTECODE_OP_RETURN:
290 case BYTECODE_OP_RETURN_S64:
291 {
292 if (unlikely(pc + sizeof(struct return_op)
293 > start_pc + bytecode->len)) {
294 ret = -ERANGE;
295 }
296 break;
297 }
298
299 /* binary */
300 case BYTECODE_OP_MUL:
301 case BYTECODE_OP_DIV:
302 case BYTECODE_OP_MOD:
303 case BYTECODE_OP_PLUS:
304 case BYTECODE_OP_MINUS:
305 {
306 ERR("unsupported bytecode op %u\n",
307 (unsigned int) *(bytecode_opcode_t *) pc);
308 ret = -EINVAL;
309 break;
310 }
311
312 case BYTECODE_OP_EQ:
313 case BYTECODE_OP_NE:
314 case BYTECODE_OP_GT:
315 case BYTECODE_OP_LT:
316 case BYTECODE_OP_GE:
317 case BYTECODE_OP_LE:
318 case BYTECODE_OP_EQ_STRING:
319 case BYTECODE_OP_NE_STRING:
320 case BYTECODE_OP_GT_STRING:
321 case BYTECODE_OP_LT_STRING:
322 case BYTECODE_OP_GE_STRING:
323 case BYTECODE_OP_LE_STRING:
324 case BYTECODE_OP_EQ_STAR_GLOB_STRING:
325 case BYTECODE_OP_NE_STAR_GLOB_STRING:
326 case BYTECODE_OP_EQ_S64:
327 case BYTECODE_OP_NE_S64:
328 case BYTECODE_OP_GT_S64:
329 case BYTECODE_OP_LT_S64:
330 case BYTECODE_OP_GE_S64:
331 case BYTECODE_OP_LE_S64:
332 case BYTECODE_OP_EQ_DOUBLE:
333 case BYTECODE_OP_NE_DOUBLE:
334 case BYTECODE_OP_GT_DOUBLE:
335 case BYTECODE_OP_LT_DOUBLE:
336 case BYTECODE_OP_GE_DOUBLE:
337 case BYTECODE_OP_LE_DOUBLE:
338 case BYTECODE_OP_EQ_DOUBLE_S64:
339 case BYTECODE_OP_NE_DOUBLE_S64:
340 case BYTECODE_OP_GT_DOUBLE_S64:
341 case BYTECODE_OP_LT_DOUBLE_S64:
342 case BYTECODE_OP_GE_DOUBLE_S64:
343 case BYTECODE_OP_LE_DOUBLE_S64:
344 case BYTECODE_OP_EQ_S64_DOUBLE:
345 case BYTECODE_OP_NE_S64_DOUBLE:
346 case BYTECODE_OP_GT_S64_DOUBLE:
347 case BYTECODE_OP_LT_S64_DOUBLE:
348 case BYTECODE_OP_GE_S64_DOUBLE:
349 case BYTECODE_OP_LE_S64_DOUBLE:
350 case BYTECODE_OP_BIT_RSHIFT:
351 case BYTECODE_OP_BIT_LSHIFT:
352 case BYTECODE_OP_BIT_AND:
353 case BYTECODE_OP_BIT_OR:
354 case BYTECODE_OP_BIT_XOR:
355 {
356 if (unlikely(pc + sizeof(struct binary_op)
357 > start_pc + bytecode->len)) {
358 ret = -ERANGE;
359 }
360 break;
361 }
362
363 /* unary */
364 case BYTECODE_OP_UNARY_PLUS:
365 case BYTECODE_OP_UNARY_MINUS:
366 case BYTECODE_OP_UNARY_NOT:
367 case BYTECODE_OP_UNARY_PLUS_S64:
368 case BYTECODE_OP_UNARY_MINUS_S64:
369 case BYTECODE_OP_UNARY_NOT_S64:
370 case BYTECODE_OP_UNARY_PLUS_DOUBLE:
371 case BYTECODE_OP_UNARY_MINUS_DOUBLE:
372 case BYTECODE_OP_UNARY_NOT_DOUBLE:
373 case BYTECODE_OP_UNARY_BIT_NOT:
374 {
375 if (unlikely(pc + sizeof(struct unary_op)
376 > start_pc + bytecode->len)) {
377 ret = -ERANGE;
378 }
379 break;
380 }
381
382 /* logical */
383 case BYTECODE_OP_AND:
384 case BYTECODE_OP_OR:
385 {
386 if (unlikely(pc + sizeof(struct logical_op)
387 > start_pc + bytecode->len)) {
388 ret = -ERANGE;
389 }
390 break;
391 }
392
393 /* load field ref */
394 case BYTECODE_OP_LOAD_FIELD_REF:
395 {
396 ERR("Unknown field ref type\n");
397 ret = -EINVAL;
398 break;
399 }
400
401 /* get context ref */
402 case BYTECODE_OP_GET_CONTEXT_REF:
403 case BYTECODE_OP_LOAD_FIELD_REF_STRING:
404 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE:
405 case BYTECODE_OP_LOAD_FIELD_REF_S64:
406 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE:
407 case BYTECODE_OP_GET_CONTEXT_REF_STRING:
408 case BYTECODE_OP_GET_CONTEXT_REF_S64:
409 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE:
410 {
411 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct field_ref)
412 > start_pc + bytecode->len)) {
413 ret = -ERANGE;
414 }
415 break;
416 }
417
418 /* load from immediate operand */
419 case BYTECODE_OP_LOAD_STRING:
420 case BYTECODE_OP_LOAD_STAR_GLOB_STRING:
421 {
422 struct load_op *insn = (struct load_op *) pc;
423 uint32_t str_len, maxlen;
424
425 if (unlikely(pc + sizeof(struct load_op)
426 > start_pc + bytecode->len)) {
427 ret = -ERANGE;
428 break;
429 }
430
431 maxlen = start_pc + bytecode->len - pc - sizeof(struct load_op);
432 str_len = strnlen(insn->data, maxlen);
433 if (unlikely(str_len >= maxlen)) {
434 /* Final '\0' not found within range */
435 ret = -ERANGE;
436 }
437 break;
438 }
439
440 case BYTECODE_OP_LOAD_S64:
441 {
442 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct literal_numeric)
443 > start_pc + bytecode->len)) {
444 ret = -ERANGE;
445 }
446 break;
447 }
448
449 case BYTECODE_OP_LOAD_DOUBLE:
450 {
451 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct literal_double)
452 > start_pc + bytecode->len)) {
453 ret = -ERANGE;
454 }
455 break;
456 }
457
458 case BYTECODE_OP_CAST_TO_S64:
459 case BYTECODE_OP_CAST_DOUBLE_TO_S64:
460 case BYTECODE_OP_CAST_NOP:
461 {
462 if (unlikely(pc + sizeof(struct cast_op)
463 > start_pc + bytecode->len)) {
464 ret = -ERANGE;
465 }
466 break;
467 }
468
469 /*
470 * Instructions for recursive traversal through composed types.
471 */
472 case BYTECODE_OP_GET_CONTEXT_ROOT:
473 case BYTECODE_OP_GET_APP_CONTEXT_ROOT:
474 case BYTECODE_OP_GET_PAYLOAD_ROOT:
475 case BYTECODE_OP_LOAD_FIELD:
476 case BYTECODE_OP_LOAD_FIELD_S8:
477 case BYTECODE_OP_LOAD_FIELD_S16:
478 case BYTECODE_OP_LOAD_FIELD_S32:
479 case BYTECODE_OP_LOAD_FIELD_S64:
480 case BYTECODE_OP_LOAD_FIELD_U8:
481 case BYTECODE_OP_LOAD_FIELD_U16:
482 case BYTECODE_OP_LOAD_FIELD_U32:
483 case BYTECODE_OP_LOAD_FIELD_U64:
484 case BYTECODE_OP_LOAD_FIELD_STRING:
485 case BYTECODE_OP_LOAD_FIELD_SEQUENCE:
486 case BYTECODE_OP_LOAD_FIELD_DOUBLE:
487 if (unlikely(pc + sizeof(struct load_op)
488 > start_pc + bytecode->len)) {
489 ret = -ERANGE;
490 }
491 break;
492
493 case BYTECODE_OP_GET_SYMBOL:
494 {
495 struct load_op *insn = (struct load_op *) pc;
496 struct get_symbol *sym = (struct get_symbol *) insn->data;
497
498 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct get_symbol)
499 > start_pc + bytecode->len)) {
500 ret = -ERANGE;
501 break;
502 }
503 ret = validate_get_symbol(bytecode, sym);
504 break;
505 }
506
507 case BYTECODE_OP_GET_SYMBOL_FIELD:
508 ERR("Unexpected get symbol field");
509 ret = -EINVAL;
510 break;
511
512 case BYTECODE_OP_GET_INDEX_U16:
513 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct get_index_u16)
514 > start_pc + bytecode->len)) {
515 ret = -ERANGE;
516 }
517 break;
518
519 case BYTECODE_OP_GET_INDEX_U64:
520 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct get_index_u64)
521 > start_pc + bytecode->len)) {
522 ret = -ERANGE;
523 }
524 break;
525 }
526
527 return ret;
528 }
529
530 static
531 unsigned long delete_all_nodes(struct lttng_ust_lfht *ht)
532 {
533 struct lttng_ust_lfht_iter iter;
534 struct lfht_mp_node *node;
535 unsigned long nr_nodes = 0;
536
537 lttng_ust_lfht_for_each_entry(ht, &iter, node, node) {
538 int ret;
539
540 ret = lttng_ust_lfht_del(ht, lttng_ust_lfht_iter_get_node(&iter));
541 assert(!ret);
542 /* note: this hash table is never used concurrently */
543 free(node);
544 nr_nodes++;
545 }
546 return nr_nodes;
547 }
548
549 /*
550 * Return value:
551 * >=0: success
552 * <0: error
553 */
554 static
555 int validate_instruction_context(struct bytecode_runtime *bytecode,
556 struct vstack *stack,
557 char *start_pc,
558 char *pc)
559 {
560 int ret = 0;
561 const bytecode_opcode_t opcode = *(bytecode_opcode_t *) pc;
562
563 switch (opcode) {
564 case BYTECODE_OP_UNKNOWN:
565 default:
566 {
567 ERR("unknown bytecode op %u\n",
568 (unsigned int) *(bytecode_opcode_t *) pc);
569 ret = -EINVAL;
570 goto end;
571 }
572
573 case BYTECODE_OP_RETURN:
574 case BYTECODE_OP_RETURN_S64:
575 {
576 goto end;
577 }
578
579 /* binary */
580 case BYTECODE_OP_MUL:
581 case BYTECODE_OP_DIV:
582 case BYTECODE_OP_MOD:
583 case BYTECODE_OP_PLUS:
584 case BYTECODE_OP_MINUS:
585 {
586 ERR("unsupported bytecode op %u\n",
587 (unsigned int) opcode);
588 ret = -EINVAL;
589 goto end;
590 }
591
592 case BYTECODE_OP_EQ:
593 {
594 ret = bin_op_compare_check(stack, opcode, "==");
595 if (ret < 0)
596 goto end;
597 break;
598 }
599 case BYTECODE_OP_NE:
600 {
601 ret = bin_op_compare_check(stack, opcode, "!=");
602 if (ret < 0)
603 goto end;
604 break;
605 }
606 case BYTECODE_OP_GT:
607 {
608 ret = bin_op_compare_check(stack, opcode, ">");
609 if (ret < 0)
610 goto end;
611 break;
612 }
613 case BYTECODE_OP_LT:
614 {
615 ret = bin_op_compare_check(stack, opcode, "<");
616 if (ret < 0)
617 goto end;
618 break;
619 }
620 case BYTECODE_OP_GE:
621 {
622 ret = bin_op_compare_check(stack, opcode, ">=");
623 if (ret < 0)
624 goto end;
625 break;
626 }
627 case BYTECODE_OP_LE:
628 {
629 ret = bin_op_compare_check(stack, opcode, "<=");
630 if (ret < 0)
631 goto end;
632 break;
633 }
634
635 case BYTECODE_OP_EQ_STRING:
636 case BYTECODE_OP_NE_STRING:
637 case BYTECODE_OP_GT_STRING:
638 case BYTECODE_OP_LT_STRING:
639 case BYTECODE_OP_GE_STRING:
640 case BYTECODE_OP_LE_STRING:
641 {
642 if (!vstack_ax(stack) || !vstack_bx(stack)) {
643 ERR("Empty stack\n");
644 ret = -EINVAL;
645 goto end;
646 }
647 if (vstack_ax(stack)->type != REG_STRING
648 || vstack_bx(stack)->type != REG_STRING) {
649 ERR("Unexpected register type for string comparator\n");
650 ret = -EINVAL;
651 goto end;
652 }
653 break;
654 }
655
656 case BYTECODE_OP_EQ_STAR_GLOB_STRING:
657 case BYTECODE_OP_NE_STAR_GLOB_STRING:
658 {
659 if (!vstack_ax(stack) || !vstack_bx(stack)) {
660 ERR("Empty stack\n");
661 ret = -EINVAL;
662 goto end;
663 }
664 if (vstack_ax(stack)->type != REG_STAR_GLOB_STRING
665 && vstack_bx(stack)->type != REG_STAR_GLOB_STRING) {
666 ERR("Unexpected register type for globbing pattern comparator\n");
667 ret = -EINVAL;
668 goto end;
669 }
670 break;
671 }
672
673 case BYTECODE_OP_EQ_S64:
674 case BYTECODE_OP_NE_S64:
675 case BYTECODE_OP_GT_S64:
676 case BYTECODE_OP_LT_S64:
677 case BYTECODE_OP_GE_S64:
678 case BYTECODE_OP_LE_S64:
679 {
680 if (!vstack_ax(stack) || !vstack_bx(stack)) {
681 ERR("Empty stack\n");
682 ret = -EINVAL;
683 goto end;
684 }
685 switch (vstack_ax(stack)->type) {
686 case REG_S64:
687 case REG_U64:
688 break;
689 default:
690 ERR("Unexpected register type for s64 comparator\n");
691 ret = -EINVAL;
692 goto end;
693 }
694 switch (vstack_bx(stack)->type) {
695 case REG_S64:
696 case REG_U64:
697 break;
698 default:
699 ERR("Unexpected register type for s64 comparator\n");
700 ret = -EINVAL;
701 goto end;
702 }
703 break;
704 }
705
706 case BYTECODE_OP_EQ_DOUBLE:
707 case BYTECODE_OP_NE_DOUBLE:
708 case BYTECODE_OP_GT_DOUBLE:
709 case BYTECODE_OP_LT_DOUBLE:
710 case BYTECODE_OP_GE_DOUBLE:
711 case BYTECODE_OP_LE_DOUBLE:
712 {
713 if (!vstack_ax(stack) || !vstack_bx(stack)) {
714 ERR("Empty stack\n");
715 ret = -EINVAL;
716 goto end;
717 }
718 if (vstack_ax(stack)->type != REG_DOUBLE && vstack_bx(stack)->type != REG_DOUBLE) {
719 ERR("Double operator should have two double registers\n");
720 ret = -EINVAL;
721 goto end;
722 }
723 break;
724 }
725
726 case BYTECODE_OP_EQ_DOUBLE_S64:
727 case BYTECODE_OP_NE_DOUBLE_S64:
728 case BYTECODE_OP_GT_DOUBLE_S64:
729 case BYTECODE_OP_LT_DOUBLE_S64:
730 case BYTECODE_OP_GE_DOUBLE_S64:
731 case BYTECODE_OP_LE_DOUBLE_S64:
732 {
733 if (!vstack_ax(stack) || !vstack_bx(stack)) {
734 ERR("Empty stack\n");
735 ret = -EINVAL;
736 goto end;
737 }
738 switch (vstack_ax(stack)->type) {
739 case REG_S64:
740 case REG_U64:
741 break;
742 default:
743 ERR("Double-S64 operator has unexpected register types\n");
744 ret = -EINVAL;
745 goto end;
746 }
747 switch (vstack_bx(stack)->type) {
748 case REG_DOUBLE:
749 break;
750 default:
751 ERR("Double-S64 operator has unexpected register types\n");
752 ret = -EINVAL;
753 goto end;
754 }
755 break;
756 }
757
758 case BYTECODE_OP_EQ_S64_DOUBLE:
759 case BYTECODE_OP_NE_S64_DOUBLE:
760 case BYTECODE_OP_GT_S64_DOUBLE:
761 case BYTECODE_OP_LT_S64_DOUBLE:
762 case BYTECODE_OP_GE_S64_DOUBLE:
763 case BYTECODE_OP_LE_S64_DOUBLE:
764 {
765 if (!vstack_ax(stack) || !vstack_bx(stack)) {
766 ERR("Empty stack\n");
767 ret = -EINVAL;
768 goto end;
769 }
770 switch (vstack_ax(stack)->type) {
771 case REG_DOUBLE:
772 break;
773 default:
774 ERR("S64-Double operator has unexpected register types\n");
775 ret = -EINVAL;
776 goto end;
777 }
778 switch (vstack_bx(stack)->type) {
779 case REG_S64:
780 case REG_U64:
781 break;
782 default:
783 ERR("S64-Double operator has unexpected register types\n");
784 ret = -EINVAL;
785 goto end;
786 }
787 break;
788 }
789
790 case BYTECODE_OP_BIT_RSHIFT:
791 ret = bin_op_bitwise_check(stack, opcode, ">>");
792 if (ret < 0)
793 goto end;
794 break;
795 case BYTECODE_OP_BIT_LSHIFT:
796 ret = bin_op_bitwise_check(stack, opcode, "<<");
797 if (ret < 0)
798 goto end;
799 break;
800 case BYTECODE_OP_BIT_AND:
801 ret = bin_op_bitwise_check(stack, opcode, "&");
802 if (ret < 0)
803 goto end;
804 break;
805 case BYTECODE_OP_BIT_OR:
806 ret = bin_op_bitwise_check(stack, opcode, "|");
807 if (ret < 0)
808 goto end;
809 break;
810 case BYTECODE_OP_BIT_XOR:
811 ret = bin_op_bitwise_check(stack, opcode, "^");
812 if (ret < 0)
813 goto end;
814 break;
815
816 /* unary */
817 case BYTECODE_OP_UNARY_PLUS:
818 case BYTECODE_OP_UNARY_MINUS:
819 case BYTECODE_OP_UNARY_NOT:
820 {
821 if (!vstack_ax(stack)) {
822 ERR("Empty stack\n");
823 ret = -EINVAL;
824 goto end;
825 }
826 switch (vstack_ax(stack)->type) {
827 default:
828 ERR("unknown register type\n");
829 ret = -EINVAL;
830 goto end;
831
832 case REG_STRING:
833 case REG_STAR_GLOB_STRING:
834 ERR("Unary op can only be applied to numeric or floating point registers\n");
835 ret = -EINVAL;
836 goto end;
837 case REG_S64:
838 break;
839 case REG_U64:
840 break;
841 case REG_DOUBLE:
842 break;
843 case REG_UNKNOWN:
844 break;
845 }
846 break;
847 }
848 case BYTECODE_OP_UNARY_BIT_NOT:
849 {
850 if (!vstack_ax(stack)) {
851 ERR("Empty stack\n");
852 ret = -EINVAL;
853 goto end;
854 }
855 switch (vstack_ax(stack)->type) {
856 default:
857 ERR("unknown register type\n");
858 ret = -EINVAL;
859 goto end;
860
861 case REG_STRING:
862 case REG_STAR_GLOB_STRING:
863 case REG_DOUBLE:
864 ERR("Unary bitwise op can only be applied to numeric registers\n");
865 ret = -EINVAL;
866 goto end;
867 case REG_S64:
868 break;
869 case REG_U64:
870 break;
871 case REG_UNKNOWN:
872 break;
873 }
874 break;
875 }
876
877 case BYTECODE_OP_UNARY_PLUS_S64:
878 case BYTECODE_OP_UNARY_MINUS_S64:
879 case BYTECODE_OP_UNARY_NOT_S64:
880 {
881 if (!vstack_ax(stack)) {
882 ERR("Empty stack\n");
883 ret = -EINVAL;
884 goto end;
885 }
886 if (vstack_ax(stack)->type != REG_S64 &&
887 vstack_ax(stack)->type != REG_U64) {
888 ERR("Invalid register type\n");
889 ret = -EINVAL;
890 goto end;
891 }
892 break;
893 }
894
895 case BYTECODE_OP_UNARY_PLUS_DOUBLE:
896 case BYTECODE_OP_UNARY_MINUS_DOUBLE:
897 case BYTECODE_OP_UNARY_NOT_DOUBLE:
898 {
899 if (!vstack_ax(stack)) {
900 ERR("Empty stack\n");
901 ret = -EINVAL;
902 goto end;
903 }
904 if (vstack_ax(stack)->type != REG_DOUBLE) {
905 ERR("Invalid register type\n");
906 ret = -EINVAL;
907 goto end;
908 }
909 break;
910 }
911
912 /* logical */
913 case BYTECODE_OP_AND:
914 case BYTECODE_OP_OR:
915 {
916 struct logical_op *insn = (struct logical_op *) pc;
917
918 if (!vstack_ax(stack)) {
919 ERR("Empty stack\n");
920 ret = -EINVAL;
921 goto end;
922 }
923 if (vstack_ax(stack)->type != REG_S64
924 && vstack_ax(stack)->type != REG_U64
925 && vstack_ax(stack)->type != REG_UNKNOWN) {
926 ERR("Logical comparator expects S64, U64 or dynamic register\n");
927 ret = -EINVAL;
928 goto end;
929 }
930
931 dbg_printf("Validate jumping to bytecode offset %u\n",
932 (unsigned int) insn->skip_offset);
933 if (unlikely(start_pc + insn->skip_offset <= pc)) {
934 ERR("Loops are not allowed in bytecode\n");
935 ret = -EINVAL;
936 goto end;
937 }
938 break;
939 }
940
941 /* load field ref */
942 case BYTECODE_OP_LOAD_FIELD_REF:
943 {
944 ERR("Unknown field ref type\n");
945 ret = -EINVAL;
946 goto end;
947 }
948 case BYTECODE_OP_LOAD_FIELD_REF_STRING:
949 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE:
950 {
951 struct load_op *insn = (struct load_op *) pc;
952 struct field_ref *ref = (struct field_ref *) insn->data;
953
954 dbg_printf("Validate load field ref offset %u type string\n",
955 ref->offset);
956 break;
957 }
958 case BYTECODE_OP_LOAD_FIELD_REF_S64:
959 {
960 struct load_op *insn = (struct load_op *) pc;
961 struct field_ref *ref = (struct field_ref *) insn->data;
962
963 dbg_printf("Validate load field ref offset %u type s64\n",
964 ref->offset);
965 break;
966 }
967 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE:
968 {
969 struct load_op *insn = (struct load_op *) pc;
970 struct field_ref *ref = (struct field_ref *) insn->data;
971
972 dbg_printf("Validate load field ref offset %u type double\n",
973 ref->offset);
974 break;
975 }
976
977 /* load from immediate operand */
978 case BYTECODE_OP_LOAD_STRING:
979 case BYTECODE_OP_LOAD_STAR_GLOB_STRING:
980 {
981 break;
982 }
983
984 case BYTECODE_OP_LOAD_S64:
985 {
986 break;
987 }
988
989 case BYTECODE_OP_LOAD_DOUBLE:
990 {
991 break;
992 }
993
994 case BYTECODE_OP_CAST_TO_S64:
995 case BYTECODE_OP_CAST_DOUBLE_TO_S64:
996 {
997 struct cast_op *insn = (struct cast_op *) pc;
998
999 if (!vstack_ax(stack)) {
1000 ERR("Empty stack\n");
1001 ret = -EINVAL;
1002 goto end;
1003 }
1004 switch (vstack_ax(stack)->type) {
1005 default:
1006 ERR("unknown register type\n");
1007 ret = -EINVAL;
1008 goto end;
1009
1010 case REG_STRING:
1011 case REG_STAR_GLOB_STRING:
1012 ERR("Cast op can only be applied to numeric or floating point registers\n");
1013 ret = -EINVAL;
1014 goto end;
1015 case REG_S64:
1016 break;
1017 case REG_U64:
1018 break;
1019 case REG_DOUBLE:
1020 break;
1021 case REG_UNKNOWN:
1022 break;
1023 }
1024 if (insn->op == BYTECODE_OP_CAST_DOUBLE_TO_S64) {
1025 if (vstack_ax(stack)->type != REG_DOUBLE) {
1026 ERR("Cast expects double\n");
1027 ret = -EINVAL;
1028 goto end;
1029 }
1030 }
1031 break;
1032 }
1033 case BYTECODE_OP_CAST_NOP:
1034 {
1035 break;
1036 }
1037
1038 /* get context ref */
1039 case BYTECODE_OP_GET_CONTEXT_REF:
1040 {
1041 struct load_op *insn = (struct load_op *) pc;
1042 struct field_ref *ref = (struct field_ref *) insn->data;
1043
1044 dbg_printf("Validate get context ref offset %u type dynamic\n",
1045 ref->offset);
1046 break;
1047 }
1048 case BYTECODE_OP_GET_CONTEXT_REF_STRING:
1049 {
1050 struct load_op *insn = (struct load_op *) pc;
1051 struct field_ref *ref = (struct field_ref *) insn->data;
1052
1053 dbg_printf("Validate get context ref offset %u type string\n",
1054 ref->offset);
1055 break;
1056 }
1057 case BYTECODE_OP_GET_CONTEXT_REF_S64:
1058 {
1059 struct load_op *insn = (struct load_op *) pc;
1060 struct field_ref *ref = (struct field_ref *) insn->data;
1061
1062 dbg_printf("Validate get context ref offset %u type s64\n",
1063 ref->offset);
1064 break;
1065 }
1066 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE:
1067 {
1068 struct load_op *insn = (struct load_op *) pc;
1069 struct field_ref *ref = (struct field_ref *) insn->data;
1070
1071 dbg_printf("Validate get context ref offset %u type double\n",
1072 ref->offset);
1073 break;
1074 }
1075
1076 /*
1077 * Instructions for recursive traversal through composed types.
1078 */
1079 case BYTECODE_OP_GET_CONTEXT_ROOT:
1080 {
1081 dbg_printf("Validate get context root\n");
1082 break;
1083 }
1084 case BYTECODE_OP_GET_APP_CONTEXT_ROOT:
1085 {
1086 dbg_printf("Validate get app context root\n");
1087 break;
1088 }
1089 case BYTECODE_OP_GET_PAYLOAD_ROOT:
1090 {
1091 dbg_printf("Validate get payload root\n");
1092 break;
1093 }
1094 case BYTECODE_OP_LOAD_FIELD:
1095 {
1096 /*
1097 * We tolerate that field type is unknown at validation,
1098 * because we are performing the load specialization in
1099 * a phase after validation.
1100 */
1101 dbg_printf("Validate load field\n");
1102 break;
1103 }
1104 case BYTECODE_OP_LOAD_FIELD_S8:
1105 {
1106 dbg_printf("Validate load field s8\n");
1107 break;
1108 }
1109 case BYTECODE_OP_LOAD_FIELD_S16:
1110 {
1111 dbg_printf("Validate load field s16\n");
1112 break;
1113 }
1114 case BYTECODE_OP_LOAD_FIELD_S32:
1115 {
1116 dbg_printf("Validate load field s32\n");
1117 break;
1118 }
1119 case BYTECODE_OP_LOAD_FIELD_S64:
1120 {
1121 dbg_printf("Validate load field s64\n");
1122 break;
1123 }
1124 case BYTECODE_OP_LOAD_FIELD_U8:
1125 {
1126 dbg_printf("Validate load field u8\n");
1127 break;
1128 }
1129 case BYTECODE_OP_LOAD_FIELD_U16:
1130 {
1131 dbg_printf("Validate load field u16\n");
1132 break;
1133 }
1134 case BYTECODE_OP_LOAD_FIELD_U32:
1135 {
1136 dbg_printf("Validate load field u32\n");
1137 break;
1138 }
1139 case BYTECODE_OP_LOAD_FIELD_U64:
1140 {
1141 dbg_printf("Validate load field u64\n");
1142 break;
1143 }
1144 case BYTECODE_OP_LOAD_FIELD_STRING:
1145 {
1146 dbg_printf("Validate load field string\n");
1147 break;
1148 }
1149 case BYTECODE_OP_LOAD_FIELD_SEQUENCE:
1150 {
1151 dbg_printf("Validate load field sequence\n");
1152 break;
1153 }
1154 case BYTECODE_OP_LOAD_FIELD_DOUBLE:
1155 {
1156 dbg_printf("Validate load field double\n");
1157 break;
1158 }
1159
1160 case BYTECODE_OP_GET_SYMBOL:
1161 {
1162 struct load_op *insn = (struct load_op *) pc;
1163 struct get_symbol *sym = (struct get_symbol *) insn->data;
1164
1165 dbg_printf("Validate get symbol offset %u\n", sym->offset);
1166 break;
1167 }
1168
1169 case BYTECODE_OP_GET_SYMBOL_FIELD:
1170 {
1171 struct load_op *insn = (struct load_op *) pc;
1172 struct get_symbol *sym = (struct get_symbol *) insn->data;
1173
1174 dbg_printf("Validate get symbol field offset %u\n", sym->offset);
1175 break;
1176 }
1177
1178 case BYTECODE_OP_GET_INDEX_U16:
1179 {
1180 struct load_op *insn = (struct load_op *) pc;
1181 struct get_index_u16 *get_index = (struct get_index_u16 *) insn->data;
1182
1183 dbg_printf("Validate get index u16 index %u\n", get_index->index);
1184 break;
1185 }
1186
1187 case BYTECODE_OP_GET_INDEX_U64:
1188 {
1189 struct load_op *insn = (struct load_op *) pc;
1190 struct get_index_u64 *get_index = (struct get_index_u64 *) insn->data;
1191
1192 dbg_printf("Validate get index u64 index %" PRIu64 "\n", get_index->index);
1193 break;
1194 }
1195 }
1196 end:
1197 return ret;
1198 }
1199
1200 /*
1201 * Return value:
1202 * 0: success
1203 * <0: error
1204 */
1205 static
1206 int validate_instruction_all_contexts(struct bytecode_runtime *bytecode,
1207 struct lttng_ust_lfht *merge_points,
1208 struct vstack *stack,
1209 char *start_pc,
1210 char *pc)
1211 {
1212 int ret;
1213 unsigned long target_pc = pc - start_pc;
1214 struct lttng_ust_lfht_iter iter;
1215 struct lttng_ust_lfht_node *node;
1216 struct lfht_mp_node *mp_node;
1217 unsigned long hash;
1218
1219 /* Validate the context resulting from the previous instruction */
1220 ret = validate_instruction_context(bytecode, stack, start_pc, pc);
1221 if (ret < 0)
1222 return ret;
1223
1224 /* Validate merge points */
1225 hash = lttng_hash_mix((const char *) target_pc, sizeof(target_pc),
1226 lttng_hash_seed);
1227 lttng_ust_lfht_lookup(merge_points, hash, lttng_hash_match,
1228 (const char *) target_pc, &iter);
1229 node = lttng_ust_lfht_iter_get_node(&iter);
1230 if (node) {
1231 mp_node = caa_container_of(node, struct lfht_mp_node, node);
1232
1233 dbg_printf("Bytecode: validate merge point at offset %lu\n",
1234 target_pc);
1235 if (merge_points_compare(stack, &mp_node->stack)) {
1236 ERR("Merge points differ for offset %lu\n",
1237 target_pc);
1238 return -EINVAL;
1239 }
1240 /* Once validated, we can remove the merge point */
1241 dbg_printf("Bytecode: remove merge point at offset %lu\n",
1242 target_pc);
1243 ret = lttng_ust_lfht_del(merge_points, node);
1244 assert(!ret);
1245 }
1246 return 0;
1247 }
1248
1249 /*
1250 * Return value:
1251 * >0: going to next insn.
1252 * 0: success, stop iteration.
1253 * <0: error
1254 */
1255 static
1256 int exec_insn(struct bytecode_runtime *bytecode,
1257 struct lttng_ust_lfht *merge_points,
1258 struct vstack *stack,
1259 char **_next_pc,
1260 char *pc)
1261 {
1262 int ret = 1;
1263 char *next_pc = *_next_pc;
1264
1265 switch (*(bytecode_opcode_t *) pc) {
1266 case BYTECODE_OP_UNKNOWN:
1267 default:
1268 {
1269 ERR("unknown bytecode op %u\n",
1270 (unsigned int) *(bytecode_opcode_t *) pc);
1271 ret = -EINVAL;
1272 goto end;
1273 }
1274
1275 case BYTECODE_OP_RETURN:
1276 {
1277 if (!vstack_ax(stack)) {
1278 ERR("Empty stack\n");
1279 ret = -EINVAL;
1280 goto end;
1281 }
1282 switch (vstack_ax(stack)->type) {
1283 case REG_S64:
1284 case REG_U64:
1285 case REG_DOUBLE:
1286 case REG_STRING:
1287 case REG_PTR:
1288 case REG_UNKNOWN:
1289 break;
1290 default:
1291 ERR("Unexpected register type %d at end of bytecode\n",
1292 (int) vstack_ax(stack)->type);
1293 ret = -EINVAL;
1294 goto end;
1295 }
1296
1297 ret = 0;
1298 goto end;
1299 }
1300 case BYTECODE_OP_RETURN_S64:
1301 {
1302 if (!vstack_ax(stack)) {
1303 ERR("Empty stack\n");
1304 ret = -EINVAL;
1305 goto end;
1306 }
1307 switch (vstack_ax(stack)->type) {
1308 case REG_S64:
1309 case REG_U64:
1310 break;
1311 default:
1312 case REG_UNKNOWN:
1313 ERR("Unexpected register type %d at end of bytecode\n",
1314 (int) vstack_ax(stack)->type);
1315 ret = -EINVAL;
1316 goto end;
1317 }
1318
1319 ret = 0;
1320 goto end;
1321 }
1322
1323 /* binary */
1324 case BYTECODE_OP_MUL:
1325 case BYTECODE_OP_DIV:
1326 case BYTECODE_OP_MOD:
1327 case BYTECODE_OP_PLUS:
1328 case BYTECODE_OP_MINUS:
1329 {
1330 ERR("unsupported bytecode op %u\n",
1331 (unsigned int) *(bytecode_opcode_t *) pc);
1332 ret = -EINVAL;
1333 goto end;
1334 }
1335
1336 case BYTECODE_OP_EQ:
1337 case BYTECODE_OP_NE:
1338 case BYTECODE_OP_GT:
1339 case BYTECODE_OP_LT:
1340 case BYTECODE_OP_GE:
1341 case BYTECODE_OP_LE:
1342 case BYTECODE_OP_EQ_STRING:
1343 case BYTECODE_OP_NE_STRING:
1344 case BYTECODE_OP_GT_STRING:
1345 case BYTECODE_OP_LT_STRING:
1346 case BYTECODE_OP_GE_STRING:
1347 case BYTECODE_OP_LE_STRING:
1348 case BYTECODE_OP_EQ_STAR_GLOB_STRING:
1349 case BYTECODE_OP_NE_STAR_GLOB_STRING:
1350 case BYTECODE_OP_EQ_S64:
1351 case BYTECODE_OP_NE_S64:
1352 case BYTECODE_OP_GT_S64:
1353 case BYTECODE_OP_LT_S64:
1354 case BYTECODE_OP_GE_S64:
1355 case BYTECODE_OP_LE_S64:
1356 case BYTECODE_OP_EQ_DOUBLE:
1357 case BYTECODE_OP_NE_DOUBLE:
1358 case BYTECODE_OP_GT_DOUBLE:
1359 case BYTECODE_OP_LT_DOUBLE:
1360 case BYTECODE_OP_GE_DOUBLE:
1361 case BYTECODE_OP_LE_DOUBLE:
1362 case BYTECODE_OP_EQ_DOUBLE_S64:
1363 case BYTECODE_OP_NE_DOUBLE_S64:
1364 case BYTECODE_OP_GT_DOUBLE_S64:
1365 case BYTECODE_OP_LT_DOUBLE_S64:
1366 case BYTECODE_OP_GE_DOUBLE_S64:
1367 case BYTECODE_OP_LE_DOUBLE_S64:
1368 case BYTECODE_OP_EQ_S64_DOUBLE:
1369 case BYTECODE_OP_NE_S64_DOUBLE:
1370 case BYTECODE_OP_GT_S64_DOUBLE:
1371 case BYTECODE_OP_LT_S64_DOUBLE:
1372 case BYTECODE_OP_GE_S64_DOUBLE:
1373 case BYTECODE_OP_LE_S64_DOUBLE:
1374 {
1375 /* Pop 2, push 1 */
1376 if (vstack_pop(stack)) {
1377 ret = -EINVAL;
1378 goto end;
1379 }
1380 if (!vstack_ax(stack)) {
1381 ERR("Empty stack\n");
1382 ret = -EINVAL;
1383 goto end;
1384 }
1385 switch (vstack_ax(stack)->type) {
1386 case REG_S64:
1387 case REG_U64:
1388 case REG_DOUBLE:
1389 case REG_STRING:
1390 case REG_STAR_GLOB_STRING:
1391 case REG_UNKNOWN:
1392 break;
1393 default:
1394 ERR("Unexpected register type %d for operation\n",
1395 (int) vstack_ax(stack)->type);
1396 ret = -EINVAL;
1397 goto end;
1398 }
1399
1400 vstack_ax(stack)->type = REG_S64;
1401 next_pc += sizeof(struct binary_op);
1402 break;
1403 }
1404
1405 case BYTECODE_OP_BIT_RSHIFT:
1406 case BYTECODE_OP_BIT_LSHIFT:
1407 case BYTECODE_OP_BIT_AND:
1408 case BYTECODE_OP_BIT_OR:
1409 case BYTECODE_OP_BIT_XOR:
1410 {
1411 /* Pop 2, push 1 */
1412 if (vstack_pop(stack)) {
1413 ret = -EINVAL;
1414 goto end;
1415 }
1416 if (!vstack_ax(stack)) {
1417 ERR("Empty stack\n");
1418 ret = -EINVAL;
1419 goto end;
1420 }
1421 switch (vstack_ax(stack)->type) {
1422 case REG_S64:
1423 case REG_U64:
1424 case REG_DOUBLE:
1425 case REG_STRING:
1426 case REG_STAR_GLOB_STRING:
1427 case REG_UNKNOWN:
1428 break;
1429 default:
1430 ERR("Unexpected register type %d for operation\n",
1431 (int) vstack_ax(stack)->type);
1432 ret = -EINVAL;
1433 goto end;
1434 }
1435
1436 vstack_ax(stack)->type = REG_U64;
1437 next_pc += sizeof(struct binary_op);
1438 break;
1439 }
1440
1441 /* unary */
1442 case BYTECODE_OP_UNARY_PLUS:
1443 case BYTECODE_OP_UNARY_MINUS:
1444 {
1445 /* Pop 1, push 1 */
1446 if (!vstack_ax(stack)) {
1447 ERR("Empty stack\n");
1448 ret = -EINVAL;
1449 goto end;
1450 }
1451 switch (vstack_ax(stack)->type) {
1452 case REG_UNKNOWN:
1453 case REG_DOUBLE:
1454 case REG_S64:
1455 case REG_U64:
1456 break;
1457 default:
1458 ERR("Unexpected register type %d for operation\n",
1459 (int) vstack_ax(stack)->type);
1460 ret = -EINVAL;
1461 goto end;
1462 }
1463 vstack_ax(stack)->type = REG_UNKNOWN;
1464 next_pc += sizeof(struct unary_op);
1465 break;
1466 }
1467
1468 case BYTECODE_OP_UNARY_PLUS_S64:
1469 case BYTECODE_OP_UNARY_MINUS_S64:
1470 case BYTECODE_OP_UNARY_NOT_S64:
1471 {
1472 /* Pop 1, push 1 */
1473 if (!vstack_ax(stack)) {
1474 ERR("Empty stack\n");
1475 ret = -EINVAL;
1476 goto end;
1477 }
1478 switch (vstack_ax(stack)->type) {
1479 case REG_S64:
1480 case REG_U64:
1481 break;
1482 default:
1483 ERR("Unexpected register type %d for operation\n",
1484 (int) vstack_ax(stack)->type);
1485 ret = -EINVAL;
1486 goto end;
1487 }
1488
1489 next_pc += sizeof(struct unary_op);
1490 break;
1491 }
1492
1493 case BYTECODE_OP_UNARY_NOT:
1494 {
1495 /* Pop 1, push 1 */
1496 if (!vstack_ax(stack)) {
1497 ERR("Empty stack\n");
1498 ret = -EINVAL;
1499 goto end;
1500 }
1501 switch (vstack_ax(stack)->type) {
1502 case REG_UNKNOWN:
1503 case REG_DOUBLE:
1504 case REG_S64:
1505 case REG_U64:
1506 break;
1507 default:
1508 ERR("Unexpected register type %d for operation\n",
1509 (int) vstack_ax(stack)->type);
1510 ret = -EINVAL;
1511 goto end;
1512 }
1513
1514 next_pc += sizeof(struct unary_op);
1515 break;
1516 }
1517
1518 case BYTECODE_OP_UNARY_BIT_NOT:
1519 {
1520 /* Pop 1, push 1 */
1521 if (!vstack_ax(stack)) {
1522 ERR("Empty stack\n");
1523 ret = -EINVAL;
1524 goto end;
1525 }
1526 switch (vstack_ax(stack)->type) {
1527 case REG_UNKNOWN:
1528 case REG_S64:
1529 case REG_U64:
1530 break;
1531 case REG_DOUBLE:
1532 default:
1533 ERR("Unexpected register type %d for operation\n",
1534 (int) vstack_ax(stack)->type);
1535 ret = -EINVAL;
1536 goto end;
1537 }
1538
1539 vstack_ax(stack)->type = REG_U64;
1540 next_pc += sizeof(struct unary_op);
1541 break;
1542 }
1543
1544 case BYTECODE_OP_UNARY_NOT_DOUBLE:
1545 {
1546 /* Pop 1, push 1 */
1547 if (!vstack_ax(stack)) {
1548 ERR("Empty stack\n");
1549 ret = -EINVAL;
1550 goto end;
1551 }
1552 switch (vstack_ax(stack)->type) {
1553 case REG_DOUBLE:
1554 break;
1555 default:
1556 ERR("Incorrect register type %d for operation\n",
1557 (int) vstack_ax(stack)->type);
1558 ret = -EINVAL;
1559 goto end;
1560 }
1561
1562 vstack_ax(stack)->type = REG_S64;
1563 next_pc += sizeof(struct unary_op);
1564 break;
1565 }
1566
1567 case BYTECODE_OP_UNARY_PLUS_DOUBLE:
1568 case BYTECODE_OP_UNARY_MINUS_DOUBLE:
1569 {
1570 /* Pop 1, push 1 */
1571 if (!vstack_ax(stack)) {
1572 ERR("Empty stack\n");
1573 ret = -EINVAL;
1574 goto end;
1575 }
1576 switch (vstack_ax(stack)->type) {
1577 case REG_DOUBLE:
1578 break;
1579 default:
1580 ERR("Incorrect register type %d for operation\n",
1581 (int) vstack_ax(stack)->type);
1582 ret = -EINVAL;
1583 goto end;
1584 }
1585
1586 vstack_ax(stack)->type = REG_DOUBLE;
1587 next_pc += sizeof(struct unary_op);
1588 break;
1589 }
1590
1591 /* logical */
1592 case BYTECODE_OP_AND:
1593 case BYTECODE_OP_OR:
1594 {
1595 struct logical_op *insn = (struct logical_op *) pc;
1596 int merge_ret;
1597
1598 /* Add merge point to table */
1599 merge_ret = merge_point_add_check(merge_points,
1600 insn->skip_offset, stack);
1601 if (merge_ret) {
1602 ret = merge_ret;
1603 goto end;
1604 }
1605
1606 if (!vstack_ax(stack)) {
1607 ERR("Empty stack\n");
1608 ret = -EINVAL;
1609 goto end;
1610 }
1611 /* There is always a cast-to-s64 operation before a or/and op. */
1612 switch (vstack_ax(stack)->type) {
1613 case REG_S64:
1614 case REG_U64:
1615 break;
1616 default:
1617 ERR("Incorrect register type %d for operation\n",
1618 (int) vstack_ax(stack)->type);
1619 ret = -EINVAL;
1620 goto end;
1621 }
1622
1623 /* Continue to next instruction */
1624 /* Pop 1 when jump not taken */
1625 if (vstack_pop(stack)) {
1626 ret = -EINVAL;
1627 goto end;
1628 }
1629 next_pc += sizeof(struct logical_op);
1630 break;
1631 }
1632
1633 /* load field ref */
1634 case BYTECODE_OP_LOAD_FIELD_REF:
1635 {
1636 ERR("Unknown field ref type\n");
1637 ret = -EINVAL;
1638 goto end;
1639 }
1640 /* get context ref */
1641 case BYTECODE_OP_GET_CONTEXT_REF:
1642 {
1643 if (vstack_push(stack)) {
1644 ret = -EINVAL;
1645 goto end;
1646 }
1647 vstack_ax(stack)->type = REG_UNKNOWN;
1648 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1649 break;
1650 }
1651 case BYTECODE_OP_LOAD_FIELD_REF_STRING:
1652 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE:
1653 case BYTECODE_OP_GET_CONTEXT_REF_STRING:
1654 {
1655 if (vstack_push(stack)) {
1656 ret = -EINVAL;
1657 goto end;
1658 }
1659 vstack_ax(stack)->type = REG_STRING;
1660 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1661 break;
1662 }
1663 case BYTECODE_OP_LOAD_FIELD_REF_S64:
1664 case BYTECODE_OP_GET_CONTEXT_REF_S64:
1665 {
1666 if (vstack_push(stack)) {
1667 ret = -EINVAL;
1668 goto end;
1669 }
1670 vstack_ax(stack)->type = REG_S64;
1671 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1672 break;
1673 }
1674 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE:
1675 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE:
1676 {
1677 if (vstack_push(stack)) {
1678 ret = -EINVAL;
1679 goto end;
1680 }
1681 vstack_ax(stack)->type = REG_DOUBLE;
1682 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1683 break;
1684 }
1685
1686 /* load from immediate operand */
1687 case BYTECODE_OP_LOAD_STRING:
1688 {
1689 struct load_op *insn = (struct load_op *) pc;
1690
1691 if (vstack_push(stack)) {
1692 ret = -EINVAL;
1693 goto end;
1694 }
1695 vstack_ax(stack)->type = REG_STRING;
1696 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1697 break;
1698 }
1699
1700 case BYTECODE_OP_LOAD_STAR_GLOB_STRING:
1701 {
1702 struct load_op *insn = (struct load_op *) pc;
1703
1704 if (vstack_push(stack)) {
1705 ret = -EINVAL;
1706 goto end;
1707 }
1708 vstack_ax(stack)->type = REG_STAR_GLOB_STRING;
1709 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1710 break;
1711 }
1712
1713 case BYTECODE_OP_LOAD_S64:
1714 {
1715 if (vstack_push(stack)) {
1716 ret = -EINVAL;
1717 goto end;
1718 }
1719 vstack_ax(stack)->type = REG_S64;
1720 next_pc += sizeof(struct load_op)
1721 + sizeof(struct literal_numeric);
1722 break;
1723 }
1724
1725 case BYTECODE_OP_LOAD_DOUBLE:
1726 {
1727 if (vstack_push(stack)) {
1728 ret = -EINVAL;
1729 goto end;
1730 }
1731 vstack_ax(stack)->type = REG_DOUBLE;
1732 next_pc += sizeof(struct load_op)
1733 + sizeof(struct literal_double);
1734 break;
1735 }
1736
1737 case BYTECODE_OP_CAST_TO_S64:
1738 case BYTECODE_OP_CAST_DOUBLE_TO_S64:
1739 {
1740 /* Pop 1, push 1 */
1741 if (!vstack_ax(stack)) {
1742 ERR("Empty stack\n");
1743 ret = -EINVAL;
1744 goto end;
1745 }
1746 switch (vstack_ax(stack)->type) {
1747 case REG_S64:
1748 case REG_U64:
1749 case REG_DOUBLE:
1750 case REG_UNKNOWN:
1751 break;
1752 default:
1753 ERR("Incorrect register type %d for cast\n",
1754 (int) vstack_ax(stack)->type);
1755 ret = -EINVAL;
1756 goto end;
1757 }
1758 vstack_ax(stack)->type = REG_S64;
1759 next_pc += sizeof(struct cast_op);
1760 break;
1761 }
1762 case BYTECODE_OP_CAST_NOP:
1763 {
1764 next_pc += sizeof(struct cast_op);
1765 break;
1766 }
1767
1768 /*
1769 * Instructions for recursive traversal through composed types.
1770 */
1771 case BYTECODE_OP_GET_CONTEXT_ROOT:
1772 case BYTECODE_OP_GET_APP_CONTEXT_ROOT:
1773 case BYTECODE_OP_GET_PAYLOAD_ROOT:
1774 {
1775 if (vstack_push(stack)) {
1776 ret = -EINVAL;
1777 goto end;
1778 }
1779 vstack_ax(stack)->type = REG_PTR;
1780 next_pc += sizeof(struct load_op);
1781 break;
1782 }
1783
1784 case BYTECODE_OP_LOAD_FIELD:
1785 {
1786 /* Pop 1, push 1 */
1787 if (!vstack_ax(stack)) {
1788 ERR("Empty stack\n");
1789 ret = -EINVAL;
1790 goto end;
1791 }
1792 if (vstack_ax(stack)->type != REG_PTR) {
1793 ERR("Expecting pointer on top of stack\n");
1794 ret = -EINVAL;
1795 goto end;
1796 }
1797 vstack_ax(stack)->type = REG_UNKNOWN;
1798 next_pc += sizeof(struct load_op);
1799 break;
1800 }
1801
1802 case BYTECODE_OP_LOAD_FIELD_S8:
1803 case BYTECODE_OP_LOAD_FIELD_S16:
1804 case BYTECODE_OP_LOAD_FIELD_S32:
1805 case BYTECODE_OP_LOAD_FIELD_S64:
1806 {
1807 /* Pop 1, push 1 */
1808 if (!vstack_ax(stack)) {
1809 ERR("Empty stack\n");
1810 ret = -EINVAL;
1811 goto end;
1812 }
1813 if (vstack_ax(stack)->type != REG_PTR) {
1814 ERR("Expecting pointer on top of stack\n");
1815 ret = -EINVAL;
1816 goto end;
1817 }
1818 vstack_ax(stack)->type = REG_S64;
1819 next_pc += sizeof(struct load_op);
1820 break;
1821 }
1822
1823 case BYTECODE_OP_LOAD_FIELD_U8:
1824 case BYTECODE_OP_LOAD_FIELD_U16:
1825 case BYTECODE_OP_LOAD_FIELD_U32:
1826 case BYTECODE_OP_LOAD_FIELD_U64:
1827 {
1828 /* Pop 1, push 1 */
1829 if (!vstack_ax(stack)) {
1830 ERR("Empty stack\n");
1831 ret = -EINVAL;
1832 goto end;
1833 }
1834 if (vstack_ax(stack)->type != REG_PTR) {
1835 ERR("Expecting pointer on top of stack\n");
1836 ret = -EINVAL;
1837 goto end;
1838 }
1839 vstack_ax(stack)->type = REG_U64;
1840 next_pc += sizeof(struct load_op);
1841 break;
1842 }
1843
1844 case BYTECODE_OP_LOAD_FIELD_STRING:
1845 case BYTECODE_OP_LOAD_FIELD_SEQUENCE:
1846 {
1847 /* Pop 1, push 1 */
1848 if (!vstack_ax(stack)) {
1849 ERR("Empty stack\n");
1850 ret = -EINVAL;
1851 goto end;
1852 }
1853 if (vstack_ax(stack)->type != REG_PTR) {
1854 ERR("Expecting pointer on top of stack\n");
1855 ret = -EINVAL;
1856 goto end;
1857 }
1858 vstack_ax(stack)->type = REG_STRING;
1859 next_pc += sizeof(struct load_op);
1860 break;
1861 }
1862
1863 case BYTECODE_OP_LOAD_FIELD_DOUBLE:
1864 {
1865 /* Pop 1, push 1 */
1866 if (!vstack_ax(stack)) {
1867 ERR("Empty stack\n");
1868 ret = -EINVAL;
1869 goto end;
1870 }
1871 if (vstack_ax(stack)->type != REG_PTR) {
1872 ERR("Expecting pointer on top of stack\n");
1873 ret = -EINVAL;
1874 goto end;
1875 }
1876 vstack_ax(stack)->type = REG_DOUBLE;
1877 next_pc += sizeof(struct load_op);
1878 break;
1879 }
1880
1881 case BYTECODE_OP_GET_SYMBOL:
1882 case BYTECODE_OP_GET_SYMBOL_FIELD:
1883 {
1884 /* Pop 1, push 1 */
1885 if (!vstack_ax(stack)) {
1886 ERR("Empty stack\n");
1887 ret = -EINVAL;
1888 goto end;
1889 }
1890 if (vstack_ax(stack)->type != REG_PTR) {
1891 ERR("Expecting pointer on top of stack\n");
1892 ret = -EINVAL;
1893 goto end;
1894 }
1895 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1896 break;
1897 }
1898
1899 case BYTECODE_OP_GET_INDEX_U16:
1900 {
1901 /* Pop 1, push 1 */
1902 if (!vstack_ax(stack)) {
1903 ERR("Empty stack\n");
1904 ret = -EINVAL;
1905 goto end;
1906 }
1907 if (vstack_ax(stack)->type != REG_PTR) {
1908 ERR("Expecting pointer on top of stack\n");
1909 ret = -EINVAL;
1910 goto end;
1911 }
1912 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1913 break;
1914 }
1915
1916 case BYTECODE_OP_GET_INDEX_U64:
1917 {
1918 /* Pop 1, push 1 */
1919 if (!vstack_ax(stack)) {
1920 ERR("Empty stack\n");
1921 ret = -EINVAL;
1922 goto end;
1923 }
1924 if (vstack_ax(stack)->type != REG_PTR) {
1925 ERR("Expecting pointer on top of stack\n");
1926 ret = -EINVAL;
1927 goto end;
1928 }
1929 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1930 break;
1931 }
1932
1933 }
1934 end:
1935 *_next_pc = next_pc;
1936 return ret;
1937 }
1938
1939 /*
1940 * Never called concurrently (hash seed is shared).
1941 */
1942 int lttng_bytecode_validate(struct bytecode_runtime *bytecode)
1943 {
1944 struct lttng_ust_lfht *merge_points;
1945 char *pc, *next_pc, *start_pc;
1946 int ret = -EINVAL;
1947 struct vstack stack;
1948
1949 vstack_init(&stack);
1950
1951 if (!lttng_hash_seed_ready) {
1952 lttng_hash_seed = time(NULL);
1953 lttng_hash_seed_ready = 1;
1954 }
1955 /*
1956 * Note: merge_points hash table used by single thread, and
1957 * never concurrently resized. Therefore, we can use it without
1958 * holding RCU read-side lock and free nodes without using
1959 * call_rcu.
1960 */
1961 merge_points = lttng_ust_lfht_new(DEFAULT_NR_MERGE_POINTS,
1962 MIN_NR_BUCKETS, MAX_NR_BUCKETS,
1963 0, NULL);
1964 if (!merge_points) {
1965 ERR("Error allocating hash table for bytecode validation\n");
1966 return -ENOMEM;
1967 }
1968 start_pc = &bytecode->code[0];
1969 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len;
1970 pc = next_pc) {
1971 ret = bytecode_validate_overflow(bytecode, start_pc, pc);
1972 if (ret != 0) {
1973 if (ret == -ERANGE)
1974 ERR("Bytecode overflow\n");
1975 goto end;
1976 }
1977 dbg_printf("Validating op %s (%u)\n",
1978 print_op((unsigned int) *(bytecode_opcode_t *) pc),
1979 (unsigned int) *(bytecode_opcode_t *) pc);
1980
1981 /*
1982 * For each instruction, validate the current context
1983 * (traversal of entire execution flow), and validate
1984 * all merge points targeting this instruction.
1985 */
1986 ret = validate_instruction_all_contexts(bytecode, merge_points,
1987 &stack, start_pc, pc);
1988 if (ret)
1989 goto end;
1990 ret = exec_insn(bytecode, merge_points, &stack, &next_pc, pc);
1991 if (ret <= 0)
1992 goto end;
1993 }
1994 end:
1995 if (delete_all_nodes(merge_points)) {
1996 if (!ret) {
1997 ERR("Unexpected merge points\n");
1998 ret = -EINVAL;
1999 }
2000 }
2001 if (lttng_ust_lfht_destroy(merge_points)) {
2002 ERR("Error destroying hash table\n");
2003 }
2004 return ret;
2005 }
This page took 0.110137 seconds and 4 git commands to generate.