198d2fab6c1843096a2aec550a5c8704ac276b9a
[lttng-ust.git] / liblttng-ust / lttng-bytecode-validator.c
1 /*
2 * lttng-bytecode-validator.c
3 *
4 * LTTng UST bytecode validator.
5 *
6 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
7 *
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
14 *
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
24 * SOFTWARE.
25 */
26
27 #define _LGPL_SOURCE
28 #include <stddef.h>
29 #include <stdint.h>
30 #include <time.h>
31
32 #include "rculfhash.h"
33
34 #include "lttng-bytecode.h"
35 #include "lttng-hash-helper.h"
36 #include "string-utils.h"
37 #include "ust-events-internal.h"
38
39 /*
40 * Number of merge points for hash table size. Hash table initialized to
41 * that size, and we do not resize, because we do not want to trigger
42 * RCU worker thread execution: fall-back on linear traversal if number
43 * of merge points exceeds this value.
44 */
45 #define DEFAULT_NR_MERGE_POINTS 128
46 #define MIN_NR_BUCKETS 128
47 #define MAX_NR_BUCKETS 128
48
49 /* merge point table node */
50 struct lfht_mp_node {
51 struct lttng_ust_lfht_node node;
52
53 /* Context at merge point */
54 struct vstack stack;
55 unsigned long target_pc;
56 };
57
58 static unsigned long lttng_hash_seed;
59 static unsigned int lttng_hash_seed_ready;
60
61 static
62 int lttng_hash_match(struct lttng_ust_lfht_node *node, const void *key)
63 {
64 struct lfht_mp_node *mp_node =
65 caa_container_of(node, struct lfht_mp_node, node);
66 unsigned long key_pc = (unsigned long) key;
67
68 if (mp_node->target_pc == key_pc)
69 return 1;
70 else
71 return 0;
72 }
73
74 static
75 int merge_points_compare(const struct vstack *stacka,
76 const struct vstack *stackb)
77 {
78 int i, len;
79
80 if (stacka->top != stackb->top)
81 return 1;
82 len = stacka->top + 1;
83 assert(len >= 0);
84 for (i = 0; i < len; i++) {
85 if (stacka->e[i].type != REG_UNKNOWN
86 && stackb->e[i].type != REG_UNKNOWN
87 && stacka->e[i].type != stackb->e[i].type)
88 return 1;
89 }
90 return 0;
91 }
92
93 static
94 int merge_point_add_check(struct lttng_ust_lfht *ht, unsigned long target_pc,
95 const struct vstack *stack)
96 {
97 struct lfht_mp_node *node;
98 unsigned long hash = lttng_hash_mix((const char *) target_pc,
99 sizeof(target_pc),
100 lttng_hash_seed);
101 struct lttng_ust_lfht_node *ret;
102
103 dbg_printf("Bytecode: adding merge point at offset %lu, hash %lu\n",
104 target_pc, hash);
105 node = zmalloc(sizeof(struct lfht_mp_node));
106 if (!node)
107 return -ENOMEM;
108 node->target_pc = target_pc;
109 memcpy(&node->stack, stack, sizeof(node->stack));
110 ret = lttng_ust_lfht_add_unique(ht, hash, lttng_hash_match,
111 (const char *) target_pc, &node->node);
112 if (ret != &node->node) {
113 struct lfht_mp_node *ret_mp =
114 caa_container_of(ret, struct lfht_mp_node, node);
115
116 /* Key already present */
117 dbg_printf("Bytecode: compare merge points for offset %lu, hash %lu\n",
118 target_pc, hash);
119 free(node);
120 if (merge_points_compare(stack, &ret_mp->stack)) {
121 ERR("Merge points differ for offset %lu\n",
122 target_pc);
123 return -EINVAL;
124 }
125 }
126 return 0;
127 }
128
129 /*
130 * Binary comparators use top of stack and top of stack -1.
131 * Return 0 if typing is known to match, 1 if typing is dynamic
132 * (unknown), negative error value on error.
133 */
134 static
135 int bin_op_compare_check(struct vstack *stack, bytecode_opcode_t opcode,
136 const char *str)
137 {
138 if (unlikely(!vstack_ax(stack) || !vstack_bx(stack)))
139 goto error_empty;
140
141 switch (vstack_ax(stack)->type) {
142 default:
143 goto error_type;
144
145 case REG_UNKNOWN:
146 goto unknown;
147 case REG_STRING:
148 switch (vstack_bx(stack)->type) {
149 default:
150 goto error_type;
151
152 case REG_UNKNOWN:
153 goto unknown;
154 case REG_STRING:
155 break;
156 case REG_STAR_GLOB_STRING:
157 if (opcode != BYTECODE_OP_EQ && opcode != BYTECODE_OP_NE) {
158 goto error_mismatch;
159 }
160 break;
161 case REG_S64:
162 case REG_U64:
163 case REG_DOUBLE:
164 goto error_mismatch;
165 }
166 break;
167 case REG_STAR_GLOB_STRING:
168 switch (vstack_bx(stack)->type) {
169 default:
170 goto error_type;
171
172 case REG_UNKNOWN:
173 goto unknown;
174 case REG_STRING:
175 if (opcode != BYTECODE_OP_EQ && opcode != BYTECODE_OP_NE) {
176 goto error_mismatch;
177 }
178 break;
179 case REG_STAR_GLOB_STRING:
180 case REG_S64:
181 case REG_U64:
182 case REG_DOUBLE:
183 goto error_mismatch;
184 }
185 break;
186 case REG_S64:
187 case REG_U64:
188 case REG_DOUBLE:
189 switch (vstack_bx(stack)->type) {
190 default:
191 goto error_type;
192
193 case REG_UNKNOWN:
194 goto unknown;
195 case REG_STRING:
196 case REG_STAR_GLOB_STRING:
197 goto error_mismatch;
198 case REG_S64:
199 case REG_U64:
200 case REG_DOUBLE:
201 break;
202 }
203 break;
204 }
205 return 0;
206
207 unknown:
208 return 1;
209
210 error_mismatch:
211 ERR("type mismatch for '%s' binary operator\n", str);
212 return -EINVAL;
213
214 error_empty:
215 ERR("empty stack for '%s' binary operator\n", str);
216 return -EINVAL;
217
218 error_type:
219 ERR("unknown type for '%s' binary operator\n", str);
220 return -EINVAL;
221 }
222
223 /*
224 * Binary bitwise operators use top of stack and top of stack -1.
225 * Return 0 if typing is known to match, 1 if typing is dynamic
226 * (unknown), negative error value on error.
227 */
228 static
229 int bin_op_bitwise_check(struct vstack *stack, bytecode_opcode_t opcode,
230 const char *str)
231 {
232 if (unlikely(!vstack_ax(stack) || !vstack_bx(stack)))
233 goto error_empty;
234
235 switch (vstack_ax(stack)->type) {
236 default:
237 goto error_type;
238
239 case REG_UNKNOWN:
240 goto unknown;
241 case REG_S64:
242 case REG_U64:
243 switch (vstack_bx(stack)->type) {
244 default:
245 goto error_type;
246
247 case REG_UNKNOWN:
248 goto unknown;
249 case REG_S64:
250 case REG_U64:
251 break;
252 }
253 break;
254 }
255 return 0;
256
257 unknown:
258 return 1;
259
260 error_empty:
261 ERR("empty stack for '%s' binary operator\n", str);
262 return -EINVAL;
263
264 error_type:
265 ERR("unknown type for '%s' binary operator\n", str);
266 return -EINVAL;
267 }
268
269 static
270 int validate_get_symbol(struct bytecode_runtime *bytecode,
271 const struct get_symbol *sym)
272 {
273 const char *str, *str_limit;
274 size_t len_limit;
275
276 if (sym->offset >= bytecode->p.bc->bc.len - bytecode->p.bc->bc.reloc_offset)
277 return -EINVAL;
278
279 str = bytecode->p.bc->bc.data + bytecode->p.bc->bc.reloc_offset + sym->offset;
280 str_limit = bytecode->p.bc->bc.data + bytecode->p.bc->bc.len;
281 len_limit = str_limit - str;
282 if (strnlen(str, len_limit) == len_limit)
283 return -EINVAL;
284 return 0;
285 }
286
287 /*
288 * Validate bytecode range overflow within the validation pass.
289 * Called for each instruction encountered.
290 */
291 static
292 int bytecode_validate_overflow(struct bytecode_runtime *bytecode,
293 char *start_pc, char *pc)
294 {
295 int ret = 0;
296
297 switch (*(bytecode_opcode_t *) pc) {
298 case BYTECODE_OP_UNKNOWN:
299 default:
300 {
301 ERR("unknown bytecode op %u\n",
302 (unsigned int) *(bytecode_opcode_t *) pc);
303 ret = -EINVAL;
304 break;
305 }
306
307 case BYTECODE_OP_RETURN:
308 case BYTECODE_OP_RETURN_S64:
309 {
310 if (unlikely(pc + sizeof(struct return_op)
311 > start_pc + bytecode->len)) {
312 ret = -ERANGE;
313 }
314 break;
315 }
316
317 /* binary */
318 case BYTECODE_OP_MUL:
319 case BYTECODE_OP_DIV:
320 case BYTECODE_OP_MOD:
321 case BYTECODE_OP_PLUS:
322 case BYTECODE_OP_MINUS:
323 {
324 ERR("unsupported bytecode op %u\n",
325 (unsigned int) *(bytecode_opcode_t *) pc);
326 ret = -EINVAL;
327 break;
328 }
329
330 case BYTECODE_OP_EQ:
331 case BYTECODE_OP_NE:
332 case BYTECODE_OP_GT:
333 case BYTECODE_OP_LT:
334 case BYTECODE_OP_GE:
335 case BYTECODE_OP_LE:
336 case BYTECODE_OP_EQ_STRING:
337 case BYTECODE_OP_NE_STRING:
338 case BYTECODE_OP_GT_STRING:
339 case BYTECODE_OP_LT_STRING:
340 case BYTECODE_OP_GE_STRING:
341 case BYTECODE_OP_LE_STRING:
342 case BYTECODE_OP_EQ_STAR_GLOB_STRING:
343 case BYTECODE_OP_NE_STAR_GLOB_STRING:
344 case BYTECODE_OP_EQ_S64:
345 case BYTECODE_OP_NE_S64:
346 case BYTECODE_OP_GT_S64:
347 case BYTECODE_OP_LT_S64:
348 case BYTECODE_OP_GE_S64:
349 case BYTECODE_OP_LE_S64:
350 case BYTECODE_OP_EQ_DOUBLE:
351 case BYTECODE_OP_NE_DOUBLE:
352 case BYTECODE_OP_GT_DOUBLE:
353 case BYTECODE_OP_LT_DOUBLE:
354 case BYTECODE_OP_GE_DOUBLE:
355 case BYTECODE_OP_LE_DOUBLE:
356 case BYTECODE_OP_EQ_DOUBLE_S64:
357 case BYTECODE_OP_NE_DOUBLE_S64:
358 case BYTECODE_OP_GT_DOUBLE_S64:
359 case BYTECODE_OP_LT_DOUBLE_S64:
360 case BYTECODE_OP_GE_DOUBLE_S64:
361 case BYTECODE_OP_LE_DOUBLE_S64:
362 case BYTECODE_OP_EQ_S64_DOUBLE:
363 case BYTECODE_OP_NE_S64_DOUBLE:
364 case BYTECODE_OP_GT_S64_DOUBLE:
365 case BYTECODE_OP_LT_S64_DOUBLE:
366 case BYTECODE_OP_GE_S64_DOUBLE:
367 case BYTECODE_OP_LE_S64_DOUBLE:
368 case BYTECODE_OP_BIT_RSHIFT:
369 case BYTECODE_OP_BIT_LSHIFT:
370 case BYTECODE_OP_BIT_AND:
371 case BYTECODE_OP_BIT_OR:
372 case BYTECODE_OP_BIT_XOR:
373 {
374 if (unlikely(pc + sizeof(struct binary_op)
375 > start_pc + bytecode->len)) {
376 ret = -ERANGE;
377 }
378 break;
379 }
380
381 /* unary */
382 case BYTECODE_OP_UNARY_PLUS:
383 case BYTECODE_OP_UNARY_MINUS:
384 case BYTECODE_OP_UNARY_NOT:
385 case BYTECODE_OP_UNARY_PLUS_S64:
386 case BYTECODE_OP_UNARY_MINUS_S64:
387 case BYTECODE_OP_UNARY_NOT_S64:
388 case BYTECODE_OP_UNARY_PLUS_DOUBLE:
389 case BYTECODE_OP_UNARY_MINUS_DOUBLE:
390 case BYTECODE_OP_UNARY_NOT_DOUBLE:
391 case BYTECODE_OP_UNARY_BIT_NOT:
392 {
393 if (unlikely(pc + sizeof(struct unary_op)
394 > start_pc + bytecode->len)) {
395 ret = -ERANGE;
396 }
397 break;
398 }
399
400 /* logical */
401 case BYTECODE_OP_AND:
402 case BYTECODE_OP_OR:
403 {
404 if (unlikely(pc + sizeof(struct logical_op)
405 > start_pc + bytecode->len)) {
406 ret = -ERANGE;
407 }
408 break;
409 }
410
411 /* load field ref */
412 case BYTECODE_OP_LOAD_FIELD_REF:
413 {
414 ERR("Unknown field ref type\n");
415 ret = -EINVAL;
416 break;
417 }
418
419 /* get context ref */
420 case BYTECODE_OP_GET_CONTEXT_REF:
421 case BYTECODE_OP_LOAD_FIELD_REF_STRING:
422 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE:
423 case BYTECODE_OP_LOAD_FIELD_REF_S64:
424 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE:
425 case BYTECODE_OP_GET_CONTEXT_REF_STRING:
426 case BYTECODE_OP_GET_CONTEXT_REF_S64:
427 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE:
428 {
429 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct field_ref)
430 > start_pc + bytecode->len)) {
431 ret = -ERANGE;
432 }
433 break;
434 }
435
436 /* load from immediate operand */
437 case BYTECODE_OP_LOAD_STRING:
438 case BYTECODE_OP_LOAD_STAR_GLOB_STRING:
439 {
440 struct load_op *insn = (struct load_op *) pc;
441 uint32_t str_len, maxlen;
442
443 if (unlikely(pc + sizeof(struct load_op)
444 > start_pc + bytecode->len)) {
445 ret = -ERANGE;
446 break;
447 }
448
449 maxlen = start_pc + bytecode->len - pc - sizeof(struct load_op);
450 str_len = strnlen(insn->data, maxlen);
451 if (unlikely(str_len >= maxlen)) {
452 /* Final '\0' not found within range */
453 ret = -ERANGE;
454 }
455 break;
456 }
457
458 case BYTECODE_OP_LOAD_S64:
459 {
460 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct literal_numeric)
461 > start_pc + bytecode->len)) {
462 ret = -ERANGE;
463 }
464 break;
465 }
466
467 case BYTECODE_OP_LOAD_DOUBLE:
468 {
469 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct literal_double)
470 > start_pc + bytecode->len)) {
471 ret = -ERANGE;
472 }
473 break;
474 }
475
476 case BYTECODE_OP_CAST_TO_S64:
477 case BYTECODE_OP_CAST_DOUBLE_TO_S64:
478 case BYTECODE_OP_CAST_NOP:
479 {
480 if (unlikely(pc + sizeof(struct cast_op)
481 > start_pc + bytecode->len)) {
482 ret = -ERANGE;
483 }
484 break;
485 }
486
487 /*
488 * Instructions for recursive traversal through composed types.
489 */
490 case BYTECODE_OP_GET_CONTEXT_ROOT:
491 case BYTECODE_OP_GET_APP_CONTEXT_ROOT:
492 case BYTECODE_OP_GET_PAYLOAD_ROOT:
493 case BYTECODE_OP_LOAD_FIELD:
494 case BYTECODE_OP_LOAD_FIELD_S8:
495 case BYTECODE_OP_LOAD_FIELD_S16:
496 case BYTECODE_OP_LOAD_FIELD_S32:
497 case BYTECODE_OP_LOAD_FIELD_S64:
498 case BYTECODE_OP_LOAD_FIELD_U8:
499 case BYTECODE_OP_LOAD_FIELD_U16:
500 case BYTECODE_OP_LOAD_FIELD_U32:
501 case BYTECODE_OP_LOAD_FIELD_U64:
502 case BYTECODE_OP_LOAD_FIELD_STRING:
503 case BYTECODE_OP_LOAD_FIELD_SEQUENCE:
504 case BYTECODE_OP_LOAD_FIELD_DOUBLE:
505 if (unlikely(pc + sizeof(struct load_op)
506 > start_pc + bytecode->len)) {
507 ret = -ERANGE;
508 }
509 break;
510
511 case BYTECODE_OP_GET_SYMBOL:
512 {
513 struct load_op *insn = (struct load_op *) pc;
514 struct get_symbol *sym = (struct get_symbol *) insn->data;
515
516 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct get_symbol)
517 > start_pc + bytecode->len)) {
518 ret = -ERANGE;
519 break;
520 }
521 ret = validate_get_symbol(bytecode, sym);
522 break;
523 }
524
525 case BYTECODE_OP_GET_SYMBOL_FIELD:
526 ERR("Unexpected get symbol field");
527 ret = -EINVAL;
528 break;
529
530 case BYTECODE_OP_GET_INDEX_U16:
531 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct get_index_u16)
532 > start_pc + bytecode->len)) {
533 ret = -ERANGE;
534 }
535 break;
536
537 case BYTECODE_OP_GET_INDEX_U64:
538 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct get_index_u64)
539 > start_pc + bytecode->len)) {
540 ret = -ERANGE;
541 }
542 break;
543 }
544
545 return ret;
546 }
547
548 static
549 unsigned long delete_all_nodes(struct lttng_ust_lfht *ht)
550 {
551 struct lttng_ust_lfht_iter iter;
552 struct lfht_mp_node *node;
553 unsigned long nr_nodes = 0;
554
555 lttng_ust_lfht_for_each_entry(ht, &iter, node, node) {
556 int ret;
557
558 ret = lttng_ust_lfht_del(ht, lttng_ust_lfht_iter_get_node(&iter));
559 assert(!ret);
560 /* note: this hash table is never used concurrently */
561 free(node);
562 nr_nodes++;
563 }
564 return nr_nodes;
565 }
566
567 /*
568 * Return value:
569 * >=0: success
570 * <0: error
571 */
572 static
573 int validate_instruction_context(struct bytecode_runtime *bytecode,
574 struct vstack *stack,
575 char *start_pc,
576 char *pc)
577 {
578 int ret = 0;
579 const bytecode_opcode_t opcode = *(bytecode_opcode_t *) pc;
580
581 switch (opcode) {
582 case BYTECODE_OP_UNKNOWN:
583 default:
584 {
585 ERR("unknown bytecode op %u\n",
586 (unsigned int) *(bytecode_opcode_t *) pc);
587 ret = -EINVAL;
588 goto end;
589 }
590
591 case BYTECODE_OP_RETURN:
592 case BYTECODE_OP_RETURN_S64:
593 {
594 goto end;
595 }
596
597 /* binary */
598 case BYTECODE_OP_MUL:
599 case BYTECODE_OP_DIV:
600 case BYTECODE_OP_MOD:
601 case BYTECODE_OP_PLUS:
602 case BYTECODE_OP_MINUS:
603 {
604 ERR("unsupported bytecode op %u\n",
605 (unsigned int) opcode);
606 ret = -EINVAL;
607 goto end;
608 }
609
610 case BYTECODE_OP_EQ:
611 {
612 ret = bin_op_compare_check(stack, opcode, "==");
613 if (ret < 0)
614 goto end;
615 break;
616 }
617 case BYTECODE_OP_NE:
618 {
619 ret = bin_op_compare_check(stack, opcode, "!=");
620 if (ret < 0)
621 goto end;
622 break;
623 }
624 case BYTECODE_OP_GT:
625 {
626 ret = bin_op_compare_check(stack, opcode, ">");
627 if (ret < 0)
628 goto end;
629 break;
630 }
631 case BYTECODE_OP_LT:
632 {
633 ret = bin_op_compare_check(stack, opcode, "<");
634 if (ret < 0)
635 goto end;
636 break;
637 }
638 case BYTECODE_OP_GE:
639 {
640 ret = bin_op_compare_check(stack, opcode, ">=");
641 if (ret < 0)
642 goto end;
643 break;
644 }
645 case BYTECODE_OP_LE:
646 {
647 ret = bin_op_compare_check(stack, opcode, "<=");
648 if (ret < 0)
649 goto end;
650 break;
651 }
652
653 case BYTECODE_OP_EQ_STRING:
654 case BYTECODE_OP_NE_STRING:
655 case BYTECODE_OP_GT_STRING:
656 case BYTECODE_OP_LT_STRING:
657 case BYTECODE_OP_GE_STRING:
658 case BYTECODE_OP_LE_STRING:
659 {
660 if (!vstack_ax(stack) || !vstack_bx(stack)) {
661 ERR("Empty stack\n");
662 ret = -EINVAL;
663 goto end;
664 }
665 if (vstack_ax(stack)->type != REG_STRING
666 || vstack_bx(stack)->type != REG_STRING) {
667 ERR("Unexpected register type for string comparator\n");
668 ret = -EINVAL;
669 goto end;
670 }
671 break;
672 }
673
674 case BYTECODE_OP_EQ_STAR_GLOB_STRING:
675 case BYTECODE_OP_NE_STAR_GLOB_STRING:
676 {
677 if (!vstack_ax(stack) || !vstack_bx(stack)) {
678 ERR("Empty stack\n");
679 ret = -EINVAL;
680 goto end;
681 }
682 if (vstack_ax(stack)->type != REG_STAR_GLOB_STRING
683 && vstack_bx(stack)->type != REG_STAR_GLOB_STRING) {
684 ERR("Unexpected register type for globbing pattern comparator\n");
685 ret = -EINVAL;
686 goto end;
687 }
688 break;
689 }
690
691 case BYTECODE_OP_EQ_S64:
692 case BYTECODE_OP_NE_S64:
693 case BYTECODE_OP_GT_S64:
694 case BYTECODE_OP_LT_S64:
695 case BYTECODE_OP_GE_S64:
696 case BYTECODE_OP_LE_S64:
697 {
698 if (!vstack_ax(stack) || !vstack_bx(stack)) {
699 ERR("Empty stack\n");
700 ret = -EINVAL;
701 goto end;
702 }
703 switch (vstack_ax(stack)->type) {
704 case REG_S64:
705 case REG_U64:
706 break;
707 default:
708 ERR("Unexpected register type for s64 comparator\n");
709 ret = -EINVAL;
710 goto end;
711 }
712 switch (vstack_bx(stack)->type) {
713 case REG_S64:
714 case REG_U64:
715 break;
716 default:
717 ERR("Unexpected register type for s64 comparator\n");
718 ret = -EINVAL;
719 goto end;
720 }
721 break;
722 }
723
724 case BYTECODE_OP_EQ_DOUBLE:
725 case BYTECODE_OP_NE_DOUBLE:
726 case BYTECODE_OP_GT_DOUBLE:
727 case BYTECODE_OP_LT_DOUBLE:
728 case BYTECODE_OP_GE_DOUBLE:
729 case BYTECODE_OP_LE_DOUBLE:
730 {
731 if (!vstack_ax(stack) || !vstack_bx(stack)) {
732 ERR("Empty stack\n");
733 ret = -EINVAL;
734 goto end;
735 }
736 if (vstack_ax(stack)->type != REG_DOUBLE && vstack_bx(stack)->type != REG_DOUBLE) {
737 ERR("Double operator should have two double registers\n");
738 ret = -EINVAL;
739 goto end;
740 }
741 break;
742 }
743
744 case BYTECODE_OP_EQ_DOUBLE_S64:
745 case BYTECODE_OP_NE_DOUBLE_S64:
746 case BYTECODE_OP_GT_DOUBLE_S64:
747 case BYTECODE_OP_LT_DOUBLE_S64:
748 case BYTECODE_OP_GE_DOUBLE_S64:
749 case BYTECODE_OP_LE_DOUBLE_S64:
750 {
751 if (!vstack_ax(stack) || !vstack_bx(stack)) {
752 ERR("Empty stack\n");
753 ret = -EINVAL;
754 goto end;
755 }
756 switch (vstack_ax(stack)->type) {
757 case REG_S64:
758 case REG_U64:
759 break;
760 default:
761 ERR("Double-S64 operator has unexpected register types\n");
762 ret = -EINVAL;
763 goto end;
764 }
765 switch (vstack_bx(stack)->type) {
766 case REG_DOUBLE:
767 break;
768 default:
769 ERR("Double-S64 operator has unexpected register types\n");
770 ret = -EINVAL;
771 goto end;
772 }
773 break;
774 }
775
776 case BYTECODE_OP_EQ_S64_DOUBLE:
777 case BYTECODE_OP_NE_S64_DOUBLE:
778 case BYTECODE_OP_GT_S64_DOUBLE:
779 case BYTECODE_OP_LT_S64_DOUBLE:
780 case BYTECODE_OP_GE_S64_DOUBLE:
781 case BYTECODE_OP_LE_S64_DOUBLE:
782 {
783 if (!vstack_ax(stack) || !vstack_bx(stack)) {
784 ERR("Empty stack\n");
785 ret = -EINVAL;
786 goto end;
787 }
788 switch (vstack_ax(stack)->type) {
789 case REG_DOUBLE:
790 break;
791 default:
792 ERR("S64-Double operator has unexpected register types\n");
793 ret = -EINVAL;
794 goto end;
795 }
796 switch (vstack_bx(stack)->type) {
797 case REG_S64:
798 case REG_U64:
799 break;
800 default:
801 ERR("S64-Double operator has unexpected register types\n");
802 ret = -EINVAL;
803 goto end;
804 }
805 break;
806 }
807
808 case BYTECODE_OP_BIT_RSHIFT:
809 ret = bin_op_bitwise_check(stack, opcode, ">>");
810 if (ret < 0)
811 goto end;
812 break;
813 case BYTECODE_OP_BIT_LSHIFT:
814 ret = bin_op_bitwise_check(stack, opcode, "<<");
815 if (ret < 0)
816 goto end;
817 break;
818 case BYTECODE_OP_BIT_AND:
819 ret = bin_op_bitwise_check(stack, opcode, "&");
820 if (ret < 0)
821 goto end;
822 break;
823 case BYTECODE_OP_BIT_OR:
824 ret = bin_op_bitwise_check(stack, opcode, "|");
825 if (ret < 0)
826 goto end;
827 break;
828 case BYTECODE_OP_BIT_XOR:
829 ret = bin_op_bitwise_check(stack, opcode, "^");
830 if (ret < 0)
831 goto end;
832 break;
833
834 /* unary */
835 case BYTECODE_OP_UNARY_PLUS:
836 case BYTECODE_OP_UNARY_MINUS:
837 case BYTECODE_OP_UNARY_NOT:
838 {
839 if (!vstack_ax(stack)) {
840 ERR("Empty stack\n");
841 ret = -EINVAL;
842 goto end;
843 }
844 switch (vstack_ax(stack)->type) {
845 default:
846 ERR("unknown register type\n");
847 ret = -EINVAL;
848 goto end;
849
850 case REG_STRING:
851 case REG_STAR_GLOB_STRING:
852 ERR("Unary op can only be applied to numeric or floating point registers\n");
853 ret = -EINVAL;
854 goto end;
855 case REG_S64:
856 break;
857 case REG_U64:
858 break;
859 case REG_DOUBLE:
860 break;
861 case REG_UNKNOWN:
862 break;
863 }
864 break;
865 }
866 case BYTECODE_OP_UNARY_BIT_NOT:
867 {
868 if (!vstack_ax(stack)) {
869 ERR("Empty stack\n");
870 ret = -EINVAL;
871 goto end;
872 }
873 switch (vstack_ax(stack)->type) {
874 default:
875 ERR("unknown register type\n");
876 ret = -EINVAL;
877 goto end;
878
879 case REG_STRING:
880 case REG_STAR_GLOB_STRING:
881 case REG_DOUBLE:
882 ERR("Unary bitwise op can only be applied to numeric registers\n");
883 ret = -EINVAL;
884 goto end;
885 case REG_S64:
886 break;
887 case REG_U64:
888 break;
889 case REG_UNKNOWN:
890 break;
891 }
892 break;
893 }
894
895 case BYTECODE_OP_UNARY_PLUS_S64:
896 case BYTECODE_OP_UNARY_MINUS_S64:
897 case BYTECODE_OP_UNARY_NOT_S64:
898 {
899 if (!vstack_ax(stack)) {
900 ERR("Empty stack\n");
901 ret = -EINVAL;
902 goto end;
903 }
904 if (vstack_ax(stack)->type != REG_S64 &&
905 vstack_ax(stack)->type != REG_U64) {
906 ERR("Invalid register type\n");
907 ret = -EINVAL;
908 goto end;
909 }
910 break;
911 }
912
913 case BYTECODE_OP_UNARY_PLUS_DOUBLE:
914 case BYTECODE_OP_UNARY_MINUS_DOUBLE:
915 case BYTECODE_OP_UNARY_NOT_DOUBLE:
916 {
917 if (!vstack_ax(stack)) {
918 ERR("Empty stack\n");
919 ret = -EINVAL;
920 goto end;
921 }
922 if (vstack_ax(stack)->type != REG_DOUBLE) {
923 ERR("Invalid register type\n");
924 ret = -EINVAL;
925 goto end;
926 }
927 break;
928 }
929
930 /* logical */
931 case BYTECODE_OP_AND:
932 case BYTECODE_OP_OR:
933 {
934 struct logical_op *insn = (struct logical_op *) pc;
935
936 if (!vstack_ax(stack)) {
937 ERR("Empty stack\n");
938 ret = -EINVAL;
939 goto end;
940 }
941 if (vstack_ax(stack)->type != REG_S64
942 && vstack_ax(stack)->type != REG_U64
943 && vstack_ax(stack)->type != REG_UNKNOWN) {
944 ERR("Logical comparator expects S64, U64 or dynamic register\n");
945 ret = -EINVAL;
946 goto end;
947 }
948
949 dbg_printf("Validate jumping to bytecode offset %u\n",
950 (unsigned int) insn->skip_offset);
951 if (unlikely(start_pc + insn->skip_offset <= pc)) {
952 ERR("Loops are not allowed in bytecode\n");
953 ret = -EINVAL;
954 goto end;
955 }
956 break;
957 }
958
959 /* load field ref */
960 case BYTECODE_OP_LOAD_FIELD_REF:
961 {
962 ERR("Unknown field ref type\n");
963 ret = -EINVAL;
964 goto end;
965 }
966 case BYTECODE_OP_LOAD_FIELD_REF_STRING:
967 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE:
968 {
969 struct load_op *insn = (struct load_op *) pc;
970 struct field_ref *ref = (struct field_ref *) insn->data;
971
972 dbg_printf("Validate load field ref offset %u type string\n",
973 ref->offset);
974 break;
975 }
976 case BYTECODE_OP_LOAD_FIELD_REF_S64:
977 {
978 struct load_op *insn = (struct load_op *) pc;
979 struct field_ref *ref = (struct field_ref *) insn->data;
980
981 dbg_printf("Validate load field ref offset %u type s64\n",
982 ref->offset);
983 break;
984 }
985 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE:
986 {
987 struct load_op *insn = (struct load_op *) pc;
988 struct field_ref *ref = (struct field_ref *) insn->data;
989
990 dbg_printf("Validate load field ref offset %u type double\n",
991 ref->offset);
992 break;
993 }
994
995 /* load from immediate operand */
996 case BYTECODE_OP_LOAD_STRING:
997 case BYTECODE_OP_LOAD_STAR_GLOB_STRING:
998 {
999 break;
1000 }
1001
1002 case BYTECODE_OP_LOAD_S64:
1003 {
1004 break;
1005 }
1006
1007 case BYTECODE_OP_LOAD_DOUBLE:
1008 {
1009 break;
1010 }
1011
1012 case BYTECODE_OP_CAST_TO_S64:
1013 case BYTECODE_OP_CAST_DOUBLE_TO_S64:
1014 {
1015 struct cast_op *insn = (struct cast_op *) pc;
1016
1017 if (!vstack_ax(stack)) {
1018 ERR("Empty stack\n");
1019 ret = -EINVAL;
1020 goto end;
1021 }
1022 switch (vstack_ax(stack)->type) {
1023 default:
1024 ERR("unknown register type\n");
1025 ret = -EINVAL;
1026 goto end;
1027
1028 case REG_STRING:
1029 case REG_STAR_GLOB_STRING:
1030 ERR("Cast op can only be applied to numeric or floating point registers\n");
1031 ret = -EINVAL;
1032 goto end;
1033 case REG_S64:
1034 break;
1035 case REG_U64:
1036 break;
1037 case REG_DOUBLE:
1038 break;
1039 case REG_UNKNOWN:
1040 break;
1041 }
1042 if (insn->op == BYTECODE_OP_CAST_DOUBLE_TO_S64) {
1043 if (vstack_ax(stack)->type != REG_DOUBLE) {
1044 ERR("Cast expects double\n");
1045 ret = -EINVAL;
1046 goto end;
1047 }
1048 }
1049 break;
1050 }
1051 case BYTECODE_OP_CAST_NOP:
1052 {
1053 break;
1054 }
1055
1056 /* get context ref */
1057 case BYTECODE_OP_GET_CONTEXT_REF:
1058 {
1059 struct load_op *insn = (struct load_op *) pc;
1060 struct field_ref *ref = (struct field_ref *) insn->data;
1061
1062 dbg_printf("Validate get context ref offset %u type dynamic\n",
1063 ref->offset);
1064 break;
1065 }
1066 case BYTECODE_OP_GET_CONTEXT_REF_STRING:
1067 {
1068 struct load_op *insn = (struct load_op *) pc;
1069 struct field_ref *ref = (struct field_ref *) insn->data;
1070
1071 dbg_printf("Validate get context ref offset %u type string\n",
1072 ref->offset);
1073 break;
1074 }
1075 case BYTECODE_OP_GET_CONTEXT_REF_S64:
1076 {
1077 struct load_op *insn = (struct load_op *) pc;
1078 struct field_ref *ref = (struct field_ref *) insn->data;
1079
1080 dbg_printf("Validate get context ref offset %u type s64\n",
1081 ref->offset);
1082 break;
1083 }
1084 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE:
1085 {
1086 struct load_op *insn = (struct load_op *) pc;
1087 struct field_ref *ref = (struct field_ref *) insn->data;
1088
1089 dbg_printf("Validate get context ref offset %u type double\n",
1090 ref->offset);
1091 break;
1092 }
1093
1094 /*
1095 * Instructions for recursive traversal through composed types.
1096 */
1097 case BYTECODE_OP_GET_CONTEXT_ROOT:
1098 {
1099 dbg_printf("Validate get context root\n");
1100 break;
1101 }
1102 case BYTECODE_OP_GET_APP_CONTEXT_ROOT:
1103 {
1104 dbg_printf("Validate get app context root\n");
1105 break;
1106 }
1107 case BYTECODE_OP_GET_PAYLOAD_ROOT:
1108 {
1109 dbg_printf("Validate get payload root\n");
1110 break;
1111 }
1112 case BYTECODE_OP_LOAD_FIELD:
1113 {
1114 /*
1115 * We tolerate that field type is unknown at validation,
1116 * because we are performing the load specialization in
1117 * a phase after validation.
1118 */
1119 dbg_printf("Validate load field\n");
1120 break;
1121 }
1122 case BYTECODE_OP_LOAD_FIELD_S8:
1123 {
1124 dbg_printf("Validate load field s8\n");
1125 break;
1126 }
1127 case BYTECODE_OP_LOAD_FIELD_S16:
1128 {
1129 dbg_printf("Validate load field s16\n");
1130 break;
1131 }
1132 case BYTECODE_OP_LOAD_FIELD_S32:
1133 {
1134 dbg_printf("Validate load field s32\n");
1135 break;
1136 }
1137 case BYTECODE_OP_LOAD_FIELD_S64:
1138 {
1139 dbg_printf("Validate load field s64\n");
1140 break;
1141 }
1142 case BYTECODE_OP_LOAD_FIELD_U8:
1143 {
1144 dbg_printf("Validate load field u8\n");
1145 break;
1146 }
1147 case BYTECODE_OP_LOAD_FIELD_U16:
1148 {
1149 dbg_printf("Validate load field u16\n");
1150 break;
1151 }
1152 case BYTECODE_OP_LOAD_FIELD_U32:
1153 {
1154 dbg_printf("Validate load field u32\n");
1155 break;
1156 }
1157 case BYTECODE_OP_LOAD_FIELD_U64:
1158 {
1159 dbg_printf("Validate load field u64\n");
1160 break;
1161 }
1162 case BYTECODE_OP_LOAD_FIELD_STRING:
1163 {
1164 dbg_printf("Validate load field string\n");
1165 break;
1166 }
1167 case BYTECODE_OP_LOAD_FIELD_SEQUENCE:
1168 {
1169 dbg_printf("Validate load field sequence\n");
1170 break;
1171 }
1172 case BYTECODE_OP_LOAD_FIELD_DOUBLE:
1173 {
1174 dbg_printf("Validate load field double\n");
1175 break;
1176 }
1177
1178 case BYTECODE_OP_GET_SYMBOL:
1179 {
1180 struct load_op *insn = (struct load_op *) pc;
1181 struct get_symbol *sym = (struct get_symbol *) insn->data;
1182
1183 dbg_printf("Validate get symbol offset %u\n", sym->offset);
1184 break;
1185 }
1186
1187 case BYTECODE_OP_GET_SYMBOL_FIELD:
1188 {
1189 struct load_op *insn = (struct load_op *) pc;
1190 struct get_symbol *sym = (struct get_symbol *) insn->data;
1191
1192 dbg_printf("Validate get symbol field offset %u\n", sym->offset);
1193 break;
1194 }
1195
1196 case BYTECODE_OP_GET_INDEX_U16:
1197 {
1198 struct load_op *insn = (struct load_op *) pc;
1199 struct get_index_u16 *get_index = (struct get_index_u16 *) insn->data;
1200
1201 dbg_printf("Validate get index u16 index %u\n", get_index->index);
1202 break;
1203 }
1204
1205 case BYTECODE_OP_GET_INDEX_U64:
1206 {
1207 struct load_op *insn = (struct load_op *) pc;
1208 struct get_index_u64 *get_index = (struct get_index_u64 *) insn->data;
1209
1210 dbg_printf("Validate get index u64 index %" PRIu64 "\n", get_index->index);
1211 break;
1212 }
1213 }
1214 end:
1215 return ret;
1216 }
1217
1218 /*
1219 * Return value:
1220 * 0: success
1221 * <0: error
1222 */
1223 static
1224 int validate_instruction_all_contexts(struct bytecode_runtime *bytecode,
1225 struct lttng_ust_lfht *merge_points,
1226 struct vstack *stack,
1227 char *start_pc,
1228 char *pc)
1229 {
1230 int ret;
1231 unsigned long target_pc = pc - start_pc;
1232 struct lttng_ust_lfht_iter iter;
1233 struct lttng_ust_lfht_node *node;
1234 struct lfht_mp_node *mp_node;
1235 unsigned long hash;
1236
1237 /* Validate the context resulting from the previous instruction */
1238 ret = validate_instruction_context(bytecode, stack, start_pc, pc);
1239 if (ret < 0)
1240 return ret;
1241
1242 /* Validate merge points */
1243 hash = lttng_hash_mix((const char *) target_pc, sizeof(target_pc),
1244 lttng_hash_seed);
1245 lttng_ust_lfht_lookup(merge_points, hash, lttng_hash_match,
1246 (const char *) target_pc, &iter);
1247 node = lttng_ust_lfht_iter_get_node(&iter);
1248 if (node) {
1249 mp_node = caa_container_of(node, struct lfht_mp_node, node);
1250
1251 dbg_printf("Bytecode: validate merge point at offset %lu\n",
1252 target_pc);
1253 if (merge_points_compare(stack, &mp_node->stack)) {
1254 ERR("Merge points differ for offset %lu\n",
1255 target_pc);
1256 return -EINVAL;
1257 }
1258 /* Once validated, we can remove the merge point */
1259 dbg_printf("Bytecode: remove merge point at offset %lu\n",
1260 target_pc);
1261 ret = lttng_ust_lfht_del(merge_points, node);
1262 assert(!ret);
1263 }
1264 return 0;
1265 }
1266
1267 /*
1268 * Return value:
1269 * >0: going to next insn.
1270 * 0: success, stop iteration.
1271 * <0: error
1272 */
1273 static
1274 int exec_insn(struct bytecode_runtime *bytecode,
1275 struct lttng_ust_lfht *merge_points,
1276 struct vstack *stack,
1277 char **_next_pc,
1278 char *pc)
1279 {
1280 int ret = 1;
1281 char *next_pc = *_next_pc;
1282
1283 switch (*(bytecode_opcode_t *) pc) {
1284 case BYTECODE_OP_UNKNOWN:
1285 default:
1286 {
1287 ERR("unknown bytecode op %u\n",
1288 (unsigned int) *(bytecode_opcode_t *) pc);
1289 ret = -EINVAL;
1290 goto end;
1291 }
1292
1293 case BYTECODE_OP_RETURN:
1294 {
1295 if (!vstack_ax(stack)) {
1296 ERR("Empty stack\n");
1297 ret = -EINVAL;
1298 goto end;
1299 }
1300 switch (vstack_ax(stack)->type) {
1301 case REG_S64:
1302 case REG_U64:
1303 case REG_DOUBLE:
1304 case REG_STRING:
1305 case REG_PTR:
1306 case REG_UNKNOWN:
1307 break;
1308 default:
1309 ERR("Unexpected register type %d at end of bytecode\n",
1310 (int) vstack_ax(stack)->type);
1311 ret = -EINVAL;
1312 goto end;
1313 }
1314
1315 ret = 0;
1316 goto end;
1317 }
1318 case BYTECODE_OP_RETURN_S64:
1319 {
1320 if (!vstack_ax(stack)) {
1321 ERR("Empty stack\n");
1322 ret = -EINVAL;
1323 goto end;
1324 }
1325 switch (vstack_ax(stack)->type) {
1326 case REG_S64:
1327 case REG_U64:
1328 break;
1329 default:
1330 case REG_UNKNOWN:
1331 ERR("Unexpected register type %d at end of bytecode\n",
1332 (int) vstack_ax(stack)->type);
1333 ret = -EINVAL;
1334 goto end;
1335 }
1336
1337 ret = 0;
1338 goto end;
1339 }
1340
1341 /* binary */
1342 case BYTECODE_OP_MUL:
1343 case BYTECODE_OP_DIV:
1344 case BYTECODE_OP_MOD:
1345 case BYTECODE_OP_PLUS:
1346 case BYTECODE_OP_MINUS:
1347 {
1348 ERR("unsupported bytecode op %u\n",
1349 (unsigned int) *(bytecode_opcode_t *) pc);
1350 ret = -EINVAL;
1351 goto end;
1352 }
1353
1354 case BYTECODE_OP_EQ:
1355 case BYTECODE_OP_NE:
1356 case BYTECODE_OP_GT:
1357 case BYTECODE_OP_LT:
1358 case BYTECODE_OP_GE:
1359 case BYTECODE_OP_LE:
1360 case BYTECODE_OP_EQ_STRING:
1361 case BYTECODE_OP_NE_STRING:
1362 case BYTECODE_OP_GT_STRING:
1363 case BYTECODE_OP_LT_STRING:
1364 case BYTECODE_OP_GE_STRING:
1365 case BYTECODE_OP_LE_STRING:
1366 case BYTECODE_OP_EQ_STAR_GLOB_STRING:
1367 case BYTECODE_OP_NE_STAR_GLOB_STRING:
1368 case BYTECODE_OP_EQ_S64:
1369 case BYTECODE_OP_NE_S64:
1370 case BYTECODE_OP_GT_S64:
1371 case BYTECODE_OP_LT_S64:
1372 case BYTECODE_OP_GE_S64:
1373 case BYTECODE_OP_LE_S64:
1374 case BYTECODE_OP_EQ_DOUBLE:
1375 case BYTECODE_OP_NE_DOUBLE:
1376 case BYTECODE_OP_GT_DOUBLE:
1377 case BYTECODE_OP_LT_DOUBLE:
1378 case BYTECODE_OP_GE_DOUBLE:
1379 case BYTECODE_OP_LE_DOUBLE:
1380 case BYTECODE_OP_EQ_DOUBLE_S64:
1381 case BYTECODE_OP_NE_DOUBLE_S64:
1382 case BYTECODE_OP_GT_DOUBLE_S64:
1383 case BYTECODE_OP_LT_DOUBLE_S64:
1384 case BYTECODE_OP_GE_DOUBLE_S64:
1385 case BYTECODE_OP_LE_DOUBLE_S64:
1386 case BYTECODE_OP_EQ_S64_DOUBLE:
1387 case BYTECODE_OP_NE_S64_DOUBLE:
1388 case BYTECODE_OP_GT_S64_DOUBLE:
1389 case BYTECODE_OP_LT_S64_DOUBLE:
1390 case BYTECODE_OP_GE_S64_DOUBLE:
1391 case BYTECODE_OP_LE_S64_DOUBLE:
1392 {
1393 /* Pop 2, push 1 */
1394 if (vstack_pop(stack)) {
1395 ret = -EINVAL;
1396 goto end;
1397 }
1398 if (!vstack_ax(stack)) {
1399 ERR("Empty stack\n");
1400 ret = -EINVAL;
1401 goto end;
1402 }
1403 switch (vstack_ax(stack)->type) {
1404 case REG_S64:
1405 case REG_U64:
1406 case REG_DOUBLE:
1407 case REG_STRING:
1408 case REG_STAR_GLOB_STRING:
1409 case REG_UNKNOWN:
1410 break;
1411 default:
1412 ERR("Unexpected register type %d for operation\n",
1413 (int) vstack_ax(stack)->type);
1414 ret = -EINVAL;
1415 goto end;
1416 }
1417
1418 vstack_ax(stack)->type = REG_S64;
1419 next_pc += sizeof(struct binary_op);
1420 break;
1421 }
1422
1423 case BYTECODE_OP_BIT_RSHIFT:
1424 case BYTECODE_OP_BIT_LSHIFT:
1425 case BYTECODE_OP_BIT_AND:
1426 case BYTECODE_OP_BIT_OR:
1427 case BYTECODE_OP_BIT_XOR:
1428 {
1429 /* Pop 2, push 1 */
1430 if (vstack_pop(stack)) {
1431 ret = -EINVAL;
1432 goto end;
1433 }
1434 if (!vstack_ax(stack)) {
1435 ERR("Empty stack\n");
1436 ret = -EINVAL;
1437 goto end;
1438 }
1439 switch (vstack_ax(stack)->type) {
1440 case REG_S64:
1441 case REG_U64:
1442 case REG_DOUBLE:
1443 case REG_STRING:
1444 case REG_STAR_GLOB_STRING:
1445 case REG_UNKNOWN:
1446 break;
1447 default:
1448 ERR("Unexpected register type %d for operation\n",
1449 (int) vstack_ax(stack)->type);
1450 ret = -EINVAL;
1451 goto end;
1452 }
1453
1454 vstack_ax(stack)->type = REG_U64;
1455 next_pc += sizeof(struct binary_op);
1456 break;
1457 }
1458
1459 /* unary */
1460 case BYTECODE_OP_UNARY_PLUS:
1461 case BYTECODE_OP_UNARY_MINUS:
1462 {
1463 /* Pop 1, push 1 */
1464 if (!vstack_ax(stack)) {
1465 ERR("Empty stack\n");
1466 ret = -EINVAL;
1467 goto end;
1468 }
1469 switch (vstack_ax(stack)->type) {
1470 case REG_UNKNOWN:
1471 case REG_DOUBLE:
1472 case REG_S64:
1473 case REG_U64:
1474 break;
1475 default:
1476 ERR("Unexpected register type %d for operation\n",
1477 (int) vstack_ax(stack)->type);
1478 ret = -EINVAL;
1479 goto end;
1480 }
1481 vstack_ax(stack)->type = REG_UNKNOWN;
1482 next_pc += sizeof(struct unary_op);
1483 break;
1484 }
1485
1486 case BYTECODE_OP_UNARY_PLUS_S64:
1487 case BYTECODE_OP_UNARY_MINUS_S64:
1488 case BYTECODE_OP_UNARY_NOT_S64:
1489 {
1490 /* Pop 1, push 1 */
1491 if (!vstack_ax(stack)) {
1492 ERR("Empty stack\n");
1493 ret = -EINVAL;
1494 goto end;
1495 }
1496 switch (vstack_ax(stack)->type) {
1497 case REG_S64:
1498 case REG_U64:
1499 break;
1500 default:
1501 ERR("Unexpected register type %d for operation\n",
1502 (int) vstack_ax(stack)->type);
1503 ret = -EINVAL;
1504 goto end;
1505 }
1506
1507 next_pc += sizeof(struct unary_op);
1508 break;
1509 }
1510
1511 case BYTECODE_OP_UNARY_NOT:
1512 {
1513 /* Pop 1, push 1 */
1514 if (!vstack_ax(stack)) {
1515 ERR("Empty stack\n");
1516 ret = -EINVAL;
1517 goto end;
1518 }
1519 switch (vstack_ax(stack)->type) {
1520 case REG_UNKNOWN:
1521 case REG_DOUBLE:
1522 case REG_S64:
1523 case REG_U64:
1524 break;
1525 default:
1526 ERR("Unexpected register type %d for operation\n",
1527 (int) vstack_ax(stack)->type);
1528 ret = -EINVAL;
1529 goto end;
1530 }
1531
1532 next_pc += sizeof(struct unary_op);
1533 break;
1534 }
1535
1536 case BYTECODE_OP_UNARY_BIT_NOT:
1537 {
1538 /* Pop 1, push 1 */
1539 if (!vstack_ax(stack)) {
1540 ERR("Empty stack\n");
1541 ret = -EINVAL;
1542 goto end;
1543 }
1544 switch (vstack_ax(stack)->type) {
1545 case REG_UNKNOWN:
1546 case REG_S64:
1547 case REG_U64:
1548 break;
1549 case REG_DOUBLE:
1550 default:
1551 ERR("Unexpected register type %d for operation\n",
1552 (int) vstack_ax(stack)->type);
1553 ret = -EINVAL;
1554 goto end;
1555 }
1556
1557 vstack_ax(stack)->type = REG_U64;
1558 next_pc += sizeof(struct unary_op);
1559 break;
1560 }
1561
1562 case BYTECODE_OP_UNARY_NOT_DOUBLE:
1563 {
1564 /* Pop 1, push 1 */
1565 if (!vstack_ax(stack)) {
1566 ERR("Empty stack\n");
1567 ret = -EINVAL;
1568 goto end;
1569 }
1570 switch (vstack_ax(stack)->type) {
1571 case REG_DOUBLE:
1572 break;
1573 default:
1574 ERR("Incorrect register type %d for operation\n",
1575 (int) vstack_ax(stack)->type);
1576 ret = -EINVAL;
1577 goto end;
1578 }
1579
1580 vstack_ax(stack)->type = REG_S64;
1581 next_pc += sizeof(struct unary_op);
1582 break;
1583 }
1584
1585 case BYTECODE_OP_UNARY_PLUS_DOUBLE:
1586 case BYTECODE_OP_UNARY_MINUS_DOUBLE:
1587 {
1588 /* Pop 1, push 1 */
1589 if (!vstack_ax(stack)) {
1590 ERR("Empty stack\n");
1591 ret = -EINVAL;
1592 goto end;
1593 }
1594 switch (vstack_ax(stack)->type) {
1595 case REG_DOUBLE:
1596 break;
1597 default:
1598 ERR("Incorrect register type %d for operation\n",
1599 (int) vstack_ax(stack)->type);
1600 ret = -EINVAL;
1601 goto end;
1602 }
1603
1604 vstack_ax(stack)->type = REG_DOUBLE;
1605 next_pc += sizeof(struct unary_op);
1606 break;
1607 }
1608
1609 /* logical */
1610 case BYTECODE_OP_AND:
1611 case BYTECODE_OP_OR:
1612 {
1613 struct logical_op *insn = (struct logical_op *) pc;
1614 int merge_ret;
1615
1616 /* Add merge point to table */
1617 merge_ret = merge_point_add_check(merge_points,
1618 insn->skip_offset, stack);
1619 if (merge_ret) {
1620 ret = merge_ret;
1621 goto end;
1622 }
1623
1624 if (!vstack_ax(stack)) {
1625 ERR("Empty stack\n");
1626 ret = -EINVAL;
1627 goto end;
1628 }
1629 /* There is always a cast-to-s64 operation before a or/and op. */
1630 switch (vstack_ax(stack)->type) {
1631 case REG_S64:
1632 case REG_U64:
1633 break;
1634 default:
1635 ERR("Incorrect register type %d for operation\n",
1636 (int) vstack_ax(stack)->type);
1637 ret = -EINVAL;
1638 goto end;
1639 }
1640
1641 /* Continue to next instruction */
1642 /* Pop 1 when jump not taken */
1643 if (vstack_pop(stack)) {
1644 ret = -EINVAL;
1645 goto end;
1646 }
1647 next_pc += sizeof(struct logical_op);
1648 break;
1649 }
1650
1651 /* load field ref */
1652 case BYTECODE_OP_LOAD_FIELD_REF:
1653 {
1654 ERR("Unknown field ref type\n");
1655 ret = -EINVAL;
1656 goto end;
1657 }
1658 /* get context ref */
1659 case BYTECODE_OP_GET_CONTEXT_REF:
1660 {
1661 if (vstack_push(stack)) {
1662 ret = -EINVAL;
1663 goto end;
1664 }
1665 vstack_ax(stack)->type = REG_UNKNOWN;
1666 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1667 break;
1668 }
1669 case BYTECODE_OP_LOAD_FIELD_REF_STRING:
1670 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE:
1671 case BYTECODE_OP_GET_CONTEXT_REF_STRING:
1672 {
1673 if (vstack_push(stack)) {
1674 ret = -EINVAL;
1675 goto end;
1676 }
1677 vstack_ax(stack)->type = REG_STRING;
1678 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1679 break;
1680 }
1681 case BYTECODE_OP_LOAD_FIELD_REF_S64:
1682 case BYTECODE_OP_GET_CONTEXT_REF_S64:
1683 {
1684 if (vstack_push(stack)) {
1685 ret = -EINVAL;
1686 goto end;
1687 }
1688 vstack_ax(stack)->type = REG_S64;
1689 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1690 break;
1691 }
1692 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE:
1693 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE:
1694 {
1695 if (vstack_push(stack)) {
1696 ret = -EINVAL;
1697 goto end;
1698 }
1699 vstack_ax(stack)->type = REG_DOUBLE;
1700 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1701 break;
1702 }
1703
1704 /* load from immediate operand */
1705 case BYTECODE_OP_LOAD_STRING:
1706 {
1707 struct load_op *insn = (struct load_op *) pc;
1708
1709 if (vstack_push(stack)) {
1710 ret = -EINVAL;
1711 goto end;
1712 }
1713 vstack_ax(stack)->type = REG_STRING;
1714 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1715 break;
1716 }
1717
1718 case BYTECODE_OP_LOAD_STAR_GLOB_STRING:
1719 {
1720 struct load_op *insn = (struct load_op *) pc;
1721
1722 if (vstack_push(stack)) {
1723 ret = -EINVAL;
1724 goto end;
1725 }
1726 vstack_ax(stack)->type = REG_STAR_GLOB_STRING;
1727 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1728 break;
1729 }
1730
1731 case BYTECODE_OP_LOAD_S64:
1732 {
1733 if (vstack_push(stack)) {
1734 ret = -EINVAL;
1735 goto end;
1736 }
1737 vstack_ax(stack)->type = REG_S64;
1738 next_pc += sizeof(struct load_op)
1739 + sizeof(struct literal_numeric);
1740 break;
1741 }
1742
1743 case BYTECODE_OP_LOAD_DOUBLE:
1744 {
1745 if (vstack_push(stack)) {
1746 ret = -EINVAL;
1747 goto end;
1748 }
1749 vstack_ax(stack)->type = REG_DOUBLE;
1750 next_pc += sizeof(struct load_op)
1751 + sizeof(struct literal_double);
1752 break;
1753 }
1754
1755 case BYTECODE_OP_CAST_TO_S64:
1756 case BYTECODE_OP_CAST_DOUBLE_TO_S64:
1757 {
1758 /* Pop 1, push 1 */
1759 if (!vstack_ax(stack)) {
1760 ERR("Empty stack\n");
1761 ret = -EINVAL;
1762 goto end;
1763 }
1764 switch (vstack_ax(stack)->type) {
1765 case REG_S64:
1766 case REG_U64:
1767 case REG_DOUBLE:
1768 case REG_UNKNOWN:
1769 break;
1770 default:
1771 ERR("Incorrect register type %d for cast\n",
1772 (int) vstack_ax(stack)->type);
1773 ret = -EINVAL;
1774 goto end;
1775 }
1776 vstack_ax(stack)->type = REG_S64;
1777 next_pc += sizeof(struct cast_op);
1778 break;
1779 }
1780 case BYTECODE_OP_CAST_NOP:
1781 {
1782 next_pc += sizeof(struct cast_op);
1783 break;
1784 }
1785
1786 /*
1787 * Instructions for recursive traversal through composed types.
1788 */
1789 case BYTECODE_OP_GET_CONTEXT_ROOT:
1790 case BYTECODE_OP_GET_APP_CONTEXT_ROOT:
1791 case BYTECODE_OP_GET_PAYLOAD_ROOT:
1792 {
1793 if (vstack_push(stack)) {
1794 ret = -EINVAL;
1795 goto end;
1796 }
1797 vstack_ax(stack)->type = REG_PTR;
1798 next_pc += sizeof(struct load_op);
1799 break;
1800 }
1801
1802 case BYTECODE_OP_LOAD_FIELD:
1803 {
1804 /* Pop 1, push 1 */
1805 if (!vstack_ax(stack)) {
1806 ERR("Empty stack\n");
1807 ret = -EINVAL;
1808 goto end;
1809 }
1810 if (vstack_ax(stack)->type != REG_PTR) {
1811 ERR("Expecting pointer on top of stack\n");
1812 ret = -EINVAL;
1813 goto end;
1814 }
1815 vstack_ax(stack)->type = REG_UNKNOWN;
1816 next_pc += sizeof(struct load_op);
1817 break;
1818 }
1819
1820 case BYTECODE_OP_LOAD_FIELD_S8:
1821 case BYTECODE_OP_LOAD_FIELD_S16:
1822 case BYTECODE_OP_LOAD_FIELD_S32:
1823 case BYTECODE_OP_LOAD_FIELD_S64:
1824 {
1825 /* Pop 1, push 1 */
1826 if (!vstack_ax(stack)) {
1827 ERR("Empty stack\n");
1828 ret = -EINVAL;
1829 goto end;
1830 }
1831 if (vstack_ax(stack)->type != REG_PTR) {
1832 ERR("Expecting pointer on top of stack\n");
1833 ret = -EINVAL;
1834 goto end;
1835 }
1836 vstack_ax(stack)->type = REG_S64;
1837 next_pc += sizeof(struct load_op);
1838 break;
1839 }
1840
1841 case BYTECODE_OP_LOAD_FIELD_U8:
1842 case BYTECODE_OP_LOAD_FIELD_U16:
1843 case BYTECODE_OP_LOAD_FIELD_U32:
1844 case BYTECODE_OP_LOAD_FIELD_U64:
1845 {
1846 /* Pop 1, push 1 */
1847 if (!vstack_ax(stack)) {
1848 ERR("Empty stack\n");
1849 ret = -EINVAL;
1850 goto end;
1851 }
1852 if (vstack_ax(stack)->type != REG_PTR) {
1853 ERR("Expecting pointer on top of stack\n");
1854 ret = -EINVAL;
1855 goto end;
1856 }
1857 vstack_ax(stack)->type = REG_U64;
1858 next_pc += sizeof(struct load_op);
1859 break;
1860 }
1861
1862 case BYTECODE_OP_LOAD_FIELD_STRING:
1863 case BYTECODE_OP_LOAD_FIELD_SEQUENCE:
1864 {
1865 /* Pop 1, push 1 */
1866 if (!vstack_ax(stack)) {
1867 ERR("Empty stack\n");
1868 ret = -EINVAL;
1869 goto end;
1870 }
1871 if (vstack_ax(stack)->type != REG_PTR) {
1872 ERR("Expecting pointer on top of stack\n");
1873 ret = -EINVAL;
1874 goto end;
1875 }
1876 vstack_ax(stack)->type = REG_STRING;
1877 next_pc += sizeof(struct load_op);
1878 break;
1879 }
1880
1881 case BYTECODE_OP_LOAD_FIELD_DOUBLE:
1882 {
1883 /* Pop 1, push 1 */
1884 if (!vstack_ax(stack)) {
1885 ERR("Empty stack\n");
1886 ret = -EINVAL;
1887 goto end;
1888 }
1889 if (vstack_ax(stack)->type != REG_PTR) {
1890 ERR("Expecting pointer on top of stack\n");
1891 ret = -EINVAL;
1892 goto end;
1893 }
1894 vstack_ax(stack)->type = REG_DOUBLE;
1895 next_pc += sizeof(struct load_op);
1896 break;
1897 }
1898
1899 case BYTECODE_OP_GET_SYMBOL:
1900 case BYTECODE_OP_GET_SYMBOL_FIELD:
1901 {
1902 /* Pop 1, push 1 */
1903 if (!vstack_ax(stack)) {
1904 ERR("Empty stack\n");
1905 ret = -EINVAL;
1906 goto end;
1907 }
1908 if (vstack_ax(stack)->type != REG_PTR) {
1909 ERR("Expecting pointer on top of stack\n");
1910 ret = -EINVAL;
1911 goto end;
1912 }
1913 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1914 break;
1915 }
1916
1917 case BYTECODE_OP_GET_INDEX_U16:
1918 {
1919 /* Pop 1, push 1 */
1920 if (!vstack_ax(stack)) {
1921 ERR("Empty stack\n");
1922 ret = -EINVAL;
1923 goto end;
1924 }
1925 if (vstack_ax(stack)->type != REG_PTR) {
1926 ERR("Expecting pointer on top of stack\n");
1927 ret = -EINVAL;
1928 goto end;
1929 }
1930 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1931 break;
1932 }
1933
1934 case BYTECODE_OP_GET_INDEX_U64:
1935 {
1936 /* Pop 1, push 1 */
1937 if (!vstack_ax(stack)) {
1938 ERR("Empty stack\n");
1939 ret = -EINVAL;
1940 goto end;
1941 }
1942 if (vstack_ax(stack)->type != REG_PTR) {
1943 ERR("Expecting pointer on top of stack\n");
1944 ret = -EINVAL;
1945 goto end;
1946 }
1947 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1948 break;
1949 }
1950
1951 }
1952 end:
1953 *_next_pc = next_pc;
1954 return ret;
1955 }
1956
1957 /*
1958 * Never called concurrently (hash seed is shared).
1959 */
1960 int lttng_bytecode_validate(struct bytecode_runtime *bytecode)
1961 {
1962 struct lttng_ust_lfht *merge_points;
1963 char *pc, *next_pc, *start_pc;
1964 int ret = -EINVAL;
1965 struct vstack stack;
1966
1967 vstack_init(&stack);
1968
1969 if (!lttng_hash_seed_ready) {
1970 lttng_hash_seed = time(NULL);
1971 lttng_hash_seed_ready = 1;
1972 }
1973 /*
1974 * Note: merge_points hash table used by single thread, and
1975 * never concurrently resized. Therefore, we can use it without
1976 * holding RCU read-side lock and free nodes without using
1977 * call_rcu.
1978 */
1979 merge_points = lttng_ust_lfht_new(DEFAULT_NR_MERGE_POINTS,
1980 MIN_NR_BUCKETS, MAX_NR_BUCKETS,
1981 0, NULL);
1982 if (!merge_points) {
1983 ERR("Error allocating hash table for bytecode validation\n");
1984 return -ENOMEM;
1985 }
1986 start_pc = &bytecode->code[0];
1987 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len;
1988 pc = next_pc) {
1989 ret = bytecode_validate_overflow(bytecode, start_pc, pc);
1990 if (ret != 0) {
1991 if (ret == -ERANGE)
1992 ERR("Bytecode overflow\n");
1993 goto end;
1994 }
1995 dbg_printf("Validating op %s (%u)\n",
1996 print_op((unsigned int) *(bytecode_opcode_t *) pc),
1997 (unsigned int) *(bytecode_opcode_t *) pc);
1998
1999 /*
2000 * For each instruction, validate the current context
2001 * (traversal of entire execution flow), and validate
2002 * all merge points targeting this instruction.
2003 */
2004 ret = validate_instruction_all_contexts(bytecode, merge_points,
2005 &stack, start_pc, pc);
2006 if (ret)
2007 goto end;
2008 ret = exec_insn(bytecode, merge_points, &stack, &next_pc, pc);
2009 if (ret <= 0)
2010 goto end;
2011 }
2012 end:
2013 if (delete_all_nodes(merge_points)) {
2014 if (!ret) {
2015 ERR("Unexpected merge points\n");
2016 ret = -EINVAL;
2017 }
2018 }
2019 if (lttng_ust_lfht_destroy(merge_points)) {
2020 ERR("Error destroying hash table\n");
2021 }
2022 return ret;
2023 }
This page took 0.124519 seconds and 3 git commands to generate.