2 * lttng-bytecode-validator.c
4 * LTTng UST bytecode validator.
6 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
33 #include <urcu/rculfhash.h>
35 #include "lttng-bytecode.h"
36 #include "lttng-hash-helper.h"
37 #include "string-utils.h"
38 #include "ust-events-internal.h"
41 * Number of merge points for hash table size. Hash table initialized to
42 * that size, and we do not resize, because we do not want to trigger
43 * RCU worker thread execution: fall-back on linear traversal if number
44 * of merge points exceeds this value.
46 #define DEFAULT_NR_MERGE_POINTS 128
47 #define MIN_NR_BUCKETS 128
48 #define MAX_NR_BUCKETS 128
50 /* merge point table node */
52 struct cds_lfht_node node
;
54 /* Context at merge point */
56 unsigned long target_pc
;
59 static unsigned long lttng_hash_seed
;
60 static unsigned int lttng_hash_seed_ready
;
63 int lttng_hash_match(struct cds_lfht_node
*node
, const void *key
)
65 struct lfht_mp_node
*mp_node
=
66 caa_container_of(node
, struct lfht_mp_node
, node
);
67 unsigned long key_pc
= (unsigned long) key
;
69 if (mp_node
->target_pc
== key_pc
)
76 int merge_points_compare(const struct vstack
*stacka
,
77 const struct vstack
*stackb
)
81 if (stacka
->top
!= stackb
->top
)
83 len
= stacka
->top
+ 1;
85 for (i
= 0; i
< len
; i
++) {
86 if (stacka
->e
[i
].type
!= REG_UNKNOWN
87 && stackb
->e
[i
].type
!= REG_UNKNOWN
88 && stacka
->e
[i
].type
!= stackb
->e
[i
].type
)
95 int merge_point_add_check(struct cds_lfht
*ht
, unsigned long target_pc
,
96 const struct vstack
*stack
)
98 struct lfht_mp_node
*node
;
99 unsigned long hash
= lttng_hash_mix((const char *) target_pc
,
102 struct cds_lfht_node
*ret
;
104 dbg_printf("Bytecode: adding merge point at offset %lu, hash %lu\n",
106 node
= zmalloc(sizeof(struct lfht_mp_node
));
109 node
->target_pc
= target_pc
;
110 memcpy(&node
->stack
, stack
, sizeof(node
->stack
));
111 ret
= cds_lfht_add_unique(ht
, hash
, lttng_hash_match
,
112 (const char *) target_pc
, &node
->node
);
113 if (ret
!= &node
->node
) {
114 struct lfht_mp_node
*ret_mp
=
115 caa_container_of(ret
, struct lfht_mp_node
, node
);
117 /* Key already present */
118 dbg_printf("Bytecode: compare merge points for offset %lu, hash %lu\n",
121 if (merge_points_compare(stack
, &ret_mp
->stack
)) {
122 ERR("Merge points differ for offset %lu\n",
131 * Binary comparators use top of stack and top of stack -1.
132 * Return 0 if typing is known to match, 1 if typing is dynamic
133 * (unknown), negative error value on error.
136 int bin_op_compare_check(struct vstack
*stack
, bytecode_opcode_t opcode
,
139 if (unlikely(!vstack_ax(stack
) || !vstack_bx(stack
)))
142 switch (vstack_ax(stack
)->type
) {
149 switch (vstack_bx(stack
)->type
) {
157 case REG_STAR_GLOB_STRING
:
158 if (opcode
!= BYTECODE_OP_EQ
&& opcode
!= BYTECODE_OP_NE
) {
168 case REG_STAR_GLOB_STRING
:
169 switch (vstack_bx(stack
)->type
) {
176 if (opcode
!= BYTECODE_OP_EQ
&& opcode
!= BYTECODE_OP_NE
) {
180 case REG_STAR_GLOB_STRING
:
190 switch (vstack_bx(stack
)->type
) {
197 case REG_STAR_GLOB_STRING
:
212 ERR("type mismatch for '%s' binary operator\n", str
);
216 ERR("empty stack for '%s' binary operator\n", str
);
220 ERR("unknown type for '%s' binary operator\n", str
);
225 * Binary bitwise operators use top of stack and top of stack -1.
226 * Return 0 if typing is known to match, 1 if typing is dynamic
227 * (unknown), negative error value on error.
230 int bin_op_bitwise_check(struct vstack
*stack
, bytecode_opcode_t opcode
,
233 if (unlikely(!vstack_ax(stack
) || !vstack_bx(stack
)))
236 switch (vstack_ax(stack
)->type
) {
244 switch (vstack_bx(stack
)->type
) {
262 ERR("empty stack for '%s' binary operator\n", str
);
266 ERR("unknown type for '%s' binary operator\n", str
);
271 int validate_get_symbol(struct bytecode_runtime
*bytecode
,
272 const struct get_symbol
*sym
)
274 const char *str
, *str_limit
;
277 if (sym
->offset
>= bytecode
->p
.bc
->bc
.len
- bytecode
->p
.bc
->bc
.reloc_offset
)
280 str
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ sym
->offset
;
281 str_limit
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.len
;
282 len_limit
= str_limit
- str
;
283 if (strnlen(str
, len_limit
) == len_limit
)
289 * Validate bytecode range overflow within the validation pass.
290 * Called for each instruction encountered.
293 int bytecode_validate_overflow(struct bytecode_runtime
*bytecode
,
294 char *start_pc
, char *pc
)
298 switch (*(bytecode_opcode_t
*) pc
) {
299 case BYTECODE_OP_UNKNOWN
:
302 ERR("unknown bytecode op %u\n",
303 (unsigned int) *(bytecode_opcode_t
*) pc
);
308 case BYTECODE_OP_RETURN
:
309 case BYTECODE_OP_RETURN_S64
:
311 if (unlikely(pc
+ sizeof(struct return_op
)
312 > start_pc
+ bytecode
->len
)) {
319 case BYTECODE_OP_MUL
:
320 case BYTECODE_OP_DIV
:
321 case BYTECODE_OP_MOD
:
322 case BYTECODE_OP_PLUS
:
323 case BYTECODE_OP_MINUS
:
325 ERR("unsupported bytecode op %u\n",
326 (unsigned int) *(bytecode_opcode_t
*) pc
);
337 case BYTECODE_OP_EQ_STRING
:
338 case BYTECODE_OP_NE_STRING
:
339 case BYTECODE_OP_GT_STRING
:
340 case BYTECODE_OP_LT_STRING
:
341 case BYTECODE_OP_GE_STRING
:
342 case BYTECODE_OP_LE_STRING
:
343 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
344 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
345 case BYTECODE_OP_EQ_S64
:
346 case BYTECODE_OP_NE_S64
:
347 case BYTECODE_OP_GT_S64
:
348 case BYTECODE_OP_LT_S64
:
349 case BYTECODE_OP_GE_S64
:
350 case BYTECODE_OP_LE_S64
:
351 case BYTECODE_OP_EQ_DOUBLE
:
352 case BYTECODE_OP_NE_DOUBLE
:
353 case BYTECODE_OP_GT_DOUBLE
:
354 case BYTECODE_OP_LT_DOUBLE
:
355 case BYTECODE_OP_GE_DOUBLE
:
356 case BYTECODE_OP_LE_DOUBLE
:
357 case BYTECODE_OP_EQ_DOUBLE_S64
:
358 case BYTECODE_OP_NE_DOUBLE_S64
:
359 case BYTECODE_OP_GT_DOUBLE_S64
:
360 case BYTECODE_OP_LT_DOUBLE_S64
:
361 case BYTECODE_OP_GE_DOUBLE_S64
:
362 case BYTECODE_OP_LE_DOUBLE_S64
:
363 case BYTECODE_OP_EQ_S64_DOUBLE
:
364 case BYTECODE_OP_NE_S64_DOUBLE
:
365 case BYTECODE_OP_GT_S64_DOUBLE
:
366 case BYTECODE_OP_LT_S64_DOUBLE
:
367 case BYTECODE_OP_GE_S64_DOUBLE
:
368 case BYTECODE_OP_LE_S64_DOUBLE
:
369 case BYTECODE_OP_BIT_RSHIFT
:
370 case BYTECODE_OP_BIT_LSHIFT
:
371 case BYTECODE_OP_BIT_AND
:
372 case BYTECODE_OP_BIT_OR
:
373 case BYTECODE_OP_BIT_XOR
:
375 if (unlikely(pc
+ sizeof(struct binary_op
)
376 > start_pc
+ bytecode
->len
)) {
383 case BYTECODE_OP_UNARY_PLUS
:
384 case BYTECODE_OP_UNARY_MINUS
:
385 case BYTECODE_OP_UNARY_NOT
:
386 case BYTECODE_OP_UNARY_PLUS_S64
:
387 case BYTECODE_OP_UNARY_MINUS_S64
:
388 case BYTECODE_OP_UNARY_NOT_S64
:
389 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
390 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
391 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
392 case BYTECODE_OP_UNARY_BIT_NOT
:
394 if (unlikely(pc
+ sizeof(struct unary_op
)
395 > start_pc
+ bytecode
->len
)) {
402 case BYTECODE_OP_AND
:
405 if (unlikely(pc
+ sizeof(struct logical_op
)
406 > start_pc
+ bytecode
->len
)) {
413 case BYTECODE_OP_LOAD_FIELD_REF
:
415 ERR("Unknown field ref type\n");
420 /* get context ref */
421 case BYTECODE_OP_GET_CONTEXT_REF
:
422 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
423 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
424 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
425 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
426 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
427 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
428 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
430 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct field_ref
)
431 > start_pc
+ bytecode
->len
)) {
437 /* load from immediate operand */
438 case BYTECODE_OP_LOAD_STRING
:
439 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
441 struct load_op
*insn
= (struct load_op
*) pc
;
442 uint32_t str_len
, maxlen
;
444 if (unlikely(pc
+ sizeof(struct load_op
)
445 > start_pc
+ bytecode
->len
)) {
450 maxlen
= start_pc
+ bytecode
->len
- pc
- sizeof(struct load_op
);
451 str_len
= strnlen(insn
->data
, maxlen
);
452 if (unlikely(str_len
>= maxlen
)) {
453 /* Final '\0' not found within range */
459 case BYTECODE_OP_LOAD_S64
:
461 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct literal_numeric
)
462 > start_pc
+ bytecode
->len
)) {
468 case BYTECODE_OP_LOAD_DOUBLE
:
470 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct literal_double
)
471 > start_pc
+ bytecode
->len
)) {
477 case BYTECODE_OP_CAST_TO_S64
:
478 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
479 case BYTECODE_OP_CAST_NOP
:
481 if (unlikely(pc
+ sizeof(struct cast_op
)
482 > start_pc
+ bytecode
->len
)) {
489 * Instructions for recursive traversal through composed types.
491 case BYTECODE_OP_GET_CONTEXT_ROOT
:
492 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
493 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
494 case BYTECODE_OP_LOAD_FIELD
:
495 case BYTECODE_OP_LOAD_FIELD_S8
:
496 case BYTECODE_OP_LOAD_FIELD_S16
:
497 case BYTECODE_OP_LOAD_FIELD_S32
:
498 case BYTECODE_OP_LOAD_FIELD_S64
:
499 case BYTECODE_OP_LOAD_FIELD_U8
:
500 case BYTECODE_OP_LOAD_FIELD_U16
:
501 case BYTECODE_OP_LOAD_FIELD_U32
:
502 case BYTECODE_OP_LOAD_FIELD_U64
:
503 case BYTECODE_OP_LOAD_FIELD_STRING
:
504 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
505 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
506 if (unlikely(pc
+ sizeof(struct load_op
)
507 > start_pc
+ bytecode
->len
)) {
512 case BYTECODE_OP_GET_SYMBOL
:
514 struct load_op
*insn
= (struct load_op
*) pc
;
515 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
517 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_symbol
)
518 > start_pc
+ bytecode
->len
)) {
522 ret
= validate_get_symbol(bytecode
, sym
);
526 case BYTECODE_OP_GET_SYMBOL_FIELD
:
527 ERR("Unexpected get symbol field");
531 case BYTECODE_OP_GET_INDEX_U16
:
532 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_index_u16
)
533 > start_pc
+ bytecode
->len
)) {
538 case BYTECODE_OP_GET_INDEX_U64
:
539 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_index_u64
)
540 > start_pc
+ bytecode
->len
)) {
550 unsigned long delete_all_nodes(struct cds_lfht
*ht
)
552 struct cds_lfht_iter iter
;
553 struct lfht_mp_node
*node
;
554 unsigned long nr_nodes
= 0;
556 cds_lfht_for_each_entry(ht
, &iter
, node
, node
) {
559 ret
= cds_lfht_del(ht
, cds_lfht_iter_get_node(&iter
));
561 /* note: this hash table is never used concurrently */
574 int validate_instruction_context(struct bytecode_runtime
*bytecode
,
575 struct vstack
*stack
,
580 const bytecode_opcode_t opcode
= *(bytecode_opcode_t
*) pc
;
583 case BYTECODE_OP_UNKNOWN
:
586 ERR("unknown bytecode op %u\n",
587 (unsigned int) *(bytecode_opcode_t
*) pc
);
592 case BYTECODE_OP_RETURN
:
593 case BYTECODE_OP_RETURN_S64
:
599 case BYTECODE_OP_MUL
:
600 case BYTECODE_OP_DIV
:
601 case BYTECODE_OP_MOD
:
602 case BYTECODE_OP_PLUS
:
603 case BYTECODE_OP_MINUS
:
605 ERR("unsupported bytecode op %u\n",
606 (unsigned int) opcode
);
613 ret
= bin_op_compare_check(stack
, opcode
, "==");
620 ret
= bin_op_compare_check(stack
, opcode
, "!=");
627 ret
= bin_op_compare_check(stack
, opcode
, ">");
634 ret
= bin_op_compare_check(stack
, opcode
, "<");
641 ret
= bin_op_compare_check(stack
, opcode
, ">=");
648 ret
= bin_op_compare_check(stack
, opcode
, "<=");
654 case BYTECODE_OP_EQ_STRING
:
655 case BYTECODE_OP_NE_STRING
:
656 case BYTECODE_OP_GT_STRING
:
657 case BYTECODE_OP_LT_STRING
:
658 case BYTECODE_OP_GE_STRING
:
659 case BYTECODE_OP_LE_STRING
:
661 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
662 ERR("Empty stack\n");
666 if (vstack_ax(stack
)->type
!= REG_STRING
667 || vstack_bx(stack
)->type
!= REG_STRING
) {
668 ERR("Unexpected register type for string comparator\n");
675 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
676 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
678 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
679 ERR("Empty stack\n");
683 if (vstack_ax(stack
)->type
!= REG_STAR_GLOB_STRING
684 && vstack_bx(stack
)->type
!= REG_STAR_GLOB_STRING
) {
685 ERR("Unexpected register type for globbing pattern comparator\n");
692 case BYTECODE_OP_EQ_S64
:
693 case BYTECODE_OP_NE_S64
:
694 case BYTECODE_OP_GT_S64
:
695 case BYTECODE_OP_LT_S64
:
696 case BYTECODE_OP_GE_S64
:
697 case BYTECODE_OP_LE_S64
:
699 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
700 ERR("Empty stack\n");
704 switch (vstack_ax(stack
)->type
) {
709 ERR("Unexpected register type for s64 comparator\n");
713 switch (vstack_bx(stack
)->type
) {
718 ERR("Unexpected register type for s64 comparator\n");
725 case BYTECODE_OP_EQ_DOUBLE
:
726 case BYTECODE_OP_NE_DOUBLE
:
727 case BYTECODE_OP_GT_DOUBLE
:
728 case BYTECODE_OP_LT_DOUBLE
:
729 case BYTECODE_OP_GE_DOUBLE
:
730 case BYTECODE_OP_LE_DOUBLE
:
732 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
733 ERR("Empty stack\n");
737 if (vstack_ax(stack
)->type
!= REG_DOUBLE
&& vstack_bx(stack
)->type
!= REG_DOUBLE
) {
738 ERR("Double operator should have two double registers\n");
745 case BYTECODE_OP_EQ_DOUBLE_S64
:
746 case BYTECODE_OP_NE_DOUBLE_S64
:
747 case BYTECODE_OP_GT_DOUBLE_S64
:
748 case BYTECODE_OP_LT_DOUBLE_S64
:
749 case BYTECODE_OP_GE_DOUBLE_S64
:
750 case BYTECODE_OP_LE_DOUBLE_S64
:
752 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
753 ERR("Empty stack\n");
757 switch (vstack_ax(stack
)->type
) {
762 ERR("Double-S64 operator has unexpected register types\n");
766 switch (vstack_bx(stack
)->type
) {
770 ERR("Double-S64 operator has unexpected register types\n");
777 case BYTECODE_OP_EQ_S64_DOUBLE
:
778 case BYTECODE_OP_NE_S64_DOUBLE
:
779 case BYTECODE_OP_GT_S64_DOUBLE
:
780 case BYTECODE_OP_LT_S64_DOUBLE
:
781 case BYTECODE_OP_GE_S64_DOUBLE
:
782 case BYTECODE_OP_LE_S64_DOUBLE
:
784 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
785 ERR("Empty stack\n");
789 switch (vstack_ax(stack
)->type
) {
793 ERR("S64-Double operator has unexpected register types\n");
797 switch (vstack_bx(stack
)->type
) {
802 ERR("S64-Double operator has unexpected register types\n");
809 case BYTECODE_OP_BIT_RSHIFT
:
810 ret
= bin_op_bitwise_check(stack
, opcode
, ">>");
814 case BYTECODE_OP_BIT_LSHIFT
:
815 ret
= bin_op_bitwise_check(stack
, opcode
, "<<");
819 case BYTECODE_OP_BIT_AND
:
820 ret
= bin_op_bitwise_check(stack
, opcode
, "&");
824 case BYTECODE_OP_BIT_OR
:
825 ret
= bin_op_bitwise_check(stack
, opcode
, "|");
829 case BYTECODE_OP_BIT_XOR
:
830 ret
= bin_op_bitwise_check(stack
, opcode
, "^");
836 case BYTECODE_OP_UNARY_PLUS
:
837 case BYTECODE_OP_UNARY_MINUS
:
838 case BYTECODE_OP_UNARY_NOT
:
840 if (!vstack_ax(stack
)) {
841 ERR("Empty stack\n");
845 switch (vstack_ax(stack
)->type
) {
847 ERR("unknown register type\n");
852 case REG_STAR_GLOB_STRING
:
853 ERR("Unary op can only be applied to numeric or floating point registers\n");
867 case BYTECODE_OP_UNARY_BIT_NOT
:
869 if (!vstack_ax(stack
)) {
870 ERR("Empty stack\n");
874 switch (vstack_ax(stack
)->type
) {
876 ERR("unknown register type\n");
881 case REG_STAR_GLOB_STRING
:
883 ERR("Unary bitwise op can only be applied to numeric registers\n");
896 case BYTECODE_OP_UNARY_PLUS_S64
:
897 case BYTECODE_OP_UNARY_MINUS_S64
:
898 case BYTECODE_OP_UNARY_NOT_S64
:
900 if (!vstack_ax(stack
)) {
901 ERR("Empty stack\n");
905 if (vstack_ax(stack
)->type
!= REG_S64
&&
906 vstack_ax(stack
)->type
!= REG_U64
) {
907 ERR("Invalid register type\n");
914 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
915 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
916 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
918 if (!vstack_ax(stack
)) {
919 ERR("Empty stack\n");
923 if (vstack_ax(stack
)->type
!= REG_DOUBLE
) {
924 ERR("Invalid register type\n");
932 case BYTECODE_OP_AND
:
935 struct logical_op
*insn
= (struct logical_op
*) pc
;
937 if (!vstack_ax(stack
)) {
938 ERR("Empty stack\n");
942 if (vstack_ax(stack
)->type
!= REG_S64
943 && vstack_ax(stack
)->type
!= REG_U64
944 && vstack_ax(stack
)->type
!= REG_UNKNOWN
) {
945 ERR("Logical comparator expects S64, U64 or dynamic register\n");
950 dbg_printf("Validate jumping to bytecode offset %u\n",
951 (unsigned int) insn
->skip_offset
);
952 if (unlikely(start_pc
+ insn
->skip_offset
<= pc
)) {
953 ERR("Loops are not allowed in bytecode\n");
961 case BYTECODE_OP_LOAD_FIELD_REF
:
963 ERR("Unknown field ref type\n");
967 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
968 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
970 struct load_op
*insn
= (struct load_op
*) pc
;
971 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
973 dbg_printf("Validate load field ref offset %u type string\n",
977 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
979 struct load_op
*insn
= (struct load_op
*) pc
;
980 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
982 dbg_printf("Validate load field ref offset %u type s64\n",
986 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
988 struct load_op
*insn
= (struct load_op
*) pc
;
989 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
991 dbg_printf("Validate load field ref offset %u type double\n",
996 /* load from immediate operand */
997 case BYTECODE_OP_LOAD_STRING
:
998 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1003 case BYTECODE_OP_LOAD_S64
:
1008 case BYTECODE_OP_LOAD_DOUBLE
:
1013 case BYTECODE_OP_CAST_TO_S64
:
1014 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1016 struct cast_op
*insn
= (struct cast_op
*) pc
;
1018 if (!vstack_ax(stack
)) {
1019 ERR("Empty stack\n");
1023 switch (vstack_ax(stack
)->type
) {
1025 ERR("unknown register type\n");
1030 case REG_STAR_GLOB_STRING
:
1031 ERR("Cast op can only be applied to numeric or floating point registers\n");
1043 if (insn
->op
== BYTECODE_OP_CAST_DOUBLE_TO_S64
) {
1044 if (vstack_ax(stack
)->type
!= REG_DOUBLE
) {
1045 ERR("Cast expects double\n");
1052 case BYTECODE_OP_CAST_NOP
:
1057 /* get context ref */
1058 case BYTECODE_OP_GET_CONTEXT_REF
:
1060 struct load_op
*insn
= (struct load_op
*) pc
;
1061 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1063 dbg_printf("Validate get context ref offset %u type dynamic\n",
1067 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1069 struct load_op
*insn
= (struct load_op
*) pc
;
1070 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1072 dbg_printf("Validate get context ref offset %u type string\n",
1076 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1078 struct load_op
*insn
= (struct load_op
*) pc
;
1079 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1081 dbg_printf("Validate get context ref offset %u type s64\n",
1085 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1087 struct load_op
*insn
= (struct load_op
*) pc
;
1088 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1090 dbg_printf("Validate get context ref offset %u type double\n",
1096 * Instructions for recursive traversal through composed types.
1098 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1100 dbg_printf("Validate get context root\n");
1103 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1105 dbg_printf("Validate get app context root\n");
1108 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1110 dbg_printf("Validate get payload root\n");
1113 case BYTECODE_OP_LOAD_FIELD
:
1116 * We tolerate that field type is unknown at validation,
1117 * because we are performing the load specialization in
1118 * a phase after validation.
1120 dbg_printf("Validate load field\n");
1123 case BYTECODE_OP_LOAD_FIELD_S8
:
1125 dbg_printf("Validate load field s8\n");
1128 case BYTECODE_OP_LOAD_FIELD_S16
:
1130 dbg_printf("Validate load field s16\n");
1133 case BYTECODE_OP_LOAD_FIELD_S32
:
1135 dbg_printf("Validate load field s32\n");
1138 case BYTECODE_OP_LOAD_FIELD_S64
:
1140 dbg_printf("Validate load field s64\n");
1143 case BYTECODE_OP_LOAD_FIELD_U8
:
1145 dbg_printf("Validate load field u8\n");
1148 case BYTECODE_OP_LOAD_FIELD_U16
:
1150 dbg_printf("Validate load field u16\n");
1153 case BYTECODE_OP_LOAD_FIELD_U32
:
1155 dbg_printf("Validate load field u32\n");
1158 case BYTECODE_OP_LOAD_FIELD_U64
:
1160 dbg_printf("Validate load field u64\n");
1163 case BYTECODE_OP_LOAD_FIELD_STRING
:
1165 dbg_printf("Validate load field string\n");
1168 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1170 dbg_printf("Validate load field sequence\n");
1173 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1175 dbg_printf("Validate load field double\n");
1179 case BYTECODE_OP_GET_SYMBOL
:
1181 struct load_op
*insn
= (struct load_op
*) pc
;
1182 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
1184 dbg_printf("Validate get symbol offset %u\n", sym
->offset
);
1188 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1190 struct load_op
*insn
= (struct load_op
*) pc
;
1191 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
1193 dbg_printf("Validate get symbol field offset %u\n", sym
->offset
);
1197 case BYTECODE_OP_GET_INDEX_U16
:
1199 struct load_op
*insn
= (struct load_op
*) pc
;
1200 struct get_index_u16
*get_index
= (struct get_index_u16
*) insn
->data
;
1202 dbg_printf("Validate get index u16 index %u\n", get_index
->index
);
1206 case BYTECODE_OP_GET_INDEX_U64
:
1208 struct load_op
*insn
= (struct load_op
*) pc
;
1209 struct get_index_u64
*get_index
= (struct get_index_u64
*) insn
->data
;
1211 dbg_printf("Validate get index u64 index %" PRIu64
"\n", get_index
->index
);
1225 int validate_instruction_all_contexts(struct bytecode_runtime
*bytecode
,
1226 struct cds_lfht
*merge_points
,
1227 struct vstack
*stack
,
1232 unsigned long target_pc
= pc
- start_pc
;
1233 struct cds_lfht_iter iter
;
1234 struct cds_lfht_node
*node
;
1235 struct lfht_mp_node
*mp_node
;
1238 /* Validate the context resulting from the previous instruction */
1239 ret
= validate_instruction_context(bytecode
, stack
, start_pc
, pc
);
1243 /* Validate merge points */
1244 hash
= lttng_hash_mix((const char *) target_pc
, sizeof(target_pc
),
1246 cds_lfht_lookup(merge_points
, hash
, lttng_hash_match
,
1247 (const char *) target_pc
, &iter
);
1248 node
= cds_lfht_iter_get_node(&iter
);
1250 mp_node
= caa_container_of(node
, struct lfht_mp_node
, node
);
1252 dbg_printf("Bytecode: validate merge point at offset %lu\n",
1254 if (merge_points_compare(stack
, &mp_node
->stack
)) {
1255 ERR("Merge points differ for offset %lu\n",
1259 /* Once validated, we can remove the merge point */
1260 dbg_printf("Bytecode: remove merge point at offset %lu\n",
1262 ret
= cds_lfht_del(merge_points
, node
);
1270 * >0: going to next insn.
1271 * 0: success, stop iteration.
1275 int exec_insn(struct bytecode_runtime
*bytecode
,
1276 struct cds_lfht
*merge_points
,
1277 struct vstack
*stack
,
1282 char *next_pc
= *_next_pc
;
1284 switch (*(bytecode_opcode_t
*) pc
) {
1285 case BYTECODE_OP_UNKNOWN
:
1288 ERR("unknown bytecode op %u\n",
1289 (unsigned int) *(bytecode_opcode_t
*) pc
);
1294 case BYTECODE_OP_RETURN
:
1296 if (!vstack_ax(stack
)) {
1297 ERR("Empty stack\n");
1301 switch (vstack_ax(stack
)->type
) {
1310 ERR("Unexpected register type %d at end of bytecode\n",
1311 (int) vstack_ax(stack
)->type
);
1319 case BYTECODE_OP_RETURN_S64
:
1321 if (!vstack_ax(stack
)) {
1322 ERR("Empty stack\n");
1326 switch (vstack_ax(stack
)->type
) {
1332 ERR("Unexpected register type %d at end of bytecode\n",
1333 (int) vstack_ax(stack
)->type
);
1343 case BYTECODE_OP_MUL
:
1344 case BYTECODE_OP_DIV
:
1345 case BYTECODE_OP_MOD
:
1346 case BYTECODE_OP_PLUS
:
1347 case BYTECODE_OP_MINUS
:
1349 ERR("unsupported bytecode op %u\n",
1350 (unsigned int) *(bytecode_opcode_t
*) pc
);
1355 case BYTECODE_OP_EQ
:
1356 case BYTECODE_OP_NE
:
1357 case BYTECODE_OP_GT
:
1358 case BYTECODE_OP_LT
:
1359 case BYTECODE_OP_GE
:
1360 case BYTECODE_OP_LE
:
1361 case BYTECODE_OP_EQ_STRING
:
1362 case BYTECODE_OP_NE_STRING
:
1363 case BYTECODE_OP_GT_STRING
:
1364 case BYTECODE_OP_LT_STRING
:
1365 case BYTECODE_OP_GE_STRING
:
1366 case BYTECODE_OP_LE_STRING
:
1367 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
1368 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
1369 case BYTECODE_OP_EQ_S64
:
1370 case BYTECODE_OP_NE_S64
:
1371 case BYTECODE_OP_GT_S64
:
1372 case BYTECODE_OP_LT_S64
:
1373 case BYTECODE_OP_GE_S64
:
1374 case BYTECODE_OP_LE_S64
:
1375 case BYTECODE_OP_EQ_DOUBLE
:
1376 case BYTECODE_OP_NE_DOUBLE
:
1377 case BYTECODE_OP_GT_DOUBLE
:
1378 case BYTECODE_OP_LT_DOUBLE
:
1379 case BYTECODE_OP_GE_DOUBLE
:
1380 case BYTECODE_OP_LE_DOUBLE
:
1381 case BYTECODE_OP_EQ_DOUBLE_S64
:
1382 case BYTECODE_OP_NE_DOUBLE_S64
:
1383 case BYTECODE_OP_GT_DOUBLE_S64
:
1384 case BYTECODE_OP_LT_DOUBLE_S64
:
1385 case BYTECODE_OP_GE_DOUBLE_S64
:
1386 case BYTECODE_OP_LE_DOUBLE_S64
:
1387 case BYTECODE_OP_EQ_S64_DOUBLE
:
1388 case BYTECODE_OP_NE_S64_DOUBLE
:
1389 case BYTECODE_OP_GT_S64_DOUBLE
:
1390 case BYTECODE_OP_LT_S64_DOUBLE
:
1391 case BYTECODE_OP_GE_S64_DOUBLE
:
1392 case BYTECODE_OP_LE_S64_DOUBLE
:
1395 if (vstack_pop(stack
)) {
1399 if (!vstack_ax(stack
)) {
1400 ERR("Empty stack\n");
1404 switch (vstack_ax(stack
)->type
) {
1409 case REG_STAR_GLOB_STRING
:
1413 ERR("Unexpected register type %d for operation\n",
1414 (int) vstack_ax(stack
)->type
);
1419 vstack_ax(stack
)->type
= REG_S64
;
1420 next_pc
+= sizeof(struct binary_op
);
1424 case BYTECODE_OP_BIT_RSHIFT
:
1425 case BYTECODE_OP_BIT_LSHIFT
:
1426 case BYTECODE_OP_BIT_AND
:
1427 case BYTECODE_OP_BIT_OR
:
1428 case BYTECODE_OP_BIT_XOR
:
1431 if (vstack_pop(stack
)) {
1435 if (!vstack_ax(stack
)) {
1436 ERR("Empty stack\n");
1440 switch (vstack_ax(stack
)->type
) {
1445 case REG_STAR_GLOB_STRING
:
1449 ERR("Unexpected register type %d for operation\n",
1450 (int) vstack_ax(stack
)->type
);
1455 vstack_ax(stack
)->type
= REG_U64
;
1456 next_pc
+= sizeof(struct binary_op
);
1461 case BYTECODE_OP_UNARY_PLUS
:
1462 case BYTECODE_OP_UNARY_MINUS
:
1465 if (!vstack_ax(stack
)) {
1466 ERR("Empty stack\n");
1470 switch (vstack_ax(stack
)->type
) {
1477 ERR("Unexpected register type %d for operation\n",
1478 (int) vstack_ax(stack
)->type
);
1482 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1483 next_pc
+= sizeof(struct unary_op
);
1487 case BYTECODE_OP_UNARY_PLUS_S64
:
1488 case BYTECODE_OP_UNARY_MINUS_S64
:
1489 case BYTECODE_OP_UNARY_NOT_S64
:
1492 if (!vstack_ax(stack
)) {
1493 ERR("Empty stack\n");
1497 switch (vstack_ax(stack
)->type
) {
1502 ERR("Unexpected register type %d for operation\n",
1503 (int) vstack_ax(stack
)->type
);
1508 next_pc
+= sizeof(struct unary_op
);
1512 case BYTECODE_OP_UNARY_NOT
:
1515 if (!vstack_ax(stack
)) {
1516 ERR("Empty stack\n");
1520 switch (vstack_ax(stack
)->type
) {
1527 ERR("Unexpected register type %d for operation\n",
1528 (int) vstack_ax(stack
)->type
);
1533 next_pc
+= sizeof(struct unary_op
);
1537 case BYTECODE_OP_UNARY_BIT_NOT
:
1540 if (!vstack_ax(stack
)) {
1541 ERR("Empty stack\n");
1545 switch (vstack_ax(stack
)->type
) {
1552 ERR("Unexpected register type %d for operation\n",
1553 (int) vstack_ax(stack
)->type
);
1558 vstack_ax(stack
)->type
= REG_U64
;
1559 next_pc
+= sizeof(struct unary_op
);
1563 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1566 if (!vstack_ax(stack
)) {
1567 ERR("Empty stack\n");
1571 switch (vstack_ax(stack
)->type
) {
1575 ERR("Incorrect register type %d for operation\n",
1576 (int) vstack_ax(stack
)->type
);
1581 vstack_ax(stack
)->type
= REG_S64
;
1582 next_pc
+= sizeof(struct unary_op
);
1586 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1587 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1590 if (!vstack_ax(stack
)) {
1591 ERR("Empty stack\n");
1595 switch (vstack_ax(stack
)->type
) {
1599 ERR("Incorrect register type %d for operation\n",
1600 (int) vstack_ax(stack
)->type
);
1605 vstack_ax(stack
)->type
= REG_DOUBLE
;
1606 next_pc
+= sizeof(struct unary_op
);
1611 case BYTECODE_OP_AND
:
1612 case BYTECODE_OP_OR
:
1614 struct logical_op
*insn
= (struct logical_op
*) pc
;
1617 /* Add merge point to table */
1618 merge_ret
= merge_point_add_check(merge_points
,
1619 insn
->skip_offset
, stack
);
1625 if (!vstack_ax(stack
)) {
1626 ERR("Empty stack\n");
1630 /* There is always a cast-to-s64 operation before a or/and op. */
1631 switch (vstack_ax(stack
)->type
) {
1636 ERR("Incorrect register type %d for operation\n",
1637 (int) vstack_ax(stack
)->type
);
1642 /* Continue to next instruction */
1643 /* Pop 1 when jump not taken */
1644 if (vstack_pop(stack
)) {
1648 next_pc
+= sizeof(struct logical_op
);
1652 /* load field ref */
1653 case BYTECODE_OP_LOAD_FIELD_REF
:
1655 ERR("Unknown field ref type\n");
1659 /* get context ref */
1660 case BYTECODE_OP_GET_CONTEXT_REF
:
1662 if (vstack_push(stack
)) {
1666 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1667 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1670 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1671 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1672 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1674 if (vstack_push(stack
)) {
1678 vstack_ax(stack
)->type
= REG_STRING
;
1679 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1682 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1683 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1685 if (vstack_push(stack
)) {
1689 vstack_ax(stack
)->type
= REG_S64
;
1690 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1693 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1694 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1696 if (vstack_push(stack
)) {
1700 vstack_ax(stack
)->type
= REG_DOUBLE
;
1701 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1705 /* load from immediate operand */
1706 case BYTECODE_OP_LOAD_STRING
:
1708 struct load_op
*insn
= (struct load_op
*) pc
;
1710 if (vstack_push(stack
)) {
1714 vstack_ax(stack
)->type
= REG_STRING
;
1715 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1719 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1721 struct load_op
*insn
= (struct load_op
*) pc
;
1723 if (vstack_push(stack
)) {
1727 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1728 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1732 case BYTECODE_OP_LOAD_S64
:
1734 if (vstack_push(stack
)) {
1738 vstack_ax(stack
)->type
= REG_S64
;
1739 next_pc
+= sizeof(struct load_op
)
1740 + sizeof(struct literal_numeric
);
1744 case BYTECODE_OP_LOAD_DOUBLE
:
1746 if (vstack_push(stack
)) {
1750 vstack_ax(stack
)->type
= REG_DOUBLE
;
1751 next_pc
+= sizeof(struct load_op
)
1752 + sizeof(struct literal_double
);
1756 case BYTECODE_OP_CAST_TO_S64
:
1757 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1760 if (!vstack_ax(stack
)) {
1761 ERR("Empty stack\n");
1765 switch (vstack_ax(stack
)->type
) {
1772 ERR("Incorrect register type %d for cast\n",
1773 (int) vstack_ax(stack
)->type
);
1777 vstack_ax(stack
)->type
= REG_S64
;
1778 next_pc
+= sizeof(struct cast_op
);
1781 case BYTECODE_OP_CAST_NOP
:
1783 next_pc
+= sizeof(struct cast_op
);
1788 * Instructions for recursive traversal through composed types.
1790 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1791 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1792 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1794 if (vstack_push(stack
)) {
1798 vstack_ax(stack
)->type
= REG_PTR
;
1799 next_pc
+= sizeof(struct load_op
);
1803 case BYTECODE_OP_LOAD_FIELD
:
1806 if (!vstack_ax(stack
)) {
1807 ERR("Empty stack\n");
1811 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1812 ERR("Expecting pointer on top of stack\n");
1816 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1817 next_pc
+= sizeof(struct load_op
);
1821 case BYTECODE_OP_LOAD_FIELD_S8
:
1822 case BYTECODE_OP_LOAD_FIELD_S16
:
1823 case BYTECODE_OP_LOAD_FIELD_S32
:
1824 case BYTECODE_OP_LOAD_FIELD_S64
:
1827 if (!vstack_ax(stack
)) {
1828 ERR("Empty stack\n");
1832 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1833 ERR("Expecting pointer on top of stack\n");
1837 vstack_ax(stack
)->type
= REG_S64
;
1838 next_pc
+= sizeof(struct load_op
);
1842 case BYTECODE_OP_LOAD_FIELD_U8
:
1843 case BYTECODE_OP_LOAD_FIELD_U16
:
1844 case BYTECODE_OP_LOAD_FIELD_U32
:
1845 case BYTECODE_OP_LOAD_FIELD_U64
:
1848 if (!vstack_ax(stack
)) {
1849 ERR("Empty stack\n");
1853 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1854 ERR("Expecting pointer on top of stack\n");
1858 vstack_ax(stack
)->type
= REG_U64
;
1859 next_pc
+= sizeof(struct load_op
);
1863 case BYTECODE_OP_LOAD_FIELD_STRING
:
1864 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1867 if (!vstack_ax(stack
)) {
1868 ERR("Empty stack\n");
1872 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1873 ERR("Expecting pointer on top of stack\n");
1877 vstack_ax(stack
)->type
= REG_STRING
;
1878 next_pc
+= sizeof(struct load_op
);
1882 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1885 if (!vstack_ax(stack
)) {
1886 ERR("Empty stack\n");
1890 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1891 ERR("Expecting pointer on top of stack\n");
1895 vstack_ax(stack
)->type
= REG_DOUBLE
;
1896 next_pc
+= sizeof(struct load_op
);
1900 case BYTECODE_OP_GET_SYMBOL
:
1901 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1904 if (!vstack_ax(stack
)) {
1905 ERR("Empty stack\n");
1909 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1910 ERR("Expecting pointer on top of stack\n");
1914 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1918 case BYTECODE_OP_GET_INDEX_U16
:
1921 if (!vstack_ax(stack
)) {
1922 ERR("Empty stack\n");
1926 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1927 ERR("Expecting pointer on top of stack\n");
1931 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1935 case BYTECODE_OP_GET_INDEX_U64
:
1938 if (!vstack_ax(stack
)) {
1939 ERR("Empty stack\n");
1943 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1944 ERR("Expecting pointer on top of stack\n");
1948 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1954 *_next_pc
= next_pc
;
1959 * Never called concurrently (hash seed is shared).
1961 int lttng_bytecode_validate(struct bytecode_runtime
*bytecode
)
1963 struct cds_lfht
*merge_points
;
1964 char *pc
, *next_pc
, *start_pc
;
1966 struct vstack stack
;
1968 vstack_init(&stack
);
1970 if (!lttng_hash_seed_ready
) {
1971 lttng_hash_seed
= time(NULL
);
1972 lttng_hash_seed_ready
= 1;
1975 * Note: merge_points hash table used by single thread, and
1976 * never concurrently resized. Therefore, we can use it without
1977 * holding RCU read-side lock and free nodes without using
1980 merge_points
= cds_lfht_new(DEFAULT_NR_MERGE_POINTS
,
1981 MIN_NR_BUCKETS
, MAX_NR_BUCKETS
,
1983 if (!merge_points
) {
1984 ERR("Error allocating hash table for bytecode validation\n");
1987 start_pc
= &bytecode
->code
[0];
1988 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
1990 ret
= bytecode_validate_overflow(bytecode
, start_pc
, pc
);
1993 ERR("Bytecode overflow\n");
1996 dbg_printf("Validating op %s (%u)\n",
1997 print_op((unsigned int) *(bytecode_opcode_t
*) pc
),
1998 (unsigned int) *(bytecode_opcode_t
*) pc
);
2001 * For each instruction, validate the current context
2002 * (traversal of entire execution flow), and validate
2003 * all merge points targeting this instruction.
2005 ret
= validate_instruction_all_contexts(bytecode
, merge_points
,
2006 &stack
, start_pc
, pc
);
2009 ret
= exec_insn(bytecode
, merge_points
, &stack
, &next_pc
, pc
);
2014 if (delete_all_nodes(merge_points
)) {
2016 ERR("Unexpected merge points\n");
2020 if (cds_lfht_destroy(merge_points
, NULL
)) {
2021 ERR("Error destroying hash table\n");