2 * lttng-bytecode-validator.c
4 * LTTng UST bytecode validator.
6 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
32 #include "rculfhash.h"
34 #include "lttng-bytecode.h"
35 #include "lttng-hash-helper.h"
36 #include "string-utils.h"
37 #include "ust-events-internal.h"
40 * Number of merge points for hash table size. Hash table initialized to
41 * that size, and we do not resize, because we do not want to trigger
42 * RCU worker thread execution: fall-back on linear traversal if number
43 * of merge points exceeds this value.
45 #define DEFAULT_NR_MERGE_POINTS 128
46 #define MIN_NR_BUCKETS 128
47 #define MAX_NR_BUCKETS 128
49 /* merge point table node */
51 struct lttng_ust_lfht_node node
;
53 /* Context at merge point */
55 unsigned long target_pc
;
58 static unsigned long lttng_hash_seed
;
59 static unsigned int lttng_hash_seed_ready
;
62 int lttng_hash_match(struct lttng_ust_lfht_node
*node
, const void *key
)
64 struct lfht_mp_node
*mp_node
=
65 caa_container_of(node
, struct lfht_mp_node
, node
);
66 unsigned long key_pc
= (unsigned long) key
;
68 if (mp_node
->target_pc
== key_pc
)
75 int merge_points_compare(const struct vstack
*stacka
,
76 const struct vstack
*stackb
)
80 if (stacka
->top
!= stackb
->top
)
82 len
= stacka
->top
+ 1;
84 for (i
= 0; i
< len
; i
++) {
85 if (stacka
->e
[i
].type
!= REG_UNKNOWN
86 && stackb
->e
[i
].type
!= REG_UNKNOWN
87 && stacka
->e
[i
].type
!= stackb
->e
[i
].type
)
94 int merge_point_add_check(struct lttng_ust_lfht
*ht
, unsigned long target_pc
,
95 const struct vstack
*stack
)
97 struct lfht_mp_node
*node
;
98 unsigned long hash
= lttng_hash_mix((const char *) target_pc
,
101 struct lttng_ust_lfht_node
*ret
;
103 dbg_printf("Bytecode: adding merge point at offset %lu, hash %lu\n",
105 node
= zmalloc(sizeof(struct lfht_mp_node
));
108 node
->target_pc
= target_pc
;
109 memcpy(&node
->stack
, stack
, sizeof(node
->stack
));
110 ret
= lttng_ust_lfht_add_unique(ht
, hash
, lttng_hash_match
,
111 (const char *) target_pc
, &node
->node
);
112 if (ret
!= &node
->node
) {
113 struct lfht_mp_node
*ret_mp
=
114 caa_container_of(ret
, struct lfht_mp_node
, node
);
116 /* Key already present */
117 dbg_printf("Bytecode: compare merge points for offset %lu, hash %lu\n",
120 if (merge_points_compare(stack
, &ret_mp
->stack
)) {
121 ERR("Merge points differ for offset %lu\n",
130 * Binary comparators use top of stack and top of stack -1.
131 * Return 0 if typing is known to match, 1 if typing is dynamic
132 * (unknown), negative error value on error.
135 int bin_op_compare_check(struct vstack
*stack
, bytecode_opcode_t opcode
,
138 if (unlikely(!vstack_ax(stack
) || !vstack_bx(stack
)))
141 switch (vstack_ax(stack
)->type
) {
148 switch (vstack_bx(stack
)->type
) {
156 case REG_STAR_GLOB_STRING
:
157 if (opcode
!= BYTECODE_OP_EQ
&& opcode
!= BYTECODE_OP_NE
) {
167 case REG_STAR_GLOB_STRING
:
168 switch (vstack_bx(stack
)->type
) {
175 if (opcode
!= BYTECODE_OP_EQ
&& opcode
!= BYTECODE_OP_NE
) {
179 case REG_STAR_GLOB_STRING
:
189 switch (vstack_bx(stack
)->type
) {
196 case REG_STAR_GLOB_STRING
:
211 ERR("type mismatch for '%s' binary operator\n", str
);
215 ERR("empty stack for '%s' binary operator\n", str
);
219 ERR("unknown type for '%s' binary operator\n", str
);
224 * Binary bitwise operators use top of stack and top of stack -1.
225 * Return 0 if typing is known to match, 1 if typing is dynamic
226 * (unknown), negative error value on error.
229 int bin_op_bitwise_check(struct vstack
*stack
, bytecode_opcode_t opcode
,
232 if (unlikely(!vstack_ax(stack
) || !vstack_bx(stack
)))
235 switch (vstack_ax(stack
)->type
) {
243 switch (vstack_bx(stack
)->type
) {
261 ERR("empty stack for '%s' binary operator\n", str
);
265 ERR("unknown type for '%s' binary operator\n", str
);
270 int validate_get_symbol(struct bytecode_runtime
*bytecode
,
271 const struct get_symbol
*sym
)
273 const char *str
, *str_limit
;
276 if (sym
->offset
>= bytecode
->p
.bc
->bc
.len
- bytecode
->p
.bc
->bc
.reloc_offset
)
279 str
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ sym
->offset
;
280 str_limit
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.len
;
281 len_limit
= str_limit
- str
;
282 if (strnlen(str
, len_limit
) == len_limit
)
288 * Validate bytecode range overflow within the validation pass.
289 * Called for each instruction encountered.
292 int bytecode_validate_overflow(struct bytecode_runtime
*bytecode
,
293 char *start_pc
, char *pc
)
297 switch (*(bytecode_opcode_t
*) pc
) {
298 case BYTECODE_OP_UNKNOWN
:
301 ERR("unknown bytecode op %u\n",
302 (unsigned int) *(bytecode_opcode_t
*) pc
);
307 case BYTECODE_OP_RETURN
:
308 case BYTECODE_OP_RETURN_S64
:
310 if (unlikely(pc
+ sizeof(struct return_op
)
311 > start_pc
+ bytecode
->len
)) {
318 case BYTECODE_OP_MUL
:
319 case BYTECODE_OP_DIV
:
320 case BYTECODE_OP_MOD
:
321 case BYTECODE_OP_PLUS
:
322 case BYTECODE_OP_MINUS
:
324 ERR("unsupported bytecode op %u\n",
325 (unsigned int) *(bytecode_opcode_t
*) pc
);
336 case BYTECODE_OP_EQ_STRING
:
337 case BYTECODE_OP_NE_STRING
:
338 case BYTECODE_OP_GT_STRING
:
339 case BYTECODE_OP_LT_STRING
:
340 case BYTECODE_OP_GE_STRING
:
341 case BYTECODE_OP_LE_STRING
:
342 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
343 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
344 case BYTECODE_OP_EQ_S64
:
345 case BYTECODE_OP_NE_S64
:
346 case BYTECODE_OP_GT_S64
:
347 case BYTECODE_OP_LT_S64
:
348 case BYTECODE_OP_GE_S64
:
349 case BYTECODE_OP_LE_S64
:
350 case BYTECODE_OP_EQ_DOUBLE
:
351 case BYTECODE_OP_NE_DOUBLE
:
352 case BYTECODE_OP_GT_DOUBLE
:
353 case BYTECODE_OP_LT_DOUBLE
:
354 case BYTECODE_OP_GE_DOUBLE
:
355 case BYTECODE_OP_LE_DOUBLE
:
356 case BYTECODE_OP_EQ_DOUBLE_S64
:
357 case BYTECODE_OP_NE_DOUBLE_S64
:
358 case BYTECODE_OP_GT_DOUBLE_S64
:
359 case BYTECODE_OP_LT_DOUBLE_S64
:
360 case BYTECODE_OP_GE_DOUBLE_S64
:
361 case BYTECODE_OP_LE_DOUBLE_S64
:
362 case BYTECODE_OP_EQ_S64_DOUBLE
:
363 case BYTECODE_OP_NE_S64_DOUBLE
:
364 case BYTECODE_OP_GT_S64_DOUBLE
:
365 case BYTECODE_OP_LT_S64_DOUBLE
:
366 case BYTECODE_OP_GE_S64_DOUBLE
:
367 case BYTECODE_OP_LE_S64_DOUBLE
:
368 case BYTECODE_OP_BIT_RSHIFT
:
369 case BYTECODE_OP_BIT_LSHIFT
:
370 case BYTECODE_OP_BIT_AND
:
371 case BYTECODE_OP_BIT_OR
:
372 case BYTECODE_OP_BIT_XOR
:
374 if (unlikely(pc
+ sizeof(struct binary_op
)
375 > start_pc
+ bytecode
->len
)) {
382 case BYTECODE_OP_UNARY_PLUS
:
383 case BYTECODE_OP_UNARY_MINUS
:
384 case BYTECODE_OP_UNARY_NOT
:
385 case BYTECODE_OP_UNARY_PLUS_S64
:
386 case BYTECODE_OP_UNARY_MINUS_S64
:
387 case BYTECODE_OP_UNARY_NOT_S64
:
388 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
389 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
390 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
391 case BYTECODE_OP_UNARY_BIT_NOT
:
393 if (unlikely(pc
+ sizeof(struct unary_op
)
394 > start_pc
+ bytecode
->len
)) {
401 case BYTECODE_OP_AND
:
404 if (unlikely(pc
+ sizeof(struct logical_op
)
405 > start_pc
+ bytecode
->len
)) {
412 case BYTECODE_OP_LOAD_FIELD_REF
:
414 ERR("Unknown field ref type\n");
419 /* get context ref */
420 case BYTECODE_OP_GET_CONTEXT_REF
:
421 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
422 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
423 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
424 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
425 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
426 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
427 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
429 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct field_ref
)
430 > start_pc
+ bytecode
->len
)) {
436 /* load from immediate operand */
437 case BYTECODE_OP_LOAD_STRING
:
438 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
440 struct load_op
*insn
= (struct load_op
*) pc
;
441 uint32_t str_len
, maxlen
;
443 if (unlikely(pc
+ sizeof(struct load_op
)
444 > start_pc
+ bytecode
->len
)) {
449 maxlen
= start_pc
+ bytecode
->len
- pc
- sizeof(struct load_op
);
450 str_len
= strnlen(insn
->data
, maxlen
);
451 if (unlikely(str_len
>= maxlen
)) {
452 /* Final '\0' not found within range */
458 case BYTECODE_OP_LOAD_S64
:
460 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct literal_numeric
)
461 > start_pc
+ bytecode
->len
)) {
467 case BYTECODE_OP_LOAD_DOUBLE
:
469 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct literal_double
)
470 > start_pc
+ bytecode
->len
)) {
476 case BYTECODE_OP_CAST_TO_S64
:
477 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
478 case BYTECODE_OP_CAST_NOP
:
480 if (unlikely(pc
+ sizeof(struct cast_op
)
481 > start_pc
+ bytecode
->len
)) {
488 * Instructions for recursive traversal through composed types.
490 case BYTECODE_OP_GET_CONTEXT_ROOT
:
491 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
492 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
493 case BYTECODE_OP_LOAD_FIELD
:
494 case BYTECODE_OP_LOAD_FIELD_S8
:
495 case BYTECODE_OP_LOAD_FIELD_S16
:
496 case BYTECODE_OP_LOAD_FIELD_S32
:
497 case BYTECODE_OP_LOAD_FIELD_S64
:
498 case BYTECODE_OP_LOAD_FIELD_U8
:
499 case BYTECODE_OP_LOAD_FIELD_U16
:
500 case BYTECODE_OP_LOAD_FIELD_U32
:
501 case BYTECODE_OP_LOAD_FIELD_U64
:
502 case BYTECODE_OP_LOAD_FIELD_STRING
:
503 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
504 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
505 if (unlikely(pc
+ sizeof(struct load_op
)
506 > start_pc
+ bytecode
->len
)) {
511 case BYTECODE_OP_GET_SYMBOL
:
513 struct load_op
*insn
= (struct load_op
*) pc
;
514 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
516 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_symbol
)
517 > start_pc
+ bytecode
->len
)) {
521 ret
= validate_get_symbol(bytecode
, sym
);
525 case BYTECODE_OP_GET_SYMBOL_FIELD
:
526 ERR("Unexpected get symbol field");
530 case BYTECODE_OP_GET_INDEX_U16
:
531 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_index_u16
)
532 > start_pc
+ bytecode
->len
)) {
537 case BYTECODE_OP_GET_INDEX_U64
:
538 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_index_u64
)
539 > start_pc
+ bytecode
->len
)) {
549 unsigned long delete_all_nodes(struct lttng_ust_lfht
*ht
)
551 struct lttng_ust_lfht_iter iter
;
552 struct lfht_mp_node
*node
;
553 unsigned long nr_nodes
= 0;
555 lttng_ust_lfht_for_each_entry(ht
, &iter
, node
, node
) {
558 ret
= lttng_ust_lfht_del(ht
, lttng_ust_lfht_iter_get_node(&iter
));
560 /* note: this hash table is never used concurrently */
573 int validate_instruction_context(struct bytecode_runtime
*bytecode
,
574 struct vstack
*stack
,
579 const bytecode_opcode_t opcode
= *(bytecode_opcode_t
*) pc
;
582 case BYTECODE_OP_UNKNOWN
:
585 ERR("unknown bytecode op %u\n",
586 (unsigned int) *(bytecode_opcode_t
*) pc
);
591 case BYTECODE_OP_RETURN
:
592 case BYTECODE_OP_RETURN_S64
:
598 case BYTECODE_OP_MUL
:
599 case BYTECODE_OP_DIV
:
600 case BYTECODE_OP_MOD
:
601 case BYTECODE_OP_PLUS
:
602 case BYTECODE_OP_MINUS
:
604 ERR("unsupported bytecode op %u\n",
605 (unsigned int) opcode
);
612 ret
= bin_op_compare_check(stack
, opcode
, "==");
619 ret
= bin_op_compare_check(stack
, opcode
, "!=");
626 ret
= bin_op_compare_check(stack
, opcode
, ">");
633 ret
= bin_op_compare_check(stack
, opcode
, "<");
640 ret
= bin_op_compare_check(stack
, opcode
, ">=");
647 ret
= bin_op_compare_check(stack
, opcode
, "<=");
653 case BYTECODE_OP_EQ_STRING
:
654 case BYTECODE_OP_NE_STRING
:
655 case BYTECODE_OP_GT_STRING
:
656 case BYTECODE_OP_LT_STRING
:
657 case BYTECODE_OP_GE_STRING
:
658 case BYTECODE_OP_LE_STRING
:
660 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
661 ERR("Empty stack\n");
665 if (vstack_ax(stack
)->type
!= REG_STRING
666 || vstack_bx(stack
)->type
!= REG_STRING
) {
667 ERR("Unexpected register type for string comparator\n");
674 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
675 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
677 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
678 ERR("Empty stack\n");
682 if (vstack_ax(stack
)->type
!= REG_STAR_GLOB_STRING
683 && vstack_bx(stack
)->type
!= REG_STAR_GLOB_STRING
) {
684 ERR("Unexpected register type for globbing pattern comparator\n");
691 case BYTECODE_OP_EQ_S64
:
692 case BYTECODE_OP_NE_S64
:
693 case BYTECODE_OP_GT_S64
:
694 case BYTECODE_OP_LT_S64
:
695 case BYTECODE_OP_GE_S64
:
696 case BYTECODE_OP_LE_S64
:
698 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
699 ERR("Empty stack\n");
703 switch (vstack_ax(stack
)->type
) {
708 ERR("Unexpected register type for s64 comparator\n");
712 switch (vstack_bx(stack
)->type
) {
717 ERR("Unexpected register type for s64 comparator\n");
724 case BYTECODE_OP_EQ_DOUBLE
:
725 case BYTECODE_OP_NE_DOUBLE
:
726 case BYTECODE_OP_GT_DOUBLE
:
727 case BYTECODE_OP_LT_DOUBLE
:
728 case BYTECODE_OP_GE_DOUBLE
:
729 case BYTECODE_OP_LE_DOUBLE
:
731 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
732 ERR("Empty stack\n");
736 if (vstack_ax(stack
)->type
!= REG_DOUBLE
&& vstack_bx(stack
)->type
!= REG_DOUBLE
) {
737 ERR("Double operator should have two double registers\n");
744 case BYTECODE_OP_EQ_DOUBLE_S64
:
745 case BYTECODE_OP_NE_DOUBLE_S64
:
746 case BYTECODE_OP_GT_DOUBLE_S64
:
747 case BYTECODE_OP_LT_DOUBLE_S64
:
748 case BYTECODE_OP_GE_DOUBLE_S64
:
749 case BYTECODE_OP_LE_DOUBLE_S64
:
751 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
752 ERR("Empty stack\n");
756 switch (vstack_ax(stack
)->type
) {
761 ERR("Double-S64 operator has unexpected register types\n");
765 switch (vstack_bx(stack
)->type
) {
769 ERR("Double-S64 operator has unexpected register types\n");
776 case BYTECODE_OP_EQ_S64_DOUBLE
:
777 case BYTECODE_OP_NE_S64_DOUBLE
:
778 case BYTECODE_OP_GT_S64_DOUBLE
:
779 case BYTECODE_OP_LT_S64_DOUBLE
:
780 case BYTECODE_OP_GE_S64_DOUBLE
:
781 case BYTECODE_OP_LE_S64_DOUBLE
:
783 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
784 ERR("Empty stack\n");
788 switch (vstack_ax(stack
)->type
) {
792 ERR("S64-Double operator has unexpected register types\n");
796 switch (vstack_bx(stack
)->type
) {
801 ERR("S64-Double operator has unexpected register types\n");
808 case BYTECODE_OP_BIT_RSHIFT
:
809 ret
= bin_op_bitwise_check(stack
, opcode
, ">>");
813 case BYTECODE_OP_BIT_LSHIFT
:
814 ret
= bin_op_bitwise_check(stack
, opcode
, "<<");
818 case BYTECODE_OP_BIT_AND
:
819 ret
= bin_op_bitwise_check(stack
, opcode
, "&");
823 case BYTECODE_OP_BIT_OR
:
824 ret
= bin_op_bitwise_check(stack
, opcode
, "|");
828 case BYTECODE_OP_BIT_XOR
:
829 ret
= bin_op_bitwise_check(stack
, opcode
, "^");
835 case BYTECODE_OP_UNARY_PLUS
:
836 case BYTECODE_OP_UNARY_MINUS
:
837 case BYTECODE_OP_UNARY_NOT
:
839 if (!vstack_ax(stack
)) {
840 ERR("Empty stack\n");
844 switch (vstack_ax(stack
)->type
) {
846 ERR("unknown register type\n");
851 case REG_STAR_GLOB_STRING
:
852 ERR("Unary op can only be applied to numeric or floating point registers\n");
866 case BYTECODE_OP_UNARY_BIT_NOT
:
868 if (!vstack_ax(stack
)) {
869 ERR("Empty stack\n");
873 switch (vstack_ax(stack
)->type
) {
875 ERR("unknown register type\n");
880 case REG_STAR_GLOB_STRING
:
882 ERR("Unary bitwise op can only be applied to numeric registers\n");
895 case BYTECODE_OP_UNARY_PLUS_S64
:
896 case BYTECODE_OP_UNARY_MINUS_S64
:
897 case BYTECODE_OP_UNARY_NOT_S64
:
899 if (!vstack_ax(stack
)) {
900 ERR("Empty stack\n");
904 if (vstack_ax(stack
)->type
!= REG_S64
&&
905 vstack_ax(stack
)->type
!= REG_U64
) {
906 ERR("Invalid register type\n");
913 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
914 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
915 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
917 if (!vstack_ax(stack
)) {
918 ERR("Empty stack\n");
922 if (vstack_ax(stack
)->type
!= REG_DOUBLE
) {
923 ERR("Invalid register type\n");
931 case BYTECODE_OP_AND
:
934 struct logical_op
*insn
= (struct logical_op
*) pc
;
936 if (!vstack_ax(stack
)) {
937 ERR("Empty stack\n");
941 if (vstack_ax(stack
)->type
!= REG_S64
942 && vstack_ax(stack
)->type
!= REG_U64
943 && vstack_ax(stack
)->type
!= REG_UNKNOWN
) {
944 ERR("Logical comparator expects S64, U64 or dynamic register\n");
949 dbg_printf("Validate jumping to bytecode offset %u\n",
950 (unsigned int) insn
->skip_offset
);
951 if (unlikely(start_pc
+ insn
->skip_offset
<= pc
)) {
952 ERR("Loops are not allowed in bytecode\n");
960 case BYTECODE_OP_LOAD_FIELD_REF
:
962 ERR("Unknown field ref type\n");
966 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
967 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
969 struct load_op
*insn
= (struct load_op
*) pc
;
970 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
972 dbg_printf("Validate load field ref offset %u type string\n",
976 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
978 struct load_op
*insn
= (struct load_op
*) pc
;
979 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
981 dbg_printf("Validate load field ref offset %u type s64\n",
985 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
987 struct load_op
*insn
= (struct load_op
*) pc
;
988 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
990 dbg_printf("Validate load field ref offset %u type double\n",
995 /* load from immediate operand */
996 case BYTECODE_OP_LOAD_STRING
:
997 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1002 case BYTECODE_OP_LOAD_S64
:
1007 case BYTECODE_OP_LOAD_DOUBLE
:
1012 case BYTECODE_OP_CAST_TO_S64
:
1013 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1015 struct cast_op
*insn
= (struct cast_op
*) pc
;
1017 if (!vstack_ax(stack
)) {
1018 ERR("Empty stack\n");
1022 switch (vstack_ax(stack
)->type
) {
1024 ERR("unknown register type\n");
1029 case REG_STAR_GLOB_STRING
:
1030 ERR("Cast op can only be applied to numeric or floating point registers\n");
1042 if (insn
->op
== BYTECODE_OP_CAST_DOUBLE_TO_S64
) {
1043 if (vstack_ax(stack
)->type
!= REG_DOUBLE
) {
1044 ERR("Cast expects double\n");
1051 case BYTECODE_OP_CAST_NOP
:
1056 /* get context ref */
1057 case BYTECODE_OP_GET_CONTEXT_REF
:
1059 struct load_op
*insn
= (struct load_op
*) pc
;
1060 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1062 dbg_printf("Validate get context ref offset %u type dynamic\n",
1066 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1068 struct load_op
*insn
= (struct load_op
*) pc
;
1069 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1071 dbg_printf("Validate get context ref offset %u type string\n",
1075 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1077 struct load_op
*insn
= (struct load_op
*) pc
;
1078 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1080 dbg_printf("Validate get context ref offset %u type s64\n",
1084 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1086 struct load_op
*insn
= (struct load_op
*) pc
;
1087 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1089 dbg_printf("Validate get context ref offset %u type double\n",
1095 * Instructions for recursive traversal through composed types.
1097 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1099 dbg_printf("Validate get context root\n");
1102 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1104 dbg_printf("Validate get app context root\n");
1107 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1109 dbg_printf("Validate get payload root\n");
1112 case BYTECODE_OP_LOAD_FIELD
:
1115 * We tolerate that field type is unknown at validation,
1116 * because we are performing the load specialization in
1117 * a phase after validation.
1119 dbg_printf("Validate load field\n");
1122 case BYTECODE_OP_LOAD_FIELD_S8
:
1124 dbg_printf("Validate load field s8\n");
1127 case BYTECODE_OP_LOAD_FIELD_S16
:
1129 dbg_printf("Validate load field s16\n");
1132 case BYTECODE_OP_LOAD_FIELD_S32
:
1134 dbg_printf("Validate load field s32\n");
1137 case BYTECODE_OP_LOAD_FIELD_S64
:
1139 dbg_printf("Validate load field s64\n");
1142 case BYTECODE_OP_LOAD_FIELD_U8
:
1144 dbg_printf("Validate load field u8\n");
1147 case BYTECODE_OP_LOAD_FIELD_U16
:
1149 dbg_printf("Validate load field u16\n");
1152 case BYTECODE_OP_LOAD_FIELD_U32
:
1154 dbg_printf("Validate load field u32\n");
1157 case BYTECODE_OP_LOAD_FIELD_U64
:
1159 dbg_printf("Validate load field u64\n");
1162 case BYTECODE_OP_LOAD_FIELD_STRING
:
1164 dbg_printf("Validate load field string\n");
1167 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1169 dbg_printf("Validate load field sequence\n");
1172 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1174 dbg_printf("Validate load field double\n");
1178 case BYTECODE_OP_GET_SYMBOL
:
1180 struct load_op
*insn
= (struct load_op
*) pc
;
1181 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
1183 dbg_printf("Validate get symbol offset %u\n", sym
->offset
);
1187 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1189 struct load_op
*insn
= (struct load_op
*) pc
;
1190 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
1192 dbg_printf("Validate get symbol field offset %u\n", sym
->offset
);
1196 case BYTECODE_OP_GET_INDEX_U16
:
1198 struct load_op
*insn
= (struct load_op
*) pc
;
1199 struct get_index_u16
*get_index
= (struct get_index_u16
*) insn
->data
;
1201 dbg_printf("Validate get index u16 index %u\n", get_index
->index
);
1205 case BYTECODE_OP_GET_INDEX_U64
:
1207 struct load_op
*insn
= (struct load_op
*) pc
;
1208 struct get_index_u64
*get_index
= (struct get_index_u64
*) insn
->data
;
1210 dbg_printf("Validate get index u64 index %" PRIu64
"\n", get_index
->index
);
1224 int validate_instruction_all_contexts(struct bytecode_runtime
*bytecode
,
1225 struct lttng_ust_lfht
*merge_points
,
1226 struct vstack
*stack
,
1231 unsigned long target_pc
= pc
- start_pc
;
1232 struct lttng_ust_lfht_iter iter
;
1233 struct lttng_ust_lfht_node
*node
;
1234 struct lfht_mp_node
*mp_node
;
1237 /* Validate the context resulting from the previous instruction */
1238 ret
= validate_instruction_context(bytecode
, stack
, start_pc
, pc
);
1242 /* Validate merge points */
1243 hash
= lttng_hash_mix((const char *) target_pc
, sizeof(target_pc
),
1245 lttng_ust_lfht_lookup(merge_points
, hash
, lttng_hash_match
,
1246 (const char *) target_pc
, &iter
);
1247 node
= lttng_ust_lfht_iter_get_node(&iter
);
1249 mp_node
= caa_container_of(node
, struct lfht_mp_node
, node
);
1251 dbg_printf("Bytecode: validate merge point at offset %lu\n",
1253 if (merge_points_compare(stack
, &mp_node
->stack
)) {
1254 ERR("Merge points differ for offset %lu\n",
1258 /* Once validated, we can remove the merge point */
1259 dbg_printf("Bytecode: remove merge point at offset %lu\n",
1261 ret
= lttng_ust_lfht_del(merge_points
, node
);
1269 * >0: going to next insn.
1270 * 0: success, stop iteration.
1274 int exec_insn(struct bytecode_runtime
*bytecode
,
1275 struct lttng_ust_lfht
*merge_points
,
1276 struct vstack
*stack
,
1281 char *next_pc
= *_next_pc
;
1283 switch (*(bytecode_opcode_t
*) pc
) {
1284 case BYTECODE_OP_UNKNOWN
:
1287 ERR("unknown bytecode op %u\n",
1288 (unsigned int) *(bytecode_opcode_t
*) pc
);
1293 case BYTECODE_OP_RETURN
:
1295 if (!vstack_ax(stack
)) {
1296 ERR("Empty stack\n");
1300 switch (vstack_ax(stack
)->type
) {
1309 ERR("Unexpected register type %d at end of bytecode\n",
1310 (int) vstack_ax(stack
)->type
);
1318 case BYTECODE_OP_RETURN_S64
:
1320 if (!vstack_ax(stack
)) {
1321 ERR("Empty stack\n");
1325 switch (vstack_ax(stack
)->type
) {
1331 ERR("Unexpected register type %d at end of bytecode\n",
1332 (int) vstack_ax(stack
)->type
);
1342 case BYTECODE_OP_MUL
:
1343 case BYTECODE_OP_DIV
:
1344 case BYTECODE_OP_MOD
:
1345 case BYTECODE_OP_PLUS
:
1346 case BYTECODE_OP_MINUS
:
1348 ERR("unsupported bytecode op %u\n",
1349 (unsigned int) *(bytecode_opcode_t
*) pc
);
1354 case BYTECODE_OP_EQ
:
1355 case BYTECODE_OP_NE
:
1356 case BYTECODE_OP_GT
:
1357 case BYTECODE_OP_LT
:
1358 case BYTECODE_OP_GE
:
1359 case BYTECODE_OP_LE
:
1360 case BYTECODE_OP_EQ_STRING
:
1361 case BYTECODE_OP_NE_STRING
:
1362 case BYTECODE_OP_GT_STRING
:
1363 case BYTECODE_OP_LT_STRING
:
1364 case BYTECODE_OP_GE_STRING
:
1365 case BYTECODE_OP_LE_STRING
:
1366 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
1367 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
1368 case BYTECODE_OP_EQ_S64
:
1369 case BYTECODE_OP_NE_S64
:
1370 case BYTECODE_OP_GT_S64
:
1371 case BYTECODE_OP_LT_S64
:
1372 case BYTECODE_OP_GE_S64
:
1373 case BYTECODE_OP_LE_S64
:
1374 case BYTECODE_OP_EQ_DOUBLE
:
1375 case BYTECODE_OP_NE_DOUBLE
:
1376 case BYTECODE_OP_GT_DOUBLE
:
1377 case BYTECODE_OP_LT_DOUBLE
:
1378 case BYTECODE_OP_GE_DOUBLE
:
1379 case BYTECODE_OP_LE_DOUBLE
:
1380 case BYTECODE_OP_EQ_DOUBLE_S64
:
1381 case BYTECODE_OP_NE_DOUBLE_S64
:
1382 case BYTECODE_OP_GT_DOUBLE_S64
:
1383 case BYTECODE_OP_LT_DOUBLE_S64
:
1384 case BYTECODE_OP_GE_DOUBLE_S64
:
1385 case BYTECODE_OP_LE_DOUBLE_S64
:
1386 case BYTECODE_OP_EQ_S64_DOUBLE
:
1387 case BYTECODE_OP_NE_S64_DOUBLE
:
1388 case BYTECODE_OP_GT_S64_DOUBLE
:
1389 case BYTECODE_OP_LT_S64_DOUBLE
:
1390 case BYTECODE_OP_GE_S64_DOUBLE
:
1391 case BYTECODE_OP_LE_S64_DOUBLE
:
1394 if (vstack_pop(stack
)) {
1398 if (!vstack_ax(stack
)) {
1399 ERR("Empty stack\n");
1403 switch (vstack_ax(stack
)->type
) {
1408 case REG_STAR_GLOB_STRING
:
1412 ERR("Unexpected register type %d for operation\n",
1413 (int) vstack_ax(stack
)->type
);
1418 vstack_ax(stack
)->type
= REG_S64
;
1419 next_pc
+= sizeof(struct binary_op
);
1423 case BYTECODE_OP_BIT_RSHIFT
:
1424 case BYTECODE_OP_BIT_LSHIFT
:
1425 case BYTECODE_OP_BIT_AND
:
1426 case BYTECODE_OP_BIT_OR
:
1427 case BYTECODE_OP_BIT_XOR
:
1430 if (vstack_pop(stack
)) {
1434 if (!vstack_ax(stack
)) {
1435 ERR("Empty stack\n");
1439 switch (vstack_ax(stack
)->type
) {
1444 case REG_STAR_GLOB_STRING
:
1448 ERR("Unexpected register type %d for operation\n",
1449 (int) vstack_ax(stack
)->type
);
1454 vstack_ax(stack
)->type
= REG_U64
;
1455 next_pc
+= sizeof(struct binary_op
);
1460 case BYTECODE_OP_UNARY_PLUS
:
1461 case BYTECODE_OP_UNARY_MINUS
:
1464 if (!vstack_ax(stack
)) {
1465 ERR("Empty stack\n");
1469 switch (vstack_ax(stack
)->type
) {
1476 ERR("Unexpected register type %d for operation\n",
1477 (int) vstack_ax(stack
)->type
);
1481 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1482 next_pc
+= sizeof(struct unary_op
);
1486 case BYTECODE_OP_UNARY_PLUS_S64
:
1487 case BYTECODE_OP_UNARY_MINUS_S64
:
1488 case BYTECODE_OP_UNARY_NOT_S64
:
1491 if (!vstack_ax(stack
)) {
1492 ERR("Empty stack\n");
1496 switch (vstack_ax(stack
)->type
) {
1501 ERR("Unexpected register type %d for operation\n",
1502 (int) vstack_ax(stack
)->type
);
1507 next_pc
+= sizeof(struct unary_op
);
1511 case BYTECODE_OP_UNARY_NOT
:
1514 if (!vstack_ax(stack
)) {
1515 ERR("Empty stack\n");
1519 switch (vstack_ax(stack
)->type
) {
1526 ERR("Unexpected register type %d for operation\n",
1527 (int) vstack_ax(stack
)->type
);
1532 next_pc
+= sizeof(struct unary_op
);
1536 case BYTECODE_OP_UNARY_BIT_NOT
:
1539 if (!vstack_ax(stack
)) {
1540 ERR("Empty stack\n");
1544 switch (vstack_ax(stack
)->type
) {
1551 ERR("Unexpected register type %d for operation\n",
1552 (int) vstack_ax(stack
)->type
);
1557 vstack_ax(stack
)->type
= REG_U64
;
1558 next_pc
+= sizeof(struct unary_op
);
1562 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1565 if (!vstack_ax(stack
)) {
1566 ERR("Empty stack\n");
1570 switch (vstack_ax(stack
)->type
) {
1574 ERR("Incorrect register type %d for operation\n",
1575 (int) vstack_ax(stack
)->type
);
1580 vstack_ax(stack
)->type
= REG_S64
;
1581 next_pc
+= sizeof(struct unary_op
);
1585 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1586 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1589 if (!vstack_ax(stack
)) {
1590 ERR("Empty stack\n");
1594 switch (vstack_ax(stack
)->type
) {
1598 ERR("Incorrect register type %d for operation\n",
1599 (int) vstack_ax(stack
)->type
);
1604 vstack_ax(stack
)->type
= REG_DOUBLE
;
1605 next_pc
+= sizeof(struct unary_op
);
1610 case BYTECODE_OP_AND
:
1611 case BYTECODE_OP_OR
:
1613 struct logical_op
*insn
= (struct logical_op
*) pc
;
1616 /* Add merge point to table */
1617 merge_ret
= merge_point_add_check(merge_points
,
1618 insn
->skip_offset
, stack
);
1624 if (!vstack_ax(stack
)) {
1625 ERR("Empty stack\n");
1629 /* There is always a cast-to-s64 operation before a or/and op. */
1630 switch (vstack_ax(stack
)->type
) {
1635 ERR("Incorrect register type %d for operation\n",
1636 (int) vstack_ax(stack
)->type
);
1641 /* Continue to next instruction */
1642 /* Pop 1 when jump not taken */
1643 if (vstack_pop(stack
)) {
1647 next_pc
+= sizeof(struct logical_op
);
1651 /* load field ref */
1652 case BYTECODE_OP_LOAD_FIELD_REF
:
1654 ERR("Unknown field ref type\n");
1658 /* get context ref */
1659 case BYTECODE_OP_GET_CONTEXT_REF
:
1661 if (vstack_push(stack
)) {
1665 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1666 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1669 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1670 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1671 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1673 if (vstack_push(stack
)) {
1677 vstack_ax(stack
)->type
= REG_STRING
;
1678 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1681 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1682 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1684 if (vstack_push(stack
)) {
1688 vstack_ax(stack
)->type
= REG_S64
;
1689 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1692 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1693 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1695 if (vstack_push(stack
)) {
1699 vstack_ax(stack
)->type
= REG_DOUBLE
;
1700 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1704 /* load from immediate operand */
1705 case BYTECODE_OP_LOAD_STRING
:
1707 struct load_op
*insn
= (struct load_op
*) pc
;
1709 if (vstack_push(stack
)) {
1713 vstack_ax(stack
)->type
= REG_STRING
;
1714 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1718 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1720 struct load_op
*insn
= (struct load_op
*) pc
;
1722 if (vstack_push(stack
)) {
1726 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1727 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1731 case BYTECODE_OP_LOAD_S64
:
1733 if (vstack_push(stack
)) {
1737 vstack_ax(stack
)->type
= REG_S64
;
1738 next_pc
+= sizeof(struct load_op
)
1739 + sizeof(struct literal_numeric
);
1743 case BYTECODE_OP_LOAD_DOUBLE
:
1745 if (vstack_push(stack
)) {
1749 vstack_ax(stack
)->type
= REG_DOUBLE
;
1750 next_pc
+= sizeof(struct load_op
)
1751 + sizeof(struct literal_double
);
1755 case BYTECODE_OP_CAST_TO_S64
:
1756 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1759 if (!vstack_ax(stack
)) {
1760 ERR("Empty stack\n");
1764 switch (vstack_ax(stack
)->type
) {
1771 ERR("Incorrect register type %d for cast\n",
1772 (int) vstack_ax(stack
)->type
);
1776 vstack_ax(stack
)->type
= REG_S64
;
1777 next_pc
+= sizeof(struct cast_op
);
1780 case BYTECODE_OP_CAST_NOP
:
1782 next_pc
+= sizeof(struct cast_op
);
1787 * Instructions for recursive traversal through composed types.
1789 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1790 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1791 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1793 if (vstack_push(stack
)) {
1797 vstack_ax(stack
)->type
= REG_PTR
;
1798 next_pc
+= sizeof(struct load_op
);
1802 case BYTECODE_OP_LOAD_FIELD
:
1805 if (!vstack_ax(stack
)) {
1806 ERR("Empty stack\n");
1810 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1811 ERR("Expecting pointer on top of stack\n");
1815 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1816 next_pc
+= sizeof(struct load_op
);
1820 case BYTECODE_OP_LOAD_FIELD_S8
:
1821 case BYTECODE_OP_LOAD_FIELD_S16
:
1822 case BYTECODE_OP_LOAD_FIELD_S32
:
1823 case BYTECODE_OP_LOAD_FIELD_S64
:
1826 if (!vstack_ax(stack
)) {
1827 ERR("Empty stack\n");
1831 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1832 ERR("Expecting pointer on top of stack\n");
1836 vstack_ax(stack
)->type
= REG_S64
;
1837 next_pc
+= sizeof(struct load_op
);
1841 case BYTECODE_OP_LOAD_FIELD_U8
:
1842 case BYTECODE_OP_LOAD_FIELD_U16
:
1843 case BYTECODE_OP_LOAD_FIELD_U32
:
1844 case BYTECODE_OP_LOAD_FIELD_U64
:
1847 if (!vstack_ax(stack
)) {
1848 ERR("Empty stack\n");
1852 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1853 ERR("Expecting pointer on top of stack\n");
1857 vstack_ax(stack
)->type
= REG_U64
;
1858 next_pc
+= sizeof(struct load_op
);
1862 case BYTECODE_OP_LOAD_FIELD_STRING
:
1863 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1866 if (!vstack_ax(stack
)) {
1867 ERR("Empty stack\n");
1871 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1872 ERR("Expecting pointer on top of stack\n");
1876 vstack_ax(stack
)->type
= REG_STRING
;
1877 next_pc
+= sizeof(struct load_op
);
1881 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1884 if (!vstack_ax(stack
)) {
1885 ERR("Empty stack\n");
1889 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1890 ERR("Expecting pointer on top of stack\n");
1894 vstack_ax(stack
)->type
= REG_DOUBLE
;
1895 next_pc
+= sizeof(struct load_op
);
1899 case BYTECODE_OP_GET_SYMBOL
:
1900 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1903 if (!vstack_ax(stack
)) {
1904 ERR("Empty stack\n");
1908 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1909 ERR("Expecting pointer on top of stack\n");
1913 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1917 case BYTECODE_OP_GET_INDEX_U16
:
1920 if (!vstack_ax(stack
)) {
1921 ERR("Empty stack\n");
1925 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1926 ERR("Expecting pointer on top of stack\n");
1930 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1934 case BYTECODE_OP_GET_INDEX_U64
:
1937 if (!vstack_ax(stack
)) {
1938 ERR("Empty stack\n");
1942 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1943 ERR("Expecting pointer on top of stack\n");
1947 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1953 *_next_pc
= next_pc
;
1958 * Never called concurrently (hash seed is shared).
1960 int lttng_bytecode_validate(struct bytecode_runtime
*bytecode
)
1962 struct lttng_ust_lfht
*merge_points
;
1963 char *pc
, *next_pc
, *start_pc
;
1965 struct vstack stack
;
1967 vstack_init(&stack
);
1969 if (!lttng_hash_seed_ready
) {
1970 lttng_hash_seed
= time(NULL
);
1971 lttng_hash_seed_ready
= 1;
1974 * Note: merge_points hash table used by single thread, and
1975 * never concurrently resized. Therefore, we can use it without
1976 * holding RCU read-side lock and free nodes without using
1979 merge_points
= lttng_ust_lfht_new(DEFAULT_NR_MERGE_POINTS
,
1980 MIN_NR_BUCKETS
, MAX_NR_BUCKETS
,
1982 if (!merge_points
) {
1983 ERR("Error allocating hash table for bytecode validation\n");
1986 start_pc
= &bytecode
->code
[0];
1987 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
1989 ret
= bytecode_validate_overflow(bytecode
, start_pc
, pc
);
1992 ERR("Bytecode overflow\n");
1995 dbg_printf("Validating op %s (%u)\n",
1996 print_op((unsigned int) *(bytecode_opcode_t
*) pc
),
1997 (unsigned int) *(bytecode_opcode_t
*) pc
);
2000 * For each instruction, validate the current context
2001 * (traversal of entire execution flow), and validate
2002 * all merge points targeting this instruction.
2004 ret
= validate_instruction_all_contexts(bytecode
, merge_points
,
2005 &stack
, start_pc
, pc
);
2008 ret
= exec_insn(bytecode
, merge_points
, &stack
, &next_pc
, pc
);
2013 if (delete_all_nodes(merge_points
)) {
2015 ERR("Unexpected merge points\n");
2019 if (lttng_ust_lfht_destroy(merge_points
)) {
2020 ERR("Error destroying hash table\n");