1 /* SPDX-License-Identifier: MIT
3 * lttng-bytecode-validator.c
5 * LTTng modules bytecode bytecode validator.
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #include <linux/types.h>
11 #include <linux/jhash.h>
12 #include <linux/slab.h>
14 #include <wrapper/list.h>
15 #include <lttng/lttng-bytecode.h>
17 #define MERGE_POINT_TABLE_BITS 7
18 #define MERGE_POINT_TABLE_SIZE (1U << MERGE_POINT_TABLE_BITS)
20 /* merge point table node */
22 struct hlist_node node
;
24 /* Context at merge point */
26 unsigned long target_pc
;
30 struct hlist_head mp_head
[MERGE_POINT_TABLE_SIZE
];
34 int lttng_hash_match(struct mp_node
*mp_node
, unsigned long key_pc
)
36 if (mp_node
->target_pc
== key_pc
)
43 int merge_points_compare(const struct vstack
*stacka
,
44 const struct vstack
*stackb
)
48 if (stacka
->top
!= stackb
->top
)
50 len
= stacka
->top
+ 1;
51 WARN_ON_ONCE(len
< 0);
52 for (i
= 0; i
< len
; i
++) {
53 if (stacka
->e
[i
].type
!= stackb
->e
[i
].type
)
60 int merge_point_add_check(struct mp_table
*mp_table
, unsigned long target_pc
,
61 const struct vstack
*stack
)
63 struct mp_node
*mp_node
;
64 unsigned long hash
= jhash_1word(target_pc
, 0);
65 struct hlist_head
*head
;
66 struct mp_node
*lookup_node
;
69 dbg_printk("Bytecode: adding merge point at offset %lu, hash %lu\n",
71 mp_node
= kzalloc(sizeof(struct mp_node
), GFP_KERNEL
);
74 mp_node
->target_pc
= target_pc
;
75 memcpy(&mp_node
->stack
, stack
, sizeof(mp_node
->stack
));
77 head
= &mp_table
->mp_head
[hash
& (MERGE_POINT_TABLE_SIZE
- 1)];
78 lttng_hlist_for_each_entry(lookup_node
, head
, node
) {
79 if (lttng_hash_match(lookup_node
, target_pc
)) {
85 /* Key already present */
86 dbg_printk("Bytecode: compare merge points for offset %lu, hash %lu\n",
89 if (merge_points_compare(stack
, &lookup_node
->stack
)) {
90 printk(KERN_WARNING
"LTTng: bytecode: Merge points differ for offset %lu\n",
95 hlist_add_head(&mp_node
->node
, head
);
101 * Binary comparators use top of stack and top of stack -1.
104 int bin_op_compare_check(struct vstack
*stack
, const bytecode_opcode_t opcode
,
107 if (unlikely(!vstack_ax(stack
) || !vstack_bx(stack
)))
110 switch (vstack_ax(stack
)->type
) {
116 switch (vstack_bx(stack
)->type
) {
120 case REG_TYPE_UNKNOWN
:
124 case REG_STAR_GLOB_STRING
:
125 if (opcode
!= BYTECODE_OP_EQ
&& opcode
!= BYTECODE_OP_NE
) {
134 case REG_STAR_GLOB_STRING
:
135 switch (vstack_bx(stack
)->type
) {
139 case REG_TYPE_UNKNOWN
:
142 if (opcode
!= BYTECODE_OP_EQ
&& opcode
!= BYTECODE_OP_NE
) {
146 case REG_STAR_GLOB_STRING
:
154 switch (vstack_bx(stack
)->type
) {
158 case REG_TYPE_UNKNOWN
:
161 case REG_STAR_GLOB_STRING
:
168 case REG_TYPE_UNKNOWN
:
169 switch (vstack_bx(stack
)->type
) {
173 case REG_TYPE_UNKNOWN
:
175 case REG_STAR_GLOB_STRING
:
188 printk(KERN_WARNING
"LTTng: bytecode: empty stack for '%s' binary operator\n", str
);
192 printk(KERN_WARNING
"LTTng: bytecode: type mismatch for '%s' binary operator\n", str
);
196 printk(KERN_WARNING
"LTTng: bytecode: unknown type for '%s' binary operator\n", str
);
201 * Binary bitwise operators use top of stack and top of stack -1.
202 * Return 0 if typing is known to match, 1 if typing is dynamic
203 * (unknown), negative error value on error.
206 int bin_op_bitwise_check(struct vstack
*stack
, bytecode_opcode_t opcode
,
209 if (unlikely(!vstack_ax(stack
) || !vstack_bx(stack
)))
212 switch (vstack_ax(stack
)->type
) {
217 case REG_TYPE_UNKNOWN
:
218 switch (vstack_bx(stack
)->type
) {
222 case REG_TYPE_UNKNOWN
:
230 switch (vstack_bx(stack
)->type
) {
234 case REG_TYPE_UNKNOWN
:
248 printk(KERN_WARNING
"LTTng: bytecode: empty stack for '%s' binary operator\n", str
);
252 printk(KERN_WARNING
"LTTng: bytecode: unknown type for '%s' binary operator\n", str
);
257 int validate_get_symbol(struct bytecode_runtime
*bytecode
,
258 const struct get_symbol
*sym
)
260 const char *str
, *str_limit
;
263 if (sym
->offset
>= bytecode
->p
.bc
->bc
.len
- bytecode
->p
.bc
->bc
.reloc_offset
)
266 str
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ sym
->offset
;
267 str_limit
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.len
;
268 len_limit
= str_limit
- str
;
269 if (strnlen(str
, len_limit
) == len_limit
)
275 * Validate bytecode range overflow within the validation pass.
276 * Called for each instruction encountered.
279 int bytecode_validate_overflow(struct bytecode_runtime
*bytecode
,
280 char *start_pc
, char *pc
)
284 switch (*(bytecode_opcode_t
*) pc
) {
285 case BYTECODE_OP_UNKNOWN
:
288 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
289 (unsigned int) *(bytecode_opcode_t
*) pc
);
294 case BYTECODE_OP_RETURN
:
295 case BYTECODE_OP_RETURN_S64
:
297 if (unlikely(pc
+ sizeof(struct return_op
)
298 > start_pc
+ bytecode
->len
)) {
305 case BYTECODE_OP_MUL
:
306 case BYTECODE_OP_DIV
:
307 case BYTECODE_OP_MOD
:
308 case BYTECODE_OP_PLUS
:
309 case BYTECODE_OP_MINUS
:
310 case BYTECODE_OP_EQ_DOUBLE
:
311 case BYTECODE_OP_NE_DOUBLE
:
312 case BYTECODE_OP_GT_DOUBLE
:
313 case BYTECODE_OP_LT_DOUBLE
:
314 case BYTECODE_OP_GE_DOUBLE
:
315 case BYTECODE_OP_LE_DOUBLE
:
317 case BYTECODE_OP_EQ_DOUBLE_S64
:
318 case BYTECODE_OP_NE_DOUBLE_S64
:
319 case BYTECODE_OP_GT_DOUBLE_S64
:
320 case BYTECODE_OP_LT_DOUBLE_S64
:
321 case BYTECODE_OP_GE_DOUBLE_S64
:
322 case BYTECODE_OP_LE_DOUBLE_S64
:
323 case BYTECODE_OP_EQ_S64_DOUBLE
:
324 case BYTECODE_OP_NE_S64_DOUBLE
:
325 case BYTECODE_OP_GT_S64_DOUBLE
:
326 case BYTECODE_OP_LT_S64_DOUBLE
:
327 case BYTECODE_OP_GE_S64_DOUBLE
:
328 case BYTECODE_OP_LE_S64_DOUBLE
:
329 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
330 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
331 case BYTECODE_OP_LOAD_DOUBLE
:
332 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
333 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
334 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
335 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
337 printk(KERN_WARNING
"LTTng: bytecode: unsupported bytecode op %u\n",
338 (unsigned int) *(bytecode_opcode_t
*) pc
);
349 case BYTECODE_OP_EQ_STRING
:
350 case BYTECODE_OP_NE_STRING
:
351 case BYTECODE_OP_GT_STRING
:
352 case BYTECODE_OP_LT_STRING
:
353 case BYTECODE_OP_GE_STRING
:
354 case BYTECODE_OP_LE_STRING
:
355 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
356 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
357 case BYTECODE_OP_EQ_S64
:
358 case BYTECODE_OP_NE_S64
:
359 case BYTECODE_OP_GT_S64
:
360 case BYTECODE_OP_LT_S64
:
361 case BYTECODE_OP_GE_S64
:
362 case BYTECODE_OP_LE_S64
:
363 case BYTECODE_OP_BIT_RSHIFT
:
364 case BYTECODE_OP_BIT_LSHIFT
:
365 case BYTECODE_OP_BIT_AND
:
366 case BYTECODE_OP_BIT_OR
:
367 case BYTECODE_OP_BIT_XOR
:
369 if (unlikely(pc
+ sizeof(struct binary_op
)
370 > start_pc
+ bytecode
->len
)) {
377 case BYTECODE_OP_UNARY_PLUS
:
378 case BYTECODE_OP_UNARY_MINUS
:
379 case BYTECODE_OP_UNARY_NOT
:
380 case BYTECODE_OP_UNARY_PLUS_S64
:
381 case BYTECODE_OP_UNARY_MINUS_S64
:
382 case BYTECODE_OP_UNARY_NOT_S64
:
383 case BYTECODE_OP_UNARY_BIT_NOT
:
385 if (unlikely(pc
+ sizeof(struct unary_op
)
386 > start_pc
+ bytecode
->len
)) {
393 case BYTECODE_OP_AND
:
396 if (unlikely(pc
+ sizeof(struct logical_op
)
397 > start_pc
+ bytecode
->len
)) {
404 case BYTECODE_OP_LOAD_FIELD_REF
:
406 printk(KERN_WARNING
"LTTng: bytecode: Unknown field ref type\n");
411 /* get context ref */
412 case BYTECODE_OP_GET_CONTEXT_REF
:
414 printk(KERN_WARNING
"LTTng: bytecode: Unknown field ref type\n");
418 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
419 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
420 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
421 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
422 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
423 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
424 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
426 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct field_ref
)
427 > start_pc
+ bytecode
->len
)) {
433 /* load from immediate operand */
434 case BYTECODE_OP_LOAD_STRING
:
435 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
437 struct load_op
*insn
= (struct load_op
*) pc
;
438 uint32_t str_len
, maxlen
;
440 if (unlikely(pc
+ sizeof(struct load_op
)
441 > start_pc
+ bytecode
->len
)) {
446 maxlen
= start_pc
+ bytecode
->len
- pc
- sizeof(struct load_op
);
447 str_len
= strnlen(insn
->data
, maxlen
);
448 if (unlikely(str_len
>= maxlen
)) {
449 /* Final '\0' not found within range */
455 case BYTECODE_OP_LOAD_S64
:
457 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct literal_numeric
)
458 > start_pc
+ bytecode
->len
)) {
464 case BYTECODE_OP_CAST_TO_S64
:
465 case BYTECODE_OP_CAST_NOP
:
467 if (unlikely(pc
+ sizeof(struct cast_op
)
468 > start_pc
+ bytecode
->len
)) {
475 * Instructions for recursive traversal through composed types.
477 case BYTECODE_OP_GET_CONTEXT_ROOT
:
478 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
479 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
480 case BYTECODE_OP_LOAD_FIELD
:
481 case BYTECODE_OP_LOAD_FIELD_S8
:
482 case BYTECODE_OP_LOAD_FIELD_S16
:
483 case BYTECODE_OP_LOAD_FIELD_S32
:
484 case BYTECODE_OP_LOAD_FIELD_S64
:
485 case BYTECODE_OP_LOAD_FIELD_U8
:
486 case BYTECODE_OP_LOAD_FIELD_U16
:
487 case BYTECODE_OP_LOAD_FIELD_U32
:
488 case BYTECODE_OP_LOAD_FIELD_U64
:
489 case BYTECODE_OP_LOAD_FIELD_STRING
:
490 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
491 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
492 if (unlikely(pc
+ sizeof(struct load_op
)
493 > start_pc
+ bytecode
->len
)) {
498 case BYTECODE_OP_GET_SYMBOL
:
500 struct load_op
*insn
= (struct load_op
*) pc
;
501 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
503 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_symbol
)
504 > start_pc
+ bytecode
->len
)) {
508 ret
= validate_get_symbol(bytecode
, sym
);
512 case BYTECODE_OP_GET_SYMBOL_FIELD
:
513 printk(KERN_WARNING
"LTTng: bytecode: Unexpected get symbol field\n");
517 case BYTECODE_OP_GET_INDEX_U16
:
518 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_index_u16
)
519 > start_pc
+ bytecode
->len
)) {
524 case BYTECODE_OP_GET_INDEX_U64
:
525 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_index_u64
)
526 > start_pc
+ bytecode
->len
)) {
536 unsigned long delete_all_nodes(struct mp_table
*mp_table
)
538 struct mp_node
*mp_node
;
539 struct hlist_node
*tmp
;
540 unsigned long nr_nodes
= 0;
543 for (i
= 0; i
< MERGE_POINT_TABLE_SIZE
; i
++) {
544 struct hlist_head
*head
;
546 head
= &mp_table
->mp_head
[i
];
547 lttng_hlist_for_each_entry_safe(mp_node
, tmp
, head
, node
) {
561 int validate_instruction_context(struct bytecode_runtime
*bytecode
,
562 struct vstack
*stack
,
567 const bytecode_opcode_t opcode
= *(bytecode_opcode_t
*) pc
;
570 case BYTECODE_OP_UNKNOWN
:
573 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
574 (unsigned int) *(bytecode_opcode_t
*) pc
);
579 case BYTECODE_OP_RETURN
:
580 case BYTECODE_OP_RETURN_S64
:
586 case BYTECODE_OP_MUL
:
587 case BYTECODE_OP_DIV
:
588 case BYTECODE_OP_MOD
:
589 case BYTECODE_OP_PLUS
:
590 case BYTECODE_OP_MINUS
:
592 case BYTECODE_OP_EQ_DOUBLE
:
593 case BYTECODE_OP_NE_DOUBLE
:
594 case BYTECODE_OP_GT_DOUBLE
:
595 case BYTECODE_OP_LT_DOUBLE
:
596 case BYTECODE_OP_GE_DOUBLE
:
597 case BYTECODE_OP_LE_DOUBLE
:
598 case BYTECODE_OP_EQ_DOUBLE_S64
:
599 case BYTECODE_OP_NE_DOUBLE_S64
:
600 case BYTECODE_OP_GT_DOUBLE_S64
:
601 case BYTECODE_OP_LT_DOUBLE_S64
:
602 case BYTECODE_OP_GE_DOUBLE_S64
:
603 case BYTECODE_OP_LE_DOUBLE_S64
:
604 case BYTECODE_OP_EQ_S64_DOUBLE
:
605 case BYTECODE_OP_NE_S64_DOUBLE
:
606 case BYTECODE_OP_GT_S64_DOUBLE
:
607 case BYTECODE_OP_LT_S64_DOUBLE
:
608 case BYTECODE_OP_GE_S64_DOUBLE
:
609 case BYTECODE_OP_LE_S64_DOUBLE
:
610 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
611 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
612 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
613 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
614 case BYTECODE_OP_LOAD_DOUBLE
:
615 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
616 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
618 printk(KERN_WARNING
"LTTng: bytecode: unsupported bytecode op %u\n",
619 (unsigned int) *(bytecode_opcode_t
*) pc
);
626 ret
= bin_op_compare_check(stack
, opcode
, "==");
633 ret
= bin_op_compare_check(stack
, opcode
, "!=");
640 ret
= bin_op_compare_check(stack
, opcode
, ">");
647 ret
= bin_op_compare_check(stack
, opcode
, "<");
654 ret
= bin_op_compare_check(stack
, opcode
, ">=");
661 ret
= bin_op_compare_check(stack
, opcode
, "<=");
667 case BYTECODE_OP_EQ_STRING
:
668 case BYTECODE_OP_NE_STRING
:
669 case BYTECODE_OP_GT_STRING
:
670 case BYTECODE_OP_LT_STRING
:
671 case BYTECODE_OP_GE_STRING
:
672 case BYTECODE_OP_LE_STRING
:
674 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
675 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
679 if (vstack_ax(stack
)->type
!= REG_STRING
680 || vstack_bx(stack
)->type
!= REG_STRING
) {
681 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type for string comparator\n");
689 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
690 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
692 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
693 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
697 if (vstack_ax(stack
)->type
!= REG_STAR_GLOB_STRING
698 && vstack_bx(stack
)->type
!= REG_STAR_GLOB_STRING
) {
699 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type for globbing pattern comparator\n");
706 case BYTECODE_OP_EQ_S64
:
707 case BYTECODE_OP_NE_S64
:
708 case BYTECODE_OP_GT_S64
:
709 case BYTECODE_OP_LT_S64
:
710 case BYTECODE_OP_GE_S64
:
711 case BYTECODE_OP_LE_S64
:
713 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
714 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
718 switch (vstack_ax(stack
)->type
) {
723 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type for s64 comparator\n");
727 switch (vstack_bx(stack
)->type
) {
732 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type for s64 comparator\n");
739 case BYTECODE_OP_BIT_RSHIFT
:
740 ret
= bin_op_bitwise_check(stack
, opcode
, ">>");
744 case BYTECODE_OP_BIT_LSHIFT
:
745 ret
= bin_op_bitwise_check(stack
, opcode
, "<<");
749 case BYTECODE_OP_BIT_AND
:
750 ret
= bin_op_bitwise_check(stack
, opcode
, "&");
754 case BYTECODE_OP_BIT_OR
:
755 ret
= bin_op_bitwise_check(stack
, opcode
, "|");
759 case BYTECODE_OP_BIT_XOR
:
760 ret
= bin_op_bitwise_check(stack
, opcode
, "^");
766 case BYTECODE_OP_UNARY_PLUS
:
767 case BYTECODE_OP_UNARY_MINUS
:
768 case BYTECODE_OP_UNARY_NOT
:
770 if (!vstack_ax(stack
)) {
771 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
775 switch (vstack_ax(stack
)->type
) {
778 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
783 case REG_STAR_GLOB_STRING
:
784 printk(KERN_WARNING
"LTTng: bytecode: Unary op can only be applied to numeric or floating point registers\n");
789 case REG_TYPE_UNKNOWN
:
794 case BYTECODE_OP_UNARY_BIT_NOT
:
796 if (!vstack_ax(stack
)) {
797 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
801 switch (vstack_ax(stack
)->type
) {
803 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
808 case REG_STAR_GLOB_STRING
:
810 printk(KERN_WARNING
"LTTng: bytecode: Unary bitwise op can only be applied to numeric registers\n");
815 case REG_TYPE_UNKNOWN
:
821 case BYTECODE_OP_UNARY_PLUS_S64
:
822 case BYTECODE_OP_UNARY_MINUS_S64
:
823 case BYTECODE_OP_UNARY_NOT_S64
:
825 if (!vstack_ax(stack
)) {
826 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
830 if (vstack_ax(stack
)->type
!= REG_S64
&&
831 vstack_ax(stack
)->type
!= REG_U64
) {
832 printk(KERN_WARNING
"LTTng: bytecode: Invalid register type\n");
840 case BYTECODE_OP_AND
:
843 struct logical_op
*insn
= (struct logical_op
*) pc
;
845 if (!vstack_ax(stack
)) {
846 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
850 if (vstack_ax(stack
)->type
!= REG_S64
&&
851 vstack_ax(stack
)->type
!= REG_U64
) {
852 printk(KERN_WARNING
"LTTng: bytecode: Logical comparator expects S64 register\n");
857 dbg_printk("Validate jumping to bytecode offset %u\n",
858 (unsigned int) insn
->skip_offset
);
859 if (unlikely(start_pc
+ insn
->skip_offset
<= pc
)) {
860 printk(KERN_WARNING
"LTTng: bytecode: Loops are not allowed in bytecode\n");
868 case BYTECODE_OP_LOAD_FIELD_REF
:
870 printk(KERN_WARNING
"LTTng: bytecode: Unknown field ref type\n");
874 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
875 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
876 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
877 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
879 struct load_op
*insn
= (struct load_op
*) pc
;
880 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
882 dbg_printk("Validate load field ref offset %u type string\n",
886 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
888 struct load_op
*insn
= (struct load_op
*) pc
;
889 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
891 dbg_printk("Validate load field ref offset %u type s64\n",
896 /* load from immediate operand */
897 case BYTECODE_OP_LOAD_STRING
:
898 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
903 case BYTECODE_OP_LOAD_S64
:
908 case BYTECODE_OP_CAST_TO_S64
:
910 struct cast_op
*insn
= (struct cast_op
*) pc
;
912 if (!vstack_ax(stack
)) {
913 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
917 switch (vstack_ax(stack
)->type
) {
920 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
925 case REG_STAR_GLOB_STRING
:
926 printk(KERN_WARNING
"LTTng: bytecode: Cast op can only be applied to numeric or floating point registers\n");
932 if (insn
->op
== BYTECODE_OP_CAST_DOUBLE_TO_S64
) {
933 if (vstack_ax(stack
)->type
!= REG_DOUBLE
) {
934 printk(KERN_WARNING
"LTTng: bytecode: Cast expects double\n");
941 case BYTECODE_OP_CAST_NOP
:
946 /* get context ref */
947 case BYTECODE_OP_GET_CONTEXT_REF
:
949 printk(KERN_WARNING
"LTTng: bytecode: Unknown get context ref type\n");
953 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
955 struct load_op
*insn
= (struct load_op
*) pc
;
956 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
958 dbg_printk("Validate get context ref offset %u type string\n",
962 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
964 struct load_op
*insn
= (struct load_op
*) pc
;
965 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
967 dbg_printk("Validate get context ref offset %u type s64\n",
973 * Instructions for recursive traversal through composed types.
975 case BYTECODE_OP_GET_CONTEXT_ROOT
:
977 dbg_printk("Validate get context root\n");
980 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
982 dbg_printk("Validate get app context root\n");
985 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
987 dbg_printk("Validate get payload root\n");
990 case BYTECODE_OP_LOAD_FIELD
:
993 * We tolerate that field type is unknown at validation,
994 * because we are performing the load specialization in
995 * a phase after validation.
997 dbg_printk("Validate load field\n");
1002 * Disallow already specialized bytecode op load field instructions to
1003 * ensure that the received bytecode does not:
1005 * - Read user-space memory without proper get_user accessors,
1006 * - Read a memory area larger than the memory targeted by the instrumentation.
1008 case BYTECODE_OP_LOAD_FIELD_S8
:
1009 case BYTECODE_OP_LOAD_FIELD_S16
:
1010 case BYTECODE_OP_LOAD_FIELD_S32
:
1011 case BYTECODE_OP_LOAD_FIELD_S64
:
1012 case BYTECODE_OP_LOAD_FIELD_U8
:
1013 case BYTECODE_OP_LOAD_FIELD_U16
:
1014 case BYTECODE_OP_LOAD_FIELD_U32
:
1015 case BYTECODE_OP_LOAD_FIELD_U64
:
1016 case BYTECODE_OP_LOAD_FIELD_STRING
:
1017 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1018 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1020 dbg_printk("Validate load field, reject specialized load instruction (%d)\n",
1026 case BYTECODE_OP_GET_SYMBOL
:
1028 struct load_op
*insn
= (struct load_op
*) pc
;
1029 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
1031 dbg_printk("Validate get symbol offset %u\n", sym
->offset
);
1035 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1037 struct load_op
*insn
= (struct load_op
*) pc
;
1038 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
1040 dbg_printk("Validate get symbol field offset %u\n", sym
->offset
);
1044 case BYTECODE_OP_GET_INDEX_U16
:
1046 struct load_op
*insn
= (struct load_op
*) pc
;
1047 struct get_index_u16
*get_index
= (struct get_index_u16
*) insn
->data
;
1049 dbg_printk("Validate get index u16 index %u\n", get_index
->index
);
1053 case BYTECODE_OP_GET_INDEX_U64
:
1055 struct load_op
*insn
= (struct load_op
*) pc
;
1056 struct get_index_u64
*get_index
= (struct get_index_u64
*) insn
->data
;
1058 dbg_printk("Validate get index u64 index %llu\n",
1059 (unsigned long long) get_index
->index
);
1073 int validate_instruction_all_contexts(struct bytecode_runtime
*bytecode
,
1074 struct mp_table
*mp_table
,
1075 struct vstack
*stack
,
1080 unsigned long target_pc
= pc
- start_pc
;
1082 struct hlist_head
*head
;
1083 struct mp_node
*mp_node
;
1085 /* Validate the context resulting from the previous instruction */
1086 ret
= validate_instruction_context(bytecode
, stack
, start_pc
, pc
);
1090 /* Validate merge points */
1091 hash
= jhash_1word(target_pc
, 0);
1092 head
= &mp_table
->mp_head
[hash
& (MERGE_POINT_TABLE_SIZE
- 1)];
1093 lttng_hlist_for_each_entry(mp_node
, head
, node
) {
1094 if (lttng_hash_match(mp_node
, target_pc
)) {
1100 dbg_printk("Bytecode: validate merge point at offset %lu\n",
1102 if (merge_points_compare(stack
, &mp_node
->stack
)) {
1103 printk(KERN_WARNING
"LTTng: bytecode: Merge points differ for offset %lu\n",
1107 /* Once validated, we can remove the merge point */
1108 dbg_printk("Bytecode: remove merge point at offset %lu\n",
1110 hlist_del(&mp_node
->node
);
1117 * >0: going to next insn.
1118 * 0: success, stop iteration.
1122 int exec_insn(struct bytecode_runtime
*bytecode
,
1123 struct mp_table
*mp_table
,
1124 struct vstack
*stack
,
1129 char *next_pc
= *_next_pc
;
1131 switch (*(bytecode_opcode_t
*) pc
) {
1132 case BYTECODE_OP_UNKNOWN
:
1135 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
1136 (unsigned int) *(bytecode_opcode_t
*) pc
);
1141 case BYTECODE_OP_RETURN
:
1143 if (!vstack_ax(stack
)) {
1144 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
1148 switch (vstack_ax(stack
)->type
) {
1154 case REG_TYPE_UNKNOWN
:
1157 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d at end of bytecode\n",
1158 (int) vstack_ax(stack
)->type
);
1167 case BYTECODE_OP_RETURN_S64
:
1169 if (!vstack_ax(stack
)) {
1170 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
1174 switch (vstack_ax(stack
)->type
) {
1179 case REG_TYPE_UNKNOWN
:
1180 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d at end of bytecode\n",
1181 (int) vstack_ax(stack
)->type
);
1191 case BYTECODE_OP_MUL
:
1192 case BYTECODE_OP_DIV
:
1193 case BYTECODE_OP_MOD
:
1194 case BYTECODE_OP_PLUS
:
1195 case BYTECODE_OP_MINUS
:
1196 /* Floating point */
1197 case BYTECODE_OP_EQ_DOUBLE
:
1198 case BYTECODE_OP_NE_DOUBLE
:
1199 case BYTECODE_OP_GT_DOUBLE
:
1200 case BYTECODE_OP_LT_DOUBLE
:
1201 case BYTECODE_OP_GE_DOUBLE
:
1202 case BYTECODE_OP_LE_DOUBLE
:
1203 case BYTECODE_OP_EQ_DOUBLE_S64
:
1204 case BYTECODE_OP_NE_DOUBLE_S64
:
1205 case BYTECODE_OP_GT_DOUBLE_S64
:
1206 case BYTECODE_OP_LT_DOUBLE_S64
:
1207 case BYTECODE_OP_GE_DOUBLE_S64
:
1208 case BYTECODE_OP_LE_DOUBLE_S64
:
1209 case BYTECODE_OP_EQ_S64_DOUBLE
:
1210 case BYTECODE_OP_NE_S64_DOUBLE
:
1211 case BYTECODE_OP_GT_S64_DOUBLE
:
1212 case BYTECODE_OP_LT_S64_DOUBLE
:
1213 case BYTECODE_OP_GE_S64_DOUBLE
:
1214 case BYTECODE_OP_LE_S64_DOUBLE
:
1215 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1216 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1217 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1218 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1219 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1220 case BYTECODE_OP_LOAD_DOUBLE
:
1221 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1223 printk(KERN_WARNING
"LTTng: bytecode: unsupported bytecode op %u\n",
1224 (unsigned int) *(bytecode_opcode_t
*) pc
);
1229 case BYTECODE_OP_EQ
:
1230 case BYTECODE_OP_NE
:
1231 case BYTECODE_OP_GT
:
1232 case BYTECODE_OP_LT
:
1233 case BYTECODE_OP_GE
:
1234 case BYTECODE_OP_LE
:
1235 case BYTECODE_OP_EQ_STRING
:
1236 case BYTECODE_OP_NE_STRING
:
1237 case BYTECODE_OP_GT_STRING
:
1238 case BYTECODE_OP_LT_STRING
:
1239 case BYTECODE_OP_GE_STRING
:
1240 case BYTECODE_OP_LE_STRING
:
1241 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
1242 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
1243 case BYTECODE_OP_EQ_S64
:
1244 case BYTECODE_OP_NE_S64
:
1245 case BYTECODE_OP_GT_S64
:
1246 case BYTECODE_OP_LT_S64
:
1247 case BYTECODE_OP_GE_S64
:
1248 case BYTECODE_OP_LE_S64
:
1251 if (vstack_pop(stack
)) {
1255 if (!vstack_ax(stack
)) {
1256 printk(KERN_WARNING
"Empty stack\n");
1260 switch (vstack_ax(stack
)->type
) {
1265 case REG_STAR_GLOB_STRING
:
1266 case REG_TYPE_UNKNOWN
:
1269 printk(KERN_WARNING
"Unexpected register type %d for operation\n",
1270 (int) vstack_ax(stack
)->type
);
1275 vstack_ax(stack
)->type
= REG_S64
;
1276 next_pc
+= sizeof(struct binary_op
);
1279 case BYTECODE_OP_BIT_RSHIFT
:
1280 case BYTECODE_OP_BIT_LSHIFT
:
1281 case BYTECODE_OP_BIT_AND
:
1282 case BYTECODE_OP_BIT_OR
:
1283 case BYTECODE_OP_BIT_XOR
:
1286 if (vstack_pop(stack
)) {
1290 if (!vstack_ax(stack
)) {
1291 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
1295 switch (vstack_ax(stack
)->type
) {
1300 case REG_STAR_GLOB_STRING
:
1301 case REG_TYPE_UNKNOWN
:
1304 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d for operation\n",
1305 (int) vstack_ax(stack
)->type
);
1310 vstack_ax(stack
)->type
= REG_U64
;
1311 next_pc
+= sizeof(struct binary_op
);
1316 case BYTECODE_OP_UNARY_PLUS
:
1317 case BYTECODE_OP_UNARY_MINUS
:
1320 if (!vstack_ax(stack
)) {
1321 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1325 switch (vstack_ax(stack
)->type
) {
1328 case REG_TYPE_UNKNOWN
:
1331 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d for operation\n",
1332 (int) vstack_ax(stack
)->type
);
1337 vstack_ax(stack
)->type
= REG_TYPE_UNKNOWN
;
1338 next_pc
+= sizeof(struct unary_op
);
1342 case BYTECODE_OP_UNARY_PLUS_S64
:
1343 case BYTECODE_OP_UNARY_MINUS_S64
:
1344 case BYTECODE_OP_UNARY_NOT_S64
:
1347 if (!vstack_ax(stack
)) {
1348 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1352 switch (vstack_ax(stack
)->type
) {
1357 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d for operation\n",
1358 (int) vstack_ax(stack
)->type
);
1363 next_pc
+= sizeof(struct unary_op
);
1367 case BYTECODE_OP_UNARY_NOT
:
1370 if (!vstack_ax(stack
)) {
1371 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1375 switch (vstack_ax(stack
)->type
) {
1378 case REG_TYPE_UNKNOWN
:
1381 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d for operation\n",
1382 (int) vstack_ax(stack
)->type
);
1387 next_pc
+= sizeof(struct unary_op
);
1391 case BYTECODE_OP_UNARY_BIT_NOT
:
1394 if (!vstack_ax(stack
)) {
1395 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
1399 switch (vstack_ax(stack
)->type
) {
1402 case REG_TYPE_UNKNOWN
:
1406 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d for operation\n",
1407 (int) vstack_ax(stack
)->type
);
1412 vstack_ax(stack
)->type
= REG_U64
;
1413 next_pc
+= sizeof(struct unary_op
);
1418 case BYTECODE_OP_AND
:
1419 case BYTECODE_OP_OR
:
1421 struct logical_op
*insn
= (struct logical_op
*) pc
;
1424 /* Add merge point to table */
1425 merge_ret
= merge_point_add_check(mp_table
,
1426 insn
->skip_offset
, stack
);
1432 if (!vstack_ax(stack
)) {
1433 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1437 /* There is always a cast-to-s64 operation before a or/and op. */
1438 switch (vstack_ax(stack
)->type
) {
1443 printk(KERN_WARNING
"LTTng: bytecode: Incorrect register type %d for operation\n",
1444 (int) vstack_ax(stack
)->type
);
1449 /* Continue to next instruction */
1450 /* Pop 1 when jump not taken */
1451 if (vstack_pop(stack
)) {
1455 next_pc
+= sizeof(struct logical_op
);
1459 /* load field ref */
1460 case BYTECODE_OP_LOAD_FIELD_REF
:
1462 printk(KERN_WARNING
"LTTng: bytecode: Unknown field ref type\n");
1466 /* get context ref */
1467 case BYTECODE_OP_GET_CONTEXT_REF
:
1469 printk(KERN_WARNING
"LTTng: bytecode: Unknown get context ref type\n");
1473 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1474 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1475 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1476 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
1477 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
1479 if (vstack_push(stack
)) {
1483 vstack_ax(stack
)->type
= REG_STRING
;
1484 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1487 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1488 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1490 if (vstack_push(stack
)) {
1494 vstack_ax(stack
)->type
= REG_S64
;
1495 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1499 /* load from immediate operand */
1500 case BYTECODE_OP_LOAD_STRING
:
1502 struct load_op
*insn
= (struct load_op
*) pc
;
1504 if (vstack_push(stack
)) {
1508 vstack_ax(stack
)->type
= REG_STRING
;
1509 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1513 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1515 struct load_op
*insn
= (struct load_op
*) pc
;
1517 if (vstack_push(stack
)) {
1521 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1522 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1526 case BYTECODE_OP_LOAD_S64
:
1528 if (vstack_push(stack
)) {
1532 vstack_ax(stack
)->type
= REG_S64
;
1533 next_pc
+= sizeof(struct load_op
)
1534 + sizeof(struct literal_numeric
);
1538 case BYTECODE_OP_CAST_TO_S64
:
1541 if (!vstack_ax(stack
)) {
1542 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
1546 switch (vstack_ax(stack
)->type
) {
1550 case REG_TYPE_UNKNOWN
:
1553 printk(KERN_WARNING
"LTTng: bytecode: Incorrect register type %d for cast\n",
1554 (int) vstack_ax(stack
)->type
);
1558 vstack_ax(stack
)->type
= REG_S64
;
1559 next_pc
+= sizeof(struct cast_op
);
1562 case BYTECODE_OP_CAST_NOP
:
1564 next_pc
+= sizeof(struct cast_op
);
1569 * Instructions for recursive traversal through composed types.
1571 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1572 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1573 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1575 if (vstack_push(stack
)) {
1579 vstack_ax(stack
)->type
= REG_PTR
;
1580 next_pc
+= sizeof(struct load_op
);
1584 case BYTECODE_OP_LOAD_FIELD
:
1587 if (!vstack_ax(stack
)) {
1588 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1592 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1593 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1597 vstack_ax(stack
)->type
= REG_TYPE_UNKNOWN
;
1598 next_pc
+= sizeof(struct load_op
);
1602 case BYTECODE_OP_LOAD_FIELD_S8
:
1603 case BYTECODE_OP_LOAD_FIELD_S16
:
1604 case BYTECODE_OP_LOAD_FIELD_S32
:
1605 case BYTECODE_OP_LOAD_FIELD_S64
:
1608 if (!vstack_ax(stack
)) {
1609 printk(KERN_WARNING
"Empty stack\n\n");
1613 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1614 printk(KERN_WARNING
"Expecting pointer on top of stack\n\n");
1618 vstack_ax(stack
)->type
= REG_S64
;
1619 next_pc
+= sizeof(struct load_op
);
1622 case BYTECODE_OP_LOAD_FIELD_U8
:
1623 case BYTECODE_OP_LOAD_FIELD_U16
:
1624 case BYTECODE_OP_LOAD_FIELD_U32
:
1625 case BYTECODE_OP_LOAD_FIELD_U64
:
1628 if (!vstack_ax(stack
)) {
1629 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1633 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1634 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1638 vstack_ax(stack
)->type
= REG_U64
;
1639 next_pc
+= sizeof(struct load_op
);
1642 case BYTECODE_OP_LOAD_FIELD_STRING
:
1643 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1646 if (!vstack_ax(stack
)) {
1647 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1651 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1652 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1656 vstack_ax(stack
)->type
= REG_STRING
;
1657 next_pc
+= sizeof(struct load_op
);
1661 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1664 if (!vstack_ax(stack
)) {
1665 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1669 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1670 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1674 vstack_ax(stack
)->type
= REG_DOUBLE
;
1675 next_pc
+= sizeof(struct load_op
);
1679 case BYTECODE_OP_GET_SYMBOL
:
1680 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1683 if (!vstack_ax(stack
)) {
1684 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1688 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1689 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1693 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1697 case BYTECODE_OP_GET_INDEX_U16
:
1700 if (!vstack_ax(stack
)) {
1701 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1705 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1706 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1710 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1714 case BYTECODE_OP_GET_INDEX_U64
:
1717 if (!vstack_ax(stack
)) {
1718 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1722 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1723 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1727 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1733 *_next_pc
= next_pc
;
1738 * Never called concurrently (hash seed is shared).
1740 int lttng_bytecode_validate(struct bytecode_runtime
*bytecode
)
1742 struct mp_table
*mp_table
;
1743 char *pc
, *next_pc
, *start_pc
;
1745 struct vstack stack
;
1747 vstack_init(&stack
);
1749 mp_table
= kzalloc(sizeof(*mp_table
), GFP_KERNEL
);
1751 printk(KERN_WARNING
"LTTng: bytecode: Error allocating hash table for bytecode validation\n");
1754 start_pc
= &bytecode
->code
[0];
1755 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
1757 ret
= bytecode_validate_overflow(bytecode
, start_pc
, pc
);
1760 printk(KERN_WARNING
"LTTng: bytecode: bytecode overflow\n");
1763 dbg_printk("Validating op %s (%u)\n",
1764 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t
*) pc
),
1765 (unsigned int) *(bytecode_opcode_t
*) pc
);
1768 * For each instruction, validate the current context
1769 * (traversal of entire execution flow), and validate
1770 * all merge points targeting this instruction.
1772 ret
= validate_instruction_all_contexts(bytecode
, mp_table
,
1773 &stack
, start_pc
, pc
);
1776 ret
= exec_insn(bytecode
, mp_table
, &stack
, &next_pc
, pc
);
1781 if (delete_all_nodes(mp_table
)) {
1783 printk(KERN_WARNING
"LTTng: bytecode: Unexpected merge points\n");