1 /* SPDX-License-Identifier: MIT
3 * lttng-bytecode-validator.c
5 * LTTng modules bytecode bytecode validator.
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #include <linux/types.h>
11 #include <linux/jhash.h>
12 #include <linux/slab.h>
14 #include <wrapper/list.h>
15 #include <lttng/lttng-bytecode.h>
17 #define MERGE_POINT_TABLE_BITS 7
18 #define MERGE_POINT_TABLE_SIZE (1U << MERGE_POINT_TABLE_BITS)
20 /* merge point table node */
22 struct hlist_node node
;
24 /* Context at merge point */
26 unsigned long target_pc
;
30 struct hlist_head mp_head
[MERGE_POINT_TABLE_SIZE
];
34 int lttng_hash_match(struct mp_node
*mp_node
, unsigned long key_pc
)
36 if (mp_node
->target_pc
== key_pc
)
43 int merge_points_compare(const struct vstack
*stacka
,
44 const struct vstack
*stackb
)
48 if (stacka
->top
!= stackb
->top
)
50 len
= stacka
->top
+ 1;
51 WARN_ON_ONCE(len
< 0);
52 for (i
= 0; i
< len
; i
++) {
53 if (stacka
->e
[i
].type
!= stackb
->e
[i
].type
)
60 int merge_point_add_check(struct mp_table
*mp_table
, unsigned long target_pc
,
61 const struct vstack
*stack
)
63 struct mp_node
*mp_node
;
64 unsigned long hash
= jhash_1word(target_pc
, 0);
65 struct hlist_head
*head
;
66 struct mp_node
*lookup_node
;
69 dbg_printk("Bytecode: adding merge point at offset %lu, hash %lu\n",
71 mp_node
= kzalloc(sizeof(struct mp_node
), GFP_KERNEL
);
74 mp_node
->target_pc
= target_pc
;
75 memcpy(&mp_node
->stack
, stack
, sizeof(mp_node
->stack
));
77 head
= &mp_table
->mp_head
[hash
& (MERGE_POINT_TABLE_SIZE
- 1)];
78 lttng_hlist_for_each_entry(lookup_node
, head
, node
) {
79 if (lttng_hash_match(lookup_node
, target_pc
)) {
85 /* Key already present */
86 dbg_printk("Bytecode: compare merge points for offset %lu, hash %lu\n",
89 if (merge_points_compare(stack
, &lookup_node
->stack
)) {
90 printk(KERN_WARNING
"LTTng: bytecode: Merge points differ for offset %lu\n",
95 hlist_add_head(&mp_node
->node
, head
);
101 * Binary comparators use top of stack and top of stack -1.
104 int bin_op_compare_check(struct vstack
*stack
, const bytecode_opcode_t opcode
,
107 if (unlikely(!vstack_ax(stack
) || !vstack_bx(stack
)))
110 switch (vstack_ax(stack
)->type
) {
116 switch (vstack_bx(stack
)->type
) {
120 case REG_TYPE_UNKNOWN
:
124 case REG_STAR_GLOB_STRING
:
125 if (opcode
!= BYTECODE_OP_EQ
&& opcode
!= BYTECODE_OP_NE
) {
134 case REG_STAR_GLOB_STRING
:
135 switch (vstack_bx(stack
)->type
) {
139 case REG_TYPE_UNKNOWN
:
142 if (opcode
!= BYTECODE_OP_EQ
&& opcode
!= BYTECODE_OP_NE
) {
146 case REG_STAR_GLOB_STRING
:
154 switch (vstack_bx(stack
)->type
) {
158 case REG_TYPE_UNKNOWN
:
161 case REG_STAR_GLOB_STRING
:
168 case REG_TYPE_UNKNOWN
:
169 switch (vstack_bx(stack
)->type
) {
173 case REG_TYPE_UNKNOWN
:
175 case REG_STAR_GLOB_STRING
:
188 printk(KERN_WARNING
"LTTng: bytecode: empty stack for '%s' binary operator\n", str
);
192 printk(KERN_WARNING
"LTTng: bytecode: type mismatch for '%s' binary operator\n", str
);
196 printk(KERN_WARNING
"LTTng: bytecode: unknown type for '%s' binary operator\n", str
);
201 * Binary bitwise operators use top of stack and top of stack -1.
202 * Return 0 if typing is known to match, 1 if typing is dynamic
203 * (unknown), negative error value on error.
206 int bin_op_bitwise_check(struct vstack
*stack
, bytecode_opcode_t opcode
,
209 if (unlikely(!vstack_ax(stack
) || !vstack_bx(stack
)))
212 switch (vstack_ax(stack
)->type
) {
217 case REG_TYPE_UNKNOWN
:
218 switch (vstack_bx(stack
)->type
) {
222 case REG_TYPE_UNKNOWN
:
230 switch (vstack_bx(stack
)->type
) {
234 case REG_TYPE_UNKNOWN
:
248 printk(KERN_WARNING
"LTTng: bytecode: empty stack for '%s' binary operator\n", str
);
252 printk(KERN_WARNING
"LTTng: bytecode: unknown type for '%s' binary operator\n", str
);
257 int validate_get_symbol(struct bytecode_runtime
*bytecode
,
258 const struct get_symbol
*sym
)
260 const char *str
, *str_limit
;
263 if (sym
->offset
>= bytecode
->p
.bc
->bc
.len
- bytecode
->p
.bc
->bc
.reloc_offset
)
266 str
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ sym
->offset
;
267 str_limit
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.len
;
268 len_limit
= str_limit
- str
;
269 if (strnlen(str
, len_limit
) == len_limit
)
275 * Validate bytecode range overflow within the validation pass.
276 * Called for each instruction encountered.
279 int bytecode_validate_overflow(struct bytecode_runtime
*bytecode
,
280 char *start_pc
, char *pc
)
284 switch (*(bytecode_opcode_t
*) pc
) {
285 case BYTECODE_OP_UNKNOWN
:
288 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
289 (unsigned int) *(bytecode_opcode_t
*) pc
);
294 case BYTECODE_OP_RETURN
:
295 case BYTECODE_OP_RETURN_S64
:
297 if (unlikely(pc
+ sizeof(struct return_op
)
298 > start_pc
+ bytecode
->len
)) {
305 case BYTECODE_OP_MUL
:
306 case BYTECODE_OP_DIV
:
307 case BYTECODE_OP_MOD
:
308 case BYTECODE_OP_PLUS
:
309 case BYTECODE_OP_MINUS
:
310 case BYTECODE_OP_EQ_DOUBLE
:
311 case BYTECODE_OP_NE_DOUBLE
:
312 case BYTECODE_OP_GT_DOUBLE
:
313 case BYTECODE_OP_LT_DOUBLE
:
314 case BYTECODE_OP_GE_DOUBLE
:
315 case BYTECODE_OP_LE_DOUBLE
:
317 case BYTECODE_OP_EQ_DOUBLE_S64
:
318 case BYTECODE_OP_NE_DOUBLE_S64
:
319 case BYTECODE_OP_GT_DOUBLE_S64
:
320 case BYTECODE_OP_LT_DOUBLE_S64
:
321 case BYTECODE_OP_GE_DOUBLE_S64
:
322 case BYTECODE_OP_LE_DOUBLE_S64
:
323 case BYTECODE_OP_EQ_S64_DOUBLE
:
324 case BYTECODE_OP_NE_S64_DOUBLE
:
325 case BYTECODE_OP_GT_S64_DOUBLE
:
326 case BYTECODE_OP_LT_S64_DOUBLE
:
327 case BYTECODE_OP_GE_S64_DOUBLE
:
328 case BYTECODE_OP_LE_S64_DOUBLE
:
329 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
330 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
331 case BYTECODE_OP_LOAD_DOUBLE
:
332 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
333 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
334 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
335 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
337 printk(KERN_WARNING
"LTTng: bytecode: unsupported bytecode op %u\n",
338 (unsigned int) *(bytecode_opcode_t
*) pc
);
349 case BYTECODE_OP_EQ_STRING
:
350 case BYTECODE_OP_NE_STRING
:
351 case BYTECODE_OP_GT_STRING
:
352 case BYTECODE_OP_LT_STRING
:
353 case BYTECODE_OP_GE_STRING
:
354 case BYTECODE_OP_LE_STRING
:
355 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
356 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
357 case BYTECODE_OP_EQ_S64
:
358 case BYTECODE_OP_NE_S64
:
359 case BYTECODE_OP_GT_S64
:
360 case BYTECODE_OP_LT_S64
:
361 case BYTECODE_OP_GE_S64
:
362 case BYTECODE_OP_LE_S64
:
363 case BYTECODE_OP_BIT_RSHIFT
:
364 case BYTECODE_OP_BIT_LSHIFT
:
365 case BYTECODE_OP_BIT_AND
:
366 case BYTECODE_OP_BIT_OR
:
367 case BYTECODE_OP_BIT_XOR
:
369 if (unlikely(pc
+ sizeof(struct binary_op
)
370 > start_pc
+ bytecode
->len
)) {
377 case BYTECODE_OP_UNARY_PLUS
:
378 case BYTECODE_OP_UNARY_MINUS
:
379 case BYTECODE_OP_UNARY_NOT
:
380 case BYTECODE_OP_UNARY_PLUS_S64
:
381 case BYTECODE_OP_UNARY_MINUS_S64
:
382 case BYTECODE_OP_UNARY_NOT_S64
:
383 case BYTECODE_OP_UNARY_BIT_NOT
:
385 if (unlikely(pc
+ sizeof(struct unary_op
)
386 > start_pc
+ bytecode
->len
)) {
393 case BYTECODE_OP_AND
:
396 if (unlikely(pc
+ sizeof(struct logical_op
)
397 > start_pc
+ bytecode
->len
)) {
403 /* load field and get context ref */
404 case BYTECODE_OP_LOAD_FIELD_REF
:
405 case BYTECODE_OP_GET_CONTEXT_REF
:
406 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
407 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
408 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
409 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
410 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
411 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
412 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
414 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct field_ref
)
415 > start_pc
+ bytecode
->len
)) {
421 /* load from immediate operand */
422 case BYTECODE_OP_LOAD_STRING
:
423 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
425 struct load_op
*insn
= (struct load_op
*) pc
;
426 uint32_t str_len
, maxlen
;
428 if (unlikely(pc
+ sizeof(struct load_op
)
429 > start_pc
+ bytecode
->len
)) {
434 maxlen
= start_pc
+ bytecode
->len
- pc
- sizeof(struct load_op
);
435 str_len
= strnlen(insn
->data
, maxlen
);
436 if (unlikely(str_len
>= maxlen
)) {
437 /* Final '\0' not found within range */
443 case BYTECODE_OP_LOAD_S64
:
445 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct literal_numeric
)
446 > start_pc
+ bytecode
->len
)) {
452 case BYTECODE_OP_CAST_TO_S64
:
453 case BYTECODE_OP_CAST_NOP
:
455 if (unlikely(pc
+ sizeof(struct cast_op
)
456 > start_pc
+ bytecode
->len
)) {
463 * Instructions for recursive traversal through composed types.
465 case BYTECODE_OP_GET_CONTEXT_ROOT
:
466 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
467 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
468 case BYTECODE_OP_LOAD_FIELD
:
469 case BYTECODE_OP_LOAD_FIELD_S8
:
470 case BYTECODE_OP_LOAD_FIELD_S16
:
471 case BYTECODE_OP_LOAD_FIELD_S32
:
472 case BYTECODE_OP_LOAD_FIELD_S64
:
473 case BYTECODE_OP_LOAD_FIELD_U8
:
474 case BYTECODE_OP_LOAD_FIELD_U16
:
475 case BYTECODE_OP_LOAD_FIELD_U32
:
476 case BYTECODE_OP_LOAD_FIELD_U64
:
477 case BYTECODE_OP_LOAD_FIELD_STRING
:
478 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
479 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
480 if (unlikely(pc
+ sizeof(struct load_op
)
481 > start_pc
+ bytecode
->len
)) {
486 case BYTECODE_OP_GET_SYMBOL
:
488 struct load_op
*insn
= (struct load_op
*) pc
;
489 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
491 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_symbol
)
492 > start_pc
+ bytecode
->len
)) {
496 ret
= validate_get_symbol(bytecode
, sym
);
500 case BYTECODE_OP_GET_SYMBOL_FIELD
:
501 printk(KERN_WARNING
"LTTng: bytecode: Unexpected get symbol field\n");
505 case BYTECODE_OP_GET_INDEX_U16
:
506 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_index_u16
)
507 > start_pc
+ bytecode
->len
)) {
512 case BYTECODE_OP_GET_INDEX_U64
:
513 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_index_u64
)
514 > start_pc
+ bytecode
->len
)) {
524 unsigned long delete_all_nodes(struct mp_table
*mp_table
)
526 struct mp_node
*mp_node
;
527 struct hlist_node
*tmp
;
528 unsigned long nr_nodes
= 0;
531 for (i
= 0; i
< MERGE_POINT_TABLE_SIZE
; i
++) {
532 struct hlist_head
*head
;
534 head
= &mp_table
->mp_head
[i
];
535 lttng_hlist_for_each_entry_safe(mp_node
, tmp
, head
, node
) {
549 int validate_instruction_context(struct bytecode_runtime
*bytecode
,
550 struct vstack
*stack
,
555 const bytecode_opcode_t opcode
= *(bytecode_opcode_t
*) pc
;
558 case BYTECODE_OP_UNKNOWN
:
561 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
562 (unsigned int) *(bytecode_opcode_t
*) pc
);
567 case BYTECODE_OP_RETURN
:
568 case BYTECODE_OP_RETURN_S64
:
574 case BYTECODE_OP_MUL
:
575 case BYTECODE_OP_DIV
:
576 case BYTECODE_OP_MOD
:
577 case BYTECODE_OP_PLUS
:
578 case BYTECODE_OP_MINUS
:
580 case BYTECODE_OP_EQ_DOUBLE
:
581 case BYTECODE_OP_NE_DOUBLE
:
582 case BYTECODE_OP_GT_DOUBLE
:
583 case BYTECODE_OP_LT_DOUBLE
:
584 case BYTECODE_OP_GE_DOUBLE
:
585 case BYTECODE_OP_LE_DOUBLE
:
586 case BYTECODE_OP_EQ_DOUBLE_S64
:
587 case BYTECODE_OP_NE_DOUBLE_S64
:
588 case BYTECODE_OP_GT_DOUBLE_S64
:
589 case BYTECODE_OP_LT_DOUBLE_S64
:
590 case BYTECODE_OP_GE_DOUBLE_S64
:
591 case BYTECODE_OP_LE_DOUBLE_S64
:
592 case BYTECODE_OP_EQ_S64_DOUBLE
:
593 case BYTECODE_OP_NE_S64_DOUBLE
:
594 case BYTECODE_OP_GT_S64_DOUBLE
:
595 case BYTECODE_OP_LT_S64_DOUBLE
:
596 case BYTECODE_OP_GE_S64_DOUBLE
:
597 case BYTECODE_OP_LE_S64_DOUBLE
:
598 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
599 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
600 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
601 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
602 case BYTECODE_OP_LOAD_DOUBLE
:
603 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
604 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
606 printk(KERN_WARNING
"LTTng: bytecode: unsupported bytecode op %u\n",
607 (unsigned int) *(bytecode_opcode_t
*) pc
);
614 ret
= bin_op_compare_check(stack
, opcode
, "==");
621 ret
= bin_op_compare_check(stack
, opcode
, "!=");
628 ret
= bin_op_compare_check(stack
, opcode
, ">");
635 ret
= bin_op_compare_check(stack
, opcode
, "<");
642 ret
= bin_op_compare_check(stack
, opcode
, ">=");
649 ret
= bin_op_compare_check(stack
, opcode
, "<=");
655 case BYTECODE_OP_EQ_STRING
:
656 case BYTECODE_OP_NE_STRING
:
657 case BYTECODE_OP_GT_STRING
:
658 case BYTECODE_OP_LT_STRING
:
659 case BYTECODE_OP_GE_STRING
:
660 case BYTECODE_OP_LE_STRING
:
662 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
663 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
667 if (vstack_ax(stack
)->type
!= REG_STRING
668 || vstack_bx(stack
)->type
!= REG_STRING
) {
669 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type for string comparator\n");
677 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
678 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
680 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
681 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
685 if (vstack_ax(stack
)->type
!= REG_STAR_GLOB_STRING
686 && vstack_bx(stack
)->type
!= REG_STAR_GLOB_STRING
) {
687 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type for globbing pattern comparator\n");
694 case BYTECODE_OP_EQ_S64
:
695 case BYTECODE_OP_NE_S64
:
696 case BYTECODE_OP_GT_S64
:
697 case BYTECODE_OP_LT_S64
:
698 case BYTECODE_OP_GE_S64
:
699 case BYTECODE_OP_LE_S64
:
701 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
702 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
706 switch (vstack_ax(stack
)->type
) {
711 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type for s64 comparator\n");
715 switch (vstack_bx(stack
)->type
) {
720 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type for s64 comparator\n");
727 case BYTECODE_OP_BIT_RSHIFT
:
728 ret
= bin_op_bitwise_check(stack
, opcode
, ">>");
732 case BYTECODE_OP_BIT_LSHIFT
:
733 ret
= bin_op_bitwise_check(stack
, opcode
, "<<");
737 case BYTECODE_OP_BIT_AND
:
738 ret
= bin_op_bitwise_check(stack
, opcode
, "&");
742 case BYTECODE_OP_BIT_OR
:
743 ret
= bin_op_bitwise_check(stack
, opcode
, "|");
747 case BYTECODE_OP_BIT_XOR
:
748 ret
= bin_op_bitwise_check(stack
, opcode
, "^");
754 case BYTECODE_OP_UNARY_PLUS
:
755 case BYTECODE_OP_UNARY_MINUS
:
756 case BYTECODE_OP_UNARY_NOT
:
758 if (!vstack_ax(stack
)) {
759 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
763 switch (vstack_ax(stack
)->type
) {
766 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
771 case REG_STAR_GLOB_STRING
:
772 printk(KERN_WARNING
"LTTng: bytecode: Unary op can only be applied to numeric or floating point registers\n");
777 case REG_TYPE_UNKNOWN
:
782 case BYTECODE_OP_UNARY_BIT_NOT
:
784 if (!vstack_ax(stack
)) {
785 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
789 switch (vstack_ax(stack
)->type
) {
791 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
796 case REG_STAR_GLOB_STRING
:
798 printk(KERN_WARNING
"LTTng: bytecode: Unary bitwise op can only be applied to numeric registers\n");
803 case REG_TYPE_UNKNOWN
:
809 case BYTECODE_OP_UNARY_PLUS_S64
:
810 case BYTECODE_OP_UNARY_MINUS_S64
:
811 case BYTECODE_OP_UNARY_NOT_S64
:
813 if (!vstack_ax(stack
)) {
814 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
818 if (vstack_ax(stack
)->type
!= REG_S64
&&
819 vstack_ax(stack
)->type
!= REG_U64
) {
820 printk(KERN_WARNING
"LTTng: bytecode: Invalid register type\n");
828 case BYTECODE_OP_AND
:
831 struct logical_op
*insn
= (struct logical_op
*) pc
;
833 if (!vstack_ax(stack
)) {
834 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
838 if (vstack_ax(stack
)->type
!= REG_S64
&&
839 vstack_ax(stack
)->type
!= REG_U64
) {
840 printk(KERN_WARNING
"LTTng: bytecode: Logical comparator expects S64 register\n");
845 dbg_printk("Validate jumping to bytecode offset %u\n",
846 (unsigned int) insn
->skip_offset
);
847 if (unlikely(start_pc
+ insn
->skip_offset
<= pc
)) {
848 printk(KERN_WARNING
"LTTng: bytecode: Loops are not allowed in bytecode\n");
856 case BYTECODE_OP_LOAD_FIELD_REF
:
858 printk(KERN_WARNING
"LTTng: bytecode: Unknown field ref type\n");
862 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
863 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
864 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
865 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
867 struct load_op
*insn
= (struct load_op
*) pc
;
868 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
870 dbg_printk("Validate load field ref offset %u type string\n",
874 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
876 struct load_op
*insn
= (struct load_op
*) pc
;
877 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
879 dbg_printk("Validate load field ref offset %u type s64\n",
884 /* load from immediate operand */
885 case BYTECODE_OP_LOAD_STRING
:
886 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
891 case BYTECODE_OP_LOAD_S64
:
896 case BYTECODE_OP_CAST_TO_S64
:
898 struct cast_op
*insn
= (struct cast_op
*) pc
;
900 if (!vstack_ax(stack
)) {
901 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
905 switch (vstack_ax(stack
)->type
) {
908 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
913 case REG_STAR_GLOB_STRING
:
914 printk(KERN_WARNING
"LTTng: bytecode: Cast op can only be applied to numeric or floating point registers\n");
920 if (insn
->op
== BYTECODE_OP_CAST_DOUBLE_TO_S64
) {
921 if (vstack_ax(stack
)->type
!= REG_DOUBLE
) {
922 printk(KERN_WARNING
"LTTng: bytecode: Cast expects double\n");
929 case BYTECODE_OP_CAST_NOP
:
934 /* get context ref */
935 case BYTECODE_OP_GET_CONTEXT_REF
:
937 printk(KERN_WARNING
"LTTng: bytecode: Unknown get context ref type\n");
941 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
943 struct load_op
*insn
= (struct load_op
*) pc
;
944 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
946 dbg_printk("Validate get context ref offset %u type string\n",
950 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
952 struct load_op
*insn
= (struct load_op
*) pc
;
953 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
955 dbg_printk("Validate get context ref offset %u type s64\n",
961 * Instructions for recursive traversal through composed types.
963 case BYTECODE_OP_GET_CONTEXT_ROOT
:
965 dbg_printk("Validate get context root\n");
968 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
970 dbg_printk("Validate get app context root\n");
973 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
975 dbg_printk("Validate get payload root\n");
978 case BYTECODE_OP_LOAD_FIELD
:
981 * We tolerate that field type is unknown at validation,
982 * because we are performing the load specialization in
983 * a phase after validation.
985 dbg_printk("Validate load field\n");
990 * Disallow already specialized bytecode op load field instructions to
991 * ensure that the received bytecode does not:
993 * - Read user-space memory without proper get_user accessors,
994 * - Read a memory area larger than the memory targeted by the instrumentation.
996 case BYTECODE_OP_LOAD_FIELD_S8
:
997 case BYTECODE_OP_LOAD_FIELD_S16
:
998 case BYTECODE_OP_LOAD_FIELD_S32
:
999 case BYTECODE_OP_LOAD_FIELD_S64
:
1000 case BYTECODE_OP_LOAD_FIELD_U8
:
1001 case BYTECODE_OP_LOAD_FIELD_U16
:
1002 case BYTECODE_OP_LOAD_FIELD_U32
:
1003 case BYTECODE_OP_LOAD_FIELD_U64
:
1004 case BYTECODE_OP_LOAD_FIELD_STRING
:
1005 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1006 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1008 dbg_printk("Validate load field, reject specialized load instruction (%d)\n",
1014 case BYTECODE_OP_GET_SYMBOL
:
1016 struct load_op
*insn
= (struct load_op
*) pc
;
1017 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
1019 dbg_printk("Validate get symbol offset %u\n", sym
->offset
);
1023 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1025 struct load_op
*insn
= (struct load_op
*) pc
;
1026 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
1028 dbg_printk("Validate get symbol field offset %u\n", sym
->offset
);
1032 case BYTECODE_OP_GET_INDEX_U16
:
1034 struct load_op
*insn
= (struct load_op
*) pc
;
1035 struct get_index_u16
*get_index
= (struct get_index_u16
*) insn
->data
;
1037 dbg_printk("Validate get index u16 index %u\n", get_index
->index
);
1041 case BYTECODE_OP_GET_INDEX_U64
:
1043 struct load_op
*insn
= (struct load_op
*) pc
;
1044 struct get_index_u64
*get_index
= (struct get_index_u64
*) insn
->data
;
1046 dbg_printk("Validate get index u64 index %llu\n",
1047 (unsigned long long) get_index
->index
);
1061 int validate_instruction_all_contexts(struct bytecode_runtime
*bytecode
,
1062 struct mp_table
*mp_table
,
1063 struct vstack
*stack
,
1068 unsigned long target_pc
= pc
- start_pc
;
1070 struct hlist_head
*head
;
1071 struct mp_node
*mp_node
;
1073 /* Validate the context resulting from the previous instruction */
1074 ret
= validate_instruction_context(bytecode
, stack
, start_pc
, pc
);
1078 /* Validate merge points */
1079 hash
= jhash_1word(target_pc
, 0);
1080 head
= &mp_table
->mp_head
[hash
& (MERGE_POINT_TABLE_SIZE
- 1)];
1081 lttng_hlist_for_each_entry(mp_node
, head
, node
) {
1082 if (lttng_hash_match(mp_node
, target_pc
)) {
1088 dbg_printk("Bytecode: validate merge point at offset %lu\n",
1090 if (merge_points_compare(stack
, &mp_node
->stack
)) {
1091 printk(KERN_WARNING
"LTTng: bytecode: Merge points differ for offset %lu\n",
1095 /* Once validated, we can remove the merge point */
1096 dbg_printk("Bytecode: remove merge point at offset %lu\n",
1098 hlist_del(&mp_node
->node
);
1104 * Validate load instructions: specialized instructions not accepted as input.
1107 * >0: going to next insn.
1108 * 0: success, stop iteration.
1112 int validate_load(char **_next_pc
,
1116 char *next_pc
= *_next_pc
;
1118 switch (*(bytecode_opcode_t
*) pc
) {
1119 case BYTECODE_OP_UNKNOWN
:
1122 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
1123 (unsigned int) *(bytecode_opcode_t
*) pc
);
1128 case BYTECODE_OP_RETURN
:
1130 next_pc
+= sizeof(struct return_op
);
1134 case BYTECODE_OP_RETURN_S64
:
1136 next_pc
+= sizeof(struct return_op
);
1141 case BYTECODE_OP_MUL
:
1142 case BYTECODE_OP_DIV
:
1143 case BYTECODE_OP_MOD
:
1144 case BYTECODE_OP_PLUS
:
1145 case BYTECODE_OP_MINUS
:
1146 /* Floating point */
1147 case BYTECODE_OP_EQ_DOUBLE
:
1148 case BYTECODE_OP_NE_DOUBLE
:
1149 case BYTECODE_OP_GT_DOUBLE
:
1150 case BYTECODE_OP_LT_DOUBLE
:
1151 case BYTECODE_OP_GE_DOUBLE
:
1152 case BYTECODE_OP_LE_DOUBLE
:
1153 case BYTECODE_OP_EQ_DOUBLE_S64
:
1154 case BYTECODE_OP_NE_DOUBLE_S64
:
1155 case BYTECODE_OP_GT_DOUBLE_S64
:
1156 case BYTECODE_OP_LT_DOUBLE_S64
:
1157 case BYTECODE_OP_GE_DOUBLE_S64
:
1158 case BYTECODE_OP_LE_DOUBLE_S64
:
1159 case BYTECODE_OP_EQ_S64_DOUBLE
:
1160 case BYTECODE_OP_NE_S64_DOUBLE
:
1161 case BYTECODE_OP_GT_S64_DOUBLE
:
1162 case BYTECODE_OP_LT_S64_DOUBLE
:
1163 case BYTECODE_OP_GE_S64_DOUBLE
:
1164 case BYTECODE_OP_LE_S64_DOUBLE
:
1165 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1166 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1167 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1168 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1169 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1170 case BYTECODE_OP_LOAD_DOUBLE
:
1171 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1173 printk(KERN_WARNING
"LTTng: bytecode: unsupported bytecode op %u\n",
1174 (unsigned int) *(bytecode_opcode_t
*) pc
);
1179 case BYTECODE_OP_EQ
:
1180 case BYTECODE_OP_NE
:
1181 case BYTECODE_OP_GT
:
1182 case BYTECODE_OP_LT
:
1183 case BYTECODE_OP_GE
:
1184 case BYTECODE_OP_LE
:
1185 case BYTECODE_OP_EQ_STRING
:
1186 case BYTECODE_OP_NE_STRING
:
1187 case BYTECODE_OP_GT_STRING
:
1188 case BYTECODE_OP_LT_STRING
:
1189 case BYTECODE_OP_GE_STRING
:
1190 case BYTECODE_OP_LE_STRING
:
1191 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
1192 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
1193 case BYTECODE_OP_EQ_S64
:
1194 case BYTECODE_OP_NE_S64
:
1195 case BYTECODE_OP_GT_S64
:
1196 case BYTECODE_OP_LT_S64
:
1197 case BYTECODE_OP_GE_S64
:
1198 case BYTECODE_OP_LE_S64
:
1199 case BYTECODE_OP_BIT_RSHIFT
:
1200 case BYTECODE_OP_BIT_LSHIFT
:
1201 case BYTECODE_OP_BIT_AND
:
1202 case BYTECODE_OP_BIT_OR
:
1203 case BYTECODE_OP_BIT_XOR
:
1205 next_pc
+= sizeof(struct binary_op
);
1210 case BYTECODE_OP_UNARY_PLUS
:
1211 case BYTECODE_OP_UNARY_MINUS
:
1212 case BYTECODE_OP_UNARY_PLUS_S64
:
1213 case BYTECODE_OP_UNARY_MINUS_S64
:
1214 case BYTECODE_OP_UNARY_NOT_S64
:
1215 case BYTECODE_OP_UNARY_NOT
:
1216 case BYTECODE_OP_UNARY_BIT_NOT
:
1218 next_pc
+= sizeof(struct unary_op
);
1223 case BYTECODE_OP_AND
:
1224 case BYTECODE_OP_OR
:
1226 next_pc
+= sizeof(struct logical_op
);
1230 /* load field ref */
1231 case BYTECODE_OP_LOAD_FIELD_REF
:
1232 /* get context ref */
1233 case BYTECODE_OP_GET_CONTEXT_REF
:
1235 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1238 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1239 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1240 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1241 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
1242 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
1243 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1244 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1247 * Reject specialized load field ref instructions.
1253 /* load from immediate operand */
1254 case BYTECODE_OP_LOAD_STRING
:
1255 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1257 struct load_op
*insn
= (struct load_op
*) pc
;
1259 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1263 case BYTECODE_OP_LOAD_S64
:
1265 next_pc
+= sizeof(struct load_op
) + sizeof(struct literal_numeric
);
1269 case BYTECODE_OP_CAST_TO_S64
:
1270 case BYTECODE_OP_CAST_NOP
:
1272 next_pc
+= sizeof(struct cast_op
);
1277 * Instructions for recursive traversal through composed types.
1279 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1280 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1281 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1282 case BYTECODE_OP_LOAD_FIELD
:
1284 next_pc
+= sizeof(struct load_op
);
1288 case BYTECODE_OP_LOAD_FIELD_S8
:
1289 case BYTECODE_OP_LOAD_FIELD_S16
:
1290 case BYTECODE_OP_LOAD_FIELD_S32
:
1291 case BYTECODE_OP_LOAD_FIELD_S64
:
1292 case BYTECODE_OP_LOAD_FIELD_U8
:
1293 case BYTECODE_OP_LOAD_FIELD_U16
:
1294 case BYTECODE_OP_LOAD_FIELD_U32
:
1295 case BYTECODE_OP_LOAD_FIELD_U64
:
1296 case BYTECODE_OP_LOAD_FIELD_STRING
:
1297 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1298 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1301 * Reject specialized load field instructions.
1307 case BYTECODE_OP_GET_SYMBOL
:
1308 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1310 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1314 case BYTECODE_OP_GET_INDEX_U16
:
1316 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1320 case BYTECODE_OP_GET_INDEX_U64
:
1322 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1328 *_next_pc
= next_pc
;
1334 * >0: going to next insn.
1335 * 0: success, stop iteration.
1339 int exec_insn(struct bytecode_runtime
*bytecode
,
1340 struct mp_table
*mp_table
,
1341 struct vstack
*stack
,
1346 char *next_pc
= *_next_pc
;
1348 switch (*(bytecode_opcode_t
*) pc
) {
1349 case BYTECODE_OP_UNKNOWN
:
1352 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
1353 (unsigned int) *(bytecode_opcode_t
*) pc
);
1358 case BYTECODE_OP_RETURN
:
1360 if (!vstack_ax(stack
)) {
1361 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
1365 switch (vstack_ax(stack
)->type
) {
1371 case REG_TYPE_UNKNOWN
:
1374 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d at end of bytecode\n",
1375 (int) vstack_ax(stack
)->type
);
1384 case BYTECODE_OP_RETURN_S64
:
1386 if (!vstack_ax(stack
)) {
1387 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
1391 switch (vstack_ax(stack
)->type
) {
1396 case REG_TYPE_UNKNOWN
:
1397 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d at end of bytecode\n",
1398 (int) vstack_ax(stack
)->type
);
1408 case BYTECODE_OP_MUL
:
1409 case BYTECODE_OP_DIV
:
1410 case BYTECODE_OP_MOD
:
1411 case BYTECODE_OP_PLUS
:
1412 case BYTECODE_OP_MINUS
:
1413 /* Floating point */
1414 case BYTECODE_OP_EQ_DOUBLE
:
1415 case BYTECODE_OP_NE_DOUBLE
:
1416 case BYTECODE_OP_GT_DOUBLE
:
1417 case BYTECODE_OP_LT_DOUBLE
:
1418 case BYTECODE_OP_GE_DOUBLE
:
1419 case BYTECODE_OP_LE_DOUBLE
:
1420 case BYTECODE_OP_EQ_DOUBLE_S64
:
1421 case BYTECODE_OP_NE_DOUBLE_S64
:
1422 case BYTECODE_OP_GT_DOUBLE_S64
:
1423 case BYTECODE_OP_LT_DOUBLE_S64
:
1424 case BYTECODE_OP_GE_DOUBLE_S64
:
1425 case BYTECODE_OP_LE_DOUBLE_S64
:
1426 case BYTECODE_OP_EQ_S64_DOUBLE
:
1427 case BYTECODE_OP_NE_S64_DOUBLE
:
1428 case BYTECODE_OP_GT_S64_DOUBLE
:
1429 case BYTECODE_OP_LT_S64_DOUBLE
:
1430 case BYTECODE_OP_GE_S64_DOUBLE
:
1431 case BYTECODE_OP_LE_S64_DOUBLE
:
1432 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1433 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1434 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1435 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1436 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1437 case BYTECODE_OP_LOAD_DOUBLE
:
1438 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1440 printk(KERN_WARNING
"LTTng: bytecode: unsupported bytecode op %u\n",
1441 (unsigned int) *(bytecode_opcode_t
*) pc
);
1446 case BYTECODE_OP_EQ
:
1447 case BYTECODE_OP_NE
:
1448 case BYTECODE_OP_GT
:
1449 case BYTECODE_OP_LT
:
1450 case BYTECODE_OP_GE
:
1451 case BYTECODE_OP_LE
:
1452 case BYTECODE_OP_EQ_STRING
:
1453 case BYTECODE_OP_NE_STRING
:
1454 case BYTECODE_OP_GT_STRING
:
1455 case BYTECODE_OP_LT_STRING
:
1456 case BYTECODE_OP_GE_STRING
:
1457 case BYTECODE_OP_LE_STRING
:
1458 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
1459 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
1460 case BYTECODE_OP_EQ_S64
:
1461 case BYTECODE_OP_NE_S64
:
1462 case BYTECODE_OP_GT_S64
:
1463 case BYTECODE_OP_LT_S64
:
1464 case BYTECODE_OP_GE_S64
:
1465 case BYTECODE_OP_LE_S64
:
1468 if (vstack_pop(stack
)) {
1472 if (!vstack_ax(stack
)) {
1473 printk(KERN_WARNING
"Empty stack\n");
1477 switch (vstack_ax(stack
)->type
) {
1482 case REG_STAR_GLOB_STRING
:
1483 case REG_TYPE_UNKNOWN
:
1486 printk(KERN_WARNING
"Unexpected register type %d for operation\n",
1487 (int) vstack_ax(stack
)->type
);
1492 vstack_ax(stack
)->type
= REG_S64
;
1493 next_pc
+= sizeof(struct binary_op
);
1496 case BYTECODE_OP_BIT_RSHIFT
:
1497 case BYTECODE_OP_BIT_LSHIFT
:
1498 case BYTECODE_OP_BIT_AND
:
1499 case BYTECODE_OP_BIT_OR
:
1500 case BYTECODE_OP_BIT_XOR
:
1503 if (vstack_pop(stack
)) {
1507 if (!vstack_ax(stack
)) {
1508 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
1512 switch (vstack_ax(stack
)->type
) {
1517 case REG_STAR_GLOB_STRING
:
1518 case REG_TYPE_UNKNOWN
:
1521 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d for operation\n",
1522 (int) vstack_ax(stack
)->type
);
1527 vstack_ax(stack
)->type
= REG_U64
;
1528 next_pc
+= sizeof(struct binary_op
);
1533 case BYTECODE_OP_UNARY_PLUS
:
1534 case BYTECODE_OP_UNARY_MINUS
:
1537 if (!vstack_ax(stack
)) {
1538 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1542 switch (vstack_ax(stack
)->type
) {
1545 case REG_TYPE_UNKNOWN
:
1548 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d for operation\n",
1549 (int) vstack_ax(stack
)->type
);
1554 vstack_ax(stack
)->type
= REG_TYPE_UNKNOWN
;
1555 next_pc
+= sizeof(struct unary_op
);
1559 case BYTECODE_OP_UNARY_PLUS_S64
:
1560 case BYTECODE_OP_UNARY_MINUS_S64
:
1561 case BYTECODE_OP_UNARY_NOT_S64
:
1564 if (!vstack_ax(stack
)) {
1565 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1569 switch (vstack_ax(stack
)->type
) {
1574 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d for operation\n",
1575 (int) vstack_ax(stack
)->type
);
1580 next_pc
+= sizeof(struct unary_op
);
1584 case BYTECODE_OP_UNARY_NOT
:
1587 if (!vstack_ax(stack
)) {
1588 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1592 switch (vstack_ax(stack
)->type
) {
1595 case REG_TYPE_UNKNOWN
:
1598 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d for operation\n",
1599 (int) vstack_ax(stack
)->type
);
1604 next_pc
+= sizeof(struct unary_op
);
1608 case BYTECODE_OP_UNARY_BIT_NOT
:
1611 if (!vstack_ax(stack
)) {
1612 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
1616 switch (vstack_ax(stack
)->type
) {
1619 case REG_TYPE_UNKNOWN
:
1623 printk(KERN_WARNING
"LTTng: bytecode: Unexpected register type %d for operation\n",
1624 (int) vstack_ax(stack
)->type
);
1629 vstack_ax(stack
)->type
= REG_U64
;
1630 next_pc
+= sizeof(struct unary_op
);
1635 case BYTECODE_OP_AND
:
1636 case BYTECODE_OP_OR
:
1638 struct logical_op
*insn
= (struct logical_op
*) pc
;
1641 /* Add merge point to table */
1642 merge_ret
= merge_point_add_check(mp_table
,
1643 insn
->skip_offset
, stack
);
1649 if (!vstack_ax(stack
)) {
1650 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1654 /* There is always a cast-to-s64 operation before a or/and op. */
1655 switch (vstack_ax(stack
)->type
) {
1660 printk(KERN_WARNING
"LTTng: bytecode: Incorrect register type %d for operation\n",
1661 (int) vstack_ax(stack
)->type
);
1666 /* Continue to next instruction */
1667 /* Pop 1 when jump not taken */
1668 if (vstack_pop(stack
)) {
1672 next_pc
+= sizeof(struct logical_op
);
1676 /* load field ref */
1677 case BYTECODE_OP_LOAD_FIELD_REF
:
1679 printk(KERN_WARNING
"LTTng: bytecode: Unknown field ref type\n");
1683 /* get context ref */
1684 case BYTECODE_OP_GET_CONTEXT_REF
:
1686 printk(KERN_WARNING
"LTTng: bytecode: Unknown get context ref type\n");
1690 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1691 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1692 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1693 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
1694 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
1696 if (vstack_push(stack
)) {
1700 vstack_ax(stack
)->type
= REG_STRING
;
1701 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1704 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1705 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1707 if (vstack_push(stack
)) {
1711 vstack_ax(stack
)->type
= REG_S64
;
1712 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1716 /* load from immediate operand */
1717 case BYTECODE_OP_LOAD_STRING
:
1719 struct load_op
*insn
= (struct load_op
*) pc
;
1721 if (vstack_push(stack
)) {
1725 vstack_ax(stack
)->type
= REG_STRING
;
1726 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1730 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1732 struct load_op
*insn
= (struct load_op
*) pc
;
1734 if (vstack_push(stack
)) {
1738 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1739 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1743 case BYTECODE_OP_LOAD_S64
:
1745 if (vstack_push(stack
)) {
1749 vstack_ax(stack
)->type
= REG_S64
;
1750 next_pc
+= sizeof(struct load_op
)
1751 + sizeof(struct literal_numeric
);
1755 case BYTECODE_OP_CAST_TO_S64
:
1758 if (!vstack_ax(stack
)) {
1759 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n");
1763 switch (vstack_ax(stack
)->type
) {
1767 case REG_TYPE_UNKNOWN
:
1770 printk(KERN_WARNING
"LTTng: bytecode: Incorrect register type %d for cast\n",
1771 (int) vstack_ax(stack
)->type
);
1775 vstack_ax(stack
)->type
= REG_S64
;
1776 next_pc
+= sizeof(struct cast_op
);
1779 case BYTECODE_OP_CAST_NOP
:
1781 next_pc
+= sizeof(struct cast_op
);
1786 * Instructions for recursive traversal through composed types.
1788 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1789 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1790 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1792 if (vstack_push(stack
)) {
1796 vstack_ax(stack
)->type
= REG_PTR
;
1797 next_pc
+= sizeof(struct load_op
);
1801 case BYTECODE_OP_LOAD_FIELD
:
1804 if (!vstack_ax(stack
)) {
1805 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1809 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1810 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1814 vstack_ax(stack
)->type
= REG_TYPE_UNKNOWN
;
1815 next_pc
+= sizeof(struct load_op
);
1819 case BYTECODE_OP_LOAD_FIELD_S8
:
1820 case BYTECODE_OP_LOAD_FIELD_S16
:
1821 case BYTECODE_OP_LOAD_FIELD_S32
:
1822 case BYTECODE_OP_LOAD_FIELD_S64
:
1825 if (!vstack_ax(stack
)) {
1826 printk(KERN_WARNING
"Empty stack\n\n");
1830 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1831 printk(KERN_WARNING
"Expecting pointer on top of stack\n\n");
1835 vstack_ax(stack
)->type
= REG_S64
;
1836 next_pc
+= sizeof(struct load_op
);
1839 case BYTECODE_OP_LOAD_FIELD_U8
:
1840 case BYTECODE_OP_LOAD_FIELD_U16
:
1841 case BYTECODE_OP_LOAD_FIELD_U32
:
1842 case BYTECODE_OP_LOAD_FIELD_U64
:
1845 if (!vstack_ax(stack
)) {
1846 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1850 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1851 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1855 vstack_ax(stack
)->type
= REG_U64
;
1856 next_pc
+= sizeof(struct load_op
);
1859 case BYTECODE_OP_LOAD_FIELD_STRING
:
1860 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1863 if (!vstack_ax(stack
)) {
1864 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1868 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1869 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1873 vstack_ax(stack
)->type
= REG_STRING
;
1874 next_pc
+= sizeof(struct load_op
);
1878 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1881 if (!vstack_ax(stack
)) {
1882 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1886 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1887 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1891 vstack_ax(stack
)->type
= REG_DOUBLE
;
1892 next_pc
+= sizeof(struct load_op
);
1896 case BYTECODE_OP_GET_SYMBOL
:
1897 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1900 if (!vstack_ax(stack
)) {
1901 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1905 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1906 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1910 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1914 case BYTECODE_OP_GET_INDEX_U16
:
1917 if (!vstack_ax(stack
)) {
1918 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1922 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1923 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1927 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1931 case BYTECODE_OP_GET_INDEX_U64
:
1934 if (!vstack_ax(stack
)) {
1935 printk(KERN_WARNING
"LTTng: bytecode: Empty stack\n\n");
1939 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1940 printk(KERN_WARNING
"LTTng: bytecode: Expecting pointer on top of stack\n\n");
1944 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1950 *_next_pc
= next_pc
;
1954 int lttng_bytecode_validate_load(struct bytecode_runtime
*bytecode
)
1956 char *pc
, *next_pc
, *start_pc
;
1959 start_pc
= &bytecode
->code
[0];
1960 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
1962 ret
= bytecode_validate_overflow(bytecode
, start_pc
, pc
);
1965 printk(KERN_WARNING
"LTTng: bytecode: bytecode overflow\n");
1968 dbg_printk("Validating loads: op %s (%u)\n",
1969 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t
*) pc
),
1970 (unsigned int) *(bytecode_opcode_t
*) pc
);
1972 ret
= validate_load(&next_pc
, pc
);
1981 * Never called concurrently (hash seed is shared).
1983 int lttng_bytecode_validate(struct bytecode_runtime
*bytecode
)
1985 struct mp_table
*mp_table
;
1986 char *pc
, *next_pc
, *start_pc
;
1988 struct vstack stack
;
1990 vstack_init(&stack
);
1992 mp_table
= kzalloc(sizeof(*mp_table
), GFP_KERNEL
);
1994 printk(KERN_WARNING
"LTTng: bytecode: Error allocating hash table for bytecode validation\n");
1997 start_pc
= &bytecode
->code
[0];
1998 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
2000 ret
= bytecode_validate_overflow(bytecode
, start_pc
, pc
);
2003 printk(KERN_WARNING
"LTTng: bytecode: bytecode overflow\n");
2006 dbg_printk("Validating op %s (%u)\n",
2007 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t
*) pc
),
2008 (unsigned int) *(bytecode_opcode_t
*) pc
);
2011 * For each instruction, validate the current context
2012 * (traversal of entire execution flow), and validate
2013 * all merge points targeting this instruction.
2015 ret
= validate_instruction_all_contexts(bytecode
, mp_table
,
2016 &stack
, start_pc
, pc
);
2019 ret
= exec_insn(bytecode
, mp_table
, &stack
, &next_pc
, pc
);
2024 if (delete_all_nodes(mp_table
)) {
2026 printk(KERN_WARNING
"LTTng: bytecode: Unexpected merge points\n");