1 /* SPDX-License-Identifier: MIT
3 * lttng-bytecode-specialize.c
5 * LTTng modules bytecode code specializer.
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #include <linux/slab.h>
11 #include <lttng/lttng-bytecode.h>
12 #include <lttng/align.h>
14 static ssize_t
bytecode_reserve_data(struct bytecode_runtime
*runtime
,
15 size_t align
, size_t len
)
18 size_t padding
= offset_align(runtime
->data_len
, align
);
19 size_t new_len
= runtime
->data_len
+ padding
+ len
;
20 size_t new_alloc_len
= new_len
;
21 size_t old_alloc_len
= runtime
->data_alloc_len
;
23 if (new_len
> INTERPRETER_MAX_DATA_LEN
)
26 if (new_alloc_len
> old_alloc_len
) {
30 max_t(size_t, 1U << get_count_order(new_alloc_len
), old_alloc_len
<< 1);
31 newptr
= krealloc(runtime
->data
, new_alloc_len
, GFP_KERNEL
);
34 runtime
->data
= newptr
;
35 /* We zero directly the memory from start of allocation. */
36 memset(&runtime
->data
[old_alloc_len
], 0, new_alloc_len
- old_alloc_len
);
37 runtime
->data_alloc_len
= new_alloc_len
;
39 runtime
->data_len
+= padding
;
40 ret
= runtime
->data_len
;
41 runtime
->data_len
+= len
;
45 static ssize_t
bytecode_push_data(struct bytecode_runtime
*runtime
,
46 const void *p
, size_t align
, size_t len
)
50 offset
= bytecode_reserve_data(runtime
, align
, len
);
53 memcpy(&runtime
->data
[offset
], p
, len
);
57 static int specialize_load_field(struct vstack_entry
*stack_top
,
62 switch (stack_top
->load
.type
) {
65 case LOAD_ROOT_CONTEXT
:
66 case LOAD_ROOT_APP_CONTEXT
:
67 case LOAD_ROOT_PAYLOAD
:
69 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
73 switch (stack_top
->load
.object_type
) {
75 dbg_printk("op load field s8\n");
76 stack_top
->type
= REG_S64
;
77 if (!stack_top
->load
.rev_bo
)
78 insn
->op
= BYTECODE_OP_LOAD_FIELD_S8
;
81 dbg_printk("op load field s16\n");
82 stack_top
->type
= REG_S64
;
83 if (!stack_top
->load
.rev_bo
)
84 insn
->op
= BYTECODE_OP_LOAD_FIELD_S16
;
87 dbg_printk("op load field s32\n");
88 stack_top
->type
= REG_S64
;
89 if (!stack_top
->load
.rev_bo
)
90 insn
->op
= BYTECODE_OP_LOAD_FIELD_S32
;
93 dbg_printk("op load field s64\n");
94 stack_top
->type
= REG_S64
;
95 if (!stack_top
->load
.rev_bo
)
96 insn
->op
= BYTECODE_OP_LOAD_FIELD_S64
;
98 case OBJECT_TYPE_SIGNED_ENUM
:
99 dbg_printk("op load field signed enumeration\n");
100 stack_top
->type
= REG_PTR
;
103 dbg_printk("op load field u8\n");
104 stack_top
->type
= REG_S64
;
105 insn
->op
= BYTECODE_OP_LOAD_FIELD_U8
;
107 case OBJECT_TYPE_U16
:
108 dbg_printk("op load field u16\n");
109 stack_top
->type
= REG_S64
;
110 if (!stack_top
->load
.rev_bo
)
111 insn
->op
= BYTECODE_OP_LOAD_FIELD_U16
;
113 case OBJECT_TYPE_U32
:
114 dbg_printk("op load field u32\n");
115 stack_top
->type
= REG_S64
;
116 if (!stack_top
->load
.rev_bo
)
117 insn
->op
= BYTECODE_OP_LOAD_FIELD_U32
;
119 case OBJECT_TYPE_U64
:
120 dbg_printk("op load field u64\n");
121 stack_top
->type
= REG_S64
;
122 if (!stack_top
->load
.rev_bo
)
123 insn
->op
= BYTECODE_OP_LOAD_FIELD_U64
;
125 case OBJECT_TYPE_UNSIGNED_ENUM
:
126 dbg_printk("op load field unsigned enumeration\n");
127 stack_top
->type
= REG_PTR
;
129 case OBJECT_TYPE_DOUBLE
:
130 printk(KERN_WARNING
"LTTng: bytecode: Double type unsupported\n\n");
133 case OBJECT_TYPE_STRING
:
134 dbg_printk("op load field string\n");
135 stack_top
->type
= REG_STRING
;
136 insn
->op
= BYTECODE_OP_LOAD_FIELD_STRING
;
138 case OBJECT_TYPE_STRING_SEQUENCE
:
139 dbg_printk("op load field string sequence\n");
140 stack_top
->type
= REG_STRING
;
141 insn
->op
= BYTECODE_OP_LOAD_FIELD_SEQUENCE
;
143 case OBJECT_TYPE_DYNAMIC
:
146 case OBJECT_TYPE_SEQUENCE
:
147 case OBJECT_TYPE_ARRAY
:
148 case OBJECT_TYPE_STRUCT
:
149 case OBJECT_TYPE_VARIANT
:
150 printk(KERN_WARNING
"LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
160 static int specialize_get_index_object_type(enum object_type
*otype
,
161 int signedness
, uint32_t elem_len
)
166 *otype
= OBJECT_TYPE_S8
;
168 *otype
= OBJECT_TYPE_U8
;
172 *otype
= OBJECT_TYPE_S16
;
174 *otype
= OBJECT_TYPE_U16
;
178 *otype
= OBJECT_TYPE_S32
;
180 *otype
= OBJECT_TYPE_U32
;
184 *otype
= OBJECT_TYPE_S64
;
186 *otype
= OBJECT_TYPE_U64
;
194 static int specialize_get_index(struct bytecode_runtime
*runtime
,
195 struct load_op
*insn
, uint64_t index
,
196 struct vstack_entry
*stack_top
,
200 struct bytecode_get_index_data gid
;
203 memset(&gid
, 0, sizeof(gid
));
204 switch (stack_top
->load
.type
) {
206 switch (stack_top
->load
.object_type
) {
207 case OBJECT_TYPE_ARRAY
:
209 const struct lttng_integer_type
*integer_type
;
210 const struct lttng_event_field
*field
;
211 uint32_t elem_len
, num_elems
;
214 field
= stack_top
->load
.field
;
215 if (!lttng_is_bytewise_integer(field
->type
.u
.array_nestable
.elem_type
)) {
219 integer_type
= &field
->type
.u
.array_nestable
.elem_type
->u
.integer
;
220 num_elems
= field
->type
.u
.array_nestable
.length
;
221 elem_len
= integer_type
->size
;
222 signedness
= integer_type
->signedness
;
223 if (index
>= num_elems
) {
227 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
228 signedness
, elem_len
);
231 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
232 gid
.array_len
= num_elems
* (elem_len
/ CHAR_BIT
);
233 gid
.elem
.type
= stack_top
->load
.object_type
;
234 gid
.elem
.len
= elem_len
;
235 if (integer_type
->reverse_byte_order
)
236 gid
.elem
.rev_bo
= true;
237 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
240 case OBJECT_TYPE_SEQUENCE
:
242 const struct lttng_integer_type
*integer_type
;
243 const struct lttng_event_field
*field
;
247 field
= stack_top
->load
.field
;
248 if (!lttng_is_bytewise_integer(field
->type
.u
.sequence_nestable
.elem_type
)) {
252 integer_type
= &field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
;
253 elem_len
= integer_type
->size
;
254 signedness
= integer_type
->signedness
;
255 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
256 signedness
, elem_len
);
259 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
260 gid
.elem
.type
= stack_top
->load
.object_type
;
261 gid
.elem
.len
= elem_len
;
262 if (integer_type
->reverse_byte_order
)
263 gid
.elem
.rev_bo
= true;
264 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
267 case OBJECT_TYPE_STRUCT
:
268 /* Only generated by the specialize phase. */
269 case OBJECT_TYPE_VARIANT
: /* Fall-through */
271 printk(KERN_WARNING
"LTTng: bytecode: Unexpected get index type %d",
272 (int) stack_top
->load
.object_type
);
277 case LOAD_ROOT_CONTEXT
:
278 case LOAD_ROOT_APP_CONTEXT
:
279 case LOAD_ROOT_PAYLOAD
:
280 printk(KERN_WARNING
"LTTng: bytecode: Index lookup for root field not implemented yet.\n");
284 data_offset
= bytecode_push_data(runtime
, &gid
,
285 __alignof__(gid
), sizeof(gid
));
286 if (data_offset
< 0) {
292 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
295 ((struct get_index_u64
*) insn
->data
)->index
= data_offset
;
308 static int specialize_context_lookup_name(struct lttng_ctx
*ctx
,
309 struct bytecode_runtime
*bytecode
,
310 struct load_op
*insn
)
315 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
316 name
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ offset
;
317 return lttng_get_context_index(ctx
, name
);
320 static int specialize_load_object(const struct lttng_event_field
*field
,
321 struct vstack_load
*load
, bool is_context
)
323 load
->type
= LOAD_OBJECT
;
325 switch (field
->type
.atype
) {
327 if (field
->type
.u
.integer
.signedness
)
328 load
->object_type
= OBJECT_TYPE_S64
;
330 load
->object_type
= OBJECT_TYPE_U64
;
331 load
->rev_bo
= false;
333 case atype_enum_nestable
:
335 const struct lttng_integer_type
*itype
=
336 &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
338 if (itype
->signedness
)
339 load
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
341 load
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
342 load
->rev_bo
= false;
345 case atype_array_nestable
:
346 if (!lttng_is_bytewise_integer(field
->type
.u
.array_nestable
.elem_type
)) {
347 printk(KERN_WARNING
"LTTng: bytecode: Array nesting only supports integer types.\n");
351 load
->object_type
= OBJECT_TYPE_STRING
;
353 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
354 load
->object_type
= OBJECT_TYPE_ARRAY
;
357 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
361 case atype_sequence_nestable
:
362 if (!lttng_is_bytewise_integer(field
->type
.u
.sequence_nestable
.elem_type
)) {
363 printk(KERN_WARNING
"LTTng: bytecode: Sequence nesting only supports integer types.\n");
367 load
->object_type
= OBJECT_TYPE_STRING
;
369 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
370 load
->object_type
= OBJECT_TYPE_SEQUENCE
;
373 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
378 load
->object_type
= OBJECT_TYPE_STRING
;
380 case atype_struct_nestable
:
381 printk(KERN_WARNING
"LTTng: bytecode: Structure type cannot be loaded.\n");
383 case atype_variant_nestable
:
384 printk(KERN_WARNING
"LTTng: bytecode: Variant type cannot be loaded.\n");
387 printk(KERN_WARNING
"LTTng: bytecode: Unknown type: %d", (int) field
->type
.atype
);
393 static int specialize_context_lookup(struct lttng_ctx
*ctx
,
394 struct bytecode_runtime
*runtime
,
395 struct load_op
*insn
,
396 struct vstack_load
*load
)
399 struct lttng_ctx_field
*ctx_field
;
400 struct lttng_event_field
*field
;
401 struct bytecode_get_index_data gid
;
404 idx
= specialize_context_lookup_name(ctx
, runtime
, insn
);
408 ctx_field
= <tng_static_ctx
->fields
[idx
];
409 field
= &ctx_field
->event_field
;
410 ret
= specialize_load_object(field
, load
, true);
413 /* Specialize each get_symbol into a get_index. */
414 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
415 memset(&gid
, 0, sizeof(gid
));
417 gid
.elem
.type
= load
->object_type
;
418 gid
.elem
.rev_bo
= load
->rev_bo
;
420 data_offset
= bytecode_push_data(runtime
, &gid
,
421 __alignof__(gid
), sizeof(gid
));
422 if (data_offset
< 0) {
425 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
429 static int specialize_payload_lookup(const struct lttng_event_desc
*event_desc
,
430 struct bytecode_runtime
*runtime
,
431 struct load_op
*insn
,
432 struct vstack_load
*load
)
436 unsigned int i
, nr_fields
;
438 uint32_t field_offset
= 0;
439 const struct lttng_event_field
*field
;
441 struct bytecode_get_index_data gid
;
444 nr_fields
= event_desc
->nr_fields
;
445 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
446 name
= runtime
->p
.bc
->bc
.data
+ runtime
->p
.bc
->bc
.reloc_offset
+ offset
;
447 for (i
= 0; i
< nr_fields
; i
++) {
448 field
= &event_desc
->fields
[i
];
449 if (field
->nofilter
) {
452 if (!strcmp(field
->name
, name
)) {
456 /* compute field offset on stack */
457 switch (field
->type
.atype
) {
459 case atype_enum_nestable
:
460 field_offset
+= sizeof(int64_t);
462 case atype_array_nestable
:
463 case atype_sequence_nestable
:
464 field_offset
+= sizeof(unsigned long);
465 field_offset
+= sizeof(void *);
468 field_offset
+= sizeof(void *);
480 ret
= specialize_load_object(field
, load
, false);
484 /* Specialize each get_symbol into a get_index. */
485 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
486 memset(&gid
, 0, sizeof(gid
));
487 gid
.offset
= field_offset
;
488 gid
.elem
.type
= load
->object_type
;
489 gid
.elem
.rev_bo
= load
->rev_bo
;
491 data_offset
= bytecode_push_data(runtime
, &gid
,
492 __alignof__(gid
), sizeof(gid
));
493 if (data_offset
< 0) {
497 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
503 int lttng_bytecode_specialize(const struct lttng_event_desc
*event_desc
,
504 struct bytecode_runtime
*bytecode
)
506 void *pc
, *next_pc
, *start_pc
;
508 struct vstack _stack
;
509 struct vstack
*stack
= &_stack
;
510 struct lttng_ctx
*ctx
= bytecode
->p
.ctx
;
514 start_pc
= &bytecode
->code
[0];
515 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
517 switch (*(bytecode_opcode_t
*) pc
) {
518 case BYTECODE_OP_UNKNOWN
:
520 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
521 (unsigned int) *(bytecode_opcode_t
*) pc
);
525 case BYTECODE_OP_RETURN
:
526 case BYTECODE_OP_RETURN_S64
:
531 case BYTECODE_OP_MUL
:
532 case BYTECODE_OP_DIV
:
533 case BYTECODE_OP_MOD
:
534 case BYTECODE_OP_PLUS
:
535 case BYTECODE_OP_MINUS
:
536 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
537 (unsigned int) *(bytecode_opcode_t
*) pc
);
543 struct binary_op
*insn
= (struct binary_op
*) pc
;
545 switch(vstack_ax(stack
)->type
) {
547 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
552 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
553 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
555 insn
->op
= BYTECODE_OP_EQ_STRING
;
557 case REG_STAR_GLOB_STRING
:
558 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
561 if (vstack_bx(stack
)->type
== REG_S64
)
562 insn
->op
= BYTECODE_OP_EQ_S64
;
564 insn
->op
= BYTECODE_OP_EQ_DOUBLE_S64
;
567 if (vstack_bx(stack
)->type
== REG_S64
)
568 insn
->op
= BYTECODE_OP_EQ_S64_DOUBLE
;
570 insn
->op
= BYTECODE_OP_EQ_DOUBLE
;
574 if (vstack_pop(stack
)) {
578 vstack_ax(stack
)->type
= REG_S64
;
579 next_pc
+= sizeof(struct binary_op
);
585 struct binary_op
*insn
= (struct binary_op
*) pc
;
587 switch(vstack_ax(stack
)->type
) {
589 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
594 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
595 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
597 insn
->op
= BYTECODE_OP_NE_STRING
;
599 case REG_STAR_GLOB_STRING
:
600 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
603 if (vstack_bx(stack
)->type
== REG_S64
)
604 insn
->op
= BYTECODE_OP_NE_S64
;
606 insn
->op
= BYTECODE_OP_NE_DOUBLE_S64
;
609 if (vstack_bx(stack
)->type
== REG_S64
)
610 insn
->op
= BYTECODE_OP_NE_S64_DOUBLE
;
612 insn
->op
= BYTECODE_OP_NE_DOUBLE
;
616 if (vstack_pop(stack
)) {
620 vstack_ax(stack
)->type
= REG_S64
;
621 next_pc
+= sizeof(struct binary_op
);
627 struct binary_op
*insn
= (struct binary_op
*) pc
;
629 switch(vstack_ax(stack
)->type
) {
631 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
635 case REG_STAR_GLOB_STRING
:
636 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '>' binary operator\n");
640 insn
->op
= BYTECODE_OP_GT_STRING
;
643 if (vstack_bx(stack
)->type
== REG_S64
)
644 insn
->op
= BYTECODE_OP_GT_S64
;
646 insn
->op
= BYTECODE_OP_GT_DOUBLE_S64
;
649 if (vstack_bx(stack
)->type
== REG_S64
)
650 insn
->op
= BYTECODE_OP_GT_S64_DOUBLE
;
652 insn
->op
= BYTECODE_OP_GT_DOUBLE
;
656 if (vstack_pop(stack
)) {
660 vstack_ax(stack
)->type
= REG_S64
;
661 next_pc
+= sizeof(struct binary_op
);
667 struct binary_op
*insn
= (struct binary_op
*) pc
;
669 switch(vstack_ax(stack
)->type
) {
671 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
675 case REG_STAR_GLOB_STRING
:
676 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '<' binary operator\n");
680 insn
->op
= BYTECODE_OP_LT_STRING
;
683 if (vstack_bx(stack
)->type
== REG_S64
)
684 insn
->op
= BYTECODE_OP_LT_S64
;
686 insn
->op
= BYTECODE_OP_LT_DOUBLE_S64
;
689 if (vstack_bx(stack
)->type
== REG_S64
)
690 insn
->op
= BYTECODE_OP_LT_S64_DOUBLE
;
692 insn
->op
= BYTECODE_OP_LT_DOUBLE
;
696 if (vstack_pop(stack
)) {
700 vstack_ax(stack
)->type
= REG_S64
;
701 next_pc
+= sizeof(struct binary_op
);
707 struct binary_op
*insn
= (struct binary_op
*) pc
;
709 switch(vstack_ax(stack
)->type
) {
711 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
715 case REG_STAR_GLOB_STRING
:
716 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '>=' binary operator\n");
720 insn
->op
= BYTECODE_OP_GE_STRING
;
723 if (vstack_bx(stack
)->type
== REG_S64
)
724 insn
->op
= BYTECODE_OP_GE_S64
;
726 insn
->op
= BYTECODE_OP_GE_DOUBLE_S64
;
729 if (vstack_bx(stack
)->type
== REG_S64
)
730 insn
->op
= BYTECODE_OP_GE_S64_DOUBLE
;
732 insn
->op
= BYTECODE_OP_GE_DOUBLE
;
736 if (vstack_pop(stack
)) {
740 vstack_ax(stack
)->type
= REG_S64
;
741 next_pc
+= sizeof(struct binary_op
);
746 struct binary_op
*insn
= (struct binary_op
*) pc
;
748 switch(vstack_ax(stack
)->type
) {
750 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
754 case REG_STAR_GLOB_STRING
:
755 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '<=' binary operator\n");
759 insn
->op
= BYTECODE_OP_LE_STRING
;
762 if (vstack_bx(stack
)->type
== REG_S64
)
763 insn
->op
= BYTECODE_OP_LE_S64
;
765 insn
->op
= BYTECODE_OP_LE_DOUBLE_S64
;
768 if (vstack_bx(stack
)->type
== REG_S64
)
769 insn
->op
= BYTECODE_OP_LE_S64_DOUBLE
;
771 insn
->op
= BYTECODE_OP_LE_DOUBLE
;
774 vstack_ax(stack
)->type
= REG_S64
;
775 next_pc
+= sizeof(struct binary_op
);
779 case BYTECODE_OP_EQ_STRING
:
780 case BYTECODE_OP_NE_STRING
:
781 case BYTECODE_OP_GT_STRING
:
782 case BYTECODE_OP_LT_STRING
:
783 case BYTECODE_OP_GE_STRING
:
784 case BYTECODE_OP_LE_STRING
:
785 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
786 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
787 case BYTECODE_OP_EQ_S64
:
788 case BYTECODE_OP_NE_S64
:
789 case BYTECODE_OP_GT_S64
:
790 case BYTECODE_OP_LT_S64
:
791 case BYTECODE_OP_GE_S64
:
792 case BYTECODE_OP_LE_S64
:
793 case BYTECODE_OP_EQ_DOUBLE
:
794 case BYTECODE_OP_NE_DOUBLE
:
795 case BYTECODE_OP_GT_DOUBLE
:
796 case BYTECODE_OP_LT_DOUBLE
:
797 case BYTECODE_OP_GE_DOUBLE
:
798 case BYTECODE_OP_LE_DOUBLE
:
799 case BYTECODE_OP_EQ_DOUBLE_S64
:
800 case BYTECODE_OP_NE_DOUBLE_S64
:
801 case BYTECODE_OP_GT_DOUBLE_S64
:
802 case BYTECODE_OP_LT_DOUBLE_S64
:
803 case BYTECODE_OP_GE_DOUBLE_S64
:
804 case BYTECODE_OP_LE_DOUBLE_S64
:
805 case BYTECODE_OP_EQ_S64_DOUBLE
:
806 case BYTECODE_OP_NE_S64_DOUBLE
:
807 case BYTECODE_OP_GT_S64_DOUBLE
:
808 case BYTECODE_OP_LT_S64_DOUBLE
:
809 case BYTECODE_OP_GE_S64_DOUBLE
:
810 case BYTECODE_OP_LE_S64_DOUBLE
:
811 case BYTECODE_OP_BIT_RSHIFT
:
812 case BYTECODE_OP_BIT_LSHIFT
:
813 case BYTECODE_OP_BIT_AND
:
814 case BYTECODE_OP_BIT_OR
:
815 case BYTECODE_OP_BIT_XOR
:
818 if (vstack_pop(stack
)) {
822 vstack_ax(stack
)->type
= REG_S64
;
823 next_pc
+= sizeof(struct binary_op
);
828 case BYTECODE_OP_UNARY_PLUS
:
830 struct unary_op
*insn
= (struct unary_op
*) pc
;
832 switch(vstack_ax(stack
)->type
) {
834 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
839 insn
->op
= BYTECODE_OP_UNARY_PLUS_S64
;
842 insn
->op
= BYTECODE_OP_UNARY_PLUS_DOUBLE
;
846 next_pc
+= sizeof(struct unary_op
);
850 case BYTECODE_OP_UNARY_MINUS
:
852 struct unary_op
*insn
= (struct unary_op
*) pc
;
854 switch(vstack_ax(stack
)->type
) {
856 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
861 insn
->op
= BYTECODE_OP_UNARY_MINUS_S64
;
864 insn
->op
= BYTECODE_OP_UNARY_MINUS_DOUBLE
;
868 next_pc
+= sizeof(struct unary_op
);
872 case BYTECODE_OP_UNARY_NOT
:
874 struct unary_op
*insn
= (struct unary_op
*) pc
;
876 switch(vstack_ax(stack
)->type
) {
878 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
883 insn
->op
= BYTECODE_OP_UNARY_NOT_S64
;
886 insn
->op
= BYTECODE_OP_UNARY_NOT_DOUBLE
;
890 next_pc
+= sizeof(struct unary_op
);
894 case BYTECODE_OP_UNARY_BIT_NOT
:
897 next_pc
+= sizeof(struct unary_op
);
901 case BYTECODE_OP_UNARY_PLUS_S64
:
902 case BYTECODE_OP_UNARY_MINUS_S64
:
903 case BYTECODE_OP_UNARY_NOT_S64
:
904 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
905 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
906 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
909 next_pc
+= sizeof(struct unary_op
);
914 case BYTECODE_OP_AND
:
917 /* Continue to next instruction */
918 /* Pop 1 when jump not taken */
919 if (vstack_pop(stack
)) {
923 next_pc
+= sizeof(struct logical_op
);
928 case BYTECODE_OP_LOAD_FIELD_REF
:
930 printk(KERN_WARNING
"LTTng: bytecode: Unknown field ref type\n");
934 /* get context ref */
935 case BYTECODE_OP_GET_CONTEXT_REF
:
937 printk(KERN_WARNING
"LTTng: bytecode: Unknown get context ref type\n");
941 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
942 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
943 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
944 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
945 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
947 if (vstack_push(stack
)) {
951 vstack_ax(stack
)->type
= REG_STRING
;
952 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
955 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
956 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
958 if (vstack_push(stack
)) {
962 vstack_ax(stack
)->type
= REG_S64
;
963 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
966 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
967 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
969 if (vstack_push(stack
)) {
973 vstack_ax(stack
)->type
= REG_DOUBLE
;
974 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
978 /* load from immediate operand */
979 case BYTECODE_OP_LOAD_STRING
:
981 struct load_op
*insn
= (struct load_op
*) pc
;
983 if (vstack_push(stack
)) {
987 vstack_ax(stack
)->type
= REG_STRING
;
988 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
992 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
994 struct load_op
*insn
= (struct load_op
*) pc
;
996 if (vstack_push(stack
)) {
1000 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1001 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1005 case BYTECODE_OP_LOAD_S64
:
1007 if (vstack_push(stack
)) {
1011 vstack_ax(stack
)->type
= REG_S64
;
1012 next_pc
+= sizeof(struct load_op
)
1013 + sizeof(struct literal_numeric
);
1017 case BYTECODE_OP_LOAD_DOUBLE
:
1019 if (vstack_push(stack
)) {
1023 vstack_ax(stack
)->type
= REG_DOUBLE
;
1024 next_pc
+= sizeof(struct load_op
)
1025 + sizeof(struct literal_double
);
1030 case BYTECODE_OP_CAST_TO_S64
:
1032 struct cast_op
*insn
= (struct cast_op
*) pc
;
1034 switch (vstack_ax(stack
)->type
) {
1036 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
1041 case REG_STAR_GLOB_STRING
:
1042 printk(KERN_WARNING
"LTTng: bytecode: Cast op can only be applied to numeric or floating point registers\n");
1046 insn
->op
= BYTECODE_OP_CAST_NOP
;
1049 insn
->op
= BYTECODE_OP_CAST_DOUBLE_TO_S64
;
1053 vstack_ax(stack
)->type
= REG_S64
;
1054 next_pc
+= sizeof(struct cast_op
);
1057 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1060 vstack_ax(stack
)->type
= REG_S64
;
1061 next_pc
+= sizeof(struct cast_op
);
1064 case BYTECODE_OP_CAST_NOP
:
1066 next_pc
+= sizeof(struct cast_op
);
1071 * Instructions for recursive traversal through composed types.
1073 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1075 if (vstack_push(stack
)) {
1079 vstack_ax(stack
)->type
= REG_PTR
;
1080 vstack_ax(stack
)->load
.type
= LOAD_ROOT_CONTEXT
;
1081 next_pc
+= sizeof(struct load_op
);
1084 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1086 if (vstack_push(stack
)) {
1090 vstack_ax(stack
)->type
= REG_PTR
;
1091 vstack_ax(stack
)->load
.type
= LOAD_ROOT_APP_CONTEXT
;
1092 next_pc
+= sizeof(struct load_op
);
1095 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1097 if (vstack_push(stack
)) {
1101 vstack_ax(stack
)->type
= REG_PTR
;
1102 vstack_ax(stack
)->load
.type
= LOAD_ROOT_PAYLOAD
;
1103 next_pc
+= sizeof(struct load_op
);
1107 case BYTECODE_OP_LOAD_FIELD
:
1109 struct load_op
*insn
= (struct load_op
*) pc
;
1111 WARN_ON_ONCE(vstack_ax(stack
)->type
!= REG_PTR
);
1113 ret
= specialize_load_field(vstack_ax(stack
), insn
);
1117 next_pc
+= sizeof(struct load_op
);
1121 case BYTECODE_OP_LOAD_FIELD_S8
:
1122 case BYTECODE_OP_LOAD_FIELD_S16
:
1123 case BYTECODE_OP_LOAD_FIELD_S32
:
1124 case BYTECODE_OP_LOAD_FIELD_S64
:
1125 case BYTECODE_OP_LOAD_FIELD_U8
:
1126 case BYTECODE_OP_LOAD_FIELD_U16
:
1127 case BYTECODE_OP_LOAD_FIELD_U32
:
1128 case BYTECODE_OP_LOAD_FIELD_U64
:
1131 vstack_ax(stack
)->type
= REG_S64
;
1132 next_pc
+= sizeof(struct load_op
);
1136 case BYTECODE_OP_LOAD_FIELD_STRING
:
1137 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1140 vstack_ax(stack
)->type
= REG_STRING
;
1141 next_pc
+= sizeof(struct load_op
);
1145 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1148 vstack_ax(stack
)->type
= REG_DOUBLE
;
1149 next_pc
+= sizeof(struct load_op
);
1153 case BYTECODE_OP_GET_SYMBOL
:
1155 struct load_op
*insn
= (struct load_op
*) pc
;
1157 dbg_printk("op get symbol\n");
1158 switch (vstack_ax(stack
)->load
.type
) {
1160 printk(KERN_WARNING
"LTTng: bytecode: Nested fields not implemented yet.\n");
1163 case LOAD_ROOT_CONTEXT
:
1164 /* Lookup context field. */
1165 ret
= specialize_context_lookup(ctx
, bytecode
, insn
,
1166 &vstack_ax(stack
)->load
);
1170 case LOAD_ROOT_APP_CONTEXT
:
1173 case LOAD_ROOT_PAYLOAD
:
1174 /* Lookup event payload field. */
1175 ret
= specialize_payload_lookup(event_desc
,
1177 &vstack_ax(stack
)->load
);
1182 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1186 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1188 /* Always generated by specialize phase. */
1193 case BYTECODE_OP_GET_INDEX_U16
:
1195 struct load_op
*insn
= (struct load_op
*) pc
;
1196 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1198 dbg_printk("op get index u16\n");
1200 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1201 vstack_ax(stack
), sizeof(*index
));
1204 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1208 case BYTECODE_OP_GET_INDEX_U64
:
1210 struct load_op
*insn
= (struct load_op
*) pc
;
1211 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1213 dbg_printk("op get index u64\n");
1215 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1216 vstack_ax(stack
), sizeof(*index
));
1219 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);