1 /* SPDX-License-Identifier: MIT
3 * lttng-bytecode-specialize.c
5 * LTTng modules bytecode code specializer.
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #include <linux/slab.h>
11 #include <lttng/lttng-bytecode.h>
12 #include <lttng/align.h>
13 #include <lttng/events-internal.h>
15 static ssize_t
bytecode_reserve_data(struct bytecode_runtime
*runtime
,
16 size_t align
, size_t len
)
19 size_t padding
= offset_align(runtime
->data_len
, align
);
20 size_t new_len
= runtime
->data_len
+ padding
+ len
;
21 size_t new_alloc_len
= new_len
;
22 size_t old_alloc_len
= runtime
->data_alloc_len
;
24 if (new_len
> INTERPRETER_MAX_DATA_LEN
)
27 if (new_alloc_len
> old_alloc_len
) {
31 max_t(size_t, 1U << get_count_order(new_alloc_len
), old_alloc_len
<< 1);
32 newptr
= krealloc(runtime
->data
, new_alloc_len
, GFP_KERNEL
);
35 runtime
->data
= newptr
;
36 /* We zero directly the memory from start of allocation. */
37 memset(&runtime
->data
[old_alloc_len
], 0, new_alloc_len
- old_alloc_len
);
38 runtime
->data_alloc_len
= new_alloc_len
;
40 runtime
->data_len
+= padding
;
41 ret
= runtime
->data_len
;
42 runtime
->data_len
+= len
;
46 static ssize_t
bytecode_push_data(struct bytecode_runtime
*runtime
,
47 const void *p
, size_t align
, size_t len
)
51 offset
= bytecode_reserve_data(runtime
, align
, len
);
54 memcpy(&runtime
->data
[offset
], p
, len
);
58 static int specialize_load_field(struct vstack_entry
*stack_top
,
63 switch (stack_top
->load
.type
) {
66 case LOAD_ROOT_CONTEXT
:
67 case LOAD_ROOT_APP_CONTEXT
:
68 case LOAD_ROOT_PAYLOAD
:
70 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
74 switch (stack_top
->load
.object_type
) {
76 dbg_printk("op load field s8\n");
77 stack_top
->type
= REG_S64
;
78 if (!stack_top
->load
.rev_bo
)
79 insn
->op
= BYTECODE_OP_LOAD_FIELD_S8
;
82 dbg_printk("op load field s16\n");
83 stack_top
->type
= REG_S64
;
84 if (!stack_top
->load
.rev_bo
)
85 insn
->op
= BYTECODE_OP_LOAD_FIELD_S16
;
88 dbg_printk("op load field s32\n");
89 stack_top
->type
= REG_S64
;
90 if (!stack_top
->load
.rev_bo
)
91 insn
->op
= BYTECODE_OP_LOAD_FIELD_S32
;
94 dbg_printk("op load field s64\n");
95 stack_top
->type
= REG_S64
;
96 if (!stack_top
->load
.rev_bo
)
97 insn
->op
= BYTECODE_OP_LOAD_FIELD_S64
;
99 case OBJECT_TYPE_SIGNED_ENUM
:
100 dbg_printk("op load field signed enumeration\n");
101 stack_top
->type
= REG_PTR
;
104 dbg_printk("op load field u8\n");
105 stack_top
->type
= REG_S64
;
106 insn
->op
= BYTECODE_OP_LOAD_FIELD_U8
;
108 case OBJECT_TYPE_U16
:
109 dbg_printk("op load field u16\n");
110 stack_top
->type
= REG_S64
;
111 if (!stack_top
->load
.rev_bo
)
112 insn
->op
= BYTECODE_OP_LOAD_FIELD_U16
;
114 case OBJECT_TYPE_U32
:
115 dbg_printk("op load field u32\n");
116 stack_top
->type
= REG_S64
;
117 if (!stack_top
->load
.rev_bo
)
118 insn
->op
= BYTECODE_OP_LOAD_FIELD_U32
;
120 case OBJECT_TYPE_U64
:
121 dbg_printk("op load field u64\n");
122 stack_top
->type
= REG_S64
;
123 if (!stack_top
->load
.rev_bo
)
124 insn
->op
= BYTECODE_OP_LOAD_FIELD_U64
;
126 case OBJECT_TYPE_UNSIGNED_ENUM
:
127 dbg_printk("op load field unsigned enumeration\n");
128 stack_top
->type
= REG_PTR
;
130 case OBJECT_TYPE_DOUBLE
:
131 printk(KERN_WARNING
"LTTng: bytecode: Double type unsupported\n\n");
134 case OBJECT_TYPE_STRING
:
135 dbg_printk("op load field string\n");
136 stack_top
->type
= REG_STRING
;
137 insn
->op
= BYTECODE_OP_LOAD_FIELD_STRING
;
139 case OBJECT_TYPE_STRING_SEQUENCE
:
140 dbg_printk("op load field string sequence\n");
141 stack_top
->type
= REG_STRING
;
142 insn
->op
= BYTECODE_OP_LOAD_FIELD_SEQUENCE
;
144 case OBJECT_TYPE_DYNAMIC
:
147 case OBJECT_TYPE_SEQUENCE
:
148 case OBJECT_TYPE_ARRAY
:
149 case OBJECT_TYPE_STRUCT
:
150 case OBJECT_TYPE_VARIANT
:
151 printk(KERN_WARNING
"LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
161 static int specialize_get_index_object_type(enum object_type
*otype
,
162 int signedness
, uint32_t elem_len
)
167 *otype
= OBJECT_TYPE_S8
;
169 *otype
= OBJECT_TYPE_U8
;
173 *otype
= OBJECT_TYPE_S16
;
175 *otype
= OBJECT_TYPE_U16
;
179 *otype
= OBJECT_TYPE_S32
;
181 *otype
= OBJECT_TYPE_U32
;
185 *otype
= OBJECT_TYPE_S64
;
187 *otype
= OBJECT_TYPE_U64
;
195 static int specialize_get_index(struct bytecode_runtime
*runtime
,
196 struct load_op
*insn
, uint64_t index
,
197 struct vstack_entry
*stack_top
,
201 struct bytecode_get_index_data gid
;
204 memset(&gid
, 0, sizeof(gid
));
205 switch (stack_top
->load
.type
) {
207 switch (stack_top
->load
.object_type
) {
208 case OBJECT_TYPE_ARRAY
:
210 const struct lttng_kernel_event_field
*field
;
211 const struct lttng_kernel_type_array
*array_type
;
212 const struct lttng_kernel_type_integer
*integer_type
;
213 uint32_t elem_len
, num_elems
;
216 field
= stack_top
->load
.field
;
217 array_type
= lttng_kernel_get_type_array(field
->type
);
218 if (!lttng_kernel_type_is_bytewise_integer(array_type
->elem_type
)) {
222 integer_type
= lttng_kernel_get_type_integer(array_type
->elem_type
);
223 num_elems
= array_type
->length
;
224 elem_len
= integer_type
->size
;
225 signedness
= integer_type
->signedness
;
226 if (index
>= num_elems
) {
230 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
231 signedness
, elem_len
);
234 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
235 gid
.array_len
= num_elems
* (elem_len
/ CHAR_BIT
);
236 gid
.elem
.type
= stack_top
->load
.object_type
;
237 gid
.elem
.len
= elem_len
;
238 if (integer_type
->reverse_byte_order
)
239 gid
.elem
.rev_bo
= true;
240 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
243 case OBJECT_TYPE_SEQUENCE
:
245 const struct lttng_kernel_event_field
*field
;
246 const struct lttng_kernel_type_sequence
*sequence_type
;
247 const struct lttng_kernel_type_integer
*integer_type
;
251 field
= stack_top
->load
.field
;
252 sequence_type
= lttng_kernel_get_type_sequence(field
->type
);
253 if (!lttng_kernel_type_is_bytewise_integer(sequence_type
->elem_type
)) {
257 integer_type
= lttng_kernel_get_type_integer(sequence_type
->elem_type
);
258 elem_len
= integer_type
->size
;
259 signedness
= integer_type
->signedness
;
260 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
261 signedness
, elem_len
);
264 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
265 gid
.elem
.type
= stack_top
->load
.object_type
;
266 gid
.elem
.len
= elem_len
;
267 if (integer_type
->reverse_byte_order
)
268 gid
.elem
.rev_bo
= true;
269 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
272 case OBJECT_TYPE_STRUCT
:
273 /* Only generated by the specialize phase. */
274 case OBJECT_TYPE_VARIANT
: /* Fall-through */
276 printk(KERN_WARNING
"LTTng: bytecode: Unexpected get index type %d",
277 (int) stack_top
->load
.object_type
);
282 case LOAD_ROOT_CONTEXT
:
283 case LOAD_ROOT_APP_CONTEXT
:
284 case LOAD_ROOT_PAYLOAD
:
285 printk(KERN_WARNING
"LTTng: bytecode: Index lookup for root field not implemented yet.\n");
289 data_offset
= bytecode_push_data(runtime
, &gid
,
290 __alignof__(gid
), sizeof(gid
));
291 if (data_offset
< 0) {
297 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
300 ((struct get_index_u64
*) insn
->data
)->index
= data_offset
;
313 static int specialize_context_lookup_name(struct lttng_kernel_ctx
*ctx
,
314 struct bytecode_runtime
*bytecode
,
315 struct load_op
*insn
)
320 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
321 name
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ offset
;
322 return lttng_kernel_get_context_index(ctx
, name
);
325 static int specialize_load_object(const struct lttng_kernel_event_field
*field
,
326 struct vstack_load
*load
, bool is_context
)
328 load
->type
= LOAD_OBJECT
;
330 switch (field
->type
->type
) {
331 case lttng_kernel_type_integer
:
332 if (lttng_kernel_get_type_integer(field
->type
)->signedness
)
333 load
->object_type
= OBJECT_TYPE_S64
;
335 load
->object_type
= OBJECT_TYPE_U64
;
336 load
->rev_bo
= false;
338 case lttng_kernel_type_enum
:
340 const struct lttng_kernel_type_enum
*enum_type
= lttng_kernel_get_type_enum(field
->type
);
341 const struct lttng_kernel_type_integer
*integer_type
= lttng_kernel_get_type_integer(enum_type
->container_type
);
343 if (integer_type
->signedness
)
344 load
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
346 load
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
347 load
->rev_bo
= false;
350 case lttng_kernel_type_array
:
352 const struct lttng_kernel_type_array
*array_type
= lttng_kernel_get_type_array(field
->type
);
354 if (!lttng_kernel_type_is_bytewise_integer(array_type
->elem_type
)) {
355 printk(KERN_WARNING
"LTTng: bytecode: Array nesting only supports integer types.\n");
359 load
->object_type
= OBJECT_TYPE_STRING
;
361 if (array_type
->encoding
== lttng_kernel_string_encoding_none
) {
362 load
->object_type
= OBJECT_TYPE_ARRAY
;
365 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
370 case lttng_kernel_type_sequence
:
372 const struct lttng_kernel_type_sequence
*sequence_type
= lttng_kernel_get_type_sequence(field
->type
);
374 if (!lttng_kernel_type_is_bytewise_integer(sequence_type
->elem_type
)) {
375 printk(KERN_WARNING
"LTTng: bytecode: Sequence nesting only supports integer types.\n");
379 load
->object_type
= OBJECT_TYPE_STRING
;
381 if (sequence_type
->encoding
== lttng_kernel_string_encoding_none
) {
382 load
->object_type
= OBJECT_TYPE_SEQUENCE
;
385 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
390 case lttng_kernel_type_string
:
391 load
->object_type
= OBJECT_TYPE_STRING
;
393 case lttng_kernel_type_struct
:
394 printk(KERN_WARNING
"LTTng: bytecode: Structure type cannot be loaded.\n");
396 case lttng_kernel_type_variant
:
397 printk(KERN_WARNING
"LTTng: bytecode: Variant type cannot be loaded.\n");
400 printk(KERN_WARNING
"LTTng: bytecode: Unknown type: %d", (int) field
->type
->type
);
406 static int specialize_context_lookup(struct lttng_kernel_ctx
*ctx
,
407 struct bytecode_runtime
*runtime
,
408 struct load_op
*insn
,
409 struct vstack_load
*load
)
412 const struct lttng_kernel_ctx_field
*ctx_field
;
413 const struct lttng_kernel_event_field
*field
;
414 struct bytecode_get_index_data gid
;
417 idx
= specialize_context_lookup_name(ctx
, runtime
, insn
);
421 ctx_field
= <tng_static_ctx
->fields
[idx
];
422 field
= ctx_field
->event_field
;
423 ret
= specialize_load_object(field
, load
, true);
426 /* Specialize each get_symbol into a get_index. */
427 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
428 memset(&gid
, 0, sizeof(gid
));
430 gid
.elem
.type
= load
->object_type
;
431 gid
.elem
.rev_bo
= load
->rev_bo
;
433 data_offset
= bytecode_push_data(runtime
, &gid
,
434 __alignof__(gid
), sizeof(gid
));
435 if (data_offset
< 0) {
438 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
442 static int specialize_payload_lookup(const struct lttng_kernel_event_desc
*event_desc
,
443 struct bytecode_runtime
*runtime
,
444 struct load_op
*insn
,
445 struct vstack_load
*load
)
449 unsigned int i
, nr_fields
;
451 uint32_t field_offset
= 0;
452 const struct lttng_kernel_event_field
*field
;
454 struct bytecode_get_index_data gid
;
457 nr_fields
= event_desc
->nr_fields
;
458 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
459 name
= runtime
->p
.bc
->bc
.data
+ runtime
->p
.bc
->bc
.reloc_offset
+ offset
;
460 for (i
= 0; i
< nr_fields
; i
++) {
461 field
= event_desc
->fields
[i
];
462 if (field
->nofilter
) {
465 if (!strcmp(field
->name
, name
)) {
469 /* compute field offset on stack */
470 switch (field
->type
->type
) {
471 case lttng_kernel_type_integer
:
472 case lttng_kernel_type_enum
:
473 field_offset
+= sizeof(int64_t);
475 case lttng_kernel_type_array
:
476 case lttng_kernel_type_sequence
:
477 field_offset
+= sizeof(unsigned long);
478 field_offset
+= sizeof(void *);
480 case lttng_kernel_type_string
:
481 field_offset
+= sizeof(void *);
493 ret
= specialize_load_object(field
, load
, false);
497 /* Specialize each get_symbol into a get_index. */
498 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
499 memset(&gid
, 0, sizeof(gid
));
500 gid
.offset
= field_offset
;
501 gid
.elem
.type
= load
->object_type
;
502 gid
.elem
.rev_bo
= load
->rev_bo
;
504 data_offset
= bytecode_push_data(runtime
, &gid
,
505 __alignof__(gid
), sizeof(gid
));
506 if (data_offset
< 0) {
510 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
516 int lttng_bytecode_specialize(const struct lttng_kernel_event_desc
*event_desc
,
517 struct bytecode_runtime
*bytecode
)
519 void *pc
, *next_pc
, *start_pc
;
521 struct vstack _stack
;
522 struct vstack
*stack
= &_stack
;
523 struct lttng_kernel_ctx
*ctx
= bytecode
->p
.ctx
;
527 start_pc
= &bytecode
->code
[0];
528 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
530 switch (*(bytecode_opcode_t
*) pc
) {
531 case BYTECODE_OP_UNKNOWN
:
533 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
534 (unsigned int) *(bytecode_opcode_t
*) pc
);
538 case BYTECODE_OP_RETURN
:
539 case BYTECODE_OP_RETURN_S64
:
544 case BYTECODE_OP_MUL
:
545 case BYTECODE_OP_DIV
:
546 case BYTECODE_OP_MOD
:
547 case BYTECODE_OP_PLUS
:
548 case BYTECODE_OP_MINUS
:
549 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
550 (unsigned int) *(bytecode_opcode_t
*) pc
);
556 struct binary_op
*insn
= (struct binary_op
*) pc
;
558 switch(vstack_ax(stack
)->type
) {
560 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
565 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
566 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
568 insn
->op
= BYTECODE_OP_EQ_STRING
;
570 case REG_STAR_GLOB_STRING
:
571 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
574 if (vstack_bx(stack
)->type
== REG_S64
)
575 insn
->op
= BYTECODE_OP_EQ_S64
;
577 insn
->op
= BYTECODE_OP_EQ_DOUBLE_S64
;
580 if (vstack_bx(stack
)->type
== REG_S64
)
581 insn
->op
= BYTECODE_OP_EQ_S64_DOUBLE
;
583 insn
->op
= BYTECODE_OP_EQ_DOUBLE
;
587 if (vstack_pop(stack
)) {
591 vstack_ax(stack
)->type
= REG_S64
;
592 next_pc
+= sizeof(struct binary_op
);
598 struct binary_op
*insn
= (struct binary_op
*) pc
;
600 switch(vstack_ax(stack
)->type
) {
602 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
607 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
608 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
610 insn
->op
= BYTECODE_OP_NE_STRING
;
612 case REG_STAR_GLOB_STRING
:
613 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
616 if (vstack_bx(stack
)->type
== REG_S64
)
617 insn
->op
= BYTECODE_OP_NE_S64
;
619 insn
->op
= BYTECODE_OP_NE_DOUBLE_S64
;
622 if (vstack_bx(stack
)->type
== REG_S64
)
623 insn
->op
= BYTECODE_OP_NE_S64_DOUBLE
;
625 insn
->op
= BYTECODE_OP_NE_DOUBLE
;
629 if (vstack_pop(stack
)) {
633 vstack_ax(stack
)->type
= REG_S64
;
634 next_pc
+= sizeof(struct binary_op
);
640 struct binary_op
*insn
= (struct binary_op
*) pc
;
642 switch(vstack_ax(stack
)->type
) {
644 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
648 case REG_STAR_GLOB_STRING
:
649 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '>' binary operator\n");
653 insn
->op
= BYTECODE_OP_GT_STRING
;
656 if (vstack_bx(stack
)->type
== REG_S64
)
657 insn
->op
= BYTECODE_OP_GT_S64
;
659 insn
->op
= BYTECODE_OP_GT_DOUBLE_S64
;
662 if (vstack_bx(stack
)->type
== REG_S64
)
663 insn
->op
= BYTECODE_OP_GT_S64_DOUBLE
;
665 insn
->op
= BYTECODE_OP_GT_DOUBLE
;
669 if (vstack_pop(stack
)) {
673 vstack_ax(stack
)->type
= REG_S64
;
674 next_pc
+= sizeof(struct binary_op
);
680 struct binary_op
*insn
= (struct binary_op
*) pc
;
682 switch(vstack_ax(stack
)->type
) {
684 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
688 case REG_STAR_GLOB_STRING
:
689 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '<' binary operator\n");
693 insn
->op
= BYTECODE_OP_LT_STRING
;
696 if (vstack_bx(stack
)->type
== REG_S64
)
697 insn
->op
= BYTECODE_OP_LT_S64
;
699 insn
->op
= BYTECODE_OP_LT_DOUBLE_S64
;
702 if (vstack_bx(stack
)->type
== REG_S64
)
703 insn
->op
= BYTECODE_OP_LT_S64_DOUBLE
;
705 insn
->op
= BYTECODE_OP_LT_DOUBLE
;
709 if (vstack_pop(stack
)) {
713 vstack_ax(stack
)->type
= REG_S64
;
714 next_pc
+= sizeof(struct binary_op
);
720 struct binary_op
*insn
= (struct binary_op
*) pc
;
722 switch(vstack_ax(stack
)->type
) {
724 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
728 case REG_STAR_GLOB_STRING
:
729 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '>=' binary operator\n");
733 insn
->op
= BYTECODE_OP_GE_STRING
;
736 if (vstack_bx(stack
)->type
== REG_S64
)
737 insn
->op
= BYTECODE_OP_GE_S64
;
739 insn
->op
= BYTECODE_OP_GE_DOUBLE_S64
;
742 if (vstack_bx(stack
)->type
== REG_S64
)
743 insn
->op
= BYTECODE_OP_GE_S64_DOUBLE
;
745 insn
->op
= BYTECODE_OP_GE_DOUBLE
;
749 if (vstack_pop(stack
)) {
753 vstack_ax(stack
)->type
= REG_S64
;
754 next_pc
+= sizeof(struct binary_op
);
759 struct binary_op
*insn
= (struct binary_op
*) pc
;
761 switch(vstack_ax(stack
)->type
) {
763 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
767 case REG_STAR_GLOB_STRING
:
768 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '<=' binary operator\n");
772 insn
->op
= BYTECODE_OP_LE_STRING
;
775 if (vstack_bx(stack
)->type
== REG_S64
)
776 insn
->op
= BYTECODE_OP_LE_S64
;
778 insn
->op
= BYTECODE_OP_LE_DOUBLE_S64
;
781 if (vstack_bx(stack
)->type
== REG_S64
)
782 insn
->op
= BYTECODE_OP_LE_S64_DOUBLE
;
784 insn
->op
= BYTECODE_OP_LE_DOUBLE
;
787 vstack_ax(stack
)->type
= REG_S64
;
788 next_pc
+= sizeof(struct binary_op
);
792 case BYTECODE_OP_EQ_STRING
:
793 case BYTECODE_OP_NE_STRING
:
794 case BYTECODE_OP_GT_STRING
:
795 case BYTECODE_OP_LT_STRING
:
796 case BYTECODE_OP_GE_STRING
:
797 case BYTECODE_OP_LE_STRING
:
798 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
799 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
800 case BYTECODE_OP_EQ_S64
:
801 case BYTECODE_OP_NE_S64
:
802 case BYTECODE_OP_GT_S64
:
803 case BYTECODE_OP_LT_S64
:
804 case BYTECODE_OP_GE_S64
:
805 case BYTECODE_OP_LE_S64
:
806 case BYTECODE_OP_EQ_DOUBLE
:
807 case BYTECODE_OP_NE_DOUBLE
:
808 case BYTECODE_OP_GT_DOUBLE
:
809 case BYTECODE_OP_LT_DOUBLE
:
810 case BYTECODE_OP_GE_DOUBLE
:
811 case BYTECODE_OP_LE_DOUBLE
:
812 case BYTECODE_OP_EQ_DOUBLE_S64
:
813 case BYTECODE_OP_NE_DOUBLE_S64
:
814 case BYTECODE_OP_GT_DOUBLE_S64
:
815 case BYTECODE_OP_LT_DOUBLE_S64
:
816 case BYTECODE_OP_GE_DOUBLE_S64
:
817 case BYTECODE_OP_LE_DOUBLE_S64
:
818 case BYTECODE_OP_EQ_S64_DOUBLE
:
819 case BYTECODE_OP_NE_S64_DOUBLE
:
820 case BYTECODE_OP_GT_S64_DOUBLE
:
821 case BYTECODE_OP_LT_S64_DOUBLE
:
822 case BYTECODE_OP_GE_S64_DOUBLE
:
823 case BYTECODE_OP_LE_S64_DOUBLE
:
824 case BYTECODE_OP_BIT_RSHIFT
:
825 case BYTECODE_OP_BIT_LSHIFT
:
826 case BYTECODE_OP_BIT_AND
:
827 case BYTECODE_OP_BIT_OR
:
828 case BYTECODE_OP_BIT_XOR
:
831 if (vstack_pop(stack
)) {
835 vstack_ax(stack
)->type
= REG_S64
;
836 next_pc
+= sizeof(struct binary_op
);
841 case BYTECODE_OP_UNARY_PLUS
:
843 struct unary_op
*insn
= (struct unary_op
*) pc
;
845 switch(vstack_ax(stack
)->type
) {
847 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
852 insn
->op
= BYTECODE_OP_UNARY_PLUS_S64
;
855 insn
->op
= BYTECODE_OP_UNARY_PLUS_DOUBLE
;
859 next_pc
+= sizeof(struct unary_op
);
863 case BYTECODE_OP_UNARY_MINUS
:
865 struct unary_op
*insn
= (struct unary_op
*) pc
;
867 switch(vstack_ax(stack
)->type
) {
869 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
874 insn
->op
= BYTECODE_OP_UNARY_MINUS_S64
;
877 insn
->op
= BYTECODE_OP_UNARY_MINUS_DOUBLE
;
881 next_pc
+= sizeof(struct unary_op
);
885 case BYTECODE_OP_UNARY_NOT
:
887 struct unary_op
*insn
= (struct unary_op
*) pc
;
889 switch(vstack_ax(stack
)->type
) {
891 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
896 insn
->op
= BYTECODE_OP_UNARY_NOT_S64
;
899 insn
->op
= BYTECODE_OP_UNARY_NOT_DOUBLE
;
903 next_pc
+= sizeof(struct unary_op
);
907 case BYTECODE_OP_UNARY_BIT_NOT
:
910 next_pc
+= sizeof(struct unary_op
);
914 case BYTECODE_OP_UNARY_PLUS_S64
:
915 case BYTECODE_OP_UNARY_MINUS_S64
:
916 case BYTECODE_OP_UNARY_NOT_S64
:
917 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
918 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
919 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
922 next_pc
+= sizeof(struct unary_op
);
927 case BYTECODE_OP_AND
:
930 /* Continue to next instruction */
931 /* Pop 1 when jump not taken */
932 if (vstack_pop(stack
)) {
936 next_pc
+= sizeof(struct logical_op
);
941 case BYTECODE_OP_LOAD_FIELD_REF
:
943 printk(KERN_WARNING
"LTTng: bytecode: Unknown field ref type\n");
947 /* get context ref */
948 case BYTECODE_OP_GET_CONTEXT_REF
:
950 printk(KERN_WARNING
"LTTng: bytecode: Unknown get context ref type\n");
954 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
955 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
956 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
957 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
958 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
960 if (vstack_push(stack
)) {
964 vstack_ax(stack
)->type
= REG_STRING
;
965 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
968 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
969 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
971 if (vstack_push(stack
)) {
975 vstack_ax(stack
)->type
= REG_S64
;
976 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
979 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
980 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
982 if (vstack_push(stack
)) {
986 vstack_ax(stack
)->type
= REG_DOUBLE
;
987 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
991 /* load from immediate operand */
992 case BYTECODE_OP_LOAD_STRING
:
994 struct load_op
*insn
= (struct load_op
*) pc
;
996 if (vstack_push(stack
)) {
1000 vstack_ax(stack
)->type
= REG_STRING
;
1001 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1005 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1007 struct load_op
*insn
= (struct load_op
*) pc
;
1009 if (vstack_push(stack
)) {
1013 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1014 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1018 case BYTECODE_OP_LOAD_S64
:
1020 if (vstack_push(stack
)) {
1024 vstack_ax(stack
)->type
= REG_S64
;
1025 next_pc
+= sizeof(struct load_op
)
1026 + sizeof(struct literal_numeric
);
1030 case BYTECODE_OP_LOAD_DOUBLE
:
1032 if (vstack_push(stack
)) {
1036 vstack_ax(stack
)->type
= REG_DOUBLE
;
1037 next_pc
+= sizeof(struct load_op
)
1038 + sizeof(struct literal_double
);
1043 case BYTECODE_OP_CAST_TO_S64
:
1045 struct cast_op
*insn
= (struct cast_op
*) pc
;
1047 switch (vstack_ax(stack
)->type
) {
1049 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
1054 case REG_STAR_GLOB_STRING
:
1055 printk(KERN_WARNING
"LTTng: bytecode: Cast op can only be applied to numeric or floating point registers\n");
1059 insn
->op
= BYTECODE_OP_CAST_NOP
;
1062 insn
->op
= BYTECODE_OP_CAST_DOUBLE_TO_S64
;
1066 vstack_ax(stack
)->type
= REG_S64
;
1067 next_pc
+= sizeof(struct cast_op
);
1070 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1073 vstack_ax(stack
)->type
= REG_S64
;
1074 next_pc
+= sizeof(struct cast_op
);
1077 case BYTECODE_OP_CAST_NOP
:
1079 next_pc
+= sizeof(struct cast_op
);
1084 * Instructions for recursive traversal through composed types.
1086 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1088 if (vstack_push(stack
)) {
1092 vstack_ax(stack
)->type
= REG_PTR
;
1093 vstack_ax(stack
)->load
.type
= LOAD_ROOT_CONTEXT
;
1094 next_pc
+= sizeof(struct load_op
);
1097 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1099 if (vstack_push(stack
)) {
1103 vstack_ax(stack
)->type
= REG_PTR
;
1104 vstack_ax(stack
)->load
.type
= LOAD_ROOT_APP_CONTEXT
;
1105 next_pc
+= sizeof(struct load_op
);
1108 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1110 if (vstack_push(stack
)) {
1114 vstack_ax(stack
)->type
= REG_PTR
;
1115 vstack_ax(stack
)->load
.type
= LOAD_ROOT_PAYLOAD
;
1116 next_pc
+= sizeof(struct load_op
);
1120 case BYTECODE_OP_LOAD_FIELD
:
1122 struct load_op
*insn
= (struct load_op
*) pc
;
1124 WARN_ON_ONCE(vstack_ax(stack
)->type
!= REG_PTR
);
1126 ret
= specialize_load_field(vstack_ax(stack
), insn
);
1130 next_pc
+= sizeof(struct load_op
);
1134 case BYTECODE_OP_LOAD_FIELD_S8
:
1135 case BYTECODE_OP_LOAD_FIELD_S16
:
1136 case BYTECODE_OP_LOAD_FIELD_S32
:
1137 case BYTECODE_OP_LOAD_FIELD_S64
:
1138 case BYTECODE_OP_LOAD_FIELD_U8
:
1139 case BYTECODE_OP_LOAD_FIELD_U16
:
1140 case BYTECODE_OP_LOAD_FIELD_U32
:
1141 case BYTECODE_OP_LOAD_FIELD_U64
:
1144 vstack_ax(stack
)->type
= REG_S64
;
1145 next_pc
+= sizeof(struct load_op
);
1149 case BYTECODE_OP_LOAD_FIELD_STRING
:
1150 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1153 vstack_ax(stack
)->type
= REG_STRING
;
1154 next_pc
+= sizeof(struct load_op
);
1158 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1161 vstack_ax(stack
)->type
= REG_DOUBLE
;
1162 next_pc
+= sizeof(struct load_op
);
1166 case BYTECODE_OP_GET_SYMBOL
:
1168 struct load_op
*insn
= (struct load_op
*) pc
;
1170 dbg_printk("op get symbol\n");
1171 switch (vstack_ax(stack
)->load
.type
) {
1173 printk(KERN_WARNING
"LTTng: bytecode: Nested fields not implemented yet.\n");
1176 case LOAD_ROOT_CONTEXT
:
1177 /* Lookup context field. */
1178 ret
= specialize_context_lookup(ctx
, bytecode
, insn
,
1179 &vstack_ax(stack
)->load
);
1183 case LOAD_ROOT_APP_CONTEXT
:
1186 case LOAD_ROOT_PAYLOAD
:
1187 /* Lookup event payload field. */
1188 ret
= specialize_payload_lookup(event_desc
,
1190 &vstack_ax(stack
)->load
);
1195 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1199 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1201 /* Always generated by specialize phase. */
1206 case BYTECODE_OP_GET_INDEX_U16
:
1208 struct load_op
*insn
= (struct load_op
*) pc
;
1209 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1211 dbg_printk("op get index u16\n");
1213 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1214 vstack_ax(stack
), sizeof(*index
));
1217 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1221 case BYTECODE_OP_GET_INDEX_U64
:
1223 struct load_op
*insn
= (struct load_op
*) pc
;
1224 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1226 dbg_printk("op get index u64\n");
1228 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1229 vstack_ax(stack
), sizeof(*index
));
1232 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);