2 * SPDX-License-Identifier: MIT
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 * LTTng UST bytecode specializer.
13 #include <lttng/ust-align.h>
15 #include "context-internal.h"
16 #include "lttng-bytecode.h"
17 #include "ust-events-internal.h"
18 #include "ust-helper.h"
20 static int lttng_fls(int val
)
23 unsigned int x
= (unsigned int) val
;
27 if (!(x
& 0xFFFF0000U
)) {
31 if (!(x
& 0xFF000000U
)) {
35 if (!(x
& 0xF0000000U
)) {
39 if (!(x
& 0xC0000000U
)) {
43 if (!(x
& 0x80000000U
)) {
49 static int get_count_order(unsigned int count
)
53 order
= lttng_fls(count
) - 1;
54 if (count
& (count
- 1))
59 static ssize_t
bytecode_reserve_data(struct bytecode_runtime
*runtime
,
60 size_t align
, size_t len
)
63 size_t padding
= lttng_ust_offset_align(runtime
->data_len
, align
);
64 size_t new_len
= runtime
->data_len
+ padding
+ len
;
65 size_t new_alloc_len
= new_len
;
66 size_t old_alloc_len
= runtime
->data_alloc_len
;
68 if (new_len
> BYTECODE_MAX_DATA_LEN
)
71 if (new_alloc_len
> old_alloc_len
) {
75 max_t(size_t, 1U << get_count_order(new_alloc_len
), old_alloc_len
<< 1);
76 newptr
= realloc(runtime
->data
, new_alloc_len
);
79 runtime
->data
= newptr
;
80 /* We zero directly the memory from start of allocation. */
81 memset(&runtime
->data
[old_alloc_len
], 0, new_alloc_len
- old_alloc_len
);
82 runtime
->data_alloc_len
= new_alloc_len
;
84 runtime
->data_len
+= padding
;
85 ret
= runtime
->data_len
;
86 runtime
->data_len
+= len
;
90 static ssize_t
bytecode_push_data(struct bytecode_runtime
*runtime
,
91 const void *p
, size_t align
, size_t len
)
95 offset
= bytecode_reserve_data(runtime
, align
, len
);
98 memcpy(&runtime
->data
[offset
], p
, len
);
102 static int specialize_load_field(struct vstack_entry
*stack_top
,
103 struct load_op
*insn
)
107 switch (stack_top
->load
.type
) {
110 case LOAD_ROOT_CONTEXT
:
111 case LOAD_ROOT_APP_CONTEXT
:
112 case LOAD_ROOT_PAYLOAD
:
114 dbg_printf("Bytecode warning: cannot load root, missing field name.\n");
118 switch (stack_top
->load
.object_type
) {
120 dbg_printf("op load field s8\n");
121 stack_top
->type
= REG_S64
;
122 if (!stack_top
->load
.rev_bo
)
123 insn
->op
= BYTECODE_OP_LOAD_FIELD_S8
;
125 case OBJECT_TYPE_S16
:
126 dbg_printf("op load field s16\n");
127 stack_top
->type
= REG_S64
;
128 if (!stack_top
->load
.rev_bo
)
129 insn
->op
= BYTECODE_OP_LOAD_FIELD_S16
;
131 case OBJECT_TYPE_S32
:
132 dbg_printf("op load field s32\n");
133 stack_top
->type
= REG_S64
;
134 if (!stack_top
->load
.rev_bo
)
135 insn
->op
= BYTECODE_OP_LOAD_FIELD_S32
;
137 case OBJECT_TYPE_S64
:
138 dbg_printf("op load field s64\n");
139 stack_top
->type
= REG_S64
;
140 if (!stack_top
->load
.rev_bo
)
141 insn
->op
= BYTECODE_OP_LOAD_FIELD_S64
;
143 case OBJECT_TYPE_SIGNED_ENUM
:
144 dbg_printf("op load field signed enumeration\n");
145 stack_top
->type
= REG_PTR
;
148 dbg_printf("op load field u8\n");
149 stack_top
->type
= REG_U64
;
150 insn
->op
= BYTECODE_OP_LOAD_FIELD_U8
;
152 case OBJECT_TYPE_U16
:
153 dbg_printf("op load field u16\n");
154 stack_top
->type
= REG_U64
;
155 if (!stack_top
->load
.rev_bo
)
156 insn
->op
= BYTECODE_OP_LOAD_FIELD_U16
;
158 case OBJECT_TYPE_U32
:
159 dbg_printf("op load field u32\n");
160 stack_top
->type
= REG_U64
;
161 if (!stack_top
->load
.rev_bo
)
162 insn
->op
= BYTECODE_OP_LOAD_FIELD_U32
;
164 case OBJECT_TYPE_U64
:
165 dbg_printf("op load field u64\n");
166 stack_top
->type
= REG_U64
;
167 if (!stack_top
->load
.rev_bo
)
168 insn
->op
= BYTECODE_OP_LOAD_FIELD_U64
;
170 case OBJECT_TYPE_UNSIGNED_ENUM
:
171 dbg_printf("op load field unsigned enumeration\n");
172 stack_top
->type
= REG_PTR
;
174 case OBJECT_TYPE_DOUBLE
:
175 stack_top
->type
= REG_DOUBLE
;
176 insn
->op
= BYTECODE_OP_LOAD_FIELD_DOUBLE
;
178 case OBJECT_TYPE_STRING
:
179 dbg_printf("op load field string\n");
180 stack_top
->type
= REG_STRING
;
181 insn
->op
= BYTECODE_OP_LOAD_FIELD_STRING
;
183 case OBJECT_TYPE_STRING_SEQUENCE
:
184 dbg_printf("op load field string sequence\n");
185 stack_top
->type
= REG_STRING
;
186 insn
->op
= BYTECODE_OP_LOAD_FIELD_SEQUENCE
;
188 case OBJECT_TYPE_DYNAMIC
:
189 dbg_printf("op load field dynamic\n");
190 stack_top
->type
= REG_UNKNOWN
;
191 /* Don't specialize load op. */
193 case OBJECT_TYPE_SEQUENCE
:
194 case OBJECT_TYPE_ARRAY
:
195 case OBJECT_TYPE_STRUCT
:
196 case OBJECT_TYPE_VARIANT
:
197 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
207 static int specialize_get_index_object_type(enum object_type
*otype
,
208 int signedness
, uint32_t elem_len
)
213 *otype
= OBJECT_TYPE_S8
;
215 *otype
= OBJECT_TYPE_U8
;
219 *otype
= OBJECT_TYPE_S16
;
221 *otype
= OBJECT_TYPE_U16
;
225 *otype
= OBJECT_TYPE_S32
;
227 *otype
= OBJECT_TYPE_U32
;
231 *otype
= OBJECT_TYPE_S64
;
233 *otype
= OBJECT_TYPE_U64
;
241 static int specialize_get_index(struct bytecode_runtime
*runtime
,
242 struct load_op
*insn
, uint64_t index
,
243 struct vstack_entry
*stack_top
,
247 struct bytecode_get_index_data gid
;
250 memset(&gid
, 0, sizeof(gid
));
251 switch (stack_top
->load
.type
) {
253 switch (stack_top
->load
.object_type
) {
254 case OBJECT_TYPE_ARRAY
:
256 struct lttng_ust_type_integer
*integer_type
;
257 struct lttng_ust_event_field
*field
;
258 uint32_t elem_len
, num_elems
;
261 field
= stack_top
->load
.field
;
262 switch (field
->type
->type
) {
263 case lttng_ust_type_array
:
264 if (lttng_ust_get_type_array(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
268 integer_type
= lttng_ust_get_type_integer(lttng_ust_get_type_array(field
->type
)->elem_type
);
269 num_elems
= lttng_ust_get_type_array(field
->type
)->length
;
275 elem_len
= integer_type
->size
;
276 signedness
= integer_type
->signedness
;
277 if (index
>= num_elems
) {
281 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
282 signedness
, elem_len
);
285 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
286 gid
.array_len
= num_elems
* (elem_len
/ CHAR_BIT
);
287 gid
.elem
.type
= stack_top
->load
.object_type
;
288 gid
.elem
.len
= elem_len
;
289 if (integer_type
->reverse_byte_order
)
290 gid
.elem
.rev_bo
= true;
291 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
294 case OBJECT_TYPE_SEQUENCE
:
296 struct lttng_ust_type_integer
*integer_type
;
297 struct lttng_ust_event_field
*field
;
301 field
= stack_top
->load
.field
;
302 switch (field
->type
->type
) {
303 case lttng_ust_type_sequence
:
304 if (lttng_ust_get_type_sequence(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
308 integer_type
= lttng_ust_get_type_integer(lttng_ust_get_type_sequence(field
->type
)->elem_type
);
314 elem_len
= integer_type
->size
;
315 signedness
= integer_type
->signedness
;
316 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
317 signedness
, elem_len
);
320 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
321 gid
.elem
.type
= stack_top
->load
.object_type
;
322 gid
.elem
.len
= elem_len
;
323 if (integer_type
->reverse_byte_order
)
324 gid
.elem
.rev_bo
= true;
325 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
328 case OBJECT_TYPE_STRUCT
:
329 /* Only generated by the specialize phase. */
330 case OBJECT_TYPE_VARIANT
: /* Fall-through */
332 ERR("Unexpected get index type %d",
333 (int) stack_top
->load
.object_type
);
338 case LOAD_ROOT_CONTEXT
:
339 case LOAD_ROOT_APP_CONTEXT
:
340 case LOAD_ROOT_PAYLOAD
:
341 ERR("Index lookup for root field not implemented yet.");
345 data_offset
= bytecode_push_data(runtime
, &gid
,
346 __alignof__(gid
), sizeof(gid
));
347 if (data_offset
< 0) {
353 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
356 ((struct get_index_u64
*) insn
->data
)->index
= data_offset
;
369 static int specialize_context_lookup_name(struct lttng_ust_ctx
*ctx
,
370 struct bytecode_runtime
*bytecode
,
371 struct load_op
*insn
)
376 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
377 name
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ offset
;
378 return lttng_get_context_index(ctx
, name
);
381 static int specialize_load_object(struct lttng_ust_event_field
*field
,
382 struct vstack_load
*load
, bool is_context
)
384 load
->type
= LOAD_OBJECT
;
386 switch (field
->type
->type
) {
387 case lttng_ust_type_integer
:
388 if (lttng_ust_get_type_integer(field
->type
)->signedness
)
389 load
->object_type
= OBJECT_TYPE_S64
;
391 load
->object_type
= OBJECT_TYPE_U64
;
392 load
->rev_bo
= false;
394 case lttng_ust_type_enum
:
396 struct lttng_ust_type_integer
*itype
;
398 itype
= lttng_ust_get_type_integer(lttng_ust_get_type_enum(field
->type
)->container_type
);
399 if (itype
->signedness
)
400 load
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
402 load
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
403 load
->rev_bo
= false;
406 case lttng_ust_type_array
:
407 if (lttng_ust_get_type_array(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
408 ERR("Array nesting only supports integer types.");
412 load
->object_type
= OBJECT_TYPE_STRING
;
414 if (lttng_ust_get_type_array(field
->type
)->encoding
== lttng_ust_string_encoding_none
) {
415 load
->object_type
= OBJECT_TYPE_ARRAY
;
418 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
422 case lttng_ust_type_sequence
:
423 if (lttng_ust_get_type_sequence(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
424 ERR("Sequence nesting only supports integer types.");
428 load
->object_type
= OBJECT_TYPE_STRING
;
430 if (lttng_ust_get_type_sequence(field
->type
)->encoding
== lttng_ust_string_encoding_none
) {
431 load
->object_type
= OBJECT_TYPE_SEQUENCE
;
434 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
439 case lttng_ust_type_string
:
440 load
->object_type
= OBJECT_TYPE_STRING
;
442 case lttng_ust_type_float
:
443 load
->object_type
= OBJECT_TYPE_DOUBLE
;
445 case lttng_ust_type_dynamic
:
446 load
->object_type
= OBJECT_TYPE_DYNAMIC
;
449 ERR("Unknown type: %d", (int) field
->type
->type
);
455 static int specialize_context_lookup(struct lttng_ust_ctx
*ctx
,
456 struct bytecode_runtime
*runtime
,
457 struct load_op
*insn
,
458 struct vstack_load
*load
)
461 struct lttng_ust_ctx_field
*ctx_field
;
462 struct lttng_ust_event_field
*field
;
463 struct bytecode_get_index_data gid
;
466 idx
= specialize_context_lookup_name(ctx
, runtime
, insn
);
470 ctx_field
= ctx
->fields
[idx
];
471 field
= ctx_field
->event_field
;
472 ret
= specialize_load_object(field
, load
, true);
475 /* Specialize each get_symbol into a get_index. */
476 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
477 memset(&gid
, 0, sizeof(gid
));
479 gid
.elem
.type
= load
->object_type
;
480 gid
.elem
.rev_bo
= load
->rev_bo
;
482 data_offset
= bytecode_push_data(runtime
, &gid
,
483 __alignof__(gid
), sizeof(gid
));
484 if (data_offset
< 0) {
487 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
491 static int specialize_app_context_lookup(struct lttng_ust_ctx
**pctx
,
492 struct bytecode_runtime
*runtime
,
493 struct load_op
*insn
,
494 struct vstack_load
*load
)
497 const char *orig_name
;
500 struct lttng_ust_ctx_field
*ctx_field
;
501 struct lttng_ust_event_field
*field
;
502 struct bytecode_get_index_data gid
;
505 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
506 orig_name
= runtime
->p
.bc
->bc
.data
+ runtime
->p
.bc
->bc
.reloc_offset
+ offset
;
507 name
= zmalloc(strlen(orig_name
) + strlen("$app.") + 1);
512 strcpy(name
, "$app.");
513 strcat(name
, orig_name
);
514 idx
= lttng_get_context_index(*pctx
, name
);
516 assert(lttng_context_is_app(name
));
517 ret
= lttng_ust_add_app_context_to_ctx_rcu(name
,
521 idx
= lttng_get_context_index(*pctx
, name
);
525 ctx_field
= (*pctx
)->fields
[idx
];
526 field
= ctx_field
->event_field
;
527 ret
= specialize_load_object(field
, load
, true);
530 /* Specialize each get_symbol into a get_index. */
531 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
532 memset(&gid
, 0, sizeof(gid
));
534 gid
.elem
.type
= load
->object_type
;
535 gid
.elem
.rev_bo
= load
->rev_bo
;
537 data_offset
= bytecode_push_data(runtime
, &gid
,
538 __alignof__(gid
), sizeof(gid
));
539 if (data_offset
< 0) {
543 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
550 static int specialize_payload_lookup(struct lttng_ust_event_desc
*event_desc
,
551 struct bytecode_runtime
*runtime
,
552 struct load_op
*insn
,
553 struct vstack_load
*load
)
557 unsigned int i
, nr_fields
;
559 uint32_t field_offset
= 0;
560 struct lttng_ust_event_field
*field
;
562 struct bytecode_get_index_data gid
;
565 nr_fields
= event_desc
->nr_fields
;
566 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
567 name
= runtime
->p
.bc
->bc
.data
+ runtime
->p
.bc
->bc
.reloc_offset
+ offset
;
568 for (i
= 0; i
< nr_fields
; i
++) {
569 field
= event_desc
->fields
[i
];
570 if (field
->nofilter
) {
573 if (!strcmp(field
->name
, name
)) {
577 /* compute field offset on stack */
578 switch (field
->type
->type
) {
579 case lttng_ust_type_integer
:
580 case lttng_ust_type_enum
:
581 field_offset
+= sizeof(int64_t);
583 case lttng_ust_type_array
:
584 case lttng_ust_type_sequence
:
585 field_offset
+= sizeof(unsigned long);
586 field_offset
+= sizeof(void *);
588 case lttng_ust_type_string
:
589 field_offset
+= sizeof(void *);
591 case lttng_ust_type_float
:
592 field_offset
+= sizeof(double);
604 ret
= specialize_load_object(field
, load
, false);
608 /* Specialize each get_symbol into a get_index. */
609 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
610 memset(&gid
, 0, sizeof(gid
));
611 gid
.offset
= field_offset
;
612 gid
.elem
.type
= load
->object_type
;
613 gid
.elem
.rev_bo
= load
->rev_bo
;
615 data_offset
= bytecode_push_data(runtime
, &gid
,
616 __alignof__(gid
), sizeof(gid
));
617 if (data_offset
< 0) {
621 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
627 int lttng_bytecode_specialize(struct lttng_ust_event_desc
*event_desc
,
628 struct bytecode_runtime
*bytecode
)
630 void *pc
, *next_pc
, *start_pc
;
632 struct vstack _stack
;
633 struct vstack
*stack
= &_stack
;
634 struct lttng_ust_ctx
**pctx
= bytecode
->p
.pctx
;
638 start_pc
= &bytecode
->code
[0];
639 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
641 switch (*(bytecode_opcode_t
*) pc
) {
642 case BYTECODE_OP_UNKNOWN
:
644 ERR("unknown bytecode op %u\n",
645 (unsigned int) *(bytecode_opcode_t
*) pc
);
649 case BYTECODE_OP_RETURN
:
650 if (vstack_ax(stack
)->type
== REG_S64
||
651 vstack_ax(stack
)->type
== REG_U64
)
652 *(bytecode_opcode_t
*) pc
= BYTECODE_OP_RETURN_S64
;
656 case BYTECODE_OP_RETURN_S64
:
657 if (vstack_ax(stack
)->type
!= REG_S64
&&
658 vstack_ax(stack
)->type
!= REG_U64
) {
659 ERR("Unexpected register type\n");
667 case BYTECODE_OP_MUL
:
668 case BYTECODE_OP_DIV
:
669 case BYTECODE_OP_MOD
:
670 case BYTECODE_OP_PLUS
:
671 case BYTECODE_OP_MINUS
:
672 ERR("unsupported bytecode op %u\n",
673 (unsigned int) *(bytecode_opcode_t
*) pc
);
679 struct binary_op
*insn
= (struct binary_op
*) pc
;
681 switch(vstack_ax(stack
)->type
) {
683 ERR("unknown register type\n");
688 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
690 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
691 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
693 insn
->op
= BYTECODE_OP_EQ_STRING
;
695 case REG_STAR_GLOB_STRING
:
696 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
698 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
702 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
704 if (vstack_bx(stack
)->type
== REG_S64
||
705 vstack_bx(stack
)->type
== REG_U64
)
706 insn
->op
= BYTECODE_OP_EQ_S64
;
708 insn
->op
= BYTECODE_OP_EQ_DOUBLE_S64
;
711 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
713 if (vstack_bx(stack
)->type
== REG_S64
||
714 vstack_bx(stack
)->type
== REG_U64
)
715 insn
->op
= BYTECODE_OP_EQ_S64_DOUBLE
;
717 insn
->op
= BYTECODE_OP_EQ_DOUBLE
;
720 break; /* Dynamic typing. */
723 if (vstack_pop(stack
)) {
727 vstack_ax(stack
)->type
= REG_S64
;
728 next_pc
+= sizeof(struct binary_op
);
734 struct binary_op
*insn
= (struct binary_op
*) pc
;
736 switch(vstack_ax(stack
)->type
) {
738 ERR("unknown register type\n");
743 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
745 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
746 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
748 insn
->op
= BYTECODE_OP_NE_STRING
;
750 case REG_STAR_GLOB_STRING
:
751 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
753 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
757 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
759 if (vstack_bx(stack
)->type
== REG_S64
||
760 vstack_bx(stack
)->type
== REG_U64
)
761 insn
->op
= BYTECODE_OP_NE_S64
;
763 insn
->op
= BYTECODE_OP_NE_DOUBLE_S64
;
766 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
768 if (vstack_bx(stack
)->type
== REG_S64
||
769 vstack_bx(stack
)->type
== REG_U64
)
770 insn
->op
= BYTECODE_OP_NE_S64_DOUBLE
;
772 insn
->op
= BYTECODE_OP_NE_DOUBLE
;
775 break; /* Dynamic typing. */
778 if (vstack_pop(stack
)) {
782 vstack_ax(stack
)->type
= REG_S64
;
783 next_pc
+= sizeof(struct binary_op
);
789 struct binary_op
*insn
= (struct binary_op
*) pc
;
791 switch(vstack_ax(stack
)->type
) {
793 ERR("unknown register type\n");
797 case REG_STAR_GLOB_STRING
:
798 ERR("invalid register type for > binary operator\n");
802 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
804 insn
->op
= BYTECODE_OP_GT_STRING
;
808 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
810 if (vstack_bx(stack
)->type
== REG_S64
||
811 vstack_bx(stack
)->type
== REG_U64
)
812 insn
->op
= BYTECODE_OP_GT_S64
;
814 insn
->op
= BYTECODE_OP_GT_DOUBLE_S64
;
817 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
819 if (vstack_bx(stack
)->type
== REG_S64
||
820 vstack_bx(stack
)->type
== REG_U64
)
821 insn
->op
= BYTECODE_OP_GT_S64_DOUBLE
;
823 insn
->op
= BYTECODE_OP_GT_DOUBLE
;
826 break; /* Dynamic typing. */
829 if (vstack_pop(stack
)) {
833 vstack_ax(stack
)->type
= REG_S64
;
834 next_pc
+= sizeof(struct binary_op
);
840 struct binary_op
*insn
= (struct binary_op
*) pc
;
842 switch(vstack_ax(stack
)->type
) {
844 ERR("unknown register type\n");
848 case REG_STAR_GLOB_STRING
:
849 ERR("invalid register type for < binary operator\n");
853 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
855 insn
->op
= BYTECODE_OP_LT_STRING
;
859 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
861 if (vstack_bx(stack
)->type
== REG_S64
||
862 vstack_bx(stack
)->type
== REG_U64
)
863 insn
->op
= BYTECODE_OP_LT_S64
;
865 insn
->op
= BYTECODE_OP_LT_DOUBLE_S64
;
868 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
870 if (vstack_bx(stack
)->type
== REG_S64
||
871 vstack_bx(stack
)->type
== REG_U64
)
872 insn
->op
= BYTECODE_OP_LT_S64_DOUBLE
;
874 insn
->op
= BYTECODE_OP_LT_DOUBLE
;
877 break; /* Dynamic typing. */
880 if (vstack_pop(stack
)) {
884 vstack_ax(stack
)->type
= REG_S64
;
885 next_pc
+= sizeof(struct binary_op
);
891 struct binary_op
*insn
= (struct binary_op
*) pc
;
893 switch(vstack_ax(stack
)->type
) {
895 ERR("unknown register type\n");
899 case REG_STAR_GLOB_STRING
:
900 ERR("invalid register type for >= binary operator\n");
904 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
906 insn
->op
= BYTECODE_OP_GE_STRING
;
910 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
912 if (vstack_bx(stack
)->type
== REG_S64
||
913 vstack_bx(stack
)->type
== REG_U64
)
914 insn
->op
= BYTECODE_OP_GE_S64
;
916 insn
->op
= BYTECODE_OP_GE_DOUBLE_S64
;
919 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
921 if (vstack_bx(stack
)->type
== REG_S64
||
922 vstack_bx(stack
)->type
== REG_U64
)
923 insn
->op
= BYTECODE_OP_GE_S64_DOUBLE
;
925 insn
->op
= BYTECODE_OP_GE_DOUBLE
;
928 break; /* Dynamic typing. */
931 if (vstack_pop(stack
)) {
935 vstack_ax(stack
)->type
= REG_U64
;
936 next_pc
+= sizeof(struct binary_op
);
941 struct binary_op
*insn
= (struct binary_op
*) pc
;
943 switch(vstack_ax(stack
)->type
) {
945 ERR("unknown register type\n");
949 case REG_STAR_GLOB_STRING
:
950 ERR("invalid register type for <= binary operator\n");
954 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
956 insn
->op
= BYTECODE_OP_LE_STRING
;
960 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
962 if (vstack_bx(stack
)->type
== REG_S64
||
963 vstack_bx(stack
)->type
== REG_U64
)
964 insn
->op
= BYTECODE_OP_LE_S64
;
966 insn
->op
= BYTECODE_OP_LE_DOUBLE_S64
;
969 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
971 if (vstack_bx(stack
)->type
== REG_S64
||
972 vstack_bx(stack
)->type
== REG_U64
)
973 insn
->op
= BYTECODE_OP_LE_S64_DOUBLE
;
975 insn
->op
= BYTECODE_OP_LE_DOUBLE
;
978 break; /* Dynamic typing. */
980 vstack_ax(stack
)->type
= REG_S64
;
981 next_pc
+= sizeof(struct binary_op
);
985 case BYTECODE_OP_EQ_STRING
:
986 case BYTECODE_OP_NE_STRING
:
987 case BYTECODE_OP_GT_STRING
:
988 case BYTECODE_OP_LT_STRING
:
989 case BYTECODE_OP_GE_STRING
:
990 case BYTECODE_OP_LE_STRING
:
991 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
992 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
993 case BYTECODE_OP_EQ_S64
:
994 case BYTECODE_OP_NE_S64
:
995 case BYTECODE_OP_GT_S64
:
996 case BYTECODE_OP_LT_S64
:
997 case BYTECODE_OP_GE_S64
:
998 case BYTECODE_OP_LE_S64
:
999 case BYTECODE_OP_EQ_DOUBLE
:
1000 case BYTECODE_OP_NE_DOUBLE
:
1001 case BYTECODE_OP_GT_DOUBLE
:
1002 case BYTECODE_OP_LT_DOUBLE
:
1003 case BYTECODE_OP_GE_DOUBLE
:
1004 case BYTECODE_OP_LE_DOUBLE
:
1005 case BYTECODE_OP_EQ_DOUBLE_S64
:
1006 case BYTECODE_OP_NE_DOUBLE_S64
:
1007 case BYTECODE_OP_GT_DOUBLE_S64
:
1008 case BYTECODE_OP_LT_DOUBLE_S64
:
1009 case BYTECODE_OP_GE_DOUBLE_S64
:
1010 case BYTECODE_OP_LE_DOUBLE_S64
:
1011 case BYTECODE_OP_EQ_S64_DOUBLE
:
1012 case BYTECODE_OP_NE_S64_DOUBLE
:
1013 case BYTECODE_OP_GT_S64_DOUBLE
:
1014 case BYTECODE_OP_LT_S64_DOUBLE
:
1015 case BYTECODE_OP_GE_S64_DOUBLE
:
1016 case BYTECODE_OP_LE_S64_DOUBLE
:
1019 if (vstack_pop(stack
)) {
1023 vstack_ax(stack
)->type
= REG_S64
;
1024 next_pc
+= sizeof(struct binary_op
);
1028 case BYTECODE_OP_BIT_RSHIFT
:
1029 case BYTECODE_OP_BIT_LSHIFT
:
1030 case BYTECODE_OP_BIT_AND
:
1031 case BYTECODE_OP_BIT_OR
:
1032 case BYTECODE_OP_BIT_XOR
:
1035 if (vstack_pop(stack
)) {
1039 vstack_ax(stack
)->type
= REG_S64
;
1040 next_pc
+= sizeof(struct binary_op
);
1045 case BYTECODE_OP_UNARY_PLUS
:
1047 struct unary_op
*insn
= (struct unary_op
*) pc
;
1049 switch(vstack_ax(stack
)->type
) {
1051 ERR("unknown register type\n");
1057 insn
->op
= BYTECODE_OP_UNARY_PLUS_S64
;
1060 insn
->op
= BYTECODE_OP_UNARY_PLUS_DOUBLE
;
1062 case REG_UNKNOWN
: /* Dynamic typing. */
1066 next_pc
+= sizeof(struct unary_op
);
1070 case BYTECODE_OP_UNARY_MINUS
:
1072 struct unary_op
*insn
= (struct unary_op
*) pc
;
1074 switch(vstack_ax(stack
)->type
) {
1076 ERR("unknown register type\n");
1082 insn
->op
= BYTECODE_OP_UNARY_MINUS_S64
;
1085 insn
->op
= BYTECODE_OP_UNARY_MINUS_DOUBLE
;
1087 case REG_UNKNOWN
: /* Dynamic typing. */
1091 next_pc
+= sizeof(struct unary_op
);
1095 case BYTECODE_OP_UNARY_NOT
:
1097 struct unary_op
*insn
= (struct unary_op
*) pc
;
1099 switch(vstack_ax(stack
)->type
) {
1101 ERR("unknown register type\n");
1107 insn
->op
= BYTECODE_OP_UNARY_NOT_S64
;
1110 insn
->op
= BYTECODE_OP_UNARY_NOT_DOUBLE
;
1112 case REG_UNKNOWN
: /* Dynamic typing. */
1116 next_pc
+= sizeof(struct unary_op
);
1120 case BYTECODE_OP_UNARY_BIT_NOT
:
1123 next_pc
+= sizeof(struct unary_op
);
1127 case BYTECODE_OP_UNARY_PLUS_S64
:
1128 case BYTECODE_OP_UNARY_MINUS_S64
:
1129 case BYTECODE_OP_UNARY_NOT_S64
:
1130 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1131 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1132 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1135 next_pc
+= sizeof(struct unary_op
);
1140 case BYTECODE_OP_AND
:
1141 case BYTECODE_OP_OR
:
1143 /* Continue to next instruction */
1144 /* Pop 1 when jump not taken */
1145 if (vstack_pop(stack
)) {
1149 next_pc
+= sizeof(struct logical_op
);
1153 /* load field ref */
1154 case BYTECODE_OP_LOAD_FIELD_REF
:
1156 ERR("Unknown field ref type\n");
1160 /* get context ref */
1161 case BYTECODE_OP_GET_CONTEXT_REF
:
1163 if (vstack_push(stack
)) {
1167 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1168 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1171 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1172 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1173 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1175 if (vstack_push(stack
)) {
1179 vstack_ax(stack
)->type
= REG_STRING
;
1180 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1183 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1184 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1186 if (vstack_push(stack
)) {
1190 vstack_ax(stack
)->type
= REG_S64
;
1191 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1194 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1195 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1197 if (vstack_push(stack
)) {
1201 vstack_ax(stack
)->type
= REG_DOUBLE
;
1202 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1206 /* load from immediate operand */
1207 case BYTECODE_OP_LOAD_STRING
:
1209 struct load_op
*insn
= (struct load_op
*) pc
;
1211 if (vstack_push(stack
)) {
1215 vstack_ax(stack
)->type
= REG_STRING
;
1216 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1220 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1222 struct load_op
*insn
= (struct load_op
*) pc
;
1224 if (vstack_push(stack
)) {
1228 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1229 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1233 case BYTECODE_OP_LOAD_S64
:
1235 if (vstack_push(stack
)) {
1239 vstack_ax(stack
)->type
= REG_S64
;
1240 next_pc
+= sizeof(struct load_op
)
1241 + sizeof(struct literal_numeric
);
1245 case BYTECODE_OP_LOAD_DOUBLE
:
1247 if (vstack_push(stack
)) {
1251 vstack_ax(stack
)->type
= REG_DOUBLE
;
1252 next_pc
+= sizeof(struct load_op
)
1253 + sizeof(struct literal_double
);
1258 case BYTECODE_OP_CAST_TO_S64
:
1260 struct cast_op
*insn
= (struct cast_op
*) pc
;
1262 switch (vstack_ax(stack
)->type
) {
1264 ERR("unknown register type\n");
1269 case REG_STAR_GLOB_STRING
:
1270 ERR("Cast op can only be applied to numeric or floating point registers\n");
1274 insn
->op
= BYTECODE_OP_CAST_NOP
;
1277 insn
->op
= BYTECODE_OP_CAST_DOUBLE_TO_S64
;
1284 vstack_ax(stack
)->type
= REG_S64
;
1285 next_pc
+= sizeof(struct cast_op
);
1288 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1291 vstack_ax(stack
)->type
= REG_S64
;
1292 next_pc
+= sizeof(struct cast_op
);
1295 case BYTECODE_OP_CAST_NOP
:
1297 next_pc
+= sizeof(struct cast_op
);
1302 * Instructions for recursive traversal through composed types.
1304 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1306 if (vstack_push(stack
)) {
1310 vstack_ax(stack
)->type
= REG_PTR
;
1311 vstack_ax(stack
)->load
.type
= LOAD_ROOT_CONTEXT
;
1312 next_pc
+= sizeof(struct load_op
);
1315 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1317 if (vstack_push(stack
)) {
1321 vstack_ax(stack
)->type
= REG_PTR
;
1322 vstack_ax(stack
)->load
.type
= LOAD_ROOT_APP_CONTEXT
;
1323 next_pc
+= sizeof(struct load_op
);
1326 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1328 if (vstack_push(stack
)) {
1332 vstack_ax(stack
)->type
= REG_PTR
;
1333 vstack_ax(stack
)->load
.type
= LOAD_ROOT_PAYLOAD
;
1334 next_pc
+= sizeof(struct load_op
);
1338 case BYTECODE_OP_LOAD_FIELD
:
1340 struct load_op
*insn
= (struct load_op
*) pc
;
1342 assert(vstack_ax(stack
)->type
== REG_PTR
);
1344 ret
= specialize_load_field(vstack_ax(stack
), insn
);
1348 next_pc
+= sizeof(struct load_op
);
1352 case BYTECODE_OP_LOAD_FIELD_S8
:
1353 case BYTECODE_OP_LOAD_FIELD_S16
:
1354 case BYTECODE_OP_LOAD_FIELD_S32
:
1355 case BYTECODE_OP_LOAD_FIELD_S64
:
1358 vstack_ax(stack
)->type
= REG_S64
;
1359 next_pc
+= sizeof(struct load_op
);
1363 case BYTECODE_OP_LOAD_FIELD_U8
:
1364 case BYTECODE_OP_LOAD_FIELD_U16
:
1365 case BYTECODE_OP_LOAD_FIELD_U32
:
1366 case BYTECODE_OP_LOAD_FIELD_U64
:
1369 vstack_ax(stack
)->type
= REG_U64
;
1370 next_pc
+= sizeof(struct load_op
);
1374 case BYTECODE_OP_LOAD_FIELD_STRING
:
1375 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1378 vstack_ax(stack
)->type
= REG_STRING
;
1379 next_pc
+= sizeof(struct load_op
);
1383 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1386 vstack_ax(stack
)->type
= REG_DOUBLE
;
1387 next_pc
+= sizeof(struct load_op
);
1391 case BYTECODE_OP_GET_SYMBOL
:
1393 struct load_op
*insn
= (struct load_op
*) pc
;
1395 dbg_printf("op get symbol\n");
1396 switch (vstack_ax(stack
)->load
.type
) {
1398 ERR("Nested fields not implemented yet.");
1401 case LOAD_ROOT_CONTEXT
:
1402 /* Lookup context field. */
1403 ret
= specialize_context_lookup(*pctx
,
1405 &vstack_ax(stack
)->load
);
1409 case LOAD_ROOT_APP_CONTEXT
:
1410 /* Lookup app context field. */
1411 ret
= specialize_app_context_lookup(pctx
,
1413 &vstack_ax(stack
)->load
);
1417 case LOAD_ROOT_PAYLOAD
:
1418 /* Lookup event payload field. */
1419 ret
= specialize_payload_lookup(event_desc
,
1421 &vstack_ax(stack
)->load
);
1426 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1430 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1432 /* Always generated by specialize phase. */
1437 case BYTECODE_OP_GET_INDEX_U16
:
1439 struct load_op
*insn
= (struct load_op
*) pc
;
1440 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1442 dbg_printf("op get index u16\n");
1444 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1445 vstack_ax(stack
), sizeof(*index
));
1448 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1452 case BYTECODE_OP_GET_INDEX_U64
:
1454 struct load_op
*insn
= (struct load_op
*) pc
;
1455 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1457 dbg_printf("op get index u64\n");
1459 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1460 vstack_ax(stack
), sizeof(*index
));
1463 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);