2 * SPDX-License-Identifier: MIT
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 * LTTng UST bytecode specializer.
14 #include <lttng/ust-utils.h>
16 #include "context-internal.h"
17 #include "lttng-bytecode.h"
18 #include "lib/lttng-ust/events.h"
19 #include "common/macros.h"
20 #include "common/tracer.h"
22 static int lttng_fls(int val
)
25 unsigned int x
= (unsigned int) val
;
29 if (!(x
& 0xFFFF0000U
)) {
33 if (!(x
& 0xFF000000U
)) {
37 if (!(x
& 0xF0000000U
)) {
41 if (!(x
& 0xC0000000U
)) {
45 if (!(x
& 0x80000000U
)) {
51 static int get_count_order(unsigned int count
)
55 order
= lttng_fls(count
) - 1;
56 if (count
& (count
- 1))
61 static ssize_t
bytecode_reserve_data(struct bytecode_runtime
*runtime
,
62 size_t align
, size_t len
)
65 size_t padding
= lttng_ust_offset_align(runtime
->data_len
, align
);
66 size_t new_len
= runtime
->data_len
+ padding
+ len
;
67 size_t new_alloc_len
= new_len
;
68 size_t old_alloc_len
= runtime
->data_alloc_len
;
70 if (new_len
> BYTECODE_MAX_DATA_LEN
)
73 if (new_alloc_len
> old_alloc_len
) {
77 max_t(size_t, 1U << get_count_order(new_alloc_len
), old_alloc_len
<< 1);
78 newptr
= realloc(runtime
->data
, new_alloc_len
);
81 runtime
->data
= newptr
;
82 /* We zero directly the memory from start of allocation. */
83 memset(&runtime
->data
[old_alloc_len
], 0, new_alloc_len
- old_alloc_len
);
84 runtime
->data_alloc_len
= new_alloc_len
;
86 runtime
->data_len
+= padding
;
87 ret
= runtime
->data_len
;
88 runtime
->data_len
+= len
;
92 static ssize_t
bytecode_push_data(struct bytecode_runtime
*runtime
,
93 const void *p
, size_t align
, size_t len
)
97 offset
= bytecode_reserve_data(runtime
, align
, len
);
100 memcpy(&runtime
->data
[offset
], p
, len
);
104 static int specialize_load_field(struct vstack_entry
*stack_top
,
105 struct load_op
*insn
)
109 switch (stack_top
->load
.type
) {
112 case LOAD_ROOT_CONTEXT
:
113 case LOAD_ROOT_APP_CONTEXT
:
114 case LOAD_ROOT_PAYLOAD
:
116 dbg_printf("Bytecode warning: cannot load root, missing field name.\n");
120 switch (stack_top
->load
.object_type
) {
122 dbg_printf("op load field s8\n");
123 stack_top
->type
= REG_S64
;
124 if (!stack_top
->load
.rev_bo
)
125 insn
->op
= BYTECODE_OP_LOAD_FIELD_S8
;
127 case OBJECT_TYPE_S16
:
128 dbg_printf("op load field s16\n");
129 stack_top
->type
= REG_S64
;
130 if (!stack_top
->load
.rev_bo
)
131 insn
->op
= BYTECODE_OP_LOAD_FIELD_S16
;
133 case OBJECT_TYPE_S32
:
134 dbg_printf("op load field s32\n");
135 stack_top
->type
= REG_S64
;
136 if (!stack_top
->load
.rev_bo
)
137 insn
->op
= BYTECODE_OP_LOAD_FIELD_S32
;
139 case OBJECT_TYPE_S64
:
140 dbg_printf("op load field s64\n");
141 stack_top
->type
= REG_S64
;
142 if (!stack_top
->load
.rev_bo
)
143 insn
->op
= BYTECODE_OP_LOAD_FIELD_S64
;
145 case OBJECT_TYPE_SIGNED_ENUM
:
146 dbg_printf("op load field signed enumeration\n");
147 stack_top
->type
= REG_PTR
;
150 dbg_printf("op load field u8\n");
151 stack_top
->type
= REG_U64
;
152 insn
->op
= BYTECODE_OP_LOAD_FIELD_U8
;
154 case OBJECT_TYPE_U16
:
155 dbg_printf("op load field u16\n");
156 stack_top
->type
= REG_U64
;
157 if (!stack_top
->load
.rev_bo
)
158 insn
->op
= BYTECODE_OP_LOAD_FIELD_U16
;
160 case OBJECT_TYPE_U32
:
161 dbg_printf("op load field u32\n");
162 stack_top
->type
= REG_U64
;
163 if (!stack_top
->load
.rev_bo
)
164 insn
->op
= BYTECODE_OP_LOAD_FIELD_U32
;
166 case OBJECT_TYPE_U64
:
167 dbg_printf("op load field u64\n");
168 stack_top
->type
= REG_U64
;
169 if (!stack_top
->load
.rev_bo
)
170 insn
->op
= BYTECODE_OP_LOAD_FIELD_U64
;
172 case OBJECT_TYPE_UNSIGNED_ENUM
:
173 dbg_printf("op load field unsigned enumeration\n");
174 stack_top
->type
= REG_PTR
;
176 case OBJECT_TYPE_DOUBLE
:
177 stack_top
->type
= REG_DOUBLE
;
178 insn
->op
= BYTECODE_OP_LOAD_FIELD_DOUBLE
;
180 case OBJECT_TYPE_STRING
:
181 dbg_printf("op load field string\n");
182 stack_top
->type
= REG_STRING
;
183 insn
->op
= BYTECODE_OP_LOAD_FIELD_STRING
;
185 case OBJECT_TYPE_STRING_SEQUENCE
:
186 dbg_printf("op load field string sequence\n");
187 stack_top
->type
= REG_STRING
;
188 insn
->op
= BYTECODE_OP_LOAD_FIELD_SEQUENCE
;
190 case OBJECT_TYPE_DYNAMIC
:
191 dbg_printf("op load field dynamic\n");
192 stack_top
->type
= REG_UNKNOWN
;
193 /* Don't specialize load op. */
195 case OBJECT_TYPE_SEQUENCE
:
196 case OBJECT_TYPE_ARRAY
:
197 case OBJECT_TYPE_STRUCT
:
198 case OBJECT_TYPE_VARIANT
:
199 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
209 static int specialize_get_index_object_type(enum object_type
*otype
,
210 int signedness
, uint32_t elem_len
)
215 *otype
= OBJECT_TYPE_S8
;
217 *otype
= OBJECT_TYPE_U8
;
221 *otype
= OBJECT_TYPE_S16
;
223 *otype
= OBJECT_TYPE_U16
;
227 *otype
= OBJECT_TYPE_S32
;
229 *otype
= OBJECT_TYPE_U32
;
233 *otype
= OBJECT_TYPE_S64
;
235 *otype
= OBJECT_TYPE_U64
;
243 static int specialize_get_index(struct bytecode_runtime
*runtime
,
244 struct load_op
*insn
, uint64_t index
,
245 struct vstack_entry
*stack_top
,
249 struct bytecode_get_index_data gid
;
252 memset(&gid
, 0, sizeof(gid
));
253 switch (stack_top
->load
.type
) {
255 switch (stack_top
->load
.object_type
) {
256 case OBJECT_TYPE_ARRAY
:
258 const struct lttng_ust_type_integer
*integer_type
;
259 const struct lttng_ust_event_field
*field
;
260 uint32_t elem_len
, num_elems
;
263 field
= stack_top
->load
.field
;
264 switch (field
->type
->type
) {
265 case lttng_ust_type_array
:
266 if (lttng_ust_get_type_array(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
270 integer_type
= lttng_ust_get_type_integer(lttng_ust_get_type_array(field
->type
)->elem_type
);
271 num_elems
= lttng_ust_get_type_array(field
->type
)->length
;
277 elem_len
= integer_type
->size
;
278 signedness
= integer_type
->signedness
;
279 if (index
>= num_elems
) {
283 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
284 signedness
, elem_len
);
287 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
288 gid
.array_len
= num_elems
* (elem_len
/ CHAR_BIT
);
289 gid
.elem
.type
= stack_top
->load
.object_type
;
290 gid
.elem
.len
= elem_len
;
291 if (integer_type
->reverse_byte_order
)
292 gid
.elem
.rev_bo
= true;
293 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
296 case OBJECT_TYPE_SEQUENCE
:
298 const struct lttng_ust_type_integer
*integer_type
;
299 const struct lttng_ust_event_field
*field
;
303 field
= stack_top
->load
.field
;
304 switch (field
->type
->type
) {
305 case lttng_ust_type_sequence
:
306 if (lttng_ust_get_type_sequence(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
310 integer_type
= lttng_ust_get_type_integer(lttng_ust_get_type_sequence(field
->type
)->elem_type
);
316 elem_len
= integer_type
->size
;
317 signedness
= integer_type
->signedness
;
318 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
319 signedness
, elem_len
);
322 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
323 gid
.elem
.type
= stack_top
->load
.object_type
;
324 gid
.elem
.len
= elem_len
;
325 if (integer_type
->reverse_byte_order
)
326 gid
.elem
.rev_bo
= true;
327 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
330 case OBJECT_TYPE_STRUCT
:
331 /* Only generated by the specialize phase. */
332 case OBJECT_TYPE_VARIANT
: /* Fall-through */
334 ERR("Unexpected get index type %d",
335 (int) stack_top
->load
.object_type
);
340 case LOAD_ROOT_CONTEXT
:
341 case LOAD_ROOT_APP_CONTEXT
:
342 case LOAD_ROOT_PAYLOAD
:
343 ERR("Index lookup for root field not implemented yet.");
347 data_offset
= bytecode_push_data(runtime
, &gid
,
348 __alignof__(gid
), sizeof(gid
));
349 if (data_offset
< 0) {
355 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
358 ((struct get_index_u64
*) insn
->data
)->index
= data_offset
;
371 static int specialize_context_lookup_name(struct lttng_ust_ctx
*ctx
,
372 struct bytecode_runtime
*bytecode
,
373 struct load_op
*insn
)
378 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
379 name
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ offset
;
380 return lttng_get_context_index(ctx
, name
);
383 static int specialize_load_object(const struct lttng_ust_event_field
*field
,
384 struct vstack_load
*load
, bool is_context
)
386 load
->type
= LOAD_OBJECT
;
388 switch (field
->type
->type
) {
389 case lttng_ust_type_integer
:
390 if (lttng_ust_get_type_integer(field
->type
)->signedness
)
391 load
->object_type
= OBJECT_TYPE_S64
;
393 load
->object_type
= OBJECT_TYPE_U64
;
394 load
->rev_bo
= false;
396 case lttng_ust_type_enum
:
398 const struct lttng_ust_type_integer
*itype
;
400 itype
= lttng_ust_get_type_integer(lttng_ust_get_type_enum(field
->type
)->container_type
);
401 if (itype
->signedness
)
402 load
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
404 load
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
405 load
->rev_bo
= false;
408 case lttng_ust_type_array
:
409 if (lttng_ust_get_type_array(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
410 ERR("Array nesting only supports integer types.");
414 load
->object_type
= OBJECT_TYPE_STRING
;
416 if (lttng_ust_get_type_array(field
->type
)->encoding
== lttng_ust_string_encoding_none
) {
417 load
->object_type
= OBJECT_TYPE_ARRAY
;
420 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
424 case lttng_ust_type_sequence
:
425 if (lttng_ust_get_type_sequence(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
426 ERR("Sequence nesting only supports integer types.");
430 load
->object_type
= OBJECT_TYPE_STRING
;
432 if (lttng_ust_get_type_sequence(field
->type
)->encoding
== lttng_ust_string_encoding_none
) {
433 load
->object_type
= OBJECT_TYPE_SEQUENCE
;
436 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
441 case lttng_ust_type_string
:
442 load
->object_type
= OBJECT_TYPE_STRING
;
444 case lttng_ust_type_float
:
445 load
->object_type
= OBJECT_TYPE_DOUBLE
;
447 case lttng_ust_type_dynamic
:
448 load
->object_type
= OBJECT_TYPE_DYNAMIC
;
451 ERR("Unknown type: %d", (int) field
->type
->type
);
457 static int specialize_context_lookup(struct lttng_ust_ctx
*ctx
,
458 struct bytecode_runtime
*runtime
,
459 struct load_op
*insn
,
460 struct vstack_load
*load
)
463 const struct lttng_ust_ctx_field
*ctx_field
;
464 const struct lttng_ust_event_field
*field
;
465 struct bytecode_get_index_data gid
;
468 idx
= specialize_context_lookup_name(ctx
, runtime
, insn
);
472 ctx_field
= &ctx
->fields
[idx
];
473 field
= ctx_field
->event_field
;
474 ret
= specialize_load_object(field
, load
, true);
477 /* Specialize each get_symbol into a get_index. */
478 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
479 memset(&gid
, 0, sizeof(gid
));
481 gid
.elem
.type
= load
->object_type
;
482 gid
.elem
.rev_bo
= load
->rev_bo
;
484 data_offset
= bytecode_push_data(runtime
, &gid
,
485 __alignof__(gid
), sizeof(gid
));
486 if (data_offset
< 0) {
489 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
493 static int specialize_app_context_lookup(struct lttng_ust_ctx
**pctx
,
494 struct bytecode_runtime
*runtime
,
495 struct load_op
*insn
,
496 struct vstack_load
*load
)
499 const char *orig_name
;
502 const struct lttng_ust_ctx_field
*ctx_field
;
503 const struct lttng_ust_event_field
*field
;
504 struct bytecode_get_index_data gid
;
507 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
508 orig_name
= runtime
->p
.bc
->bc
.data
+ runtime
->p
.bc
->bc
.reloc_offset
+ offset
;
509 name
= zmalloc(strlen(orig_name
) + strlen("$app.") + 1);
514 strcpy(name
, "$app.");
515 strcat(name
, orig_name
);
516 idx
= lttng_get_context_index(*pctx
, name
);
518 assert(lttng_context_is_app(name
));
519 ret
= lttng_ust_add_app_context_to_ctx_rcu(name
,
523 idx
= lttng_get_context_index(*pctx
, name
);
527 ctx_field
= &(*pctx
)->fields
[idx
];
528 field
= ctx_field
->event_field
;
529 ret
= specialize_load_object(field
, load
, true);
532 /* Specialize each get_symbol into a get_index. */
533 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
534 memset(&gid
, 0, sizeof(gid
));
536 gid
.elem
.type
= load
->object_type
;
537 gid
.elem
.rev_bo
= load
->rev_bo
;
539 data_offset
= bytecode_push_data(runtime
, &gid
,
540 __alignof__(gid
), sizeof(gid
));
541 if (data_offset
< 0) {
545 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
552 static int specialize_payload_lookup(const struct lttng_ust_event_desc
*event_desc
,
553 struct bytecode_runtime
*runtime
,
554 struct load_op
*insn
,
555 struct vstack_load
*load
)
559 unsigned int i
, nr_fields
;
561 uint32_t field_offset
= 0;
562 const struct lttng_ust_event_field
*field
;
564 struct bytecode_get_index_data gid
;
567 nr_fields
= event_desc
->nr_fields
;
568 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
569 name
= runtime
->p
.bc
->bc
.data
+ runtime
->p
.bc
->bc
.reloc_offset
+ offset
;
570 for (i
= 0; i
< nr_fields
; i
++) {
571 field
= event_desc
->fields
[i
];
572 if (field
->nofilter
) {
575 if (!strcmp(field
->name
, name
)) {
579 /* compute field offset on stack */
580 switch (field
->type
->type
) {
581 case lttng_ust_type_integer
:
582 case lttng_ust_type_enum
:
583 field_offset
+= sizeof(int64_t);
585 case lttng_ust_type_array
:
586 case lttng_ust_type_sequence
:
587 field_offset
+= sizeof(unsigned long);
588 field_offset
+= sizeof(void *);
590 case lttng_ust_type_string
:
591 field_offset
+= sizeof(void *);
593 case lttng_ust_type_float
:
594 field_offset
+= sizeof(double);
606 ret
= specialize_load_object(field
, load
, false);
610 /* Specialize each get_symbol into a get_index. */
611 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
612 memset(&gid
, 0, sizeof(gid
));
613 gid
.offset
= field_offset
;
614 gid
.elem
.type
= load
->object_type
;
615 gid
.elem
.rev_bo
= load
->rev_bo
;
617 data_offset
= bytecode_push_data(runtime
, &gid
,
618 __alignof__(gid
), sizeof(gid
));
619 if (data_offset
< 0) {
623 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
629 int lttng_bytecode_specialize(const struct lttng_ust_event_desc
*event_desc
,
630 struct bytecode_runtime
*bytecode
)
632 void *pc
, *next_pc
, *start_pc
;
634 struct vstack _stack
;
635 struct vstack
*stack
= &_stack
;
636 struct lttng_ust_ctx
**pctx
= bytecode
->p
.pctx
;
640 start_pc
= &bytecode
->code
[0];
641 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
643 switch (*(bytecode_opcode_t
*) pc
) {
644 case BYTECODE_OP_UNKNOWN
:
646 ERR("unknown bytecode op %u\n",
647 (unsigned int) *(bytecode_opcode_t
*) pc
);
651 case BYTECODE_OP_RETURN
:
652 if (vstack_ax(stack
)->type
== REG_S64
||
653 vstack_ax(stack
)->type
== REG_U64
)
654 *(bytecode_opcode_t
*) pc
= BYTECODE_OP_RETURN_S64
;
658 case BYTECODE_OP_RETURN_S64
:
659 if (vstack_ax(stack
)->type
!= REG_S64
&&
660 vstack_ax(stack
)->type
!= REG_U64
) {
661 ERR("Unexpected register type\n");
669 case BYTECODE_OP_MUL
:
670 case BYTECODE_OP_DIV
:
671 case BYTECODE_OP_MOD
:
672 case BYTECODE_OP_PLUS
:
673 case BYTECODE_OP_MINUS
:
674 ERR("unsupported bytecode op %u\n",
675 (unsigned int) *(bytecode_opcode_t
*) pc
);
681 struct binary_op
*insn
= (struct binary_op
*) pc
;
683 switch(vstack_ax(stack
)->type
) {
685 ERR("unknown register type\n");
690 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
692 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
693 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
695 insn
->op
= BYTECODE_OP_EQ_STRING
;
697 case REG_STAR_GLOB_STRING
:
698 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
700 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
704 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
706 if (vstack_bx(stack
)->type
== REG_S64
||
707 vstack_bx(stack
)->type
== REG_U64
)
708 insn
->op
= BYTECODE_OP_EQ_S64
;
710 insn
->op
= BYTECODE_OP_EQ_DOUBLE_S64
;
713 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
715 if (vstack_bx(stack
)->type
== REG_S64
||
716 vstack_bx(stack
)->type
== REG_U64
)
717 insn
->op
= BYTECODE_OP_EQ_S64_DOUBLE
;
719 insn
->op
= BYTECODE_OP_EQ_DOUBLE
;
722 break; /* Dynamic typing. */
725 if (vstack_pop(stack
)) {
729 vstack_ax(stack
)->type
= REG_S64
;
730 next_pc
+= sizeof(struct binary_op
);
736 struct binary_op
*insn
= (struct binary_op
*) pc
;
738 switch(vstack_ax(stack
)->type
) {
740 ERR("unknown register type\n");
745 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
747 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
748 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
750 insn
->op
= BYTECODE_OP_NE_STRING
;
752 case REG_STAR_GLOB_STRING
:
753 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
755 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
759 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
761 if (vstack_bx(stack
)->type
== REG_S64
||
762 vstack_bx(stack
)->type
== REG_U64
)
763 insn
->op
= BYTECODE_OP_NE_S64
;
765 insn
->op
= BYTECODE_OP_NE_DOUBLE_S64
;
768 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
770 if (vstack_bx(stack
)->type
== REG_S64
||
771 vstack_bx(stack
)->type
== REG_U64
)
772 insn
->op
= BYTECODE_OP_NE_S64_DOUBLE
;
774 insn
->op
= BYTECODE_OP_NE_DOUBLE
;
777 break; /* Dynamic typing. */
780 if (vstack_pop(stack
)) {
784 vstack_ax(stack
)->type
= REG_S64
;
785 next_pc
+= sizeof(struct binary_op
);
791 struct binary_op
*insn
= (struct binary_op
*) pc
;
793 switch(vstack_ax(stack
)->type
) {
795 ERR("unknown register type\n");
799 case REG_STAR_GLOB_STRING
:
800 ERR("invalid register type for > binary operator\n");
804 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
806 insn
->op
= BYTECODE_OP_GT_STRING
;
810 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
812 if (vstack_bx(stack
)->type
== REG_S64
||
813 vstack_bx(stack
)->type
== REG_U64
)
814 insn
->op
= BYTECODE_OP_GT_S64
;
816 insn
->op
= BYTECODE_OP_GT_DOUBLE_S64
;
819 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
821 if (vstack_bx(stack
)->type
== REG_S64
||
822 vstack_bx(stack
)->type
== REG_U64
)
823 insn
->op
= BYTECODE_OP_GT_S64_DOUBLE
;
825 insn
->op
= BYTECODE_OP_GT_DOUBLE
;
828 break; /* Dynamic typing. */
831 if (vstack_pop(stack
)) {
835 vstack_ax(stack
)->type
= REG_S64
;
836 next_pc
+= sizeof(struct binary_op
);
842 struct binary_op
*insn
= (struct binary_op
*) pc
;
844 switch(vstack_ax(stack
)->type
) {
846 ERR("unknown register type\n");
850 case REG_STAR_GLOB_STRING
:
851 ERR("invalid register type for < binary operator\n");
855 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
857 insn
->op
= BYTECODE_OP_LT_STRING
;
861 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
863 if (vstack_bx(stack
)->type
== REG_S64
||
864 vstack_bx(stack
)->type
== REG_U64
)
865 insn
->op
= BYTECODE_OP_LT_S64
;
867 insn
->op
= BYTECODE_OP_LT_DOUBLE_S64
;
870 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
872 if (vstack_bx(stack
)->type
== REG_S64
||
873 vstack_bx(stack
)->type
== REG_U64
)
874 insn
->op
= BYTECODE_OP_LT_S64_DOUBLE
;
876 insn
->op
= BYTECODE_OP_LT_DOUBLE
;
879 break; /* Dynamic typing. */
882 if (vstack_pop(stack
)) {
886 vstack_ax(stack
)->type
= REG_S64
;
887 next_pc
+= sizeof(struct binary_op
);
893 struct binary_op
*insn
= (struct binary_op
*) pc
;
895 switch(vstack_ax(stack
)->type
) {
897 ERR("unknown register type\n");
901 case REG_STAR_GLOB_STRING
:
902 ERR("invalid register type for >= binary operator\n");
906 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
908 insn
->op
= BYTECODE_OP_GE_STRING
;
912 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
914 if (vstack_bx(stack
)->type
== REG_S64
||
915 vstack_bx(stack
)->type
== REG_U64
)
916 insn
->op
= BYTECODE_OP_GE_S64
;
918 insn
->op
= BYTECODE_OP_GE_DOUBLE_S64
;
921 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
923 if (vstack_bx(stack
)->type
== REG_S64
||
924 vstack_bx(stack
)->type
== REG_U64
)
925 insn
->op
= BYTECODE_OP_GE_S64_DOUBLE
;
927 insn
->op
= BYTECODE_OP_GE_DOUBLE
;
930 break; /* Dynamic typing. */
933 if (vstack_pop(stack
)) {
937 vstack_ax(stack
)->type
= REG_U64
;
938 next_pc
+= sizeof(struct binary_op
);
943 struct binary_op
*insn
= (struct binary_op
*) pc
;
945 switch(vstack_ax(stack
)->type
) {
947 ERR("unknown register type\n");
951 case REG_STAR_GLOB_STRING
:
952 ERR("invalid register type for <= binary operator\n");
956 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
958 insn
->op
= BYTECODE_OP_LE_STRING
;
962 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
964 if (vstack_bx(stack
)->type
== REG_S64
||
965 vstack_bx(stack
)->type
== REG_U64
)
966 insn
->op
= BYTECODE_OP_LE_S64
;
968 insn
->op
= BYTECODE_OP_LE_DOUBLE_S64
;
971 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
973 if (vstack_bx(stack
)->type
== REG_S64
||
974 vstack_bx(stack
)->type
== REG_U64
)
975 insn
->op
= BYTECODE_OP_LE_S64_DOUBLE
;
977 insn
->op
= BYTECODE_OP_LE_DOUBLE
;
980 break; /* Dynamic typing. */
982 vstack_ax(stack
)->type
= REG_S64
;
983 next_pc
+= sizeof(struct binary_op
);
987 case BYTECODE_OP_EQ_STRING
:
988 case BYTECODE_OP_NE_STRING
:
989 case BYTECODE_OP_GT_STRING
:
990 case BYTECODE_OP_LT_STRING
:
991 case BYTECODE_OP_GE_STRING
:
992 case BYTECODE_OP_LE_STRING
:
993 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
994 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
995 case BYTECODE_OP_EQ_S64
:
996 case BYTECODE_OP_NE_S64
:
997 case BYTECODE_OP_GT_S64
:
998 case BYTECODE_OP_LT_S64
:
999 case BYTECODE_OP_GE_S64
:
1000 case BYTECODE_OP_LE_S64
:
1001 case BYTECODE_OP_EQ_DOUBLE
:
1002 case BYTECODE_OP_NE_DOUBLE
:
1003 case BYTECODE_OP_GT_DOUBLE
:
1004 case BYTECODE_OP_LT_DOUBLE
:
1005 case BYTECODE_OP_GE_DOUBLE
:
1006 case BYTECODE_OP_LE_DOUBLE
:
1007 case BYTECODE_OP_EQ_DOUBLE_S64
:
1008 case BYTECODE_OP_NE_DOUBLE_S64
:
1009 case BYTECODE_OP_GT_DOUBLE_S64
:
1010 case BYTECODE_OP_LT_DOUBLE_S64
:
1011 case BYTECODE_OP_GE_DOUBLE_S64
:
1012 case BYTECODE_OP_LE_DOUBLE_S64
:
1013 case BYTECODE_OP_EQ_S64_DOUBLE
:
1014 case BYTECODE_OP_NE_S64_DOUBLE
:
1015 case BYTECODE_OP_GT_S64_DOUBLE
:
1016 case BYTECODE_OP_LT_S64_DOUBLE
:
1017 case BYTECODE_OP_GE_S64_DOUBLE
:
1018 case BYTECODE_OP_LE_S64_DOUBLE
:
1021 if (vstack_pop(stack
)) {
1025 vstack_ax(stack
)->type
= REG_S64
;
1026 next_pc
+= sizeof(struct binary_op
);
1030 case BYTECODE_OP_BIT_RSHIFT
:
1031 case BYTECODE_OP_BIT_LSHIFT
:
1032 case BYTECODE_OP_BIT_AND
:
1033 case BYTECODE_OP_BIT_OR
:
1034 case BYTECODE_OP_BIT_XOR
:
1037 if (vstack_pop(stack
)) {
1041 vstack_ax(stack
)->type
= REG_S64
;
1042 next_pc
+= sizeof(struct binary_op
);
1047 case BYTECODE_OP_UNARY_PLUS
:
1049 struct unary_op
*insn
= (struct unary_op
*) pc
;
1051 switch(vstack_ax(stack
)->type
) {
1053 ERR("unknown register type\n");
1059 insn
->op
= BYTECODE_OP_UNARY_PLUS_S64
;
1062 insn
->op
= BYTECODE_OP_UNARY_PLUS_DOUBLE
;
1064 case REG_UNKNOWN
: /* Dynamic typing. */
1068 next_pc
+= sizeof(struct unary_op
);
1072 case BYTECODE_OP_UNARY_MINUS
:
1074 struct unary_op
*insn
= (struct unary_op
*) pc
;
1076 switch(vstack_ax(stack
)->type
) {
1078 ERR("unknown register type\n");
1084 insn
->op
= BYTECODE_OP_UNARY_MINUS_S64
;
1087 insn
->op
= BYTECODE_OP_UNARY_MINUS_DOUBLE
;
1089 case REG_UNKNOWN
: /* Dynamic typing. */
1093 next_pc
+= sizeof(struct unary_op
);
1097 case BYTECODE_OP_UNARY_NOT
:
1099 struct unary_op
*insn
= (struct unary_op
*) pc
;
1101 switch(vstack_ax(stack
)->type
) {
1103 ERR("unknown register type\n");
1109 insn
->op
= BYTECODE_OP_UNARY_NOT_S64
;
1112 insn
->op
= BYTECODE_OP_UNARY_NOT_DOUBLE
;
1114 case REG_UNKNOWN
: /* Dynamic typing. */
1118 next_pc
+= sizeof(struct unary_op
);
1122 case BYTECODE_OP_UNARY_BIT_NOT
:
1125 next_pc
+= sizeof(struct unary_op
);
1129 case BYTECODE_OP_UNARY_PLUS_S64
:
1130 case BYTECODE_OP_UNARY_MINUS_S64
:
1131 case BYTECODE_OP_UNARY_NOT_S64
:
1132 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1133 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1134 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1137 next_pc
+= sizeof(struct unary_op
);
1142 case BYTECODE_OP_AND
:
1143 case BYTECODE_OP_OR
:
1145 /* Continue to next instruction */
1146 /* Pop 1 when jump not taken */
1147 if (vstack_pop(stack
)) {
1151 next_pc
+= sizeof(struct logical_op
);
1155 /* load field ref */
1156 case BYTECODE_OP_LOAD_FIELD_REF
:
1158 ERR("Unknown field ref type\n");
1162 /* get context ref */
1163 case BYTECODE_OP_GET_CONTEXT_REF
:
1165 if (vstack_push(stack
)) {
1169 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1170 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1173 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1174 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1175 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1177 if (vstack_push(stack
)) {
1181 vstack_ax(stack
)->type
= REG_STRING
;
1182 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1185 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1186 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1188 if (vstack_push(stack
)) {
1192 vstack_ax(stack
)->type
= REG_S64
;
1193 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1196 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1197 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1199 if (vstack_push(stack
)) {
1203 vstack_ax(stack
)->type
= REG_DOUBLE
;
1204 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1208 /* load from immediate operand */
1209 case BYTECODE_OP_LOAD_STRING
:
1211 struct load_op
*insn
= (struct load_op
*) pc
;
1213 if (vstack_push(stack
)) {
1217 vstack_ax(stack
)->type
= REG_STRING
;
1218 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1222 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1224 struct load_op
*insn
= (struct load_op
*) pc
;
1226 if (vstack_push(stack
)) {
1230 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1231 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1235 case BYTECODE_OP_LOAD_S64
:
1237 if (vstack_push(stack
)) {
1241 vstack_ax(stack
)->type
= REG_S64
;
1242 next_pc
+= sizeof(struct load_op
)
1243 + sizeof(struct literal_numeric
);
1247 case BYTECODE_OP_LOAD_DOUBLE
:
1249 if (vstack_push(stack
)) {
1253 vstack_ax(stack
)->type
= REG_DOUBLE
;
1254 next_pc
+= sizeof(struct load_op
)
1255 + sizeof(struct literal_double
);
1260 case BYTECODE_OP_CAST_TO_S64
:
1262 struct cast_op
*insn
= (struct cast_op
*) pc
;
1264 switch (vstack_ax(stack
)->type
) {
1266 ERR("unknown register type\n");
1271 case REG_STAR_GLOB_STRING
:
1272 ERR("Cast op can only be applied to numeric or floating point registers\n");
1276 insn
->op
= BYTECODE_OP_CAST_NOP
;
1279 insn
->op
= BYTECODE_OP_CAST_DOUBLE_TO_S64
;
1286 vstack_ax(stack
)->type
= REG_S64
;
1287 next_pc
+= sizeof(struct cast_op
);
1290 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1293 vstack_ax(stack
)->type
= REG_S64
;
1294 next_pc
+= sizeof(struct cast_op
);
1297 case BYTECODE_OP_CAST_NOP
:
1299 next_pc
+= sizeof(struct cast_op
);
1304 * Instructions for recursive traversal through composed types.
1306 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1308 if (vstack_push(stack
)) {
1312 vstack_ax(stack
)->type
= REG_PTR
;
1313 vstack_ax(stack
)->load
.type
= LOAD_ROOT_CONTEXT
;
1314 next_pc
+= sizeof(struct load_op
);
1317 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1319 if (vstack_push(stack
)) {
1323 vstack_ax(stack
)->type
= REG_PTR
;
1324 vstack_ax(stack
)->load
.type
= LOAD_ROOT_APP_CONTEXT
;
1325 next_pc
+= sizeof(struct load_op
);
1328 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1330 if (vstack_push(stack
)) {
1334 vstack_ax(stack
)->type
= REG_PTR
;
1335 vstack_ax(stack
)->load
.type
= LOAD_ROOT_PAYLOAD
;
1336 next_pc
+= sizeof(struct load_op
);
1340 case BYTECODE_OP_LOAD_FIELD
:
1342 struct load_op
*insn
= (struct load_op
*) pc
;
1344 assert(vstack_ax(stack
)->type
== REG_PTR
);
1346 ret
= specialize_load_field(vstack_ax(stack
), insn
);
1350 next_pc
+= sizeof(struct load_op
);
1354 case BYTECODE_OP_LOAD_FIELD_S8
:
1355 case BYTECODE_OP_LOAD_FIELD_S16
:
1356 case BYTECODE_OP_LOAD_FIELD_S32
:
1357 case BYTECODE_OP_LOAD_FIELD_S64
:
1360 vstack_ax(stack
)->type
= REG_S64
;
1361 next_pc
+= sizeof(struct load_op
);
1365 case BYTECODE_OP_LOAD_FIELD_U8
:
1366 case BYTECODE_OP_LOAD_FIELD_U16
:
1367 case BYTECODE_OP_LOAD_FIELD_U32
:
1368 case BYTECODE_OP_LOAD_FIELD_U64
:
1371 vstack_ax(stack
)->type
= REG_U64
;
1372 next_pc
+= sizeof(struct load_op
);
1376 case BYTECODE_OP_LOAD_FIELD_STRING
:
1377 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1380 vstack_ax(stack
)->type
= REG_STRING
;
1381 next_pc
+= sizeof(struct load_op
);
1385 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1388 vstack_ax(stack
)->type
= REG_DOUBLE
;
1389 next_pc
+= sizeof(struct load_op
);
1393 case BYTECODE_OP_GET_SYMBOL
:
1395 struct load_op
*insn
= (struct load_op
*) pc
;
1397 dbg_printf("op get symbol\n");
1398 switch (vstack_ax(stack
)->load
.type
) {
1400 ERR("Nested fields not implemented yet.");
1403 case LOAD_ROOT_CONTEXT
:
1404 /* Lookup context field. */
1405 ret
= specialize_context_lookup(*pctx
,
1407 &vstack_ax(stack
)->load
);
1411 case LOAD_ROOT_APP_CONTEXT
:
1412 /* Lookup app context field. */
1413 ret
= specialize_app_context_lookup(pctx
,
1415 &vstack_ax(stack
)->load
);
1419 case LOAD_ROOT_PAYLOAD
:
1420 /* Lookup event payload field. */
1421 ret
= specialize_payload_lookup(event_desc
,
1423 &vstack_ax(stack
)->load
);
1428 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1432 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1434 /* Always generated by specialize phase. */
1439 case BYTECODE_OP_GET_INDEX_U16
:
1441 struct load_op
*insn
= (struct load_op
*) pc
;
1442 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1444 dbg_printf("op get index u16\n");
1446 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1447 vstack_ax(stack
), sizeof(*index
));
1450 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1454 case BYTECODE_OP_GET_INDEX_U64
:
1456 struct load_op
*insn
= (struct load_op
*) pc
;
1457 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1459 dbg_printf("op get index u64\n");
1461 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1462 vstack_ax(stack
), sizeof(*index
));
1465 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);