2 * lttng-bytecode-interpreter.c
4 * LTTng UST bytecode interpreter.
6 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
31 #include <lttng/urcu/pointer.h>
32 #include <lttng/ust-endian.h>
33 #include <lttng/ust-events.h>
35 #include "lttng-bytecode.h"
36 #include "string-utils.h"
41 * -2: unknown escape char.
46 int parse_char(const char **p
)
66 * Returns SIZE_MAX if the string is null-terminated, or the number of
70 size_t get_str_or_seq_len(const struct estack_entry
*entry
)
72 return entry
->u
.s
.seq_len
;
76 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
79 const char *candidate
;
83 /* Find out which side is the pattern vs. the candidate. */
84 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
85 pattern
= estack_ax(stack
, top
)->u
.s
.str
;
86 pattern_len
= get_str_or_seq_len(estack_ax(stack
, top
));
87 candidate
= estack_bx(stack
, top
)->u
.s
.str
;
88 candidate_len
= get_str_or_seq_len(estack_bx(stack
, top
));
90 pattern
= estack_bx(stack
, top
)->u
.s
.str
;
91 pattern_len
= get_str_or_seq_len(estack_bx(stack
, top
));
92 candidate
= estack_ax(stack
, top
)->u
.s
.str
;
93 candidate_len
= get_str_or_seq_len(estack_ax(stack
, top
));
96 /* Perform the match. Returns 0 when the result is true. */
97 return !strutils_star_glob_match(pattern
, pattern_len
, candidate
,
102 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
104 const char *p
= estack_bx(stack
, top
)->u
.s
.str
, *q
= estack_ax(stack
, top
)->u
.s
.str
;
111 if (unlikely(p
- estack_bx(stack
, top
)->u
.s
.str
>= estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')) {
112 if (q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0') {
115 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
116 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
117 ret
= parse_char(&q
);
124 if (unlikely(q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')) {
125 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
126 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
127 ret
= parse_char(&p
);
133 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
134 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
135 ret
= parse_char(&p
);
138 } else if (ret
== -2) {
141 /* else compare both char */
143 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
144 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
145 ret
= parse_char(&q
);
148 } else if (ret
== -2) {
168 uint64_t lttng_bytecode_filter_interpret_false(void *filter_data
,
169 const char *filter_stack_data
)
171 return LTTNG_INTERPRETER_DISCARD
;
174 uint64_t lttng_bytecode_capture_interpret_false(void *capture_data
,
175 const char *capture_stack_data
,
176 struct lttng_interpreter_output
*output
)
178 return LTTNG_INTERPRETER_DISCARD
;
181 #ifdef INTERPRETER_USE_SWITCH
184 * Fallback for compilers that do not support taking address of labels.
188 start_pc = &bytecode->data[0]; \
189 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
191 dbg_printf("Executing op %s (%u)\n", \
192 print_op((unsigned int) *(bytecode_opcode_t *) pc), \
193 (unsigned int) *(bytecode_opcode_t *) pc); \
194 switch (*(bytecode_opcode_t *) pc) {
196 #define OP(name) jump_target_##name: __attribute__((unused)); \
204 #define JUMP_TO(name) \
205 goto jump_target_##name
210 * Dispatch-table based interpreter.
214 start_pc = &bytecode->code[0]; \
215 pc = next_pc = start_pc; \
216 if (unlikely(pc - start_pc >= bytecode->len)) \
218 goto *dispatch[*(bytecode_opcode_t *) pc];
225 goto *dispatch[*(bytecode_opcode_t *) pc];
229 #define JUMP_TO(name) \
234 #define IS_INTEGER_REGISTER(reg_type) \
235 (reg_type == REG_U64 || reg_type == REG_S64)
237 static int context_get_index(struct lttng_ctx
*ctx
,
238 struct load_ptr
*ptr
,
242 struct lttng_ctx_field
*ctx_field
;
243 struct lttng_event_field
*field
;
244 struct lttng_ctx_value v
;
246 ctx_field
= &ctx
->fields
[idx
];
247 field
= &ctx_field
->event_field
;
248 ptr
->type
= LOAD_OBJECT
;
251 switch (field
->type
.atype
) {
253 ctx_field
->get_value(ctx_field
, &v
);
254 if (field
->type
.u
.integer
.signedness
) {
255 ptr
->object_type
= OBJECT_TYPE_S64
;
256 ptr
->u
.s64
= v
.u
.s64
;
257 ptr
->ptr
= &ptr
->u
.s64
;
259 ptr
->object_type
= OBJECT_TYPE_U64
;
260 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
261 ptr
->ptr
= &ptr
->u
.u64
;
264 case atype_enum
: /* Fall-through */
265 case atype_enum_nestable
:
267 const struct lttng_integer_type
*itype
;
269 if (field
->type
.atype
== atype_enum
) {
270 itype
= &field
->type
.u
.legacy
.basic
.enumeration
.container_type
;
272 itype
= &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
274 ctx_field
->get_value(ctx_field
, &v
);
275 if (itype
->signedness
) {
276 ptr
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
277 ptr
->u
.s64
= v
.u
.s64
;
278 ptr
->ptr
= &ptr
->u
.s64
;
280 ptr
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
281 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
282 ptr
->ptr
= &ptr
->u
.u64
;
287 if (field
->type
.u
.legacy
.array
.elem_type
.atype
!= atype_integer
) {
288 ERR("Array nesting only supports integer types.");
291 if (field
->type
.u
.legacy
.array
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
292 ERR("Only string arrays are supported for contexts.");
295 ptr
->object_type
= OBJECT_TYPE_STRING
;
296 ctx_field
->get_value(ctx_field
, &v
);
299 case atype_array_nestable
:
300 if (field
->type
.u
.array_nestable
.elem_type
->atype
!= atype_integer
) {
301 ERR("Array nesting only supports integer types.");
304 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
305 ERR("Only string arrays are supported for contexts.");
308 ptr
->object_type
= OBJECT_TYPE_STRING
;
309 ctx_field
->get_value(ctx_field
, &v
);
313 if (field
->type
.u
.legacy
.sequence
.elem_type
.atype
!= atype_integer
) {
314 ERR("Sequence nesting only supports integer types.");
317 if (field
->type
.u
.legacy
.sequence
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
318 ERR("Only string sequences are supported for contexts.");
321 ptr
->object_type
= OBJECT_TYPE_STRING
;
322 ctx_field
->get_value(ctx_field
, &v
);
325 case atype_sequence_nestable
:
326 if (field
->type
.u
.sequence_nestable
.elem_type
->atype
!= atype_integer
) {
327 ERR("Sequence nesting only supports integer types.");
330 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
331 ERR("Only string sequences are supported for contexts.");
334 ptr
->object_type
= OBJECT_TYPE_STRING
;
335 ctx_field
->get_value(ctx_field
, &v
);
339 ptr
->object_type
= OBJECT_TYPE_STRING
;
340 ctx_field
->get_value(ctx_field
, &v
);
344 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
345 ctx_field
->get_value(ctx_field
, &v
);
347 ptr
->ptr
= &ptr
->u
.d
;
350 ctx_field
->get_value(ctx_field
, &v
);
352 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
354 case LTTNG_UST_DYNAMIC_TYPE_U8
:
355 case LTTNG_UST_DYNAMIC_TYPE_U16
:
356 case LTTNG_UST_DYNAMIC_TYPE_U32
:
357 case LTTNG_UST_DYNAMIC_TYPE_U64
:
358 ptr
->object_type
= OBJECT_TYPE_U64
;
359 ptr
->u
.u64
= v
.u
.u64
;
360 ptr
->ptr
= &ptr
->u
.u64
;
361 dbg_printf("context get index dynamic u64 %" PRIi64
"\n", ptr
->u
.u64
);
363 case LTTNG_UST_DYNAMIC_TYPE_S8
:
364 case LTTNG_UST_DYNAMIC_TYPE_S16
:
365 case LTTNG_UST_DYNAMIC_TYPE_S32
:
366 case LTTNG_UST_DYNAMIC_TYPE_S64
:
367 ptr
->object_type
= OBJECT_TYPE_S64
;
368 ptr
->u
.s64
= v
.u
.s64
;
369 ptr
->ptr
= &ptr
->u
.s64
;
370 dbg_printf("context get index dynamic s64 %" PRIi64
"\n", ptr
->u
.s64
);
372 case LTTNG_UST_DYNAMIC_TYPE_FLOAT
:
373 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
374 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
376 ptr
->ptr
= &ptr
->u
.d
;
377 dbg_printf("context get index dynamic double %g\n", ptr
->u
.d
);
379 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
380 ptr
->object_type
= OBJECT_TYPE_STRING
;
382 dbg_printf("context get index dynamic string %s\n", (const char *) ptr
->ptr
);
385 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
390 ERR("Structure type cannot be loaded.");
393 ERR("Unknown type: %d", (int) field
->type
.atype
);
399 static int dynamic_get_index(struct lttng_ctx
*ctx
,
400 struct bytecode_runtime
*runtime
,
401 uint64_t index
, struct estack_entry
*stack_top
)
404 const struct bytecode_get_index_data
*gid
;
406 gid
= (const struct bytecode_get_index_data
*) &runtime
->data
[index
];
407 switch (stack_top
->u
.ptr
.type
) {
409 switch (stack_top
->u
.ptr
.object_type
) {
410 case OBJECT_TYPE_ARRAY
:
414 assert(gid
->offset
< gid
->array_len
);
415 /* Skip count (unsigned long) */
416 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
417 ptr
= ptr
+ gid
->offset
;
418 stack_top
->u
.ptr
.ptr
= ptr
;
419 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
420 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
421 assert(stack_top
->u
.ptr
.field
->type
.atype
== atype_array
||
422 stack_top
->u
.ptr
.field
->type
.atype
== atype_array_nestable
);
423 stack_top
->u
.ptr
.field
= NULL
;
426 case OBJECT_TYPE_SEQUENCE
:
431 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
432 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
433 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
437 ptr
= ptr
+ gid
->offset
;
438 stack_top
->u
.ptr
.ptr
= ptr
;
439 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
440 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
441 assert(stack_top
->u
.ptr
.field
->type
.atype
== atype_sequence
||
442 stack_top
->u
.ptr
.field
->type
.atype
== atype_sequence_nestable
);
443 stack_top
->u
.ptr
.field
= NULL
;
446 case OBJECT_TYPE_STRUCT
:
447 ERR("Nested structures are not supported yet.");
450 case OBJECT_TYPE_VARIANT
:
452 ERR("Unexpected get index type %d",
453 (int) stack_top
->u
.ptr
.object_type
);
458 case LOAD_ROOT_CONTEXT
:
459 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
461 ret
= context_get_index(ctx
,
469 case LOAD_ROOT_PAYLOAD
:
470 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
471 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
472 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
473 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
474 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
475 stack_top
->u
.ptr
.field
= gid
->field
;
476 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
480 stack_top
->type
= REG_PTR
;
488 static int dynamic_load_field(struct estack_entry
*stack_top
)
492 switch (stack_top
->u
.ptr
.type
) {
495 case LOAD_ROOT_CONTEXT
:
496 case LOAD_ROOT_APP_CONTEXT
:
497 case LOAD_ROOT_PAYLOAD
:
499 dbg_printf("Interpreter warning: cannot load root, missing field name.\n");
503 switch (stack_top
->u
.ptr
.object_type
) {
505 dbg_printf("op load field s8\n");
506 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
507 stack_top
->type
= REG_S64
;
509 case OBJECT_TYPE_S16
:
513 dbg_printf("op load field s16\n");
514 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
515 if (stack_top
->u
.ptr
.rev_bo
)
517 stack_top
->u
.v
= tmp
;
518 stack_top
->type
= REG_S64
;
521 case OBJECT_TYPE_S32
:
525 dbg_printf("op load field s32\n");
526 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
527 if (stack_top
->u
.ptr
.rev_bo
)
529 stack_top
->u
.v
= tmp
;
530 stack_top
->type
= REG_S64
;
533 case OBJECT_TYPE_S64
:
537 dbg_printf("op load field s64\n");
538 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
539 if (stack_top
->u
.ptr
.rev_bo
)
541 stack_top
->u
.v
= tmp
;
542 stack_top
->type
= REG_S64
;
545 case OBJECT_TYPE_SIGNED_ENUM
:
549 dbg_printf("op load field signed enumeration\n");
550 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
551 if (stack_top
->u
.ptr
.rev_bo
)
553 stack_top
->u
.v
= tmp
;
554 stack_top
->type
= REG_S64
;
558 dbg_printf("op load field u8\n");
559 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
560 stack_top
->type
= REG_U64
;
562 case OBJECT_TYPE_U16
:
566 dbg_printf("op load field u16\n");
567 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
568 if (stack_top
->u
.ptr
.rev_bo
)
570 stack_top
->u
.v
= tmp
;
571 stack_top
->type
= REG_U64
;
574 case OBJECT_TYPE_U32
:
578 dbg_printf("op load field u32\n");
579 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
580 if (stack_top
->u
.ptr
.rev_bo
)
582 stack_top
->u
.v
= tmp
;
583 stack_top
->type
= REG_U64
;
586 case OBJECT_TYPE_U64
:
590 dbg_printf("op load field u64\n");
591 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
592 if (stack_top
->u
.ptr
.rev_bo
)
594 stack_top
->u
.v
= tmp
;
595 stack_top
->type
= REG_U64
;
598 case OBJECT_TYPE_UNSIGNED_ENUM
:
602 dbg_printf("op load field unsigned enumeration\n");
603 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
604 if (stack_top
->u
.ptr
.rev_bo
)
606 stack_top
->u
.v
= tmp
;
607 stack_top
->type
= REG_U64
;
610 case OBJECT_TYPE_DOUBLE
:
611 memcpy(&stack_top
->u
.d
,
612 stack_top
->u
.ptr
.ptr
,
613 sizeof(struct literal_double
));
614 stack_top
->type
= REG_DOUBLE
;
616 case OBJECT_TYPE_STRING
:
620 dbg_printf("op load field string\n");
621 str
= (const char *) stack_top
->u
.ptr
.ptr
;
622 stack_top
->u
.s
.str
= str
;
623 if (unlikely(!stack_top
->u
.s
.str
)) {
624 dbg_printf("Interpreter warning: loading a NULL string.\n");
628 stack_top
->u
.s
.seq_len
= SIZE_MAX
;
629 stack_top
->u
.s
.literal_type
=
630 ESTACK_STRING_LITERAL_TYPE_NONE
;
631 stack_top
->type
= REG_STRING
;
634 case OBJECT_TYPE_STRING_SEQUENCE
:
638 dbg_printf("op load field string sequence\n");
639 ptr
= stack_top
->u
.ptr
.ptr
;
640 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
641 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
642 stack_top
->type
= REG_STRING
;
643 if (unlikely(!stack_top
->u
.s
.str
)) {
644 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
648 stack_top
->u
.s
.literal_type
=
649 ESTACK_STRING_LITERAL_TYPE_NONE
;
652 case OBJECT_TYPE_DYNAMIC
:
654 * Dynamic types in context are looked up
655 * by context get index.
659 case OBJECT_TYPE_SEQUENCE
:
660 case OBJECT_TYPE_ARRAY
:
661 case OBJECT_TYPE_STRUCT
:
662 case OBJECT_TYPE_VARIANT
:
663 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
674 int lttng_bytecode_interpret_format_output(struct estack_entry
*ax
,
675 struct lttng_interpreter_output
*output
)
682 output
->type
= LTTNG_INTERPRETER_TYPE_S64
;
683 output
->u
.s
= ax
->u
.v
;
686 output
->type
= LTTNG_INTERPRETER_TYPE_U64
;
687 output
->u
.u
= (uint64_t) ax
->u
.v
;
690 output
->type
= LTTNG_INTERPRETER_TYPE_DOUBLE
;
691 output
->u
.d
= ax
->u
.d
;
694 output
->type
= LTTNG_INTERPRETER_TYPE_STRING
;
695 output
->u
.str
.str
= ax
->u
.s
.str
;
696 output
->u
.str
.len
= ax
->u
.s
.seq_len
;
699 switch (ax
->u
.ptr
.object_type
) {
701 case OBJECT_TYPE_S16
:
702 case OBJECT_TYPE_S32
:
703 case OBJECT_TYPE_S64
:
705 case OBJECT_TYPE_U16
:
706 case OBJECT_TYPE_U32
:
707 case OBJECT_TYPE_U64
:
708 case OBJECT_TYPE_DOUBLE
:
709 case OBJECT_TYPE_STRING
:
710 case OBJECT_TYPE_STRING_SEQUENCE
:
711 ret
= dynamic_load_field(ax
);
714 /* Retry after loading ptr into stack top. */
716 case OBJECT_TYPE_SEQUENCE
:
717 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
718 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
719 output
->u
.sequence
.nr_elem
= *(unsigned long *) ax
->u
.ptr
.ptr
;
720 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.sequence_nestable
.elem_type
;
722 case OBJECT_TYPE_ARRAY
:
723 /* Skip count (unsigned long) */
724 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
725 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
726 output
->u
.sequence
.nr_elem
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.length
;
727 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.elem_type
;
729 case OBJECT_TYPE_SIGNED_ENUM
:
730 ret
= dynamic_load_field(ax
);
733 output
->type
= LTTNG_INTERPRETER_TYPE_SIGNED_ENUM
;
734 output
->u
.s
= ax
->u
.v
;
736 case OBJECT_TYPE_UNSIGNED_ENUM
:
737 ret
= dynamic_load_field(ax
);
740 output
->type
= LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM
;
741 output
->u
.u
= ax
->u
.v
;
743 case OBJECT_TYPE_STRUCT
:
744 case OBJECT_TYPE_VARIANT
:
750 case REG_STAR_GLOB_STRING
:
756 return LTTNG_INTERPRETER_RECORD_FLAG
;
760 * For `output` equal to NULL:
761 * Return 0 (discard), or raise the 0x1 flag (log event).
762 * Currently, other flags are kept for future extensions and have no
764 * For `output` not equal to NULL:
765 * Return 0 on success, negative error value on error.
768 uint64_t bytecode_interpret(void *interpreter_data
,
769 const char *interpreter_stack_data
,
770 struct lttng_interpreter_output
*output
)
772 struct bytecode_runtime
*bytecode
= interpreter_data
;
773 struct lttng_ctx
*ctx
= lttng_ust_rcu_dereference(*bytecode
->p
.pctx
);
774 void *pc
, *next_pc
, *start_pc
;
777 struct estack _stack
;
778 struct estack
*stack
= &_stack
;
779 register int64_t ax
= 0, bx
= 0;
780 register enum entry_type ax_t
= REG_UNKNOWN
, bx_t
= REG_UNKNOWN
;
781 register int top
= INTERPRETER_STACK_EMPTY
;
782 #ifndef INTERPRETER_USE_SWITCH
783 static void *dispatch
[NR_BYTECODE_OPS
] = {
784 [ BYTECODE_OP_UNKNOWN
] = &&LABEL_BYTECODE_OP_UNKNOWN
,
786 [ BYTECODE_OP_RETURN
] = &&LABEL_BYTECODE_OP_RETURN
,
789 [ BYTECODE_OP_MUL
] = &&LABEL_BYTECODE_OP_MUL
,
790 [ BYTECODE_OP_DIV
] = &&LABEL_BYTECODE_OP_DIV
,
791 [ BYTECODE_OP_MOD
] = &&LABEL_BYTECODE_OP_MOD
,
792 [ BYTECODE_OP_PLUS
] = &&LABEL_BYTECODE_OP_PLUS
,
793 [ BYTECODE_OP_MINUS
] = &&LABEL_BYTECODE_OP_MINUS
,
794 [ BYTECODE_OP_BIT_RSHIFT
] = &&LABEL_BYTECODE_OP_BIT_RSHIFT
,
795 [ BYTECODE_OP_BIT_LSHIFT
] = &&LABEL_BYTECODE_OP_BIT_LSHIFT
,
796 [ BYTECODE_OP_BIT_AND
] = &&LABEL_BYTECODE_OP_BIT_AND
,
797 [ BYTECODE_OP_BIT_OR
] = &&LABEL_BYTECODE_OP_BIT_OR
,
798 [ BYTECODE_OP_BIT_XOR
] = &&LABEL_BYTECODE_OP_BIT_XOR
,
800 /* binary comparators */
801 [ BYTECODE_OP_EQ
] = &&LABEL_BYTECODE_OP_EQ
,
802 [ BYTECODE_OP_NE
] = &&LABEL_BYTECODE_OP_NE
,
803 [ BYTECODE_OP_GT
] = &&LABEL_BYTECODE_OP_GT
,
804 [ BYTECODE_OP_LT
] = &&LABEL_BYTECODE_OP_LT
,
805 [ BYTECODE_OP_GE
] = &&LABEL_BYTECODE_OP_GE
,
806 [ BYTECODE_OP_LE
] = &&LABEL_BYTECODE_OP_LE
,
808 /* string binary comparator */
809 [ BYTECODE_OP_EQ_STRING
] = &&LABEL_BYTECODE_OP_EQ_STRING
,
810 [ BYTECODE_OP_NE_STRING
] = &&LABEL_BYTECODE_OP_NE_STRING
,
811 [ BYTECODE_OP_GT_STRING
] = &&LABEL_BYTECODE_OP_GT_STRING
,
812 [ BYTECODE_OP_LT_STRING
] = &&LABEL_BYTECODE_OP_LT_STRING
,
813 [ BYTECODE_OP_GE_STRING
] = &&LABEL_BYTECODE_OP_GE_STRING
,
814 [ BYTECODE_OP_LE_STRING
] = &&LABEL_BYTECODE_OP_LE_STRING
,
816 /* globbing pattern binary comparator */
817 [ BYTECODE_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING
,
818 [ BYTECODE_OP_NE_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING
,
820 /* s64 binary comparator */
821 [ BYTECODE_OP_EQ_S64
] = &&LABEL_BYTECODE_OP_EQ_S64
,
822 [ BYTECODE_OP_NE_S64
] = &&LABEL_BYTECODE_OP_NE_S64
,
823 [ BYTECODE_OP_GT_S64
] = &&LABEL_BYTECODE_OP_GT_S64
,
824 [ BYTECODE_OP_LT_S64
] = &&LABEL_BYTECODE_OP_LT_S64
,
825 [ BYTECODE_OP_GE_S64
] = &&LABEL_BYTECODE_OP_GE_S64
,
826 [ BYTECODE_OP_LE_S64
] = &&LABEL_BYTECODE_OP_LE_S64
,
828 /* double binary comparator */
829 [ BYTECODE_OP_EQ_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE
,
830 [ BYTECODE_OP_NE_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_DOUBLE
,
831 [ BYTECODE_OP_GT_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_DOUBLE
,
832 [ BYTECODE_OP_LT_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_DOUBLE
,
833 [ BYTECODE_OP_GE_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_DOUBLE
,
834 [ BYTECODE_OP_LE_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_DOUBLE
,
836 /* Mixed S64-double binary comparators */
837 [ BYTECODE_OP_EQ_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64
,
838 [ BYTECODE_OP_NE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64
,
839 [ BYTECODE_OP_GT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64
,
840 [ BYTECODE_OP_LT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64
,
841 [ BYTECODE_OP_GE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64
,
842 [ BYTECODE_OP_LE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64
,
844 [ BYTECODE_OP_EQ_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE
,
845 [ BYTECODE_OP_NE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE
,
846 [ BYTECODE_OP_GT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE
,
847 [ BYTECODE_OP_LT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE
,
848 [ BYTECODE_OP_GE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE
,
849 [ BYTECODE_OP_LE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE
,
852 [ BYTECODE_OP_UNARY_PLUS
] = &&LABEL_BYTECODE_OP_UNARY_PLUS
,
853 [ BYTECODE_OP_UNARY_MINUS
] = &&LABEL_BYTECODE_OP_UNARY_MINUS
,
854 [ BYTECODE_OP_UNARY_NOT
] = &&LABEL_BYTECODE_OP_UNARY_NOT
,
855 [ BYTECODE_OP_UNARY_PLUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64
,
856 [ BYTECODE_OP_UNARY_MINUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64
,
857 [ BYTECODE_OP_UNARY_NOT_S64
] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64
,
858 [ BYTECODE_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE
,
859 [ BYTECODE_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE
,
860 [ BYTECODE_OP_UNARY_NOT_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE
,
863 [ BYTECODE_OP_AND
] = &&LABEL_BYTECODE_OP_AND
,
864 [ BYTECODE_OP_OR
] = &&LABEL_BYTECODE_OP_OR
,
867 [ BYTECODE_OP_LOAD_FIELD_REF
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF
,
868 [ BYTECODE_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING
,
869 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
,
870 [ BYTECODE_OP_LOAD_FIELD_REF_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64
,
871 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
,
873 /* load from immediate operand */
874 [ BYTECODE_OP_LOAD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STRING
,
875 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING
,
876 [ BYTECODE_OP_LOAD_S64
] = &&LABEL_BYTECODE_OP_LOAD_S64
,
877 [ BYTECODE_OP_LOAD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE
,
880 [ BYTECODE_OP_CAST_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_TO_S64
,
881 [ BYTECODE_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64
,
882 [ BYTECODE_OP_CAST_NOP
] = &&LABEL_BYTECODE_OP_CAST_NOP
,
884 /* get context ref */
885 [ BYTECODE_OP_GET_CONTEXT_REF
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF
,
886 [ BYTECODE_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING
,
887 [ BYTECODE_OP_GET_CONTEXT_REF_S64
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64
,
888 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
,
890 /* Instructions for recursive traversal through composed types. */
891 [ BYTECODE_OP_GET_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT
,
892 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT
,
893 [ BYTECODE_OP_GET_PAYLOAD_ROOT
] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT
,
895 [ BYTECODE_OP_GET_SYMBOL
] = &&LABEL_BYTECODE_OP_GET_SYMBOL
,
896 [ BYTECODE_OP_GET_SYMBOL_FIELD
] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD
,
897 [ BYTECODE_OP_GET_INDEX_U16
] = &&LABEL_BYTECODE_OP_GET_INDEX_U16
,
898 [ BYTECODE_OP_GET_INDEX_U64
] = &&LABEL_BYTECODE_OP_GET_INDEX_U64
,
900 [ BYTECODE_OP_LOAD_FIELD
] = &&LABEL_BYTECODE_OP_LOAD_FIELD
,
901 [ BYTECODE_OP_LOAD_FIELD_S8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8
,
902 [ BYTECODE_OP_LOAD_FIELD_S16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16
,
903 [ BYTECODE_OP_LOAD_FIELD_S32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32
,
904 [ BYTECODE_OP_LOAD_FIELD_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64
,
905 [ BYTECODE_OP_LOAD_FIELD_U8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8
,
906 [ BYTECODE_OP_LOAD_FIELD_U16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16
,
907 [ BYTECODE_OP_LOAD_FIELD_U32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32
,
908 [ BYTECODE_OP_LOAD_FIELD_U64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64
,
909 [ BYTECODE_OP_LOAD_FIELD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING
,
910 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE
,
911 [ BYTECODE_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE
,
913 [ BYTECODE_OP_UNARY_BIT_NOT
] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT
,
915 [ BYTECODE_OP_RETURN_S64
] = &&LABEL_BYTECODE_OP_RETURN_S64
,
917 #endif /* #ifndef INTERPRETER_USE_SWITCH */
921 OP(BYTECODE_OP_UNKNOWN
):
922 OP(BYTECODE_OP_LOAD_FIELD_REF
):
923 #ifdef INTERPRETER_USE_SWITCH
925 #endif /* INTERPRETER_USE_SWITCH */
926 ERR("unknown bytecode op %u",
927 (unsigned int) *(bytecode_opcode_t
*) pc
);
931 OP(BYTECODE_OP_RETURN
):
932 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
933 /* Handle dynamic typing. */
934 switch (estack_ax_t
) {
937 retval
= !!estack_ax_v
;
948 case REG_STAR_GLOB_STRING
:
957 OP(BYTECODE_OP_RETURN_S64
):
958 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
959 retval
= !!estack_ax_v
;
967 OP(BYTECODE_OP_PLUS
):
968 OP(BYTECODE_OP_MINUS
):
969 ERR("unsupported bytecode op %u",
970 (unsigned int) *(bytecode_opcode_t
*) pc
);
976 /* Dynamic typing. */
977 switch (estack_ax_t
) {
978 case REG_S64
: /* Fall-through */
980 switch (estack_bx_t
) {
981 case REG_S64
: /* Fall-through */
983 JUMP_TO(BYTECODE_OP_EQ_S64
);
985 JUMP_TO(BYTECODE_OP_EQ_DOUBLE_S64
);
986 case REG_STRING
: /* Fall-through */
987 case REG_STAR_GLOB_STRING
:
991 ERR("Unknown interpreter register type (%d)",
998 switch (estack_bx_t
) {
999 case REG_S64
: /* Fall-through */
1001 JUMP_TO(BYTECODE_OP_EQ_S64_DOUBLE
);
1003 JUMP_TO(BYTECODE_OP_EQ_DOUBLE
);
1004 case REG_STRING
: /* Fall-through */
1005 case REG_STAR_GLOB_STRING
:
1009 ERR("Unknown interpreter register type (%d)",
1016 switch (estack_bx_t
) {
1017 case REG_S64
: /* Fall-through */
1018 case REG_U64
: /* Fall-through */
1023 JUMP_TO(BYTECODE_OP_EQ_STRING
);
1024 case REG_STAR_GLOB_STRING
:
1025 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
1027 ERR("Unknown interpreter register type (%d)",
1033 case REG_STAR_GLOB_STRING
:
1034 switch (estack_bx_t
) {
1035 case REG_S64
: /* Fall-through */
1036 case REG_U64
: /* Fall-through */
1041 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
1042 case REG_STAR_GLOB_STRING
:
1046 ERR("Unknown interpreter register type (%d)",
1053 ERR("Unknown interpreter register type (%d)",
1061 /* Dynamic typing. */
1062 switch (estack_ax_t
) {
1063 case REG_S64
: /* Fall-through */
1065 switch (estack_bx_t
) {
1066 case REG_S64
: /* Fall-through */
1068 JUMP_TO(BYTECODE_OP_NE_S64
);
1070 JUMP_TO(BYTECODE_OP_NE_DOUBLE_S64
);
1071 case REG_STRING
: /* Fall-through */
1072 case REG_STAR_GLOB_STRING
:
1076 ERR("Unknown interpreter register type (%d)",
1083 switch (estack_bx_t
) {
1084 case REG_S64
: /* Fall-through */
1086 JUMP_TO(BYTECODE_OP_NE_S64_DOUBLE
);
1088 JUMP_TO(BYTECODE_OP_NE_DOUBLE
);
1089 case REG_STRING
: /* Fall-through */
1090 case REG_STAR_GLOB_STRING
:
1094 ERR("Unknown interpreter register type (%d)",
1101 switch (estack_bx_t
) {
1102 case REG_S64
: /* Fall-through */
1108 JUMP_TO(BYTECODE_OP_NE_STRING
);
1109 case REG_STAR_GLOB_STRING
:
1110 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1112 ERR("Unknown interpreter register type (%d)",
1118 case REG_STAR_GLOB_STRING
:
1119 switch (estack_bx_t
) {
1120 case REG_S64
: /* Fall-through */
1126 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1127 case REG_STAR_GLOB_STRING
:
1131 ERR("Unknown interpreter register type (%d)",
1138 ERR("Unknown interpreter register type (%d)",
1146 /* Dynamic typing. */
1147 switch (estack_ax_t
) {
1148 case REG_S64
: /* Fall-through */
1150 switch (estack_bx_t
) {
1151 case REG_S64
: /* Fall-through */
1153 JUMP_TO(BYTECODE_OP_GT_S64
);
1155 JUMP_TO(BYTECODE_OP_GT_DOUBLE_S64
);
1156 case REG_STRING
: /* Fall-through */
1157 case REG_STAR_GLOB_STRING
:
1161 ERR("Unknown interpreter register type (%d)",
1168 switch (estack_bx_t
) {
1169 case REG_S64
: /* Fall-through */
1171 JUMP_TO(BYTECODE_OP_GT_S64_DOUBLE
);
1173 JUMP_TO(BYTECODE_OP_GT_DOUBLE
);
1174 case REG_STRING
: /* Fall-through */
1175 case REG_STAR_GLOB_STRING
:
1179 ERR("Unknown interpreter register type (%d)",
1186 switch (estack_bx_t
) {
1187 case REG_S64
: /* Fall-through */
1188 case REG_U64
: /* Fall-through */
1189 case REG_DOUBLE
: /* Fall-through */
1190 case REG_STAR_GLOB_STRING
:
1194 JUMP_TO(BYTECODE_OP_GT_STRING
);
1196 ERR("Unknown interpreter register type (%d)",
1203 ERR("Unknown interpreter register type (%d)",
1211 /* Dynamic typing. */
1212 switch (estack_ax_t
) {
1213 case REG_S64
: /* Fall-through */
1215 switch (estack_bx_t
) {
1216 case REG_S64
: /* Fall-through */
1218 JUMP_TO(BYTECODE_OP_LT_S64
);
1220 JUMP_TO(BYTECODE_OP_LT_DOUBLE_S64
);
1221 case REG_STRING
: /* Fall-through */
1222 case REG_STAR_GLOB_STRING
:
1226 ERR("Unknown interpreter register type (%d)",
1233 switch (estack_bx_t
) {
1234 case REG_S64
: /* Fall-through */
1236 JUMP_TO(BYTECODE_OP_LT_S64_DOUBLE
);
1238 JUMP_TO(BYTECODE_OP_LT_DOUBLE
);
1239 case REG_STRING
: /* Fall-through */
1240 case REG_STAR_GLOB_STRING
:
1244 ERR("Unknown interpreter register type (%d)",
1251 switch (estack_bx_t
) {
1252 case REG_S64
: /* Fall-through */
1253 case REG_U64
: /* Fall-through */
1254 case REG_DOUBLE
: /* Fall-through */
1255 case REG_STAR_GLOB_STRING
:
1259 JUMP_TO(BYTECODE_OP_LT_STRING
);
1261 ERR("Unknown interpreter register type (%d)",
1268 ERR("Unknown interpreter register type (%d)",
1276 /* Dynamic typing. */
1277 switch (estack_ax_t
) {
1278 case REG_S64
: /* Fall-through */
1280 switch (estack_bx_t
) {
1281 case REG_S64
: /* Fall-through */
1283 JUMP_TO(BYTECODE_OP_GE_S64
);
1285 JUMP_TO(BYTECODE_OP_GE_DOUBLE_S64
);
1286 case REG_STRING
: /* Fall-through */
1287 case REG_STAR_GLOB_STRING
:
1291 ERR("Unknown interpreter register type (%d)",
1298 switch (estack_bx_t
) {
1299 case REG_S64
: /* Fall-through */
1301 JUMP_TO(BYTECODE_OP_GE_S64_DOUBLE
);
1303 JUMP_TO(BYTECODE_OP_GE_DOUBLE
);
1304 case REG_STRING
: /* Fall-through */
1305 case REG_STAR_GLOB_STRING
:
1309 ERR("Unknown interpreter register type (%d)",
1316 switch (estack_bx_t
) {
1317 case REG_S64
: /* Fall-through */
1318 case REG_U64
: /* Fall-through */
1319 case REG_DOUBLE
: /* Fall-through */
1320 case REG_STAR_GLOB_STRING
:
1324 JUMP_TO(BYTECODE_OP_GE_STRING
);
1326 ERR("Unknown interpreter register type (%d)",
1333 ERR("Unknown interpreter register type (%d)",
1341 /* Dynamic typing. */
1342 switch (estack_ax_t
) {
1343 case REG_S64
: /* Fall-through */
1345 switch (estack_bx_t
) {
1346 case REG_S64
: /* Fall-through */
1348 JUMP_TO(BYTECODE_OP_LE_S64
);
1350 JUMP_TO(BYTECODE_OP_LE_DOUBLE_S64
);
1351 case REG_STRING
: /* Fall-through */
1352 case REG_STAR_GLOB_STRING
:
1356 ERR("Unknown interpreter register type (%d)",
1363 switch (estack_bx_t
) {
1364 case REG_S64
: /* Fall-through */
1366 JUMP_TO(BYTECODE_OP_LE_S64_DOUBLE
);
1368 JUMP_TO(BYTECODE_OP_LE_DOUBLE
);
1369 case REG_STRING
: /* Fall-through */
1370 case REG_STAR_GLOB_STRING
:
1374 ERR("Unknown interpreter register type (%d)",
1381 switch (estack_bx_t
) {
1382 case REG_S64
: /* Fall-through */
1383 case REG_U64
: /* Fall-through */
1384 case REG_DOUBLE
: /* Fall-through */
1385 case REG_STAR_GLOB_STRING
:
1389 JUMP_TO(BYTECODE_OP_LE_STRING
);
1391 ERR("Unknown interpreter register type (%d)",
1398 ERR("Unknown interpreter register type (%d)",
1405 OP(BYTECODE_OP_EQ_STRING
):
1409 res
= (stack_strcmp(stack
, top
, "==") == 0);
1410 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1412 estack_ax_t
= REG_S64
;
1413 next_pc
+= sizeof(struct binary_op
);
1416 OP(BYTECODE_OP_NE_STRING
):
1420 res
= (stack_strcmp(stack
, top
, "!=") != 0);
1421 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1423 estack_ax_t
= REG_S64
;
1424 next_pc
+= sizeof(struct binary_op
);
1427 OP(BYTECODE_OP_GT_STRING
):
1431 res
= (stack_strcmp(stack
, top
, ">") > 0);
1432 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1434 estack_ax_t
= REG_S64
;
1435 next_pc
+= sizeof(struct binary_op
);
1438 OP(BYTECODE_OP_LT_STRING
):
1442 res
= (stack_strcmp(stack
, top
, "<") < 0);
1443 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1445 estack_ax_t
= REG_S64
;
1446 next_pc
+= sizeof(struct binary_op
);
1449 OP(BYTECODE_OP_GE_STRING
):
1453 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
1454 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1456 estack_ax_t
= REG_S64
;
1457 next_pc
+= sizeof(struct binary_op
);
1460 OP(BYTECODE_OP_LE_STRING
):
1464 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
1465 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1467 estack_ax_t
= REG_S64
;
1468 next_pc
+= sizeof(struct binary_op
);
1472 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING
):
1476 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
1477 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1479 estack_ax_t
= REG_S64
;
1480 next_pc
+= sizeof(struct binary_op
);
1483 OP(BYTECODE_OP_NE_STAR_GLOB_STRING
):
1487 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
1488 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1490 estack_ax_t
= REG_S64
;
1491 next_pc
+= sizeof(struct binary_op
);
1495 OP(BYTECODE_OP_EQ_S64
):
1499 res
= (estack_bx_v
== estack_ax_v
);
1500 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1502 estack_ax_t
= REG_S64
;
1503 next_pc
+= sizeof(struct binary_op
);
1506 OP(BYTECODE_OP_NE_S64
):
1510 res
= (estack_bx_v
!= estack_ax_v
);
1511 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1513 estack_ax_t
= REG_S64
;
1514 next_pc
+= sizeof(struct binary_op
);
1517 OP(BYTECODE_OP_GT_S64
):
1521 res
= (estack_bx_v
> estack_ax_v
);
1522 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1524 estack_ax_t
= REG_S64
;
1525 next_pc
+= sizeof(struct binary_op
);
1528 OP(BYTECODE_OP_LT_S64
):
1532 res
= (estack_bx_v
< estack_ax_v
);
1533 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1535 estack_ax_t
= REG_S64
;
1536 next_pc
+= sizeof(struct binary_op
);
1539 OP(BYTECODE_OP_GE_S64
):
1543 res
= (estack_bx_v
>= estack_ax_v
);
1544 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1546 estack_ax_t
= REG_S64
;
1547 next_pc
+= sizeof(struct binary_op
);
1550 OP(BYTECODE_OP_LE_S64
):
1554 res
= (estack_bx_v
<= estack_ax_v
);
1555 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1557 estack_ax_t
= REG_S64
;
1558 next_pc
+= sizeof(struct binary_op
);
1562 OP(BYTECODE_OP_EQ_DOUBLE
):
1566 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax(stack
, top
)->u
.d
);
1567 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1569 estack_ax_t
= REG_S64
;
1570 next_pc
+= sizeof(struct binary_op
);
1573 OP(BYTECODE_OP_NE_DOUBLE
):
1577 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax(stack
, top
)->u
.d
);
1578 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1580 estack_ax_t
= REG_S64
;
1581 next_pc
+= sizeof(struct binary_op
);
1584 OP(BYTECODE_OP_GT_DOUBLE
):
1588 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax(stack
, top
)->u
.d
);
1589 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1591 estack_ax_t
= REG_S64
;
1592 next_pc
+= sizeof(struct binary_op
);
1595 OP(BYTECODE_OP_LT_DOUBLE
):
1599 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax(stack
, top
)->u
.d
);
1600 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1602 estack_ax_t
= REG_S64
;
1603 next_pc
+= sizeof(struct binary_op
);
1606 OP(BYTECODE_OP_GE_DOUBLE
):
1610 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax(stack
, top
)->u
.d
);
1611 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1613 estack_ax_t
= REG_S64
;
1614 next_pc
+= sizeof(struct binary_op
);
1617 OP(BYTECODE_OP_LE_DOUBLE
):
1621 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax(stack
, top
)->u
.d
);
1622 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1624 estack_ax_t
= REG_S64
;
1625 next_pc
+= sizeof(struct binary_op
);
1629 /* Mixed S64-double binary comparators */
1630 OP(BYTECODE_OP_EQ_DOUBLE_S64
):
1634 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax_v
);
1635 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1637 estack_ax_t
= REG_S64
;
1638 next_pc
+= sizeof(struct binary_op
);
1641 OP(BYTECODE_OP_NE_DOUBLE_S64
):
1645 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax_v
);
1646 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1648 estack_ax_t
= REG_S64
;
1649 next_pc
+= sizeof(struct binary_op
);
1652 OP(BYTECODE_OP_GT_DOUBLE_S64
):
1656 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax_v
);
1657 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1659 estack_ax_t
= REG_S64
;
1660 next_pc
+= sizeof(struct binary_op
);
1663 OP(BYTECODE_OP_LT_DOUBLE_S64
):
1667 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax_v
);
1668 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1670 estack_ax_t
= REG_S64
;
1671 next_pc
+= sizeof(struct binary_op
);
1674 OP(BYTECODE_OP_GE_DOUBLE_S64
):
1678 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax_v
);
1679 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1681 estack_ax_t
= REG_S64
;
1682 next_pc
+= sizeof(struct binary_op
);
1685 OP(BYTECODE_OP_LE_DOUBLE_S64
):
1689 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax_v
);
1690 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1692 estack_ax_t
= REG_S64
;
1693 next_pc
+= sizeof(struct binary_op
);
1697 OP(BYTECODE_OP_EQ_S64_DOUBLE
):
1701 res
= (estack_bx_v
== estack_ax(stack
, top
)->u
.d
);
1702 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1704 estack_ax_t
= REG_S64
;
1705 next_pc
+= sizeof(struct binary_op
);
1708 OP(BYTECODE_OP_NE_S64_DOUBLE
):
1712 res
= (estack_bx_v
!= estack_ax(stack
, top
)->u
.d
);
1713 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1715 estack_ax_t
= REG_S64
;
1716 next_pc
+= sizeof(struct binary_op
);
1719 OP(BYTECODE_OP_GT_S64_DOUBLE
):
1723 res
= (estack_bx_v
> estack_ax(stack
, top
)->u
.d
);
1724 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1726 estack_ax_t
= REG_S64
;
1727 next_pc
+= sizeof(struct binary_op
);
1730 OP(BYTECODE_OP_LT_S64_DOUBLE
):
1734 res
= (estack_bx_v
< estack_ax(stack
, top
)->u
.d
);
1735 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1737 estack_ax_t
= REG_S64
;
1738 next_pc
+= sizeof(struct binary_op
);
1741 OP(BYTECODE_OP_GE_S64_DOUBLE
):
1745 res
= (estack_bx_v
>= estack_ax(stack
, top
)->u
.d
);
1746 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1748 estack_ax_t
= REG_S64
;
1749 next_pc
+= sizeof(struct binary_op
);
1752 OP(BYTECODE_OP_LE_S64_DOUBLE
):
1756 res
= (estack_bx_v
<= estack_ax(stack
, top
)->u
.d
);
1757 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1759 estack_ax_t
= REG_S64
;
1760 next_pc
+= sizeof(struct binary_op
);
1763 OP(BYTECODE_OP_BIT_RSHIFT
):
1767 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1772 /* Catch undefined behavior. */
1773 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1777 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
1778 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1780 estack_ax_t
= REG_U64
;
1781 next_pc
+= sizeof(struct binary_op
);
1784 OP(BYTECODE_OP_BIT_LSHIFT
):
1788 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1793 /* Catch undefined behavior. */
1794 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1798 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
1799 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1801 estack_ax_t
= REG_U64
;
1802 next_pc
+= sizeof(struct binary_op
);
1805 OP(BYTECODE_OP_BIT_AND
):
1809 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1814 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
1815 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1817 estack_ax_t
= REG_U64
;
1818 next_pc
+= sizeof(struct binary_op
);
1821 OP(BYTECODE_OP_BIT_OR
):
1825 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1830 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1831 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1833 estack_ax_t
= REG_U64
;
1834 next_pc
+= sizeof(struct binary_op
);
1837 OP(BYTECODE_OP_BIT_XOR
):
1841 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1846 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1847 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1849 estack_ax_t
= REG_U64
;
1850 next_pc
+= sizeof(struct binary_op
);
1855 OP(BYTECODE_OP_UNARY_PLUS
):
1857 /* Dynamic typing. */
1858 switch (estack_ax_t
) {
1859 case REG_S64
: /* Fall-through. */
1861 JUMP_TO(BYTECODE_OP_UNARY_PLUS_S64
);
1863 JUMP_TO(BYTECODE_OP_UNARY_PLUS_DOUBLE
);
1864 case REG_STRING
: /* Fall-through */
1865 case REG_STAR_GLOB_STRING
:
1869 ERR("Unknown interpreter register type (%d)",
1875 OP(BYTECODE_OP_UNARY_MINUS
):
1877 /* Dynamic typing. */
1878 switch (estack_ax_t
) {
1879 case REG_S64
: /* Fall-through. */
1881 JUMP_TO(BYTECODE_OP_UNARY_MINUS_S64
);
1883 JUMP_TO(BYTECODE_OP_UNARY_MINUS_DOUBLE
);
1884 case REG_STRING
: /* Fall-through */
1885 case REG_STAR_GLOB_STRING
:
1889 ERR("Unknown interpreter register type (%d)",
1895 OP(BYTECODE_OP_UNARY_NOT
):
1897 /* Dynamic typing. */
1898 switch (estack_ax_t
) {
1899 case REG_S64
: /* Fall-through. */
1901 JUMP_TO(BYTECODE_OP_UNARY_NOT_S64
);
1903 JUMP_TO(BYTECODE_OP_UNARY_NOT_DOUBLE
);
1904 case REG_STRING
: /* Fall-through */
1905 case REG_STAR_GLOB_STRING
:
1909 ERR("Unknown interpreter register type (%d)",
1914 next_pc
+= sizeof(struct unary_op
);
1918 OP(BYTECODE_OP_UNARY_BIT_NOT
):
1920 /* Dynamic typing. */
1921 if (!IS_INTEGER_REGISTER(estack_ax_t
)) {
1926 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1927 estack_ax_t
= REG_U64
;
1928 next_pc
+= sizeof(struct unary_op
);
1932 OP(BYTECODE_OP_UNARY_PLUS_S64
):
1933 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE
):
1935 next_pc
+= sizeof(struct unary_op
);
1938 OP(BYTECODE_OP_UNARY_MINUS_S64
):
1940 estack_ax_v
= -estack_ax_v
;
1941 next_pc
+= sizeof(struct unary_op
);
1944 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE
):
1946 estack_ax(stack
, top
)->u
.d
= -estack_ax(stack
, top
)->u
.d
;
1947 next_pc
+= sizeof(struct unary_op
);
1950 OP(BYTECODE_OP_UNARY_NOT_S64
):
1952 estack_ax_v
= !estack_ax_v
;
1953 estack_ax_t
= REG_S64
;
1954 next_pc
+= sizeof(struct unary_op
);
1957 OP(BYTECODE_OP_UNARY_NOT_DOUBLE
):
1959 estack_ax_v
= !estack_ax(stack
, top
)->u
.d
;
1960 estack_ax_t
= REG_S64
;
1961 next_pc
+= sizeof(struct unary_op
);
1966 OP(BYTECODE_OP_AND
):
1968 struct logical_op
*insn
= (struct logical_op
*) pc
;
1970 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1974 /* If AX is 0, skip and evaluate to 0 */
1975 if (unlikely(estack_ax_v
== 0)) {
1976 dbg_printf("Jumping to bytecode offset %u\n",
1977 (unsigned int) insn
->skip_offset
);
1978 next_pc
= start_pc
+ insn
->skip_offset
;
1980 /* Pop 1 when jump not taken */
1981 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1982 next_pc
+= sizeof(struct logical_op
);
1988 struct logical_op
*insn
= (struct logical_op
*) pc
;
1990 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1994 /* If AX is nonzero, skip and evaluate to 1 */
1995 if (unlikely(estack_ax_v
!= 0)) {
1997 dbg_printf("Jumping to bytecode offset %u\n",
1998 (unsigned int) insn
->skip_offset
);
1999 next_pc
= start_pc
+ insn
->skip_offset
;
2001 /* Pop 1 when jump not taken */
2002 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2003 next_pc
+= sizeof(struct logical_op
);
2009 /* load field ref */
2010 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING
):
2012 struct load_op
*insn
= (struct load_op
*) pc
;
2013 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2015 dbg_printf("load field ref offset %u type string\n",
2017 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2018 estack_ax(stack
, top
)->u
.s
.str
=
2019 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
2020 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2021 dbg_printf("Interpreter warning: loading a NULL string.\n");
2025 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2026 estack_ax(stack
, top
)->u
.s
.literal_type
=
2027 ESTACK_STRING_LITERAL_TYPE_NONE
;
2028 estack_ax_t
= REG_STRING
;
2029 dbg_printf("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2030 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2034 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
):
2036 struct load_op
*insn
= (struct load_op
*) pc
;
2037 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2039 dbg_printf("load field ref offset %u type sequence\n",
2041 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2042 estack_ax(stack
, top
)->u
.s
.seq_len
=
2043 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
2044 estack_ax(stack
, top
)->u
.s
.str
=
2045 *(const char **) (&interpreter_stack_data
[ref
->offset
2046 + sizeof(unsigned long)]);
2047 estack_ax_t
= REG_STRING
;
2048 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2049 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2053 estack_ax(stack
, top
)->u
.s
.literal_type
=
2054 ESTACK_STRING_LITERAL_TYPE_NONE
;
2055 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2059 OP(BYTECODE_OP_LOAD_FIELD_REF_S64
):
2061 struct load_op
*insn
= (struct load_op
*) pc
;
2062 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2064 dbg_printf("load field ref offset %u type s64\n",
2066 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2068 ((struct literal_numeric
*) &interpreter_stack_data
[ref
->offset
])->v
;
2069 estack_ax_t
= REG_S64
;
2070 dbg_printf("ref load s64 %" PRIi64
"\n", estack_ax_v
);
2071 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2075 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
):
2077 struct load_op
*insn
= (struct load_op
*) pc
;
2078 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2080 dbg_printf("load field ref offset %u type double\n",
2082 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2083 memcpy(&estack_ax(stack
, top
)->u
.d
, &interpreter_stack_data
[ref
->offset
],
2084 sizeof(struct literal_double
));
2085 estack_ax_t
= REG_DOUBLE
;
2086 dbg_printf("ref load double %g\n", estack_ax(stack
, top
)->u
.d
);
2087 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2091 /* load from immediate operand */
2092 OP(BYTECODE_OP_LOAD_STRING
):
2094 struct load_op
*insn
= (struct load_op
*) pc
;
2096 dbg_printf("load string %s\n", insn
->data
);
2097 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2098 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2099 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2100 estack_ax(stack
, top
)->u
.s
.literal_type
=
2101 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
2102 estack_ax_t
= REG_STRING
;
2103 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2107 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING
):
2109 struct load_op
*insn
= (struct load_op
*) pc
;
2111 dbg_printf("load globbing pattern %s\n", insn
->data
);
2112 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2113 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2114 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2115 estack_ax(stack
, top
)->u
.s
.literal_type
=
2116 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
2117 estack_ax_t
= REG_STAR_GLOB_STRING
;
2118 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2122 OP(BYTECODE_OP_LOAD_S64
):
2124 struct load_op
*insn
= (struct load_op
*) pc
;
2126 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2127 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
2128 estack_ax_t
= REG_S64
;
2129 dbg_printf("load s64 %" PRIi64
"\n", estack_ax_v
);
2130 next_pc
+= sizeof(struct load_op
)
2131 + sizeof(struct literal_numeric
);
2135 OP(BYTECODE_OP_LOAD_DOUBLE
):
2137 struct load_op
*insn
= (struct load_op
*) pc
;
2139 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2140 memcpy(&estack_ax(stack
, top
)->u
.d
, insn
->data
,
2141 sizeof(struct literal_double
));
2142 estack_ax_t
= REG_DOUBLE
;
2143 dbg_printf("load double %g\n", estack_ax(stack
, top
)->u
.d
);
2144 next_pc
+= sizeof(struct load_op
)
2145 + sizeof(struct literal_double
);
2150 OP(BYTECODE_OP_CAST_TO_S64
):
2152 /* Dynamic typing. */
2153 switch (estack_ax_t
) {
2155 JUMP_TO(BYTECODE_OP_CAST_NOP
);
2157 JUMP_TO(BYTECODE_OP_CAST_DOUBLE_TO_S64
);
2159 estack_ax_t
= REG_S64
;
2160 next_pc
+= sizeof(struct cast_op
);
2161 case REG_STRING
: /* Fall-through */
2162 case REG_STAR_GLOB_STRING
:
2166 ERR("Unknown interpreter register type (%d)",
2173 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64
):
2175 estack_ax_v
= (int64_t) estack_ax(stack
, top
)->u
.d
;
2176 estack_ax_t
= REG_S64
;
2177 next_pc
+= sizeof(struct cast_op
);
2181 OP(BYTECODE_OP_CAST_NOP
):
2183 next_pc
+= sizeof(struct cast_op
);
2187 /* get context ref */
2188 OP(BYTECODE_OP_GET_CONTEXT_REF
):
2190 struct load_op
*insn
= (struct load_op
*) pc
;
2191 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2192 struct lttng_ctx_field
*ctx_field
;
2193 struct lttng_ctx_value v
;
2195 dbg_printf("get context ref offset %u type dynamic\n",
2197 ctx_field
= &ctx
->fields
[ref
->offset
];
2198 ctx_field
->get_value(ctx_field
, &v
);
2199 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2201 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
2204 case LTTNG_UST_DYNAMIC_TYPE_S64
:
2205 estack_ax_v
= v
.u
.s64
;
2206 estack_ax_t
= REG_S64
;
2207 dbg_printf("ref get context dynamic s64 %" PRIi64
"\n", estack_ax_v
);
2209 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
2210 estack_ax(stack
, top
)->u
.d
= v
.u
.d
;
2211 estack_ax_t
= REG_DOUBLE
;
2212 dbg_printf("ref get context dynamic double %g\n", estack_ax(stack
, top
)->u
.d
);
2214 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
2215 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2216 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2217 dbg_printf("Interpreter warning: loading a NULL string.\n");
2221 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2222 estack_ax(stack
, top
)->u
.s
.literal_type
=
2223 ESTACK_STRING_LITERAL_TYPE_NONE
;
2224 dbg_printf("ref get context dynamic string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2225 estack_ax_t
= REG_STRING
;
2228 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
2232 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2236 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING
):
2238 struct load_op
*insn
= (struct load_op
*) pc
;
2239 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2240 struct lttng_ctx_field
*ctx_field
;
2241 struct lttng_ctx_value v
;
2243 dbg_printf("get context ref offset %u type string\n",
2245 ctx_field
= &ctx
->fields
[ref
->offset
];
2246 ctx_field
->get_value(ctx_field
, &v
);
2247 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2248 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2249 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2250 dbg_printf("Interpreter warning: loading a NULL string.\n");
2254 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2255 estack_ax(stack
, top
)->u
.s
.literal_type
=
2256 ESTACK_STRING_LITERAL_TYPE_NONE
;
2257 estack_ax_t
= REG_STRING
;
2258 dbg_printf("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2259 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2263 OP(BYTECODE_OP_GET_CONTEXT_REF_S64
):
2265 struct load_op
*insn
= (struct load_op
*) pc
;
2266 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2267 struct lttng_ctx_field
*ctx_field
;
2268 struct lttng_ctx_value v
;
2270 dbg_printf("get context ref offset %u type s64\n",
2272 ctx_field
= &ctx
->fields
[ref
->offset
];
2273 ctx_field
->get_value(ctx_field
, &v
);
2274 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2275 estack_ax_v
= v
.u
.s64
;
2276 estack_ax_t
= REG_S64
;
2277 dbg_printf("ref get context s64 %" PRIi64
"\n", estack_ax_v
);
2278 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2282 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
):
2284 struct load_op
*insn
= (struct load_op
*) pc
;
2285 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2286 struct lttng_ctx_field
*ctx_field
;
2287 struct lttng_ctx_value v
;
2289 dbg_printf("get context ref offset %u type double\n",
2291 ctx_field
= &ctx
->fields
[ref
->offset
];
2292 ctx_field
->get_value(ctx_field
, &v
);
2293 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2294 memcpy(&estack_ax(stack
, top
)->u
.d
, &v
.u
.d
, sizeof(struct literal_double
));
2295 estack_ax_t
= REG_DOUBLE
;
2296 dbg_printf("ref get context double %g\n", estack_ax(stack
, top
)->u
.d
);
2297 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2301 OP(BYTECODE_OP_GET_CONTEXT_ROOT
):
2303 dbg_printf("op get context root\n");
2304 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2305 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
2306 /* "field" only needed for variants. */
2307 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2308 estack_ax_t
= REG_PTR
;
2309 next_pc
+= sizeof(struct load_op
);
2313 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT
):
2315 dbg_printf("op get app context root\n");
2316 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2317 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_APP_CONTEXT
;
2318 /* "field" only needed for variants. */
2319 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2320 estack_ax_t
= REG_PTR
;
2321 next_pc
+= sizeof(struct load_op
);
2325 OP(BYTECODE_OP_GET_PAYLOAD_ROOT
):
2327 dbg_printf("op get app payload root\n");
2328 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2329 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
2330 estack_ax(stack
, top
)->u
.ptr
.ptr
= interpreter_stack_data
;
2331 /* "field" only needed for variants. */
2332 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2333 estack_ax_t
= REG_PTR
;
2334 next_pc
+= sizeof(struct load_op
);
2338 OP(BYTECODE_OP_GET_SYMBOL
):
2340 dbg_printf("op get symbol\n");
2341 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
2343 ERR("Nested fields not implemented yet.");
2346 case LOAD_ROOT_CONTEXT
:
2347 case LOAD_ROOT_APP_CONTEXT
:
2348 case LOAD_ROOT_PAYLOAD
:
2350 * symbol lookup is performed by
2356 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
2360 OP(BYTECODE_OP_GET_SYMBOL_FIELD
):
2363 * Used for first variant encountered in a
2364 * traversal. Variants are not implemented yet.
2370 OP(BYTECODE_OP_GET_INDEX_U16
):
2372 struct load_op
*insn
= (struct load_op
*) pc
;
2373 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
2375 dbg_printf("op get index u16\n");
2376 ret
= dynamic_get_index(ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2379 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2380 estack_ax_t
= estack_ax(stack
, top
)->type
;
2381 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
2385 OP(BYTECODE_OP_GET_INDEX_U64
):
2387 struct load_op
*insn
= (struct load_op
*) pc
;
2388 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
2390 dbg_printf("op get index u64\n");
2391 ret
= dynamic_get_index(ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2394 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2395 estack_ax_t
= estack_ax(stack
, top
)->type
;
2396 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
2400 OP(BYTECODE_OP_LOAD_FIELD
):
2402 dbg_printf("op load field\n");
2403 ret
= dynamic_load_field(estack_ax(stack
, top
));
2406 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2407 estack_ax_t
= estack_ax(stack
, top
)->type
;
2408 next_pc
+= sizeof(struct load_op
);
2412 OP(BYTECODE_OP_LOAD_FIELD_S8
):
2414 dbg_printf("op load field s8\n");
2416 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2417 estack_ax_t
= REG_S64
;
2418 next_pc
+= sizeof(struct load_op
);
2421 OP(BYTECODE_OP_LOAD_FIELD_S16
):
2423 dbg_printf("op load field s16\n");
2425 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2426 estack_ax_t
= REG_S64
;
2427 next_pc
+= sizeof(struct load_op
);
2430 OP(BYTECODE_OP_LOAD_FIELD_S32
):
2432 dbg_printf("op load field s32\n");
2434 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2435 estack_ax_t
= REG_S64
;
2436 next_pc
+= sizeof(struct load_op
);
2439 OP(BYTECODE_OP_LOAD_FIELD_S64
):
2441 dbg_printf("op load field s64\n");
2443 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2444 estack_ax_t
= REG_S64
;
2445 next_pc
+= sizeof(struct load_op
);
2448 OP(BYTECODE_OP_LOAD_FIELD_U8
):
2450 dbg_printf("op load field u8\n");
2452 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2453 estack_ax_t
= REG_U64
;
2454 next_pc
+= sizeof(struct load_op
);
2457 OP(BYTECODE_OP_LOAD_FIELD_U16
):
2459 dbg_printf("op load field u16\n");
2461 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2462 estack_ax_t
= REG_U64
;
2463 next_pc
+= sizeof(struct load_op
);
2466 OP(BYTECODE_OP_LOAD_FIELD_U32
):
2468 dbg_printf("op load field u32\n");
2470 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2471 estack_ax_t
= REG_U64
;
2472 next_pc
+= sizeof(struct load_op
);
2475 OP(BYTECODE_OP_LOAD_FIELD_U64
):
2477 dbg_printf("op load field u64\n");
2479 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2480 estack_ax_t
= REG_U64
;
2481 next_pc
+= sizeof(struct load_op
);
2484 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE
):
2486 dbg_printf("op load field double\n");
2488 memcpy(&estack_ax(stack
, top
)->u
.d
,
2489 estack_ax(stack
, top
)->u
.ptr
.ptr
,
2490 sizeof(struct literal_double
));
2491 estack_ax(stack
, top
)->type
= REG_DOUBLE
;
2492 next_pc
+= sizeof(struct load_op
);
2496 OP(BYTECODE_OP_LOAD_FIELD_STRING
):
2500 dbg_printf("op load field string\n");
2501 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2502 estack_ax(stack
, top
)->u
.s
.str
= str
;
2503 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2504 dbg_printf("Interpreter warning: loading a NULL string.\n");
2508 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2509 estack_ax(stack
, top
)->u
.s
.literal_type
=
2510 ESTACK_STRING_LITERAL_TYPE_NONE
;
2511 estack_ax(stack
, top
)->type
= REG_STRING
;
2512 next_pc
+= sizeof(struct load_op
);
2516 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE
):
2520 dbg_printf("op load field string sequence\n");
2521 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
2522 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
2523 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
2524 estack_ax(stack
, top
)->type
= REG_STRING
;
2525 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2526 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2530 estack_ax(stack
, top
)->u
.s
.literal_type
=
2531 ESTACK_STRING_LITERAL_TYPE_NONE
;
2532 next_pc
+= sizeof(struct load_op
);
2538 /* Return _DISCARD on error. */
2540 return LTTNG_INTERPRETER_DISCARD
;
2543 return lttng_bytecode_interpret_format_output(estack_ax(stack
, top
),
2550 uint64_t lttng_bytecode_filter_interpret(void *filter_data
,
2551 const char *filter_stack_data
)
2553 return bytecode_interpret(filter_data
, filter_stack_data
, NULL
);
2556 uint64_t lttng_bytecode_capture_interpret(void *capture_data
,
2557 const char *capture_stack_data
,
2558 struct lttng_interpreter_output
*output
)
2560 return bytecode_interpret(capture_data
, capture_stack_data
,
2561 (struct lttng_interpreter_output
*) output
);