2 * lttng-bytecode-interpreter.c
4 * LTTng UST bytecode interpreter.
6 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
30 #include <urcu-pointer.h>
33 #include <lttng/ust-endian.h>
34 #include <lttng/ust-events.h>
36 #include "lttng-bytecode.h"
37 #include "string-utils.h"
42 * -2: unknown escape char.
47 int parse_char(const char **p
)
67 * Returns SIZE_MAX if the string is null-terminated, or the number of
71 size_t get_str_or_seq_len(const struct estack_entry
*entry
)
73 return entry
->u
.s
.seq_len
;
77 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
80 const char *candidate
;
84 /* Find out which side is the pattern vs. the candidate. */
85 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
86 pattern
= estack_ax(stack
, top
)->u
.s
.str
;
87 pattern_len
= get_str_or_seq_len(estack_ax(stack
, top
));
88 candidate
= estack_bx(stack
, top
)->u
.s
.str
;
89 candidate_len
= get_str_or_seq_len(estack_bx(stack
, top
));
91 pattern
= estack_bx(stack
, top
)->u
.s
.str
;
92 pattern_len
= get_str_or_seq_len(estack_bx(stack
, top
));
93 candidate
= estack_ax(stack
, top
)->u
.s
.str
;
94 candidate_len
= get_str_or_seq_len(estack_ax(stack
, top
));
97 /* Perform the match. Returns 0 when the result is true. */
98 return !strutils_star_glob_match(pattern
, pattern_len
, candidate
,
103 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
105 const char *p
= estack_bx(stack
, top
)->u
.s
.str
, *q
= estack_ax(stack
, top
)->u
.s
.str
;
112 if (unlikely(p
- estack_bx(stack
, top
)->u
.s
.str
>= estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')) {
113 if (q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0') {
116 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
117 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
118 ret
= parse_char(&q
);
125 if (unlikely(q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')) {
126 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
127 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
128 ret
= parse_char(&p
);
134 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
135 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
136 ret
= parse_char(&p
);
139 } else if (ret
== -2) {
142 /* else compare both char */
144 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
145 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
146 ret
= parse_char(&q
);
149 } else if (ret
== -2) {
169 uint64_t lttng_bytecode_filter_interpret_false(void *filter_data
,
170 const char *filter_stack_data
)
172 return LTTNG_INTERPRETER_DISCARD
;
175 uint64_t lttng_bytecode_capture_interpret_false(void *capture_data
,
176 const char *capture_stack_data
,
177 struct lttng_interpreter_output
*output
)
179 return LTTNG_INTERPRETER_DISCARD
;
182 #ifdef INTERPRETER_USE_SWITCH
185 * Fallback for compilers that do not support taking address of labels.
189 start_pc = &bytecode->data[0]; \
190 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
192 dbg_printf("Executing op %s (%u)\n", \
193 print_op((unsigned int) *(bytecode_opcode_t *) pc), \
194 (unsigned int) *(bytecode_opcode_t *) pc); \
195 switch (*(bytecode_opcode_t *) pc) {
197 #define OP(name) jump_target_##name: __attribute__((unused)); \
205 #define JUMP_TO(name) \
206 goto jump_target_##name
211 * Dispatch-table based interpreter.
215 start_pc = &bytecode->code[0]; \
216 pc = next_pc = start_pc; \
217 if (unlikely(pc - start_pc >= bytecode->len)) \
219 goto *dispatch[*(bytecode_opcode_t *) pc];
226 goto *dispatch[*(bytecode_opcode_t *) pc];
230 #define JUMP_TO(name) \
235 #define IS_INTEGER_REGISTER(reg_type) \
236 (reg_type == REG_U64 || reg_type == REG_S64)
238 static int context_get_index(struct lttng_ctx
*ctx
,
239 struct load_ptr
*ptr
,
243 struct lttng_ctx_field
*ctx_field
;
244 struct lttng_event_field
*field
;
245 struct lttng_ctx_value v
;
247 ctx_field
= &ctx
->fields
[idx
];
248 field
= &ctx_field
->event_field
;
249 ptr
->type
= LOAD_OBJECT
;
252 switch (field
->type
.atype
) {
254 ctx_field
->get_value(ctx_field
, &v
);
255 if (field
->type
.u
.integer
.signedness
) {
256 ptr
->object_type
= OBJECT_TYPE_S64
;
257 ptr
->u
.s64
= v
.u
.s64
;
258 ptr
->ptr
= &ptr
->u
.s64
;
260 ptr
->object_type
= OBJECT_TYPE_U64
;
261 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
262 ptr
->ptr
= &ptr
->u
.u64
;
265 case atype_enum
: /* Fall-through */
266 case atype_enum_nestable
:
268 const struct lttng_integer_type
*itype
;
270 if (field
->type
.atype
== atype_enum
) {
271 itype
= &field
->type
.u
.legacy
.basic
.enumeration
.container_type
;
273 itype
= &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
275 ctx_field
->get_value(ctx_field
, &v
);
276 if (itype
->signedness
) {
277 ptr
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
278 ptr
->u
.s64
= v
.u
.s64
;
279 ptr
->ptr
= &ptr
->u
.s64
;
281 ptr
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
282 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
283 ptr
->ptr
= &ptr
->u
.u64
;
288 if (field
->type
.u
.legacy
.array
.elem_type
.atype
!= atype_integer
) {
289 ERR("Array nesting only supports integer types.");
292 if (field
->type
.u
.legacy
.array
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
293 ERR("Only string arrays are supported for contexts.");
296 ptr
->object_type
= OBJECT_TYPE_STRING
;
297 ctx_field
->get_value(ctx_field
, &v
);
300 case atype_array_nestable
:
301 if (field
->type
.u
.array_nestable
.elem_type
->atype
!= atype_integer
) {
302 ERR("Array nesting only supports integer types.");
305 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
306 ERR("Only string arrays are supported for contexts.");
309 ptr
->object_type
= OBJECT_TYPE_STRING
;
310 ctx_field
->get_value(ctx_field
, &v
);
314 if (field
->type
.u
.legacy
.sequence
.elem_type
.atype
!= atype_integer
) {
315 ERR("Sequence nesting only supports integer types.");
318 if (field
->type
.u
.legacy
.sequence
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
319 ERR("Only string sequences are supported for contexts.");
322 ptr
->object_type
= OBJECT_TYPE_STRING
;
323 ctx_field
->get_value(ctx_field
, &v
);
326 case atype_sequence_nestable
:
327 if (field
->type
.u
.sequence_nestable
.elem_type
->atype
!= atype_integer
) {
328 ERR("Sequence nesting only supports integer types.");
331 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
332 ERR("Only string sequences are supported for contexts.");
335 ptr
->object_type
= OBJECT_TYPE_STRING
;
336 ctx_field
->get_value(ctx_field
, &v
);
340 ptr
->object_type
= OBJECT_TYPE_STRING
;
341 ctx_field
->get_value(ctx_field
, &v
);
345 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
346 ctx_field
->get_value(ctx_field
, &v
);
348 ptr
->ptr
= &ptr
->u
.d
;
351 ctx_field
->get_value(ctx_field
, &v
);
353 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
355 case LTTNG_UST_DYNAMIC_TYPE_U8
:
356 case LTTNG_UST_DYNAMIC_TYPE_U16
:
357 case LTTNG_UST_DYNAMIC_TYPE_U32
:
358 case LTTNG_UST_DYNAMIC_TYPE_U64
:
359 ptr
->object_type
= OBJECT_TYPE_U64
;
360 ptr
->u
.u64
= v
.u
.u64
;
361 ptr
->ptr
= &ptr
->u
.u64
;
362 dbg_printf("context get index dynamic u64 %" PRIi64
"\n", ptr
->u
.u64
);
364 case LTTNG_UST_DYNAMIC_TYPE_S8
:
365 case LTTNG_UST_DYNAMIC_TYPE_S16
:
366 case LTTNG_UST_DYNAMIC_TYPE_S32
:
367 case LTTNG_UST_DYNAMIC_TYPE_S64
:
368 ptr
->object_type
= OBJECT_TYPE_S64
;
369 ptr
->u
.s64
= v
.u
.s64
;
370 ptr
->ptr
= &ptr
->u
.s64
;
371 dbg_printf("context get index dynamic s64 %" PRIi64
"\n", ptr
->u
.s64
);
373 case LTTNG_UST_DYNAMIC_TYPE_FLOAT
:
374 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
375 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
377 ptr
->ptr
= &ptr
->u
.d
;
378 dbg_printf("context get index dynamic double %g\n", ptr
->u
.d
);
380 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
381 ptr
->object_type
= OBJECT_TYPE_STRING
;
383 dbg_printf("context get index dynamic string %s\n", (const char *) ptr
->ptr
);
386 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
391 ERR("Structure type cannot be loaded.");
394 ERR("Unknown type: %d", (int) field
->type
.atype
);
400 static int dynamic_get_index(struct lttng_ctx
*ctx
,
401 struct bytecode_runtime
*runtime
,
402 uint64_t index
, struct estack_entry
*stack_top
)
405 const struct bytecode_get_index_data
*gid
;
407 gid
= (const struct bytecode_get_index_data
*) &runtime
->data
[index
];
408 switch (stack_top
->u
.ptr
.type
) {
410 switch (stack_top
->u
.ptr
.object_type
) {
411 case OBJECT_TYPE_ARRAY
:
415 assert(gid
->offset
< gid
->array_len
);
416 /* Skip count (unsigned long) */
417 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
418 ptr
= ptr
+ gid
->offset
;
419 stack_top
->u
.ptr
.ptr
= ptr
;
420 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
421 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
422 assert(stack_top
->u
.ptr
.field
->type
.atype
== atype_array
||
423 stack_top
->u
.ptr
.field
->type
.atype
== atype_array_nestable
);
424 stack_top
->u
.ptr
.field
= NULL
;
427 case OBJECT_TYPE_SEQUENCE
:
432 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
433 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
434 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
438 ptr
= ptr
+ gid
->offset
;
439 stack_top
->u
.ptr
.ptr
= ptr
;
440 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
441 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
442 assert(stack_top
->u
.ptr
.field
->type
.atype
== atype_sequence
||
443 stack_top
->u
.ptr
.field
->type
.atype
== atype_sequence_nestable
);
444 stack_top
->u
.ptr
.field
= NULL
;
447 case OBJECT_TYPE_STRUCT
:
448 ERR("Nested structures are not supported yet.");
451 case OBJECT_TYPE_VARIANT
:
453 ERR("Unexpected get index type %d",
454 (int) stack_top
->u
.ptr
.object_type
);
459 case LOAD_ROOT_CONTEXT
:
460 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
462 ret
= context_get_index(ctx
,
470 case LOAD_ROOT_PAYLOAD
:
471 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
472 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
473 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
474 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
475 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
476 stack_top
->u
.ptr
.field
= gid
->field
;
477 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
481 stack_top
->type
= REG_PTR
;
489 static int dynamic_load_field(struct estack_entry
*stack_top
)
493 switch (stack_top
->u
.ptr
.type
) {
496 case LOAD_ROOT_CONTEXT
:
497 case LOAD_ROOT_APP_CONTEXT
:
498 case LOAD_ROOT_PAYLOAD
:
500 dbg_printf("Interpreter warning: cannot load root, missing field name.\n");
504 switch (stack_top
->u
.ptr
.object_type
) {
506 dbg_printf("op load field s8\n");
507 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
508 stack_top
->type
= REG_S64
;
510 case OBJECT_TYPE_S16
:
514 dbg_printf("op load field s16\n");
515 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
516 if (stack_top
->u
.ptr
.rev_bo
)
518 stack_top
->u
.v
= tmp
;
519 stack_top
->type
= REG_S64
;
522 case OBJECT_TYPE_S32
:
526 dbg_printf("op load field s32\n");
527 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
528 if (stack_top
->u
.ptr
.rev_bo
)
530 stack_top
->u
.v
= tmp
;
531 stack_top
->type
= REG_S64
;
534 case OBJECT_TYPE_S64
:
538 dbg_printf("op load field s64\n");
539 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
540 if (stack_top
->u
.ptr
.rev_bo
)
542 stack_top
->u
.v
= tmp
;
543 stack_top
->type
= REG_S64
;
546 case OBJECT_TYPE_SIGNED_ENUM
:
550 dbg_printf("op load field signed enumeration\n");
551 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
552 if (stack_top
->u
.ptr
.rev_bo
)
554 stack_top
->u
.v
= tmp
;
555 stack_top
->type
= REG_S64
;
559 dbg_printf("op load field u8\n");
560 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
561 stack_top
->type
= REG_U64
;
563 case OBJECT_TYPE_U16
:
567 dbg_printf("op load field u16\n");
568 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
569 if (stack_top
->u
.ptr
.rev_bo
)
571 stack_top
->u
.v
= tmp
;
572 stack_top
->type
= REG_U64
;
575 case OBJECT_TYPE_U32
:
579 dbg_printf("op load field u32\n");
580 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
581 if (stack_top
->u
.ptr
.rev_bo
)
583 stack_top
->u
.v
= tmp
;
584 stack_top
->type
= REG_U64
;
587 case OBJECT_TYPE_U64
:
591 dbg_printf("op load field u64\n");
592 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
593 if (stack_top
->u
.ptr
.rev_bo
)
595 stack_top
->u
.v
= tmp
;
596 stack_top
->type
= REG_U64
;
599 case OBJECT_TYPE_UNSIGNED_ENUM
:
603 dbg_printf("op load field unsigned enumeration\n");
604 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
605 if (stack_top
->u
.ptr
.rev_bo
)
607 stack_top
->u
.v
= tmp
;
608 stack_top
->type
= REG_U64
;
611 case OBJECT_TYPE_DOUBLE
:
612 memcpy(&stack_top
->u
.d
,
613 stack_top
->u
.ptr
.ptr
,
614 sizeof(struct literal_double
));
615 stack_top
->type
= REG_DOUBLE
;
617 case OBJECT_TYPE_STRING
:
621 dbg_printf("op load field string\n");
622 str
= (const char *) stack_top
->u
.ptr
.ptr
;
623 stack_top
->u
.s
.str
= str
;
624 if (unlikely(!stack_top
->u
.s
.str
)) {
625 dbg_printf("Interpreter warning: loading a NULL string.\n");
629 stack_top
->u
.s
.seq_len
= SIZE_MAX
;
630 stack_top
->u
.s
.literal_type
=
631 ESTACK_STRING_LITERAL_TYPE_NONE
;
632 stack_top
->type
= REG_STRING
;
635 case OBJECT_TYPE_STRING_SEQUENCE
:
639 dbg_printf("op load field string sequence\n");
640 ptr
= stack_top
->u
.ptr
.ptr
;
641 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
642 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
643 stack_top
->type
= REG_STRING
;
644 if (unlikely(!stack_top
->u
.s
.str
)) {
645 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
649 stack_top
->u
.s
.literal_type
=
650 ESTACK_STRING_LITERAL_TYPE_NONE
;
653 case OBJECT_TYPE_DYNAMIC
:
655 * Dynamic types in context are looked up
656 * by context get index.
660 case OBJECT_TYPE_SEQUENCE
:
661 case OBJECT_TYPE_ARRAY
:
662 case OBJECT_TYPE_STRUCT
:
663 case OBJECT_TYPE_VARIANT
:
664 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
675 int lttng_bytecode_interpret_format_output(struct estack_entry
*ax
,
676 struct lttng_interpreter_output
*output
)
683 output
->type
= LTTNG_INTERPRETER_TYPE_S64
;
684 output
->u
.s
= ax
->u
.v
;
687 output
->type
= LTTNG_INTERPRETER_TYPE_U64
;
688 output
->u
.u
= (uint64_t) ax
->u
.v
;
691 output
->type
= LTTNG_INTERPRETER_TYPE_DOUBLE
;
692 output
->u
.d
= ax
->u
.d
;
695 output
->type
= LTTNG_INTERPRETER_TYPE_STRING
;
696 output
->u
.str
.str
= ax
->u
.s
.str
;
697 output
->u
.str
.len
= ax
->u
.s
.seq_len
;
700 switch (ax
->u
.ptr
.object_type
) {
702 case OBJECT_TYPE_S16
:
703 case OBJECT_TYPE_S32
:
704 case OBJECT_TYPE_S64
:
706 case OBJECT_TYPE_U16
:
707 case OBJECT_TYPE_U32
:
708 case OBJECT_TYPE_U64
:
709 case OBJECT_TYPE_DOUBLE
:
710 case OBJECT_TYPE_STRING
:
711 case OBJECT_TYPE_STRING_SEQUENCE
:
712 ret
= dynamic_load_field(ax
);
715 /* Retry after loading ptr into stack top. */
717 case OBJECT_TYPE_SEQUENCE
:
718 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
719 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
720 output
->u
.sequence
.nr_elem
= *(unsigned long *) ax
->u
.ptr
.ptr
;
721 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.sequence_nestable
.elem_type
;
723 case OBJECT_TYPE_ARRAY
:
724 /* Skip count (unsigned long) */
725 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
726 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
727 output
->u
.sequence
.nr_elem
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.length
;
728 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.elem_type
;
730 case OBJECT_TYPE_SIGNED_ENUM
:
731 ret
= dynamic_load_field(ax
);
734 output
->type
= LTTNG_INTERPRETER_TYPE_SIGNED_ENUM
;
735 output
->u
.s
= ax
->u
.v
;
737 case OBJECT_TYPE_UNSIGNED_ENUM
:
738 ret
= dynamic_load_field(ax
);
741 output
->type
= LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM
;
742 output
->u
.u
= ax
->u
.v
;
744 case OBJECT_TYPE_STRUCT
:
745 case OBJECT_TYPE_VARIANT
:
751 case REG_STAR_GLOB_STRING
:
757 return LTTNG_INTERPRETER_RECORD_FLAG
;
761 * For `output` equal to NULL:
762 * Return 0 (discard), or raise the 0x1 flag (log event).
763 * Currently, other flags are kept for future extensions and have no
765 * For `output` not equal to NULL:
766 * Return 0 on success, negative error value on error.
769 uint64_t bytecode_interpret(void *interpreter_data
,
770 const char *interpreter_stack_data
,
771 struct lttng_interpreter_output
*output
)
773 struct bytecode_runtime
*bytecode
= interpreter_data
;
774 struct lttng_ctx
*ctx
= rcu_dereference(*bytecode
->p
.pctx
);
775 void *pc
, *next_pc
, *start_pc
;
778 struct estack _stack
;
779 struct estack
*stack
= &_stack
;
780 register int64_t ax
= 0, bx
= 0;
781 register enum entry_type ax_t
= REG_UNKNOWN
, bx_t
= REG_UNKNOWN
;
782 register int top
= INTERPRETER_STACK_EMPTY
;
783 #ifndef INTERPRETER_USE_SWITCH
784 static void *dispatch
[NR_BYTECODE_OPS
] = {
785 [ BYTECODE_OP_UNKNOWN
] = &&LABEL_BYTECODE_OP_UNKNOWN
,
787 [ BYTECODE_OP_RETURN
] = &&LABEL_BYTECODE_OP_RETURN
,
790 [ BYTECODE_OP_MUL
] = &&LABEL_BYTECODE_OP_MUL
,
791 [ BYTECODE_OP_DIV
] = &&LABEL_BYTECODE_OP_DIV
,
792 [ BYTECODE_OP_MOD
] = &&LABEL_BYTECODE_OP_MOD
,
793 [ BYTECODE_OP_PLUS
] = &&LABEL_BYTECODE_OP_PLUS
,
794 [ BYTECODE_OP_MINUS
] = &&LABEL_BYTECODE_OP_MINUS
,
795 [ BYTECODE_OP_BIT_RSHIFT
] = &&LABEL_BYTECODE_OP_BIT_RSHIFT
,
796 [ BYTECODE_OP_BIT_LSHIFT
] = &&LABEL_BYTECODE_OP_BIT_LSHIFT
,
797 [ BYTECODE_OP_BIT_AND
] = &&LABEL_BYTECODE_OP_BIT_AND
,
798 [ BYTECODE_OP_BIT_OR
] = &&LABEL_BYTECODE_OP_BIT_OR
,
799 [ BYTECODE_OP_BIT_XOR
] = &&LABEL_BYTECODE_OP_BIT_XOR
,
801 /* binary comparators */
802 [ BYTECODE_OP_EQ
] = &&LABEL_BYTECODE_OP_EQ
,
803 [ BYTECODE_OP_NE
] = &&LABEL_BYTECODE_OP_NE
,
804 [ BYTECODE_OP_GT
] = &&LABEL_BYTECODE_OP_GT
,
805 [ BYTECODE_OP_LT
] = &&LABEL_BYTECODE_OP_LT
,
806 [ BYTECODE_OP_GE
] = &&LABEL_BYTECODE_OP_GE
,
807 [ BYTECODE_OP_LE
] = &&LABEL_BYTECODE_OP_LE
,
809 /* string binary comparator */
810 [ BYTECODE_OP_EQ_STRING
] = &&LABEL_BYTECODE_OP_EQ_STRING
,
811 [ BYTECODE_OP_NE_STRING
] = &&LABEL_BYTECODE_OP_NE_STRING
,
812 [ BYTECODE_OP_GT_STRING
] = &&LABEL_BYTECODE_OP_GT_STRING
,
813 [ BYTECODE_OP_LT_STRING
] = &&LABEL_BYTECODE_OP_LT_STRING
,
814 [ BYTECODE_OP_GE_STRING
] = &&LABEL_BYTECODE_OP_GE_STRING
,
815 [ BYTECODE_OP_LE_STRING
] = &&LABEL_BYTECODE_OP_LE_STRING
,
817 /* globbing pattern binary comparator */
818 [ BYTECODE_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING
,
819 [ BYTECODE_OP_NE_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING
,
821 /* s64 binary comparator */
822 [ BYTECODE_OP_EQ_S64
] = &&LABEL_BYTECODE_OP_EQ_S64
,
823 [ BYTECODE_OP_NE_S64
] = &&LABEL_BYTECODE_OP_NE_S64
,
824 [ BYTECODE_OP_GT_S64
] = &&LABEL_BYTECODE_OP_GT_S64
,
825 [ BYTECODE_OP_LT_S64
] = &&LABEL_BYTECODE_OP_LT_S64
,
826 [ BYTECODE_OP_GE_S64
] = &&LABEL_BYTECODE_OP_GE_S64
,
827 [ BYTECODE_OP_LE_S64
] = &&LABEL_BYTECODE_OP_LE_S64
,
829 /* double binary comparator */
830 [ BYTECODE_OP_EQ_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE
,
831 [ BYTECODE_OP_NE_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_DOUBLE
,
832 [ BYTECODE_OP_GT_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_DOUBLE
,
833 [ BYTECODE_OP_LT_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_DOUBLE
,
834 [ BYTECODE_OP_GE_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_DOUBLE
,
835 [ BYTECODE_OP_LE_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_DOUBLE
,
837 /* Mixed S64-double binary comparators */
838 [ BYTECODE_OP_EQ_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64
,
839 [ BYTECODE_OP_NE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64
,
840 [ BYTECODE_OP_GT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64
,
841 [ BYTECODE_OP_LT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64
,
842 [ BYTECODE_OP_GE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64
,
843 [ BYTECODE_OP_LE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64
,
845 [ BYTECODE_OP_EQ_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE
,
846 [ BYTECODE_OP_NE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE
,
847 [ BYTECODE_OP_GT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE
,
848 [ BYTECODE_OP_LT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE
,
849 [ BYTECODE_OP_GE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE
,
850 [ BYTECODE_OP_LE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE
,
853 [ BYTECODE_OP_UNARY_PLUS
] = &&LABEL_BYTECODE_OP_UNARY_PLUS
,
854 [ BYTECODE_OP_UNARY_MINUS
] = &&LABEL_BYTECODE_OP_UNARY_MINUS
,
855 [ BYTECODE_OP_UNARY_NOT
] = &&LABEL_BYTECODE_OP_UNARY_NOT
,
856 [ BYTECODE_OP_UNARY_PLUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64
,
857 [ BYTECODE_OP_UNARY_MINUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64
,
858 [ BYTECODE_OP_UNARY_NOT_S64
] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64
,
859 [ BYTECODE_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE
,
860 [ BYTECODE_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE
,
861 [ BYTECODE_OP_UNARY_NOT_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE
,
864 [ BYTECODE_OP_AND
] = &&LABEL_BYTECODE_OP_AND
,
865 [ BYTECODE_OP_OR
] = &&LABEL_BYTECODE_OP_OR
,
868 [ BYTECODE_OP_LOAD_FIELD_REF
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF
,
869 [ BYTECODE_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING
,
870 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
,
871 [ BYTECODE_OP_LOAD_FIELD_REF_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64
,
872 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
,
874 /* load from immediate operand */
875 [ BYTECODE_OP_LOAD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STRING
,
876 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING
,
877 [ BYTECODE_OP_LOAD_S64
] = &&LABEL_BYTECODE_OP_LOAD_S64
,
878 [ BYTECODE_OP_LOAD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE
,
881 [ BYTECODE_OP_CAST_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_TO_S64
,
882 [ BYTECODE_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64
,
883 [ BYTECODE_OP_CAST_NOP
] = &&LABEL_BYTECODE_OP_CAST_NOP
,
885 /* get context ref */
886 [ BYTECODE_OP_GET_CONTEXT_REF
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF
,
887 [ BYTECODE_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING
,
888 [ BYTECODE_OP_GET_CONTEXT_REF_S64
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64
,
889 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
,
891 /* Instructions for recursive traversal through composed types. */
892 [ BYTECODE_OP_GET_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT
,
893 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT
,
894 [ BYTECODE_OP_GET_PAYLOAD_ROOT
] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT
,
896 [ BYTECODE_OP_GET_SYMBOL
] = &&LABEL_BYTECODE_OP_GET_SYMBOL
,
897 [ BYTECODE_OP_GET_SYMBOL_FIELD
] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD
,
898 [ BYTECODE_OP_GET_INDEX_U16
] = &&LABEL_BYTECODE_OP_GET_INDEX_U16
,
899 [ BYTECODE_OP_GET_INDEX_U64
] = &&LABEL_BYTECODE_OP_GET_INDEX_U64
,
901 [ BYTECODE_OP_LOAD_FIELD
] = &&LABEL_BYTECODE_OP_LOAD_FIELD
,
902 [ BYTECODE_OP_LOAD_FIELD_S8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8
,
903 [ BYTECODE_OP_LOAD_FIELD_S16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16
,
904 [ BYTECODE_OP_LOAD_FIELD_S32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32
,
905 [ BYTECODE_OP_LOAD_FIELD_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64
,
906 [ BYTECODE_OP_LOAD_FIELD_U8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8
,
907 [ BYTECODE_OP_LOAD_FIELD_U16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16
,
908 [ BYTECODE_OP_LOAD_FIELD_U32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32
,
909 [ BYTECODE_OP_LOAD_FIELD_U64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64
,
910 [ BYTECODE_OP_LOAD_FIELD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING
,
911 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE
,
912 [ BYTECODE_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE
,
914 [ BYTECODE_OP_UNARY_BIT_NOT
] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT
,
916 [ BYTECODE_OP_RETURN_S64
] = &&LABEL_BYTECODE_OP_RETURN_S64
,
918 #endif /* #ifndef INTERPRETER_USE_SWITCH */
922 OP(BYTECODE_OP_UNKNOWN
):
923 OP(BYTECODE_OP_LOAD_FIELD_REF
):
924 #ifdef INTERPRETER_USE_SWITCH
926 #endif /* INTERPRETER_USE_SWITCH */
927 ERR("unknown bytecode op %u",
928 (unsigned int) *(bytecode_opcode_t
*) pc
);
932 OP(BYTECODE_OP_RETURN
):
933 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
934 /* Handle dynamic typing. */
935 switch (estack_ax_t
) {
938 retval
= !!estack_ax_v
;
949 case REG_STAR_GLOB_STRING
:
958 OP(BYTECODE_OP_RETURN_S64
):
959 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
960 retval
= !!estack_ax_v
;
968 OP(BYTECODE_OP_PLUS
):
969 OP(BYTECODE_OP_MINUS
):
970 ERR("unsupported bytecode op %u",
971 (unsigned int) *(bytecode_opcode_t
*) pc
);
977 /* Dynamic typing. */
978 switch (estack_ax_t
) {
979 case REG_S64
: /* Fall-through */
981 switch (estack_bx_t
) {
982 case REG_S64
: /* Fall-through */
984 JUMP_TO(BYTECODE_OP_EQ_S64
);
986 JUMP_TO(BYTECODE_OP_EQ_DOUBLE_S64
);
987 case REG_STRING
: /* Fall-through */
988 case REG_STAR_GLOB_STRING
:
992 ERR("Unknown interpreter register type (%d)",
999 switch (estack_bx_t
) {
1000 case REG_S64
: /* Fall-through */
1002 JUMP_TO(BYTECODE_OP_EQ_S64_DOUBLE
);
1004 JUMP_TO(BYTECODE_OP_EQ_DOUBLE
);
1005 case REG_STRING
: /* Fall-through */
1006 case REG_STAR_GLOB_STRING
:
1010 ERR("Unknown interpreter register type (%d)",
1017 switch (estack_bx_t
) {
1018 case REG_S64
: /* Fall-through */
1019 case REG_U64
: /* Fall-through */
1024 JUMP_TO(BYTECODE_OP_EQ_STRING
);
1025 case REG_STAR_GLOB_STRING
:
1026 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
1028 ERR("Unknown interpreter register type (%d)",
1034 case REG_STAR_GLOB_STRING
:
1035 switch (estack_bx_t
) {
1036 case REG_S64
: /* Fall-through */
1037 case REG_U64
: /* Fall-through */
1042 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
1043 case REG_STAR_GLOB_STRING
:
1047 ERR("Unknown interpreter register type (%d)",
1054 ERR("Unknown interpreter register type (%d)",
1062 /* Dynamic typing. */
1063 switch (estack_ax_t
) {
1064 case REG_S64
: /* Fall-through */
1066 switch (estack_bx_t
) {
1067 case REG_S64
: /* Fall-through */
1069 JUMP_TO(BYTECODE_OP_NE_S64
);
1071 JUMP_TO(BYTECODE_OP_NE_DOUBLE_S64
);
1072 case REG_STRING
: /* Fall-through */
1073 case REG_STAR_GLOB_STRING
:
1077 ERR("Unknown interpreter register type (%d)",
1084 switch (estack_bx_t
) {
1085 case REG_S64
: /* Fall-through */
1087 JUMP_TO(BYTECODE_OP_NE_S64_DOUBLE
);
1089 JUMP_TO(BYTECODE_OP_NE_DOUBLE
);
1090 case REG_STRING
: /* Fall-through */
1091 case REG_STAR_GLOB_STRING
:
1095 ERR("Unknown interpreter register type (%d)",
1102 switch (estack_bx_t
) {
1103 case REG_S64
: /* Fall-through */
1109 JUMP_TO(BYTECODE_OP_NE_STRING
);
1110 case REG_STAR_GLOB_STRING
:
1111 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1113 ERR("Unknown interpreter register type (%d)",
1119 case REG_STAR_GLOB_STRING
:
1120 switch (estack_bx_t
) {
1121 case REG_S64
: /* Fall-through */
1127 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1128 case REG_STAR_GLOB_STRING
:
1132 ERR("Unknown interpreter register type (%d)",
1139 ERR("Unknown interpreter register type (%d)",
1147 /* Dynamic typing. */
1148 switch (estack_ax_t
) {
1149 case REG_S64
: /* Fall-through */
1151 switch (estack_bx_t
) {
1152 case REG_S64
: /* Fall-through */
1154 JUMP_TO(BYTECODE_OP_GT_S64
);
1156 JUMP_TO(BYTECODE_OP_GT_DOUBLE_S64
);
1157 case REG_STRING
: /* Fall-through */
1158 case REG_STAR_GLOB_STRING
:
1162 ERR("Unknown interpreter register type (%d)",
1169 switch (estack_bx_t
) {
1170 case REG_S64
: /* Fall-through */
1172 JUMP_TO(BYTECODE_OP_GT_S64_DOUBLE
);
1174 JUMP_TO(BYTECODE_OP_GT_DOUBLE
);
1175 case REG_STRING
: /* Fall-through */
1176 case REG_STAR_GLOB_STRING
:
1180 ERR("Unknown interpreter register type (%d)",
1187 switch (estack_bx_t
) {
1188 case REG_S64
: /* Fall-through */
1189 case REG_U64
: /* Fall-through */
1190 case REG_DOUBLE
: /* Fall-through */
1191 case REG_STAR_GLOB_STRING
:
1195 JUMP_TO(BYTECODE_OP_GT_STRING
);
1197 ERR("Unknown interpreter register type (%d)",
1204 ERR("Unknown interpreter register type (%d)",
1212 /* Dynamic typing. */
1213 switch (estack_ax_t
) {
1214 case REG_S64
: /* Fall-through */
1216 switch (estack_bx_t
) {
1217 case REG_S64
: /* Fall-through */
1219 JUMP_TO(BYTECODE_OP_LT_S64
);
1221 JUMP_TO(BYTECODE_OP_LT_DOUBLE_S64
);
1222 case REG_STRING
: /* Fall-through */
1223 case REG_STAR_GLOB_STRING
:
1227 ERR("Unknown interpreter register type (%d)",
1234 switch (estack_bx_t
) {
1235 case REG_S64
: /* Fall-through */
1237 JUMP_TO(BYTECODE_OP_LT_S64_DOUBLE
);
1239 JUMP_TO(BYTECODE_OP_LT_DOUBLE
);
1240 case REG_STRING
: /* Fall-through */
1241 case REG_STAR_GLOB_STRING
:
1245 ERR("Unknown interpreter register type (%d)",
1252 switch (estack_bx_t
) {
1253 case REG_S64
: /* Fall-through */
1254 case REG_U64
: /* Fall-through */
1255 case REG_DOUBLE
: /* Fall-through */
1256 case REG_STAR_GLOB_STRING
:
1260 JUMP_TO(BYTECODE_OP_LT_STRING
);
1262 ERR("Unknown interpreter register type (%d)",
1269 ERR("Unknown interpreter register type (%d)",
1277 /* Dynamic typing. */
1278 switch (estack_ax_t
) {
1279 case REG_S64
: /* Fall-through */
1281 switch (estack_bx_t
) {
1282 case REG_S64
: /* Fall-through */
1284 JUMP_TO(BYTECODE_OP_GE_S64
);
1286 JUMP_TO(BYTECODE_OP_GE_DOUBLE_S64
);
1287 case REG_STRING
: /* Fall-through */
1288 case REG_STAR_GLOB_STRING
:
1292 ERR("Unknown interpreter register type (%d)",
1299 switch (estack_bx_t
) {
1300 case REG_S64
: /* Fall-through */
1302 JUMP_TO(BYTECODE_OP_GE_S64_DOUBLE
);
1304 JUMP_TO(BYTECODE_OP_GE_DOUBLE
);
1305 case REG_STRING
: /* Fall-through */
1306 case REG_STAR_GLOB_STRING
:
1310 ERR("Unknown interpreter register type (%d)",
1317 switch (estack_bx_t
) {
1318 case REG_S64
: /* Fall-through */
1319 case REG_U64
: /* Fall-through */
1320 case REG_DOUBLE
: /* Fall-through */
1321 case REG_STAR_GLOB_STRING
:
1325 JUMP_TO(BYTECODE_OP_GE_STRING
);
1327 ERR("Unknown interpreter register type (%d)",
1334 ERR("Unknown interpreter register type (%d)",
1342 /* Dynamic typing. */
1343 switch (estack_ax_t
) {
1344 case REG_S64
: /* Fall-through */
1346 switch (estack_bx_t
) {
1347 case REG_S64
: /* Fall-through */
1349 JUMP_TO(BYTECODE_OP_LE_S64
);
1351 JUMP_TO(BYTECODE_OP_LE_DOUBLE_S64
);
1352 case REG_STRING
: /* Fall-through */
1353 case REG_STAR_GLOB_STRING
:
1357 ERR("Unknown interpreter register type (%d)",
1364 switch (estack_bx_t
) {
1365 case REG_S64
: /* Fall-through */
1367 JUMP_TO(BYTECODE_OP_LE_S64_DOUBLE
);
1369 JUMP_TO(BYTECODE_OP_LE_DOUBLE
);
1370 case REG_STRING
: /* Fall-through */
1371 case REG_STAR_GLOB_STRING
:
1375 ERR("Unknown interpreter register type (%d)",
1382 switch (estack_bx_t
) {
1383 case REG_S64
: /* Fall-through */
1384 case REG_U64
: /* Fall-through */
1385 case REG_DOUBLE
: /* Fall-through */
1386 case REG_STAR_GLOB_STRING
:
1390 JUMP_TO(BYTECODE_OP_LE_STRING
);
1392 ERR("Unknown interpreter register type (%d)",
1399 ERR("Unknown interpreter register type (%d)",
1406 OP(BYTECODE_OP_EQ_STRING
):
1410 res
= (stack_strcmp(stack
, top
, "==") == 0);
1411 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1413 estack_ax_t
= REG_S64
;
1414 next_pc
+= sizeof(struct binary_op
);
1417 OP(BYTECODE_OP_NE_STRING
):
1421 res
= (stack_strcmp(stack
, top
, "!=") != 0);
1422 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1424 estack_ax_t
= REG_S64
;
1425 next_pc
+= sizeof(struct binary_op
);
1428 OP(BYTECODE_OP_GT_STRING
):
1432 res
= (stack_strcmp(stack
, top
, ">") > 0);
1433 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1435 estack_ax_t
= REG_S64
;
1436 next_pc
+= sizeof(struct binary_op
);
1439 OP(BYTECODE_OP_LT_STRING
):
1443 res
= (stack_strcmp(stack
, top
, "<") < 0);
1444 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1446 estack_ax_t
= REG_S64
;
1447 next_pc
+= sizeof(struct binary_op
);
1450 OP(BYTECODE_OP_GE_STRING
):
1454 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
1455 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1457 estack_ax_t
= REG_S64
;
1458 next_pc
+= sizeof(struct binary_op
);
1461 OP(BYTECODE_OP_LE_STRING
):
1465 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
1466 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1468 estack_ax_t
= REG_S64
;
1469 next_pc
+= sizeof(struct binary_op
);
1473 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING
):
1477 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
1478 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1480 estack_ax_t
= REG_S64
;
1481 next_pc
+= sizeof(struct binary_op
);
1484 OP(BYTECODE_OP_NE_STAR_GLOB_STRING
):
1488 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
1489 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1491 estack_ax_t
= REG_S64
;
1492 next_pc
+= sizeof(struct binary_op
);
1496 OP(BYTECODE_OP_EQ_S64
):
1500 res
= (estack_bx_v
== estack_ax_v
);
1501 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1503 estack_ax_t
= REG_S64
;
1504 next_pc
+= sizeof(struct binary_op
);
1507 OP(BYTECODE_OP_NE_S64
):
1511 res
= (estack_bx_v
!= estack_ax_v
);
1512 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1514 estack_ax_t
= REG_S64
;
1515 next_pc
+= sizeof(struct binary_op
);
1518 OP(BYTECODE_OP_GT_S64
):
1522 res
= (estack_bx_v
> estack_ax_v
);
1523 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1525 estack_ax_t
= REG_S64
;
1526 next_pc
+= sizeof(struct binary_op
);
1529 OP(BYTECODE_OP_LT_S64
):
1533 res
= (estack_bx_v
< estack_ax_v
);
1534 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1536 estack_ax_t
= REG_S64
;
1537 next_pc
+= sizeof(struct binary_op
);
1540 OP(BYTECODE_OP_GE_S64
):
1544 res
= (estack_bx_v
>= estack_ax_v
);
1545 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1547 estack_ax_t
= REG_S64
;
1548 next_pc
+= sizeof(struct binary_op
);
1551 OP(BYTECODE_OP_LE_S64
):
1555 res
= (estack_bx_v
<= estack_ax_v
);
1556 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1558 estack_ax_t
= REG_S64
;
1559 next_pc
+= sizeof(struct binary_op
);
1563 OP(BYTECODE_OP_EQ_DOUBLE
):
1567 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax(stack
, top
)->u
.d
);
1568 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1570 estack_ax_t
= REG_S64
;
1571 next_pc
+= sizeof(struct binary_op
);
1574 OP(BYTECODE_OP_NE_DOUBLE
):
1578 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax(stack
, top
)->u
.d
);
1579 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1581 estack_ax_t
= REG_S64
;
1582 next_pc
+= sizeof(struct binary_op
);
1585 OP(BYTECODE_OP_GT_DOUBLE
):
1589 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax(stack
, top
)->u
.d
);
1590 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1592 estack_ax_t
= REG_S64
;
1593 next_pc
+= sizeof(struct binary_op
);
1596 OP(BYTECODE_OP_LT_DOUBLE
):
1600 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax(stack
, top
)->u
.d
);
1601 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1603 estack_ax_t
= REG_S64
;
1604 next_pc
+= sizeof(struct binary_op
);
1607 OP(BYTECODE_OP_GE_DOUBLE
):
1611 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax(stack
, top
)->u
.d
);
1612 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1614 estack_ax_t
= REG_S64
;
1615 next_pc
+= sizeof(struct binary_op
);
1618 OP(BYTECODE_OP_LE_DOUBLE
):
1622 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax(stack
, top
)->u
.d
);
1623 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1625 estack_ax_t
= REG_S64
;
1626 next_pc
+= sizeof(struct binary_op
);
1630 /* Mixed S64-double binary comparators */
1631 OP(BYTECODE_OP_EQ_DOUBLE_S64
):
1635 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax_v
);
1636 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1638 estack_ax_t
= REG_S64
;
1639 next_pc
+= sizeof(struct binary_op
);
1642 OP(BYTECODE_OP_NE_DOUBLE_S64
):
1646 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax_v
);
1647 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1649 estack_ax_t
= REG_S64
;
1650 next_pc
+= sizeof(struct binary_op
);
1653 OP(BYTECODE_OP_GT_DOUBLE_S64
):
1657 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax_v
);
1658 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1660 estack_ax_t
= REG_S64
;
1661 next_pc
+= sizeof(struct binary_op
);
1664 OP(BYTECODE_OP_LT_DOUBLE_S64
):
1668 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax_v
);
1669 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1671 estack_ax_t
= REG_S64
;
1672 next_pc
+= sizeof(struct binary_op
);
1675 OP(BYTECODE_OP_GE_DOUBLE_S64
):
1679 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax_v
);
1680 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1682 estack_ax_t
= REG_S64
;
1683 next_pc
+= sizeof(struct binary_op
);
1686 OP(BYTECODE_OP_LE_DOUBLE_S64
):
1690 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax_v
);
1691 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1693 estack_ax_t
= REG_S64
;
1694 next_pc
+= sizeof(struct binary_op
);
1698 OP(BYTECODE_OP_EQ_S64_DOUBLE
):
1702 res
= (estack_bx_v
== estack_ax(stack
, top
)->u
.d
);
1703 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1705 estack_ax_t
= REG_S64
;
1706 next_pc
+= sizeof(struct binary_op
);
1709 OP(BYTECODE_OP_NE_S64_DOUBLE
):
1713 res
= (estack_bx_v
!= estack_ax(stack
, top
)->u
.d
);
1714 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1716 estack_ax_t
= REG_S64
;
1717 next_pc
+= sizeof(struct binary_op
);
1720 OP(BYTECODE_OP_GT_S64_DOUBLE
):
1724 res
= (estack_bx_v
> estack_ax(stack
, top
)->u
.d
);
1725 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1727 estack_ax_t
= REG_S64
;
1728 next_pc
+= sizeof(struct binary_op
);
1731 OP(BYTECODE_OP_LT_S64_DOUBLE
):
1735 res
= (estack_bx_v
< estack_ax(stack
, top
)->u
.d
);
1736 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1738 estack_ax_t
= REG_S64
;
1739 next_pc
+= sizeof(struct binary_op
);
1742 OP(BYTECODE_OP_GE_S64_DOUBLE
):
1746 res
= (estack_bx_v
>= estack_ax(stack
, top
)->u
.d
);
1747 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1749 estack_ax_t
= REG_S64
;
1750 next_pc
+= sizeof(struct binary_op
);
1753 OP(BYTECODE_OP_LE_S64_DOUBLE
):
1757 res
= (estack_bx_v
<= estack_ax(stack
, top
)->u
.d
);
1758 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1760 estack_ax_t
= REG_S64
;
1761 next_pc
+= sizeof(struct binary_op
);
1764 OP(BYTECODE_OP_BIT_RSHIFT
):
1768 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1773 /* Catch undefined behavior. */
1774 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1778 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
1779 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1781 estack_ax_t
= REG_U64
;
1782 next_pc
+= sizeof(struct binary_op
);
1785 OP(BYTECODE_OP_BIT_LSHIFT
):
1789 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1794 /* Catch undefined behavior. */
1795 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1799 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
1800 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1802 estack_ax_t
= REG_U64
;
1803 next_pc
+= sizeof(struct binary_op
);
1806 OP(BYTECODE_OP_BIT_AND
):
1810 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1815 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
1816 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1818 estack_ax_t
= REG_U64
;
1819 next_pc
+= sizeof(struct binary_op
);
1822 OP(BYTECODE_OP_BIT_OR
):
1826 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1831 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1832 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1834 estack_ax_t
= REG_U64
;
1835 next_pc
+= sizeof(struct binary_op
);
1838 OP(BYTECODE_OP_BIT_XOR
):
1842 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1847 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1848 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1850 estack_ax_t
= REG_U64
;
1851 next_pc
+= sizeof(struct binary_op
);
1856 OP(BYTECODE_OP_UNARY_PLUS
):
1858 /* Dynamic typing. */
1859 switch (estack_ax_t
) {
1860 case REG_S64
: /* Fall-through. */
1862 JUMP_TO(BYTECODE_OP_UNARY_PLUS_S64
);
1864 JUMP_TO(BYTECODE_OP_UNARY_PLUS_DOUBLE
);
1865 case REG_STRING
: /* Fall-through */
1866 case REG_STAR_GLOB_STRING
:
1870 ERR("Unknown interpreter register type (%d)",
1876 OP(BYTECODE_OP_UNARY_MINUS
):
1878 /* Dynamic typing. */
1879 switch (estack_ax_t
) {
1880 case REG_S64
: /* Fall-through. */
1882 JUMP_TO(BYTECODE_OP_UNARY_MINUS_S64
);
1884 JUMP_TO(BYTECODE_OP_UNARY_MINUS_DOUBLE
);
1885 case REG_STRING
: /* Fall-through */
1886 case REG_STAR_GLOB_STRING
:
1890 ERR("Unknown interpreter register type (%d)",
1896 OP(BYTECODE_OP_UNARY_NOT
):
1898 /* Dynamic typing. */
1899 switch (estack_ax_t
) {
1900 case REG_S64
: /* Fall-through. */
1902 JUMP_TO(BYTECODE_OP_UNARY_NOT_S64
);
1904 JUMP_TO(BYTECODE_OP_UNARY_NOT_DOUBLE
);
1905 case REG_STRING
: /* Fall-through */
1906 case REG_STAR_GLOB_STRING
:
1910 ERR("Unknown interpreter register type (%d)",
1915 next_pc
+= sizeof(struct unary_op
);
1919 OP(BYTECODE_OP_UNARY_BIT_NOT
):
1921 /* Dynamic typing. */
1922 if (!IS_INTEGER_REGISTER(estack_ax_t
)) {
1927 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1928 estack_ax_t
= REG_U64
;
1929 next_pc
+= sizeof(struct unary_op
);
1933 OP(BYTECODE_OP_UNARY_PLUS_S64
):
1934 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE
):
1936 next_pc
+= sizeof(struct unary_op
);
1939 OP(BYTECODE_OP_UNARY_MINUS_S64
):
1941 estack_ax_v
= -estack_ax_v
;
1942 next_pc
+= sizeof(struct unary_op
);
1945 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE
):
1947 estack_ax(stack
, top
)->u
.d
= -estack_ax(stack
, top
)->u
.d
;
1948 next_pc
+= sizeof(struct unary_op
);
1951 OP(BYTECODE_OP_UNARY_NOT_S64
):
1953 estack_ax_v
= !estack_ax_v
;
1954 estack_ax_t
= REG_S64
;
1955 next_pc
+= sizeof(struct unary_op
);
1958 OP(BYTECODE_OP_UNARY_NOT_DOUBLE
):
1960 estack_ax_v
= !estack_ax(stack
, top
)->u
.d
;
1961 estack_ax_t
= REG_S64
;
1962 next_pc
+= sizeof(struct unary_op
);
1967 OP(BYTECODE_OP_AND
):
1969 struct logical_op
*insn
= (struct logical_op
*) pc
;
1971 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1975 /* If AX is 0, skip and evaluate to 0 */
1976 if (unlikely(estack_ax_v
== 0)) {
1977 dbg_printf("Jumping to bytecode offset %u\n",
1978 (unsigned int) insn
->skip_offset
);
1979 next_pc
= start_pc
+ insn
->skip_offset
;
1981 /* Pop 1 when jump not taken */
1982 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1983 next_pc
+= sizeof(struct logical_op
);
1989 struct logical_op
*insn
= (struct logical_op
*) pc
;
1991 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1995 /* If AX is nonzero, skip and evaluate to 1 */
1996 if (unlikely(estack_ax_v
!= 0)) {
1998 dbg_printf("Jumping to bytecode offset %u\n",
1999 (unsigned int) insn
->skip_offset
);
2000 next_pc
= start_pc
+ insn
->skip_offset
;
2002 /* Pop 1 when jump not taken */
2003 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2004 next_pc
+= sizeof(struct logical_op
);
2010 /* load field ref */
2011 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING
):
2013 struct load_op
*insn
= (struct load_op
*) pc
;
2014 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2016 dbg_printf("load field ref offset %u type string\n",
2018 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2019 estack_ax(stack
, top
)->u
.s
.str
=
2020 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
2021 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2022 dbg_printf("Interpreter warning: loading a NULL string.\n");
2026 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2027 estack_ax(stack
, top
)->u
.s
.literal_type
=
2028 ESTACK_STRING_LITERAL_TYPE_NONE
;
2029 estack_ax_t
= REG_STRING
;
2030 dbg_printf("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2031 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2035 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
):
2037 struct load_op
*insn
= (struct load_op
*) pc
;
2038 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2040 dbg_printf("load field ref offset %u type sequence\n",
2042 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2043 estack_ax(stack
, top
)->u
.s
.seq_len
=
2044 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
2045 estack_ax(stack
, top
)->u
.s
.str
=
2046 *(const char **) (&interpreter_stack_data
[ref
->offset
2047 + sizeof(unsigned long)]);
2048 estack_ax_t
= REG_STRING
;
2049 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2050 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2054 estack_ax(stack
, top
)->u
.s
.literal_type
=
2055 ESTACK_STRING_LITERAL_TYPE_NONE
;
2056 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2060 OP(BYTECODE_OP_LOAD_FIELD_REF_S64
):
2062 struct load_op
*insn
= (struct load_op
*) pc
;
2063 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2065 dbg_printf("load field ref offset %u type s64\n",
2067 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2069 ((struct literal_numeric
*) &interpreter_stack_data
[ref
->offset
])->v
;
2070 estack_ax_t
= REG_S64
;
2071 dbg_printf("ref load s64 %" PRIi64
"\n", estack_ax_v
);
2072 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2076 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
):
2078 struct load_op
*insn
= (struct load_op
*) pc
;
2079 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2081 dbg_printf("load field ref offset %u type double\n",
2083 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2084 memcpy(&estack_ax(stack
, top
)->u
.d
, &interpreter_stack_data
[ref
->offset
],
2085 sizeof(struct literal_double
));
2086 estack_ax_t
= REG_DOUBLE
;
2087 dbg_printf("ref load double %g\n", estack_ax(stack
, top
)->u
.d
);
2088 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2092 /* load from immediate operand */
2093 OP(BYTECODE_OP_LOAD_STRING
):
2095 struct load_op
*insn
= (struct load_op
*) pc
;
2097 dbg_printf("load string %s\n", insn
->data
);
2098 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2099 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2100 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2101 estack_ax(stack
, top
)->u
.s
.literal_type
=
2102 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
2103 estack_ax_t
= REG_STRING
;
2104 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2108 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING
):
2110 struct load_op
*insn
= (struct load_op
*) pc
;
2112 dbg_printf("load globbing pattern %s\n", insn
->data
);
2113 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2114 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2115 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2116 estack_ax(stack
, top
)->u
.s
.literal_type
=
2117 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
2118 estack_ax_t
= REG_STAR_GLOB_STRING
;
2119 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2123 OP(BYTECODE_OP_LOAD_S64
):
2125 struct load_op
*insn
= (struct load_op
*) pc
;
2127 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2128 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
2129 estack_ax_t
= REG_S64
;
2130 dbg_printf("load s64 %" PRIi64
"\n", estack_ax_v
);
2131 next_pc
+= sizeof(struct load_op
)
2132 + sizeof(struct literal_numeric
);
2136 OP(BYTECODE_OP_LOAD_DOUBLE
):
2138 struct load_op
*insn
= (struct load_op
*) pc
;
2140 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2141 memcpy(&estack_ax(stack
, top
)->u
.d
, insn
->data
,
2142 sizeof(struct literal_double
));
2143 estack_ax_t
= REG_DOUBLE
;
2144 dbg_printf("load double %g\n", estack_ax(stack
, top
)->u
.d
);
2145 next_pc
+= sizeof(struct load_op
)
2146 + sizeof(struct literal_double
);
2151 OP(BYTECODE_OP_CAST_TO_S64
):
2153 /* Dynamic typing. */
2154 switch (estack_ax_t
) {
2156 JUMP_TO(BYTECODE_OP_CAST_NOP
);
2158 JUMP_TO(BYTECODE_OP_CAST_DOUBLE_TO_S64
);
2160 estack_ax_t
= REG_S64
;
2161 next_pc
+= sizeof(struct cast_op
);
2162 case REG_STRING
: /* Fall-through */
2163 case REG_STAR_GLOB_STRING
:
2167 ERR("Unknown interpreter register type (%d)",
2174 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64
):
2176 estack_ax_v
= (int64_t) estack_ax(stack
, top
)->u
.d
;
2177 estack_ax_t
= REG_S64
;
2178 next_pc
+= sizeof(struct cast_op
);
2182 OP(BYTECODE_OP_CAST_NOP
):
2184 next_pc
+= sizeof(struct cast_op
);
2188 /* get context ref */
2189 OP(BYTECODE_OP_GET_CONTEXT_REF
):
2191 struct load_op
*insn
= (struct load_op
*) pc
;
2192 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2193 struct lttng_ctx_field
*ctx_field
;
2194 struct lttng_ctx_value v
;
2196 dbg_printf("get context ref offset %u type dynamic\n",
2198 ctx_field
= &ctx
->fields
[ref
->offset
];
2199 ctx_field
->get_value(ctx_field
, &v
);
2200 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2202 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
2205 case LTTNG_UST_DYNAMIC_TYPE_S64
:
2206 estack_ax_v
= v
.u
.s64
;
2207 estack_ax_t
= REG_S64
;
2208 dbg_printf("ref get context dynamic s64 %" PRIi64
"\n", estack_ax_v
);
2210 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
2211 estack_ax(stack
, top
)->u
.d
= v
.u
.d
;
2212 estack_ax_t
= REG_DOUBLE
;
2213 dbg_printf("ref get context dynamic double %g\n", estack_ax(stack
, top
)->u
.d
);
2215 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
2216 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2217 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2218 dbg_printf("Interpreter warning: loading a NULL string.\n");
2222 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2223 estack_ax(stack
, top
)->u
.s
.literal_type
=
2224 ESTACK_STRING_LITERAL_TYPE_NONE
;
2225 dbg_printf("ref get context dynamic string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2226 estack_ax_t
= REG_STRING
;
2229 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
2233 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2237 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING
):
2239 struct load_op
*insn
= (struct load_op
*) pc
;
2240 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2241 struct lttng_ctx_field
*ctx_field
;
2242 struct lttng_ctx_value v
;
2244 dbg_printf("get context ref offset %u type string\n",
2246 ctx_field
= &ctx
->fields
[ref
->offset
];
2247 ctx_field
->get_value(ctx_field
, &v
);
2248 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2249 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2250 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2251 dbg_printf("Interpreter warning: loading a NULL string.\n");
2255 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2256 estack_ax(stack
, top
)->u
.s
.literal_type
=
2257 ESTACK_STRING_LITERAL_TYPE_NONE
;
2258 estack_ax_t
= REG_STRING
;
2259 dbg_printf("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2260 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2264 OP(BYTECODE_OP_GET_CONTEXT_REF_S64
):
2266 struct load_op
*insn
= (struct load_op
*) pc
;
2267 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2268 struct lttng_ctx_field
*ctx_field
;
2269 struct lttng_ctx_value v
;
2271 dbg_printf("get context ref offset %u type s64\n",
2273 ctx_field
= &ctx
->fields
[ref
->offset
];
2274 ctx_field
->get_value(ctx_field
, &v
);
2275 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2276 estack_ax_v
= v
.u
.s64
;
2277 estack_ax_t
= REG_S64
;
2278 dbg_printf("ref get context s64 %" PRIi64
"\n", estack_ax_v
);
2279 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2283 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
):
2285 struct load_op
*insn
= (struct load_op
*) pc
;
2286 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2287 struct lttng_ctx_field
*ctx_field
;
2288 struct lttng_ctx_value v
;
2290 dbg_printf("get context ref offset %u type double\n",
2292 ctx_field
= &ctx
->fields
[ref
->offset
];
2293 ctx_field
->get_value(ctx_field
, &v
);
2294 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2295 memcpy(&estack_ax(stack
, top
)->u
.d
, &v
.u
.d
, sizeof(struct literal_double
));
2296 estack_ax_t
= REG_DOUBLE
;
2297 dbg_printf("ref get context double %g\n", estack_ax(stack
, top
)->u
.d
);
2298 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2302 OP(BYTECODE_OP_GET_CONTEXT_ROOT
):
2304 dbg_printf("op get context root\n");
2305 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2306 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
2307 /* "field" only needed for variants. */
2308 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2309 estack_ax_t
= REG_PTR
;
2310 next_pc
+= sizeof(struct load_op
);
2314 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT
):
2316 dbg_printf("op get app context root\n");
2317 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2318 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_APP_CONTEXT
;
2319 /* "field" only needed for variants. */
2320 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2321 estack_ax_t
= REG_PTR
;
2322 next_pc
+= sizeof(struct load_op
);
2326 OP(BYTECODE_OP_GET_PAYLOAD_ROOT
):
2328 dbg_printf("op get app payload root\n");
2329 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2330 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
2331 estack_ax(stack
, top
)->u
.ptr
.ptr
= interpreter_stack_data
;
2332 /* "field" only needed for variants. */
2333 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2334 estack_ax_t
= REG_PTR
;
2335 next_pc
+= sizeof(struct load_op
);
2339 OP(BYTECODE_OP_GET_SYMBOL
):
2341 dbg_printf("op get symbol\n");
2342 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
2344 ERR("Nested fields not implemented yet.");
2347 case LOAD_ROOT_CONTEXT
:
2348 case LOAD_ROOT_APP_CONTEXT
:
2349 case LOAD_ROOT_PAYLOAD
:
2351 * symbol lookup is performed by
2357 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
2361 OP(BYTECODE_OP_GET_SYMBOL_FIELD
):
2364 * Used for first variant encountered in a
2365 * traversal. Variants are not implemented yet.
2371 OP(BYTECODE_OP_GET_INDEX_U16
):
2373 struct load_op
*insn
= (struct load_op
*) pc
;
2374 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
2376 dbg_printf("op get index u16\n");
2377 ret
= dynamic_get_index(ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2380 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2381 estack_ax_t
= estack_ax(stack
, top
)->type
;
2382 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
2386 OP(BYTECODE_OP_GET_INDEX_U64
):
2388 struct load_op
*insn
= (struct load_op
*) pc
;
2389 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
2391 dbg_printf("op get index u64\n");
2392 ret
= dynamic_get_index(ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2395 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2396 estack_ax_t
= estack_ax(stack
, top
)->type
;
2397 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
2401 OP(BYTECODE_OP_LOAD_FIELD
):
2403 dbg_printf("op load field\n");
2404 ret
= dynamic_load_field(estack_ax(stack
, top
));
2407 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2408 estack_ax_t
= estack_ax(stack
, top
)->type
;
2409 next_pc
+= sizeof(struct load_op
);
2413 OP(BYTECODE_OP_LOAD_FIELD_S8
):
2415 dbg_printf("op load field s8\n");
2417 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2418 estack_ax_t
= REG_S64
;
2419 next_pc
+= sizeof(struct load_op
);
2422 OP(BYTECODE_OP_LOAD_FIELD_S16
):
2424 dbg_printf("op load field s16\n");
2426 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2427 estack_ax_t
= REG_S64
;
2428 next_pc
+= sizeof(struct load_op
);
2431 OP(BYTECODE_OP_LOAD_FIELD_S32
):
2433 dbg_printf("op load field s32\n");
2435 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2436 estack_ax_t
= REG_S64
;
2437 next_pc
+= sizeof(struct load_op
);
2440 OP(BYTECODE_OP_LOAD_FIELD_S64
):
2442 dbg_printf("op load field s64\n");
2444 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2445 estack_ax_t
= REG_S64
;
2446 next_pc
+= sizeof(struct load_op
);
2449 OP(BYTECODE_OP_LOAD_FIELD_U8
):
2451 dbg_printf("op load field u8\n");
2453 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2454 estack_ax_t
= REG_U64
;
2455 next_pc
+= sizeof(struct load_op
);
2458 OP(BYTECODE_OP_LOAD_FIELD_U16
):
2460 dbg_printf("op load field u16\n");
2462 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2463 estack_ax_t
= REG_U64
;
2464 next_pc
+= sizeof(struct load_op
);
2467 OP(BYTECODE_OP_LOAD_FIELD_U32
):
2469 dbg_printf("op load field u32\n");
2471 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2472 estack_ax_t
= REG_U64
;
2473 next_pc
+= sizeof(struct load_op
);
2476 OP(BYTECODE_OP_LOAD_FIELD_U64
):
2478 dbg_printf("op load field u64\n");
2480 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2481 estack_ax_t
= REG_U64
;
2482 next_pc
+= sizeof(struct load_op
);
2485 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE
):
2487 dbg_printf("op load field double\n");
2489 memcpy(&estack_ax(stack
, top
)->u
.d
,
2490 estack_ax(stack
, top
)->u
.ptr
.ptr
,
2491 sizeof(struct literal_double
));
2492 estack_ax(stack
, top
)->type
= REG_DOUBLE
;
2493 next_pc
+= sizeof(struct load_op
);
2497 OP(BYTECODE_OP_LOAD_FIELD_STRING
):
2501 dbg_printf("op load field string\n");
2502 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2503 estack_ax(stack
, top
)->u
.s
.str
= str
;
2504 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2505 dbg_printf("Interpreter warning: loading a NULL string.\n");
2509 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2510 estack_ax(stack
, top
)->u
.s
.literal_type
=
2511 ESTACK_STRING_LITERAL_TYPE_NONE
;
2512 estack_ax(stack
, top
)->type
= REG_STRING
;
2513 next_pc
+= sizeof(struct load_op
);
2517 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE
):
2521 dbg_printf("op load field string sequence\n");
2522 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
2523 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
2524 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
2525 estack_ax(stack
, top
)->type
= REG_STRING
;
2526 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2527 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2531 estack_ax(stack
, top
)->u
.s
.literal_type
=
2532 ESTACK_STRING_LITERAL_TYPE_NONE
;
2533 next_pc
+= sizeof(struct load_op
);
2539 /* Return _DISCARD on error. */
2541 return LTTNG_INTERPRETER_DISCARD
;
2544 return lttng_bytecode_interpret_format_output(estack_ax(stack
, top
),
2551 uint64_t lttng_bytecode_filter_interpret(void *filter_data
,
2552 const char *filter_stack_data
)
2554 return bytecode_interpret(filter_data
, filter_stack_data
, NULL
);
2557 uint64_t lttng_bytecode_capture_interpret(void *capture_data
,
2558 const char *capture_stack_data
,
2559 struct lttng_interpreter_output
*output
)
2561 return bytecode_interpret(capture_data
, capture_stack_data
,
2562 (struct lttng_interpreter_output
*) output
);