2 * SPDX-License-Identifier: MIT
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 * LTTng UST bytecode interpreter.
13 #include <lttng/urcu/pointer.h>
14 #include <lttng/ust-endian.h>
15 #include <lttng/ust-events.h>
17 #include "lttng-bytecode.h"
18 #include "string-utils.h"
23 * -2: unknown escape char.
28 int parse_char(const char **p
)
48 * Returns SIZE_MAX if the string is null-terminated, or the number of
52 size_t get_str_or_seq_len(const struct estack_entry
*entry
)
54 return entry
->u
.s
.seq_len
;
58 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
61 const char *candidate
;
65 /* Find out which side is the pattern vs. the candidate. */
66 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
67 pattern
= estack_ax(stack
, top
)->u
.s
.str
;
68 pattern_len
= get_str_or_seq_len(estack_ax(stack
, top
));
69 candidate
= estack_bx(stack
, top
)->u
.s
.str
;
70 candidate_len
= get_str_or_seq_len(estack_bx(stack
, top
));
72 pattern
= estack_bx(stack
, top
)->u
.s
.str
;
73 pattern_len
= get_str_or_seq_len(estack_bx(stack
, top
));
74 candidate
= estack_ax(stack
, top
)->u
.s
.str
;
75 candidate_len
= get_str_or_seq_len(estack_ax(stack
, top
));
78 /* Perform the match. Returns 0 when the result is true. */
79 return !strutils_star_glob_match(pattern
, pattern_len
, candidate
,
84 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
86 const char *p
= estack_bx(stack
, top
)->u
.s
.str
, *q
= estack_ax(stack
, top
)->u
.s
.str
;
93 if (unlikely(p
- estack_bx(stack
, top
)->u
.s
.str
>= estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')) {
94 if (q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0') {
97 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
98 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
106 if (unlikely(q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')) {
107 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
108 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
109 ret
= parse_char(&p
);
115 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
116 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
117 ret
= parse_char(&p
);
120 } else if (ret
== -2) {
123 /* else compare both char */
125 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
126 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
127 ret
= parse_char(&q
);
130 } else if (ret
== -2) {
150 uint64_t lttng_bytecode_filter_interpret_false(void *filter_data
,
151 const char *filter_stack_data
)
153 return LTTNG_INTERPRETER_DISCARD
;
156 uint64_t lttng_bytecode_capture_interpret_false(void *capture_data
,
157 const char *capture_stack_data
,
158 struct lttng_interpreter_output
*output
)
160 return LTTNG_INTERPRETER_DISCARD
;
163 #ifdef INTERPRETER_USE_SWITCH
166 * Fallback for compilers that do not support taking address of labels.
170 start_pc = &bytecode->data[0]; \
171 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
173 dbg_printf("Executing op %s (%u)\n", \
174 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
175 (unsigned int) *(bytecode_opcode_t *) pc); \
176 switch (*(bytecode_opcode_t *) pc) {
178 #define OP(name) jump_target_##name: __attribute__((unused)); \
186 #define JUMP_TO(name) \
187 goto jump_target_##name
192 * Dispatch-table based interpreter.
196 start_pc = &bytecode->code[0]; \
197 pc = next_pc = start_pc; \
198 if (unlikely(pc - start_pc >= bytecode->len)) \
200 goto *dispatch[*(bytecode_opcode_t *) pc];
207 goto *dispatch[*(bytecode_opcode_t *) pc];
211 #define JUMP_TO(name) \
216 #define IS_INTEGER_REGISTER(reg_type) \
217 (reg_type == REG_U64 || reg_type == REG_S64)
219 static int context_get_index(struct lttng_ctx
*ctx
,
220 struct load_ptr
*ptr
,
224 struct lttng_ctx_field
*ctx_field
;
225 struct lttng_event_field
*field
;
226 struct lttng_ctx_value v
;
228 ctx_field
= &ctx
->fields
[idx
];
229 field
= &ctx_field
->event_field
;
230 ptr
->type
= LOAD_OBJECT
;
233 switch (field
->type
.atype
) {
235 ctx_field
->get_value(ctx_field
, &v
);
236 if (field
->type
.u
.integer
.signedness
) {
237 ptr
->object_type
= OBJECT_TYPE_S64
;
238 ptr
->u
.s64
= v
.u
.s64
;
239 ptr
->ptr
= &ptr
->u
.s64
;
241 ptr
->object_type
= OBJECT_TYPE_U64
;
242 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
243 ptr
->ptr
= &ptr
->u
.u64
;
246 case atype_enum
: /* Fall-through */
247 case atype_enum_nestable
:
249 const struct lttng_integer_type
*itype
;
251 if (field
->type
.atype
== atype_enum
) {
252 itype
= &field
->type
.u
.legacy
.basic
.enumeration
.container_type
;
254 itype
= &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
256 ctx_field
->get_value(ctx_field
, &v
);
257 if (itype
->signedness
) {
258 ptr
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
259 ptr
->u
.s64
= v
.u
.s64
;
260 ptr
->ptr
= &ptr
->u
.s64
;
262 ptr
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
263 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
264 ptr
->ptr
= &ptr
->u
.u64
;
269 if (field
->type
.u
.legacy
.array
.elem_type
.atype
!= atype_integer
) {
270 ERR("Array nesting only supports integer types.");
273 if (field
->type
.u
.legacy
.array
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
274 ERR("Only string arrays are supported for contexts.");
277 ptr
->object_type
= OBJECT_TYPE_STRING
;
278 ctx_field
->get_value(ctx_field
, &v
);
281 case atype_array_nestable
:
282 if (field
->type
.u
.array_nestable
.elem_type
->atype
!= atype_integer
) {
283 ERR("Array nesting only supports integer types.");
286 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
287 ERR("Only string arrays are supported for contexts.");
290 ptr
->object_type
= OBJECT_TYPE_STRING
;
291 ctx_field
->get_value(ctx_field
, &v
);
295 if (field
->type
.u
.legacy
.sequence
.elem_type
.atype
!= atype_integer
) {
296 ERR("Sequence nesting only supports integer types.");
299 if (field
->type
.u
.legacy
.sequence
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
300 ERR("Only string sequences are supported for contexts.");
303 ptr
->object_type
= OBJECT_TYPE_STRING
;
304 ctx_field
->get_value(ctx_field
, &v
);
307 case atype_sequence_nestable
:
308 if (field
->type
.u
.sequence_nestable
.elem_type
->atype
!= atype_integer
) {
309 ERR("Sequence nesting only supports integer types.");
312 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
313 ERR("Only string sequences are supported for contexts.");
316 ptr
->object_type
= OBJECT_TYPE_STRING
;
317 ctx_field
->get_value(ctx_field
, &v
);
321 ptr
->object_type
= OBJECT_TYPE_STRING
;
322 ctx_field
->get_value(ctx_field
, &v
);
326 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
327 ctx_field
->get_value(ctx_field
, &v
);
329 ptr
->ptr
= &ptr
->u
.d
;
332 ctx_field
->get_value(ctx_field
, &v
);
334 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
336 case LTTNG_UST_DYNAMIC_TYPE_U8
:
337 case LTTNG_UST_DYNAMIC_TYPE_U16
:
338 case LTTNG_UST_DYNAMIC_TYPE_U32
:
339 case LTTNG_UST_DYNAMIC_TYPE_U64
:
340 ptr
->object_type
= OBJECT_TYPE_U64
;
341 ptr
->u
.u64
= v
.u
.u64
;
342 ptr
->ptr
= &ptr
->u
.u64
;
343 dbg_printf("context get index dynamic u64 %" PRIi64
"\n", ptr
->u
.u64
);
345 case LTTNG_UST_DYNAMIC_TYPE_S8
:
346 case LTTNG_UST_DYNAMIC_TYPE_S16
:
347 case LTTNG_UST_DYNAMIC_TYPE_S32
:
348 case LTTNG_UST_DYNAMIC_TYPE_S64
:
349 ptr
->object_type
= OBJECT_TYPE_S64
;
350 ptr
->u
.s64
= v
.u
.s64
;
351 ptr
->ptr
= &ptr
->u
.s64
;
352 dbg_printf("context get index dynamic s64 %" PRIi64
"\n", ptr
->u
.s64
);
354 case LTTNG_UST_DYNAMIC_TYPE_FLOAT
:
355 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
356 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
358 ptr
->ptr
= &ptr
->u
.d
;
359 dbg_printf("context get index dynamic double %g\n", ptr
->u
.d
);
361 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
362 ptr
->object_type
= OBJECT_TYPE_STRING
;
364 dbg_printf("context get index dynamic string %s\n", (const char *) ptr
->ptr
);
367 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
372 ERR("Structure type cannot be loaded.");
375 ERR("Unknown type: %d", (int) field
->type
.atype
);
381 static int dynamic_get_index(struct lttng_ctx
*ctx
,
382 struct bytecode_runtime
*runtime
,
383 uint64_t index
, struct estack_entry
*stack_top
)
386 const struct bytecode_get_index_data
*gid
;
388 gid
= (const struct bytecode_get_index_data
*) &runtime
->data
[index
];
389 switch (stack_top
->u
.ptr
.type
) {
391 switch (stack_top
->u
.ptr
.object_type
) {
392 case OBJECT_TYPE_ARRAY
:
396 assert(gid
->offset
< gid
->array_len
);
397 /* Skip count (unsigned long) */
398 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
399 ptr
= ptr
+ gid
->offset
;
400 stack_top
->u
.ptr
.ptr
= ptr
;
401 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
402 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
403 assert(stack_top
->u
.ptr
.field
->type
.atype
== atype_array
||
404 stack_top
->u
.ptr
.field
->type
.atype
== atype_array_nestable
);
405 stack_top
->u
.ptr
.field
= NULL
;
408 case OBJECT_TYPE_SEQUENCE
:
413 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
414 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
415 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
419 ptr
= ptr
+ gid
->offset
;
420 stack_top
->u
.ptr
.ptr
= ptr
;
421 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
422 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
423 assert(stack_top
->u
.ptr
.field
->type
.atype
== atype_sequence
||
424 stack_top
->u
.ptr
.field
->type
.atype
== atype_sequence_nestable
);
425 stack_top
->u
.ptr
.field
= NULL
;
428 case OBJECT_TYPE_STRUCT
:
429 ERR("Nested structures are not supported yet.");
432 case OBJECT_TYPE_VARIANT
:
434 ERR("Unexpected get index type %d",
435 (int) stack_top
->u
.ptr
.object_type
);
440 case LOAD_ROOT_CONTEXT
:
441 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
443 ret
= context_get_index(ctx
,
451 case LOAD_ROOT_PAYLOAD
:
452 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
453 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
454 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
455 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
456 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
457 stack_top
->u
.ptr
.field
= gid
->field
;
458 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
462 stack_top
->type
= REG_PTR
;
470 static int dynamic_load_field(struct estack_entry
*stack_top
)
474 switch (stack_top
->u
.ptr
.type
) {
477 case LOAD_ROOT_CONTEXT
:
478 case LOAD_ROOT_APP_CONTEXT
:
479 case LOAD_ROOT_PAYLOAD
:
481 dbg_printf("Interpreter warning: cannot load root, missing field name.\n");
485 switch (stack_top
->u
.ptr
.object_type
) {
487 dbg_printf("op load field s8\n");
488 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
489 stack_top
->type
= REG_S64
;
491 case OBJECT_TYPE_S16
:
495 dbg_printf("op load field s16\n");
496 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
497 if (stack_top
->u
.ptr
.rev_bo
)
499 stack_top
->u
.v
= tmp
;
500 stack_top
->type
= REG_S64
;
503 case OBJECT_TYPE_S32
:
507 dbg_printf("op load field s32\n");
508 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
509 if (stack_top
->u
.ptr
.rev_bo
)
511 stack_top
->u
.v
= tmp
;
512 stack_top
->type
= REG_S64
;
515 case OBJECT_TYPE_S64
:
519 dbg_printf("op load field s64\n");
520 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
521 if (stack_top
->u
.ptr
.rev_bo
)
523 stack_top
->u
.v
= tmp
;
524 stack_top
->type
= REG_S64
;
527 case OBJECT_TYPE_SIGNED_ENUM
:
531 dbg_printf("op load field signed enumeration\n");
532 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
533 if (stack_top
->u
.ptr
.rev_bo
)
535 stack_top
->u
.v
= tmp
;
536 stack_top
->type
= REG_S64
;
540 dbg_printf("op load field u8\n");
541 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
542 stack_top
->type
= REG_U64
;
544 case OBJECT_TYPE_U16
:
548 dbg_printf("op load field u16\n");
549 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
550 if (stack_top
->u
.ptr
.rev_bo
)
552 stack_top
->u
.v
= tmp
;
553 stack_top
->type
= REG_U64
;
556 case OBJECT_TYPE_U32
:
560 dbg_printf("op load field u32\n");
561 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
562 if (stack_top
->u
.ptr
.rev_bo
)
564 stack_top
->u
.v
= tmp
;
565 stack_top
->type
= REG_U64
;
568 case OBJECT_TYPE_U64
:
572 dbg_printf("op load field u64\n");
573 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
574 if (stack_top
->u
.ptr
.rev_bo
)
576 stack_top
->u
.v
= tmp
;
577 stack_top
->type
= REG_U64
;
580 case OBJECT_TYPE_UNSIGNED_ENUM
:
584 dbg_printf("op load field unsigned enumeration\n");
585 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
586 if (stack_top
->u
.ptr
.rev_bo
)
588 stack_top
->u
.v
= tmp
;
589 stack_top
->type
= REG_U64
;
592 case OBJECT_TYPE_DOUBLE
:
593 memcpy(&stack_top
->u
.d
,
594 stack_top
->u
.ptr
.ptr
,
595 sizeof(struct literal_double
));
596 stack_top
->type
= REG_DOUBLE
;
598 case OBJECT_TYPE_STRING
:
602 dbg_printf("op load field string\n");
603 str
= (const char *) stack_top
->u
.ptr
.ptr
;
604 stack_top
->u
.s
.str
= str
;
605 if (unlikely(!stack_top
->u
.s
.str
)) {
606 dbg_printf("Interpreter warning: loading a NULL string.\n");
610 stack_top
->u
.s
.seq_len
= SIZE_MAX
;
611 stack_top
->u
.s
.literal_type
=
612 ESTACK_STRING_LITERAL_TYPE_NONE
;
613 stack_top
->type
= REG_STRING
;
616 case OBJECT_TYPE_STRING_SEQUENCE
:
620 dbg_printf("op load field string sequence\n");
621 ptr
= stack_top
->u
.ptr
.ptr
;
622 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
623 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
624 stack_top
->type
= REG_STRING
;
625 if (unlikely(!stack_top
->u
.s
.str
)) {
626 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
630 stack_top
->u
.s
.literal_type
=
631 ESTACK_STRING_LITERAL_TYPE_NONE
;
634 case OBJECT_TYPE_DYNAMIC
:
636 * Dynamic types in context are looked up
637 * by context get index.
641 case OBJECT_TYPE_SEQUENCE
:
642 case OBJECT_TYPE_ARRAY
:
643 case OBJECT_TYPE_STRUCT
:
644 case OBJECT_TYPE_VARIANT
:
645 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
656 int lttng_bytecode_interpret_format_output(struct estack_entry
*ax
,
657 struct lttng_interpreter_output
*output
)
664 output
->type
= LTTNG_INTERPRETER_TYPE_S64
;
665 output
->u
.s
= ax
->u
.v
;
668 output
->type
= LTTNG_INTERPRETER_TYPE_U64
;
669 output
->u
.u
= (uint64_t) ax
->u
.v
;
672 output
->type
= LTTNG_INTERPRETER_TYPE_DOUBLE
;
673 output
->u
.d
= ax
->u
.d
;
676 output
->type
= LTTNG_INTERPRETER_TYPE_STRING
;
677 output
->u
.str
.str
= ax
->u
.s
.str
;
678 output
->u
.str
.len
= ax
->u
.s
.seq_len
;
681 switch (ax
->u
.ptr
.object_type
) {
683 case OBJECT_TYPE_S16
:
684 case OBJECT_TYPE_S32
:
685 case OBJECT_TYPE_S64
:
687 case OBJECT_TYPE_U16
:
688 case OBJECT_TYPE_U32
:
689 case OBJECT_TYPE_U64
:
690 case OBJECT_TYPE_DOUBLE
:
691 case OBJECT_TYPE_STRING
:
692 case OBJECT_TYPE_STRING_SEQUENCE
:
693 ret
= dynamic_load_field(ax
);
696 /* Retry after loading ptr into stack top. */
698 case OBJECT_TYPE_SEQUENCE
:
699 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
700 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
701 output
->u
.sequence
.nr_elem
= *(unsigned long *) ax
->u
.ptr
.ptr
;
702 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.sequence_nestable
.elem_type
;
704 case OBJECT_TYPE_ARRAY
:
705 /* Skip count (unsigned long) */
706 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
707 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
708 output
->u
.sequence
.nr_elem
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.length
;
709 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.elem_type
;
711 case OBJECT_TYPE_SIGNED_ENUM
:
712 ret
= dynamic_load_field(ax
);
715 output
->type
= LTTNG_INTERPRETER_TYPE_SIGNED_ENUM
;
716 output
->u
.s
= ax
->u
.v
;
718 case OBJECT_TYPE_UNSIGNED_ENUM
:
719 ret
= dynamic_load_field(ax
);
722 output
->type
= LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM
;
723 output
->u
.u
= ax
->u
.v
;
725 case OBJECT_TYPE_STRUCT
:
726 case OBJECT_TYPE_VARIANT
:
732 case REG_STAR_GLOB_STRING
:
738 return LTTNG_INTERPRETER_RECORD_FLAG
;
742 * For `output` equal to NULL:
743 * Return 0 (discard), or raise the 0x1 flag (log event).
744 * Currently, other flags are kept for future extensions and have no
746 * For `output` not equal to NULL:
747 * Return 0 on success, negative error value on error.
750 uint64_t bytecode_interpret(void *interpreter_data
,
751 const char *interpreter_stack_data
,
752 struct lttng_interpreter_output
*output
)
754 struct bytecode_runtime
*bytecode
= interpreter_data
;
755 struct lttng_ctx
*ctx
= lttng_ust_rcu_dereference(*bytecode
->p
.pctx
);
756 void *pc
, *next_pc
, *start_pc
;
759 struct estack _stack
;
760 struct estack
*stack
= &_stack
;
761 register int64_t ax
= 0, bx
= 0;
762 register enum entry_type ax_t
= REG_UNKNOWN
, bx_t
= REG_UNKNOWN
;
763 register int top
= INTERPRETER_STACK_EMPTY
;
764 #ifndef INTERPRETER_USE_SWITCH
765 static void *dispatch
[NR_BYTECODE_OPS
] = {
766 [ BYTECODE_OP_UNKNOWN
] = &&LABEL_BYTECODE_OP_UNKNOWN
,
768 [ BYTECODE_OP_RETURN
] = &&LABEL_BYTECODE_OP_RETURN
,
771 [ BYTECODE_OP_MUL
] = &&LABEL_BYTECODE_OP_MUL
,
772 [ BYTECODE_OP_DIV
] = &&LABEL_BYTECODE_OP_DIV
,
773 [ BYTECODE_OP_MOD
] = &&LABEL_BYTECODE_OP_MOD
,
774 [ BYTECODE_OP_PLUS
] = &&LABEL_BYTECODE_OP_PLUS
,
775 [ BYTECODE_OP_MINUS
] = &&LABEL_BYTECODE_OP_MINUS
,
776 [ BYTECODE_OP_BIT_RSHIFT
] = &&LABEL_BYTECODE_OP_BIT_RSHIFT
,
777 [ BYTECODE_OP_BIT_LSHIFT
] = &&LABEL_BYTECODE_OP_BIT_LSHIFT
,
778 [ BYTECODE_OP_BIT_AND
] = &&LABEL_BYTECODE_OP_BIT_AND
,
779 [ BYTECODE_OP_BIT_OR
] = &&LABEL_BYTECODE_OP_BIT_OR
,
780 [ BYTECODE_OP_BIT_XOR
] = &&LABEL_BYTECODE_OP_BIT_XOR
,
782 /* binary comparators */
783 [ BYTECODE_OP_EQ
] = &&LABEL_BYTECODE_OP_EQ
,
784 [ BYTECODE_OP_NE
] = &&LABEL_BYTECODE_OP_NE
,
785 [ BYTECODE_OP_GT
] = &&LABEL_BYTECODE_OP_GT
,
786 [ BYTECODE_OP_LT
] = &&LABEL_BYTECODE_OP_LT
,
787 [ BYTECODE_OP_GE
] = &&LABEL_BYTECODE_OP_GE
,
788 [ BYTECODE_OP_LE
] = &&LABEL_BYTECODE_OP_LE
,
790 /* string binary comparator */
791 [ BYTECODE_OP_EQ_STRING
] = &&LABEL_BYTECODE_OP_EQ_STRING
,
792 [ BYTECODE_OP_NE_STRING
] = &&LABEL_BYTECODE_OP_NE_STRING
,
793 [ BYTECODE_OP_GT_STRING
] = &&LABEL_BYTECODE_OP_GT_STRING
,
794 [ BYTECODE_OP_LT_STRING
] = &&LABEL_BYTECODE_OP_LT_STRING
,
795 [ BYTECODE_OP_GE_STRING
] = &&LABEL_BYTECODE_OP_GE_STRING
,
796 [ BYTECODE_OP_LE_STRING
] = &&LABEL_BYTECODE_OP_LE_STRING
,
798 /* globbing pattern binary comparator */
799 [ BYTECODE_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING
,
800 [ BYTECODE_OP_NE_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING
,
802 /* s64 binary comparator */
803 [ BYTECODE_OP_EQ_S64
] = &&LABEL_BYTECODE_OP_EQ_S64
,
804 [ BYTECODE_OP_NE_S64
] = &&LABEL_BYTECODE_OP_NE_S64
,
805 [ BYTECODE_OP_GT_S64
] = &&LABEL_BYTECODE_OP_GT_S64
,
806 [ BYTECODE_OP_LT_S64
] = &&LABEL_BYTECODE_OP_LT_S64
,
807 [ BYTECODE_OP_GE_S64
] = &&LABEL_BYTECODE_OP_GE_S64
,
808 [ BYTECODE_OP_LE_S64
] = &&LABEL_BYTECODE_OP_LE_S64
,
810 /* double binary comparator */
811 [ BYTECODE_OP_EQ_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE
,
812 [ BYTECODE_OP_NE_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_DOUBLE
,
813 [ BYTECODE_OP_GT_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_DOUBLE
,
814 [ BYTECODE_OP_LT_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_DOUBLE
,
815 [ BYTECODE_OP_GE_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_DOUBLE
,
816 [ BYTECODE_OP_LE_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_DOUBLE
,
818 /* Mixed S64-double binary comparators */
819 [ BYTECODE_OP_EQ_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64
,
820 [ BYTECODE_OP_NE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64
,
821 [ BYTECODE_OP_GT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64
,
822 [ BYTECODE_OP_LT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64
,
823 [ BYTECODE_OP_GE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64
,
824 [ BYTECODE_OP_LE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64
,
826 [ BYTECODE_OP_EQ_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE
,
827 [ BYTECODE_OP_NE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE
,
828 [ BYTECODE_OP_GT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE
,
829 [ BYTECODE_OP_LT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE
,
830 [ BYTECODE_OP_GE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE
,
831 [ BYTECODE_OP_LE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE
,
834 [ BYTECODE_OP_UNARY_PLUS
] = &&LABEL_BYTECODE_OP_UNARY_PLUS
,
835 [ BYTECODE_OP_UNARY_MINUS
] = &&LABEL_BYTECODE_OP_UNARY_MINUS
,
836 [ BYTECODE_OP_UNARY_NOT
] = &&LABEL_BYTECODE_OP_UNARY_NOT
,
837 [ BYTECODE_OP_UNARY_PLUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64
,
838 [ BYTECODE_OP_UNARY_MINUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64
,
839 [ BYTECODE_OP_UNARY_NOT_S64
] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64
,
840 [ BYTECODE_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE
,
841 [ BYTECODE_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE
,
842 [ BYTECODE_OP_UNARY_NOT_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE
,
845 [ BYTECODE_OP_AND
] = &&LABEL_BYTECODE_OP_AND
,
846 [ BYTECODE_OP_OR
] = &&LABEL_BYTECODE_OP_OR
,
849 [ BYTECODE_OP_LOAD_FIELD_REF
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF
,
850 [ BYTECODE_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING
,
851 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
,
852 [ BYTECODE_OP_LOAD_FIELD_REF_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64
,
853 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
,
855 /* load from immediate operand */
856 [ BYTECODE_OP_LOAD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STRING
,
857 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING
,
858 [ BYTECODE_OP_LOAD_S64
] = &&LABEL_BYTECODE_OP_LOAD_S64
,
859 [ BYTECODE_OP_LOAD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE
,
862 [ BYTECODE_OP_CAST_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_TO_S64
,
863 [ BYTECODE_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64
,
864 [ BYTECODE_OP_CAST_NOP
] = &&LABEL_BYTECODE_OP_CAST_NOP
,
866 /* get context ref */
867 [ BYTECODE_OP_GET_CONTEXT_REF
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF
,
868 [ BYTECODE_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING
,
869 [ BYTECODE_OP_GET_CONTEXT_REF_S64
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64
,
870 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
,
872 /* Instructions for recursive traversal through composed types. */
873 [ BYTECODE_OP_GET_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT
,
874 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT
,
875 [ BYTECODE_OP_GET_PAYLOAD_ROOT
] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT
,
877 [ BYTECODE_OP_GET_SYMBOL
] = &&LABEL_BYTECODE_OP_GET_SYMBOL
,
878 [ BYTECODE_OP_GET_SYMBOL_FIELD
] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD
,
879 [ BYTECODE_OP_GET_INDEX_U16
] = &&LABEL_BYTECODE_OP_GET_INDEX_U16
,
880 [ BYTECODE_OP_GET_INDEX_U64
] = &&LABEL_BYTECODE_OP_GET_INDEX_U64
,
882 [ BYTECODE_OP_LOAD_FIELD
] = &&LABEL_BYTECODE_OP_LOAD_FIELD
,
883 [ BYTECODE_OP_LOAD_FIELD_S8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8
,
884 [ BYTECODE_OP_LOAD_FIELD_S16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16
,
885 [ BYTECODE_OP_LOAD_FIELD_S32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32
,
886 [ BYTECODE_OP_LOAD_FIELD_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64
,
887 [ BYTECODE_OP_LOAD_FIELD_U8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8
,
888 [ BYTECODE_OP_LOAD_FIELD_U16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16
,
889 [ BYTECODE_OP_LOAD_FIELD_U32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32
,
890 [ BYTECODE_OP_LOAD_FIELD_U64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64
,
891 [ BYTECODE_OP_LOAD_FIELD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING
,
892 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE
,
893 [ BYTECODE_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE
,
895 [ BYTECODE_OP_UNARY_BIT_NOT
] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT
,
897 [ BYTECODE_OP_RETURN_S64
] = &&LABEL_BYTECODE_OP_RETURN_S64
,
899 #endif /* #ifndef INTERPRETER_USE_SWITCH */
903 OP(BYTECODE_OP_UNKNOWN
):
904 OP(BYTECODE_OP_LOAD_FIELD_REF
):
905 #ifdef INTERPRETER_USE_SWITCH
907 #endif /* INTERPRETER_USE_SWITCH */
908 ERR("unknown bytecode op %u",
909 (unsigned int) *(bytecode_opcode_t
*) pc
);
913 OP(BYTECODE_OP_RETURN
):
914 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
915 /* Handle dynamic typing. */
916 switch (estack_ax_t
) {
919 retval
= !!estack_ax_v
;
930 case REG_STAR_GLOB_STRING
:
939 OP(BYTECODE_OP_RETURN_S64
):
940 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
941 retval
= !!estack_ax_v
;
949 OP(BYTECODE_OP_PLUS
):
950 OP(BYTECODE_OP_MINUS
):
951 ERR("unsupported bytecode op %u",
952 (unsigned int) *(bytecode_opcode_t
*) pc
);
958 /* Dynamic typing. */
959 switch (estack_ax_t
) {
960 case REG_S64
: /* Fall-through */
962 switch (estack_bx_t
) {
963 case REG_S64
: /* Fall-through */
965 JUMP_TO(BYTECODE_OP_EQ_S64
);
967 JUMP_TO(BYTECODE_OP_EQ_DOUBLE_S64
);
968 case REG_STRING
: /* Fall-through */
969 case REG_STAR_GLOB_STRING
:
973 ERR("Unknown interpreter register type (%d)",
980 switch (estack_bx_t
) {
981 case REG_S64
: /* Fall-through */
983 JUMP_TO(BYTECODE_OP_EQ_S64_DOUBLE
);
985 JUMP_TO(BYTECODE_OP_EQ_DOUBLE
);
986 case REG_STRING
: /* Fall-through */
987 case REG_STAR_GLOB_STRING
:
991 ERR("Unknown interpreter register type (%d)",
998 switch (estack_bx_t
) {
999 case REG_S64
: /* Fall-through */
1000 case REG_U64
: /* Fall-through */
1005 JUMP_TO(BYTECODE_OP_EQ_STRING
);
1006 case REG_STAR_GLOB_STRING
:
1007 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
1009 ERR("Unknown interpreter register type (%d)",
1015 case REG_STAR_GLOB_STRING
:
1016 switch (estack_bx_t
) {
1017 case REG_S64
: /* Fall-through */
1018 case REG_U64
: /* Fall-through */
1023 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
1024 case REG_STAR_GLOB_STRING
:
1028 ERR("Unknown interpreter register type (%d)",
1035 ERR("Unknown interpreter register type (%d)",
1043 /* Dynamic typing. */
1044 switch (estack_ax_t
) {
1045 case REG_S64
: /* Fall-through */
1047 switch (estack_bx_t
) {
1048 case REG_S64
: /* Fall-through */
1050 JUMP_TO(BYTECODE_OP_NE_S64
);
1052 JUMP_TO(BYTECODE_OP_NE_DOUBLE_S64
);
1053 case REG_STRING
: /* Fall-through */
1054 case REG_STAR_GLOB_STRING
:
1058 ERR("Unknown interpreter register type (%d)",
1065 switch (estack_bx_t
) {
1066 case REG_S64
: /* Fall-through */
1068 JUMP_TO(BYTECODE_OP_NE_S64_DOUBLE
);
1070 JUMP_TO(BYTECODE_OP_NE_DOUBLE
);
1071 case REG_STRING
: /* Fall-through */
1072 case REG_STAR_GLOB_STRING
:
1076 ERR("Unknown interpreter register type (%d)",
1083 switch (estack_bx_t
) {
1084 case REG_S64
: /* Fall-through */
1090 JUMP_TO(BYTECODE_OP_NE_STRING
);
1091 case REG_STAR_GLOB_STRING
:
1092 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1094 ERR("Unknown interpreter register type (%d)",
1100 case REG_STAR_GLOB_STRING
:
1101 switch (estack_bx_t
) {
1102 case REG_S64
: /* Fall-through */
1108 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1109 case REG_STAR_GLOB_STRING
:
1113 ERR("Unknown interpreter register type (%d)",
1120 ERR("Unknown interpreter register type (%d)",
1128 /* Dynamic typing. */
1129 switch (estack_ax_t
) {
1130 case REG_S64
: /* Fall-through */
1132 switch (estack_bx_t
) {
1133 case REG_S64
: /* Fall-through */
1135 JUMP_TO(BYTECODE_OP_GT_S64
);
1137 JUMP_TO(BYTECODE_OP_GT_DOUBLE_S64
);
1138 case REG_STRING
: /* Fall-through */
1139 case REG_STAR_GLOB_STRING
:
1143 ERR("Unknown interpreter register type (%d)",
1150 switch (estack_bx_t
) {
1151 case REG_S64
: /* Fall-through */
1153 JUMP_TO(BYTECODE_OP_GT_S64_DOUBLE
);
1155 JUMP_TO(BYTECODE_OP_GT_DOUBLE
);
1156 case REG_STRING
: /* Fall-through */
1157 case REG_STAR_GLOB_STRING
:
1161 ERR("Unknown interpreter register type (%d)",
1168 switch (estack_bx_t
) {
1169 case REG_S64
: /* Fall-through */
1170 case REG_U64
: /* Fall-through */
1171 case REG_DOUBLE
: /* Fall-through */
1172 case REG_STAR_GLOB_STRING
:
1176 JUMP_TO(BYTECODE_OP_GT_STRING
);
1178 ERR("Unknown interpreter register type (%d)",
1185 ERR("Unknown interpreter register type (%d)",
1193 /* Dynamic typing. */
1194 switch (estack_ax_t
) {
1195 case REG_S64
: /* Fall-through */
1197 switch (estack_bx_t
) {
1198 case REG_S64
: /* Fall-through */
1200 JUMP_TO(BYTECODE_OP_LT_S64
);
1202 JUMP_TO(BYTECODE_OP_LT_DOUBLE_S64
);
1203 case REG_STRING
: /* Fall-through */
1204 case REG_STAR_GLOB_STRING
:
1208 ERR("Unknown interpreter register type (%d)",
1215 switch (estack_bx_t
) {
1216 case REG_S64
: /* Fall-through */
1218 JUMP_TO(BYTECODE_OP_LT_S64_DOUBLE
);
1220 JUMP_TO(BYTECODE_OP_LT_DOUBLE
);
1221 case REG_STRING
: /* Fall-through */
1222 case REG_STAR_GLOB_STRING
:
1226 ERR("Unknown interpreter register type (%d)",
1233 switch (estack_bx_t
) {
1234 case REG_S64
: /* Fall-through */
1235 case REG_U64
: /* Fall-through */
1236 case REG_DOUBLE
: /* Fall-through */
1237 case REG_STAR_GLOB_STRING
:
1241 JUMP_TO(BYTECODE_OP_LT_STRING
);
1243 ERR("Unknown interpreter register type (%d)",
1250 ERR("Unknown interpreter register type (%d)",
1258 /* Dynamic typing. */
1259 switch (estack_ax_t
) {
1260 case REG_S64
: /* Fall-through */
1262 switch (estack_bx_t
) {
1263 case REG_S64
: /* Fall-through */
1265 JUMP_TO(BYTECODE_OP_GE_S64
);
1267 JUMP_TO(BYTECODE_OP_GE_DOUBLE_S64
);
1268 case REG_STRING
: /* Fall-through */
1269 case REG_STAR_GLOB_STRING
:
1273 ERR("Unknown interpreter register type (%d)",
1280 switch (estack_bx_t
) {
1281 case REG_S64
: /* Fall-through */
1283 JUMP_TO(BYTECODE_OP_GE_S64_DOUBLE
);
1285 JUMP_TO(BYTECODE_OP_GE_DOUBLE
);
1286 case REG_STRING
: /* Fall-through */
1287 case REG_STAR_GLOB_STRING
:
1291 ERR("Unknown interpreter register type (%d)",
1298 switch (estack_bx_t
) {
1299 case REG_S64
: /* Fall-through */
1300 case REG_U64
: /* Fall-through */
1301 case REG_DOUBLE
: /* Fall-through */
1302 case REG_STAR_GLOB_STRING
:
1306 JUMP_TO(BYTECODE_OP_GE_STRING
);
1308 ERR("Unknown interpreter register type (%d)",
1315 ERR("Unknown interpreter register type (%d)",
1323 /* Dynamic typing. */
1324 switch (estack_ax_t
) {
1325 case REG_S64
: /* Fall-through */
1327 switch (estack_bx_t
) {
1328 case REG_S64
: /* Fall-through */
1330 JUMP_TO(BYTECODE_OP_LE_S64
);
1332 JUMP_TO(BYTECODE_OP_LE_DOUBLE_S64
);
1333 case REG_STRING
: /* Fall-through */
1334 case REG_STAR_GLOB_STRING
:
1338 ERR("Unknown interpreter register type (%d)",
1345 switch (estack_bx_t
) {
1346 case REG_S64
: /* Fall-through */
1348 JUMP_TO(BYTECODE_OP_LE_S64_DOUBLE
);
1350 JUMP_TO(BYTECODE_OP_LE_DOUBLE
);
1351 case REG_STRING
: /* Fall-through */
1352 case REG_STAR_GLOB_STRING
:
1356 ERR("Unknown interpreter register type (%d)",
1363 switch (estack_bx_t
) {
1364 case REG_S64
: /* Fall-through */
1365 case REG_U64
: /* Fall-through */
1366 case REG_DOUBLE
: /* Fall-through */
1367 case REG_STAR_GLOB_STRING
:
1371 JUMP_TO(BYTECODE_OP_LE_STRING
);
1373 ERR("Unknown interpreter register type (%d)",
1380 ERR("Unknown interpreter register type (%d)",
1387 OP(BYTECODE_OP_EQ_STRING
):
1391 res
= (stack_strcmp(stack
, top
, "==") == 0);
1392 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1394 estack_ax_t
= REG_S64
;
1395 next_pc
+= sizeof(struct binary_op
);
1398 OP(BYTECODE_OP_NE_STRING
):
1402 res
= (stack_strcmp(stack
, top
, "!=") != 0);
1403 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1405 estack_ax_t
= REG_S64
;
1406 next_pc
+= sizeof(struct binary_op
);
1409 OP(BYTECODE_OP_GT_STRING
):
1413 res
= (stack_strcmp(stack
, top
, ">") > 0);
1414 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1416 estack_ax_t
= REG_S64
;
1417 next_pc
+= sizeof(struct binary_op
);
1420 OP(BYTECODE_OP_LT_STRING
):
1424 res
= (stack_strcmp(stack
, top
, "<") < 0);
1425 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1427 estack_ax_t
= REG_S64
;
1428 next_pc
+= sizeof(struct binary_op
);
1431 OP(BYTECODE_OP_GE_STRING
):
1435 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
1436 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1438 estack_ax_t
= REG_S64
;
1439 next_pc
+= sizeof(struct binary_op
);
1442 OP(BYTECODE_OP_LE_STRING
):
1446 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
1447 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1449 estack_ax_t
= REG_S64
;
1450 next_pc
+= sizeof(struct binary_op
);
1454 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING
):
1458 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
1459 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1461 estack_ax_t
= REG_S64
;
1462 next_pc
+= sizeof(struct binary_op
);
1465 OP(BYTECODE_OP_NE_STAR_GLOB_STRING
):
1469 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
1470 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1472 estack_ax_t
= REG_S64
;
1473 next_pc
+= sizeof(struct binary_op
);
1477 OP(BYTECODE_OP_EQ_S64
):
1481 res
= (estack_bx_v
== estack_ax_v
);
1482 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1484 estack_ax_t
= REG_S64
;
1485 next_pc
+= sizeof(struct binary_op
);
1488 OP(BYTECODE_OP_NE_S64
):
1492 res
= (estack_bx_v
!= estack_ax_v
);
1493 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1495 estack_ax_t
= REG_S64
;
1496 next_pc
+= sizeof(struct binary_op
);
1499 OP(BYTECODE_OP_GT_S64
):
1503 res
= (estack_bx_v
> estack_ax_v
);
1504 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1506 estack_ax_t
= REG_S64
;
1507 next_pc
+= sizeof(struct binary_op
);
1510 OP(BYTECODE_OP_LT_S64
):
1514 res
= (estack_bx_v
< estack_ax_v
);
1515 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1517 estack_ax_t
= REG_S64
;
1518 next_pc
+= sizeof(struct binary_op
);
1521 OP(BYTECODE_OP_GE_S64
):
1525 res
= (estack_bx_v
>= estack_ax_v
);
1526 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1528 estack_ax_t
= REG_S64
;
1529 next_pc
+= sizeof(struct binary_op
);
1532 OP(BYTECODE_OP_LE_S64
):
1536 res
= (estack_bx_v
<= estack_ax_v
);
1537 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1539 estack_ax_t
= REG_S64
;
1540 next_pc
+= sizeof(struct binary_op
);
1544 OP(BYTECODE_OP_EQ_DOUBLE
):
1548 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax(stack
, top
)->u
.d
);
1549 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1551 estack_ax_t
= REG_S64
;
1552 next_pc
+= sizeof(struct binary_op
);
1555 OP(BYTECODE_OP_NE_DOUBLE
):
1559 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax(stack
, top
)->u
.d
);
1560 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1562 estack_ax_t
= REG_S64
;
1563 next_pc
+= sizeof(struct binary_op
);
1566 OP(BYTECODE_OP_GT_DOUBLE
):
1570 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax(stack
, top
)->u
.d
);
1571 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1573 estack_ax_t
= REG_S64
;
1574 next_pc
+= sizeof(struct binary_op
);
1577 OP(BYTECODE_OP_LT_DOUBLE
):
1581 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax(stack
, top
)->u
.d
);
1582 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1584 estack_ax_t
= REG_S64
;
1585 next_pc
+= sizeof(struct binary_op
);
1588 OP(BYTECODE_OP_GE_DOUBLE
):
1592 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax(stack
, top
)->u
.d
);
1593 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1595 estack_ax_t
= REG_S64
;
1596 next_pc
+= sizeof(struct binary_op
);
1599 OP(BYTECODE_OP_LE_DOUBLE
):
1603 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax(stack
, top
)->u
.d
);
1604 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1606 estack_ax_t
= REG_S64
;
1607 next_pc
+= sizeof(struct binary_op
);
1611 /* Mixed S64-double binary comparators */
1612 OP(BYTECODE_OP_EQ_DOUBLE_S64
):
1616 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax_v
);
1617 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1619 estack_ax_t
= REG_S64
;
1620 next_pc
+= sizeof(struct binary_op
);
1623 OP(BYTECODE_OP_NE_DOUBLE_S64
):
1627 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax_v
);
1628 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1630 estack_ax_t
= REG_S64
;
1631 next_pc
+= sizeof(struct binary_op
);
1634 OP(BYTECODE_OP_GT_DOUBLE_S64
):
1638 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax_v
);
1639 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1641 estack_ax_t
= REG_S64
;
1642 next_pc
+= sizeof(struct binary_op
);
1645 OP(BYTECODE_OP_LT_DOUBLE_S64
):
1649 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax_v
);
1650 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1652 estack_ax_t
= REG_S64
;
1653 next_pc
+= sizeof(struct binary_op
);
1656 OP(BYTECODE_OP_GE_DOUBLE_S64
):
1660 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax_v
);
1661 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1663 estack_ax_t
= REG_S64
;
1664 next_pc
+= sizeof(struct binary_op
);
1667 OP(BYTECODE_OP_LE_DOUBLE_S64
):
1671 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax_v
);
1672 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1674 estack_ax_t
= REG_S64
;
1675 next_pc
+= sizeof(struct binary_op
);
1679 OP(BYTECODE_OP_EQ_S64_DOUBLE
):
1683 res
= (estack_bx_v
== estack_ax(stack
, top
)->u
.d
);
1684 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1686 estack_ax_t
= REG_S64
;
1687 next_pc
+= sizeof(struct binary_op
);
1690 OP(BYTECODE_OP_NE_S64_DOUBLE
):
1694 res
= (estack_bx_v
!= estack_ax(stack
, top
)->u
.d
);
1695 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1697 estack_ax_t
= REG_S64
;
1698 next_pc
+= sizeof(struct binary_op
);
1701 OP(BYTECODE_OP_GT_S64_DOUBLE
):
1705 res
= (estack_bx_v
> estack_ax(stack
, top
)->u
.d
);
1706 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1708 estack_ax_t
= REG_S64
;
1709 next_pc
+= sizeof(struct binary_op
);
1712 OP(BYTECODE_OP_LT_S64_DOUBLE
):
1716 res
= (estack_bx_v
< estack_ax(stack
, top
)->u
.d
);
1717 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1719 estack_ax_t
= REG_S64
;
1720 next_pc
+= sizeof(struct binary_op
);
1723 OP(BYTECODE_OP_GE_S64_DOUBLE
):
1727 res
= (estack_bx_v
>= estack_ax(stack
, top
)->u
.d
);
1728 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1730 estack_ax_t
= REG_S64
;
1731 next_pc
+= sizeof(struct binary_op
);
1734 OP(BYTECODE_OP_LE_S64_DOUBLE
):
1738 res
= (estack_bx_v
<= estack_ax(stack
, top
)->u
.d
);
1739 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1741 estack_ax_t
= REG_S64
;
1742 next_pc
+= sizeof(struct binary_op
);
1745 OP(BYTECODE_OP_BIT_RSHIFT
):
1749 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1754 /* Catch undefined behavior. */
1755 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1759 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
1760 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1762 estack_ax_t
= REG_U64
;
1763 next_pc
+= sizeof(struct binary_op
);
1766 OP(BYTECODE_OP_BIT_LSHIFT
):
1770 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1775 /* Catch undefined behavior. */
1776 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1780 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
1781 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1783 estack_ax_t
= REG_U64
;
1784 next_pc
+= sizeof(struct binary_op
);
1787 OP(BYTECODE_OP_BIT_AND
):
1791 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1796 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
1797 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1799 estack_ax_t
= REG_U64
;
1800 next_pc
+= sizeof(struct binary_op
);
1803 OP(BYTECODE_OP_BIT_OR
):
1807 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1812 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1813 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1815 estack_ax_t
= REG_U64
;
1816 next_pc
+= sizeof(struct binary_op
);
1819 OP(BYTECODE_OP_BIT_XOR
):
1823 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1828 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1829 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1831 estack_ax_t
= REG_U64
;
1832 next_pc
+= sizeof(struct binary_op
);
1837 OP(BYTECODE_OP_UNARY_PLUS
):
1839 /* Dynamic typing. */
1840 switch (estack_ax_t
) {
1841 case REG_S64
: /* Fall-through. */
1843 JUMP_TO(BYTECODE_OP_UNARY_PLUS_S64
);
1845 JUMP_TO(BYTECODE_OP_UNARY_PLUS_DOUBLE
);
1846 case REG_STRING
: /* Fall-through */
1847 case REG_STAR_GLOB_STRING
:
1851 ERR("Unknown interpreter register type (%d)",
1857 OP(BYTECODE_OP_UNARY_MINUS
):
1859 /* Dynamic typing. */
1860 switch (estack_ax_t
) {
1861 case REG_S64
: /* Fall-through. */
1863 JUMP_TO(BYTECODE_OP_UNARY_MINUS_S64
);
1865 JUMP_TO(BYTECODE_OP_UNARY_MINUS_DOUBLE
);
1866 case REG_STRING
: /* Fall-through */
1867 case REG_STAR_GLOB_STRING
:
1871 ERR("Unknown interpreter register type (%d)",
1877 OP(BYTECODE_OP_UNARY_NOT
):
1879 /* Dynamic typing. */
1880 switch (estack_ax_t
) {
1881 case REG_S64
: /* Fall-through. */
1883 JUMP_TO(BYTECODE_OP_UNARY_NOT_S64
);
1885 JUMP_TO(BYTECODE_OP_UNARY_NOT_DOUBLE
);
1886 case REG_STRING
: /* Fall-through */
1887 case REG_STAR_GLOB_STRING
:
1891 ERR("Unknown interpreter register type (%d)",
1896 next_pc
+= sizeof(struct unary_op
);
1900 OP(BYTECODE_OP_UNARY_BIT_NOT
):
1902 /* Dynamic typing. */
1903 if (!IS_INTEGER_REGISTER(estack_ax_t
)) {
1908 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1909 estack_ax_t
= REG_U64
;
1910 next_pc
+= sizeof(struct unary_op
);
1914 OP(BYTECODE_OP_UNARY_PLUS_S64
):
1915 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE
):
1917 next_pc
+= sizeof(struct unary_op
);
1920 OP(BYTECODE_OP_UNARY_MINUS_S64
):
1922 estack_ax_v
= -estack_ax_v
;
1923 next_pc
+= sizeof(struct unary_op
);
1926 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE
):
1928 estack_ax(stack
, top
)->u
.d
= -estack_ax(stack
, top
)->u
.d
;
1929 next_pc
+= sizeof(struct unary_op
);
1932 OP(BYTECODE_OP_UNARY_NOT_S64
):
1934 estack_ax_v
= !estack_ax_v
;
1935 estack_ax_t
= REG_S64
;
1936 next_pc
+= sizeof(struct unary_op
);
1939 OP(BYTECODE_OP_UNARY_NOT_DOUBLE
):
1941 estack_ax_v
= !estack_ax(stack
, top
)->u
.d
;
1942 estack_ax_t
= REG_S64
;
1943 next_pc
+= sizeof(struct unary_op
);
1948 OP(BYTECODE_OP_AND
):
1950 struct logical_op
*insn
= (struct logical_op
*) pc
;
1952 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1956 /* If AX is 0, skip and evaluate to 0 */
1957 if (unlikely(estack_ax_v
== 0)) {
1958 dbg_printf("Jumping to bytecode offset %u\n",
1959 (unsigned int) insn
->skip_offset
);
1960 next_pc
= start_pc
+ insn
->skip_offset
;
1962 /* Pop 1 when jump not taken */
1963 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1964 next_pc
+= sizeof(struct logical_op
);
1970 struct logical_op
*insn
= (struct logical_op
*) pc
;
1972 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1976 /* If AX is nonzero, skip and evaluate to 1 */
1977 if (unlikely(estack_ax_v
!= 0)) {
1979 dbg_printf("Jumping to bytecode offset %u\n",
1980 (unsigned int) insn
->skip_offset
);
1981 next_pc
= start_pc
+ insn
->skip_offset
;
1983 /* Pop 1 when jump not taken */
1984 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1985 next_pc
+= sizeof(struct logical_op
);
1991 /* load field ref */
1992 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING
):
1994 struct load_op
*insn
= (struct load_op
*) pc
;
1995 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1997 dbg_printf("load field ref offset %u type string\n",
1999 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2000 estack_ax(stack
, top
)->u
.s
.str
=
2001 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
2002 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2003 dbg_printf("Interpreter warning: loading a NULL string.\n");
2007 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2008 estack_ax(stack
, top
)->u
.s
.literal_type
=
2009 ESTACK_STRING_LITERAL_TYPE_NONE
;
2010 estack_ax_t
= REG_STRING
;
2011 dbg_printf("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2012 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2016 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
):
2018 struct load_op
*insn
= (struct load_op
*) pc
;
2019 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2021 dbg_printf("load field ref offset %u type sequence\n",
2023 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2024 estack_ax(stack
, top
)->u
.s
.seq_len
=
2025 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
2026 estack_ax(stack
, top
)->u
.s
.str
=
2027 *(const char **) (&interpreter_stack_data
[ref
->offset
2028 + sizeof(unsigned long)]);
2029 estack_ax_t
= REG_STRING
;
2030 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2031 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2035 estack_ax(stack
, top
)->u
.s
.literal_type
=
2036 ESTACK_STRING_LITERAL_TYPE_NONE
;
2037 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2041 OP(BYTECODE_OP_LOAD_FIELD_REF_S64
):
2043 struct load_op
*insn
= (struct load_op
*) pc
;
2044 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2046 dbg_printf("load field ref offset %u type s64\n",
2048 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2050 ((struct literal_numeric
*) &interpreter_stack_data
[ref
->offset
])->v
;
2051 estack_ax_t
= REG_S64
;
2052 dbg_printf("ref load s64 %" PRIi64
"\n", estack_ax_v
);
2053 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2057 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
):
2059 struct load_op
*insn
= (struct load_op
*) pc
;
2060 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2062 dbg_printf("load field ref offset %u type double\n",
2064 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2065 memcpy(&estack_ax(stack
, top
)->u
.d
, &interpreter_stack_data
[ref
->offset
],
2066 sizeof(struct literal_double
));
2067 estack_ax_t
= REG_DOUBLE
;
2068 dbg_printf("ref load double %g\n", estack_ax(stack
, top
)->u
.d
);
2069 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2073 /* load from immediate operand */
2074 OP(BYTECODE_OP_LOAD_STRING
):
2076 struct load_op
*insn
= (struct load_op
*) pc
;
2078 dbg_printf("load string %s\n", insn
->data
);
2079 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2080 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2081 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2082 estack_ax(stack
, top
)->u
.s
.literal_type
=
2083 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
2084 estack_ax_t
= REG_STRING
;
2085 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2089 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING
):
2091 struct load_op
*insn
= (struct load_op
*) pc
;
2093 dbg_printf("load globbing pattern %s\n", insn
->data
);
2094 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2095 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2096 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2097 estack_ax(stack
, top
)->u
.s
.literal_type
=
2098 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
2099 estack_ax_t
= REG_STAR_GLOB_STRING
;
2100 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2104 OP(BYTECODE_OP_LOAD_S64
):
2106 struct load_op
*insn
= (struct load_op
*) pc
;
2108 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2109 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
2110 estack_ax_t
= REG_S64
;
2111 dbg_printf("load s64 %" PRIi64
"\n", estack_ax_v
);
2112 next_pc
+= sizeof(struct load_op
)
2113 + sizeof(struct literal_numeric
);
2117 OP(BYTECODE_OP_LOAD_DOUBLE
):
2119 struct load_op
*insn
= (struct load_op
*) pc
;
2121 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2122 memcpy(&estack_ax(stack
, top
)->u
.d
, insn
->data
,
2123 sizeof(struct literal_double
));
2124 estack_ax_t
= REG_DOUBLE
;
2125 dbg_printf("load double %g\n", estack_ax(stack
, top
)->u
.d
);
2126 next_pc
+= sizeof(struct load_op
)
2127 + sizeof(struct literal_double
);
2132 OP(BYTECODE_OP_CAST_TO_S64
):
2134 /* Dynamic typing. */
2135 switch (estack_ax_t
) {
2137 JUMP_TO(BYTECODE_OP_CAST_NOP
);
2139 JUMP_TO(BYTECODE_OP_CAST_DOUBLE_TO_S64
);
2141 estack_ax_t
= REG_S64
;
2142 next_pc
+= sizeof(struct cast_op
);
2143 case REG_STRING
: /* Fall-through */
2144 case REG_STAR_GLOB_STRING
:
2148 ERR("Unknown interpreter register type (%d)",
2155 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64
):
2157 estack_ax_v
= (int64_t) estack_ax(stack
, top
)->u
.d
;
2158 estack_ax_t
= REG_S64
;
2159 next_pc
+= sizeof(struct cast_op
);
2163 OP(BYTECODE_OP_CAST_NOP
):
2165 next_pc
+= sizeof(struct cast_op
);
2169 /* get context ref */
2170 OP(BYTECODE_OP_GET_CONTEXT_REF
):
2172 struct load_op
*insn
= (struct load_op
*) pc
;
2173 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2174 struct lttng_ctx_field
*ctx_field
;
2175 struct lttng_ctx_value v
;
2177 dbg_printf("get context ref offset %u type dynamic\n",
2179 ctx_field
= &ctx
->fields
[ref
->offset
];
2180 ctx_field
->get_value(ctx_field
, &v
);
2181 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2183 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
2186 case LTTNG_UST_DYNAMIC_TYPE_S64
:
2187 estack_ax_v
= v
.u
.s64
;
2188 estack_ax_t
= REG_S64
;
2189 dbg_printf("ref get context dynamic s64 %" PRIi64
"\n", estack_ax_v
);
2191 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
2192 estack_ax(stack
, top
)->u
.d
= v
.u
.d
;
2193 estack_ax_t
= REG_DOUBLE
;
2194 dbg_printf("ref get context dynamic double %g\n", estack_ax(stack
, top
)->u
.d
);
2196 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
2197 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2198 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2199 dbg_printf("Interpreter warning: loading a NULL string.\n");
2203 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2204 estack_ax(stack
, top
)->u
.s
.literal_type
=
2205 ESTACK_STRING_LITERAL_TYPE_NONE
;
2206 dbg_printf("ref get context dynamic string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2207 estack_ax_t
= REG_STRING
;
2210 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
2214 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2218 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING
):
2220 struct load_op
*insn
= (struct load_op
*) pc
;
2221 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2222 struct lttng_ctx_field
*ctx_field
;
2223 struct lttng_ctx_value v
;
2225 dbg_printf("get context ref offset %u type string\n",
2227 ctx_field
= &ctx
->fields
[ref
->offset
];
2228 ctx_field
->get_value(ctx_field
, &v
);
2229 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2230 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2231 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2232 dbg_printf("Interpreter warning: loading a NULL string.\n");
2236 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2237 estack_ax(stack
, top
)->u
.s
.literal_type
=
2238 ESTACK_STRING_LITERAL_TYPE_NONE
;
2239 estack_ax_t
= REG_STRING
;
2240 dbg_printf("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2241 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2245 OP(BYTECODE_OP_GET_CONTEXT_REF_S64
):
2247 struct load_op
*insn
= (struct load_op
*) pc
;
2248 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2249 struct lttng_ctx_field
*ctx_field
;
2250 struct lttng_ctx_value v
;
2252 dbg_printf("get context ref offset %u type s64\n",
2254 ctx_field
= &ctx
->fields
[ref
->offset
];
2255 ctx_field
->get_value(ctx_field
, &v
);
2256 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2257 estack_ax_v
= v
.u
.s64
;
2258 estack_ax_t
= REG_S64
;
2259 dbg_printf("ref get context s64 %" PRIi64
"\n", estack_ax_v
);
2260 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2264 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
):
2266 struct load_op
*insn
= (struct load_op
*) pc
;
2267 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2268 struct lttng_ctx_field
*ctx_field
;
2269 struct lttng_ctx_value v
;
2271 dbg_printf("get context ref offset %u type double\n",
2273 ctx_field
= &ctx
->fields
[ref
->offset
];
2274 ctx_field
->get_value(ctx_field
, &v
);
2275 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2276 memcpy(&estack_ax(stack
, top
)->u
.d
, &v
.u
.d
, sizeof(struct literal_double
));
2277 estack_ax_t
= REG_DOUBLE
;
2278 dbg_printf("ref get context double %g\n", estack_ax(stack
, top
)->u
.d
);
2279 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2283 OP(BYTECODE_OP_GET_CONTEXT_ROOT
):
2285 dbg_printf("op get context root\n");
2286 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2287 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
2288 /* "field" only needed for variants. */
2289 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2290 estack_ax_t
= REG_PTR
;
2291 next_pc
+= sizeof(struct load_op
);
2295 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT
):
2297 dbg_printf("op get app context root\n");
2298 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2299 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_APP_CONTEXT
;
2300 /* "field" only needed for variants. */
2301 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2302 estack_ax_t
= REG_PTR
;
2303 next_pc
+= sizeof(struct load_op
);
2307 OP(BYTECODE_OP_GET_PAYLOAD_ROOT
):
2309 dbg_printf("op get app payload root\n");
2310 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2311 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
2312 estack_ax(stack
, top
)->u
.ptr
.ptr
= interpreter_stack_data
;
2313 /* "field" only needed for variants. */
2314 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2315 estack_ax_t
= REG_PTR
;
2316 next_pc
+= sizeof(struct load_op
);
2320 OP(BYTECODE_OP_GET_SYMBOL
):
2322 dbg_printf("op get symbol\n");
2323 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
2325 ERR("Nested fields not implemented yet.");
2328 case LOAD_ROOT_CONTEXT
:
2329 case LOAD_ROOT_APP_CONTEXT
:
2330 case LOAD_ROOT_PAYLOAD
:
2332 * symbol lookup is performed by
2338 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
2342 OP(BYTECODE_OP_GET_SYMBOL_FIELD
):
2345 * Used for first variant encountered in a
2346 * traversal. Variants are not implemented yet.
2352 OP(BYTECODE_OP_GET_INDEX_U16
):
2354 struct load_op
*insn
= (struct load_op
*) pc
;
2355 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
2357 dbg_printf("op get index u16\n");
2358 ret
= dynamic_get_index(ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2361 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2362 estack_ax_t
= estack_ax(stack
, top
)->type
;
2363 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
2367 OP(BYTECODE_OP_GET_INDEX_U64
):
2369 struct load_op
*insn
= (struct load_op
*) pc
;
2370 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
2372 dbg_printf("op get index u64\n");
2373 ret
= dynamic_get_index(ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2376 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2377 estack_ax_t
= estack_ax(stack
, top
)->type
;
2378 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
2382 OP(BYTECODE_OP_LOAD_FIELD
):
2384 dbg_printf("op load field\n");
2385 ret
= dynamic_load_field(estack_ax(stack
, top
));
2388 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2389 estack_ax_t
= estack_ax(stack
, top
)->type
;
2390 next_pc
+= sizeof(struct load_op
);
2394 OP(BYTECODE_OP_LOAD_FIELD_S8
):
2396 dbg_printf("op load field s8\n");
2398 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2399 estack_ax_t
= REG_S64
;
2400 next_pc
+= sizeof(struct load_op
);
2403 OP(BYTECODE_OP_LOAD_FIELD_S16
):
2405 dbg_printf("op load field s16\n");
2407 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2408 estack_ax_t
= REG_S64
;
2409 next_pc
+= sizeof(struct load_op
);
2412 OP(BYTECODE_OP_LOAD_FIELD_S32
):
2414 dbg_printf("op load field s32\n");
2416 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2417 estack_ax_t
= REG_S64
;
2418 next_pc
+= sizeof(struct load_op
);
2421 OP(BYTECODE_OP_LOAD_FIELD_S64
):
2423 dbg_printf("op load field s64\n");
2425 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2426 estack_ax_t
= REG_S64
;
2427 next_pc
+= sizeof(struct load_op
);
2430 OP(BYTECODE_OP_LOAD_FIELD_U8
):
2432 dbg_printf("op load field u8\n");
2434 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2435 estack_ax_t
= REG_U64
;
2436 next_pc
+= sizeof(struct load_op
);
2439 OP(BYTECODE_OP_LOAD_FIELD_U16
):
2441 dbg_printf("op load field u16\n");
2443 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2444 estack_ax_t
= REG_U64
;
2445 next_pc
+= sizeof(struct load_op
);
2448 OP(BYTECODE_OP_LOAD_FIELD_U32
):
2450 dbg_printf("op load field u32\n");
2452 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2453 estack_ax_t
= REG_U64
;
2454 next_pc
+= sizeof(struct load_op
);
2457 OP(BYTECODE_OP_LOAD_FIELD_U64
):
2459 dbg_printf("op load field u64\n");
2461 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2462 estack_ax_t
= REG_U64
;
2463 next_pc
+= sizeof(struct load_op
);
2466 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE
):
2468 dbg_printf("op load field double\n");
2470 memcpy(&estack_ax(stack
, top
)->u
.d
,
2471 estack_ax(stack
, top
)->u
.ptr
.ptr
,
2472 sizeof(struct literal_double
));
2473 estack_ax(stack
, top
)->type
= REG_DOUBLE
;
2474 next_pc
+= sizeof(struct load_op
);
2478 OP(BYTECODE_OP_LOAD_FIELD_STRING
):
2482 dbg_printf("op load field string\n");
2483 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2484 estack_ax(stack
, top
)->u
.s
.str
= str
;
2485 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2486 dbg_printf("Interpreter warning: loading a NULL string.\n");
2490 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2491 estack_ax(stack
, top
)->u
.s
.literal_type
=
2492 ESTACK_STRING_LITERAL_TYPE_NONE
;
2493 estack_ax(stack
, top
)->type
= REG_STRING
;
2494 next_pc
+= sizeof(struct load_op
);
2498 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE
):
2502 dbg_printf("op load field string sequence\n");
2503 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
2504 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
2505 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
2506 estack_ax(stack
, top
)->type
= REG_STRING
;
2507 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2508 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2512 estack_ax(stack
, top
)->u
.s
.literal_type
=
2513 ESTACK_STRING_LITERAL_TYPE_NONE
;
2514 next_pc
+= sizeof(struct load_op
);
2520 /* Return _DISCARD on error. */
2522 return LTTNG_INTERPRETER_DISCARD
;
2525 return lttng_bytecode_interpret_format_output(estack_ax(stack
, top
),
2532 uint64_t lttng_bytecode_filter_interpret(void *filter_data
,
2533 const char *filter_stack_data
)
2535 return bytecode_interpret(filter_data
, filter_stack_data
, NULL
);
2538 uint64_t lttng_bytecode_capture_interpret(void *capture_data
,
2539 const char *capture_stack_data
,
2540 struct lttng_interpreter_output
*output
)
2542 return bytecode_interpret(capture_data
, capture_stack_data
,
2543 (struct lttng_interpreter_output
*) output
);