2 * SPDX-License-Identifier: MIT
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 * LTTng UST bytecode interpreter.
13 #include <lttng/urcu/pointer.h>
14 #include <urcu/rculist.h>
15 #include <lttng/ust-endian.h>
16 #include <lttng/ust-events.h>
17 #include "lib/lttng-ust/events.h"
19 #include "lttng-bytecode.h"
20 #include "common/strutils.h"
25 * -2: unknown escape char.
30 int parse_char(const char **p
)
50 * Returns SIZE_MAX if the string is null-terminated, or the number of
54 size_t get_str_or_seq_len(const struct estack_entry
*entry
)
56 return entry
->u
.s
.seq_len
;
60 int stack_star_glob_match(struct estack
*stack
, int top
,
61 const char *cmp_type
__attribute__((unused
)))
64 const char *candidate
;
68 /* Find out which side is the pattern vs. the candidate. */
69 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
70 pattern
= estack_ax(stack
, top
)->u
.s
.str
;
71 pattern_len
= get_str_or_seq_len(estack_ax(stack
, top
));
72 candidate
= estack_bx(stack
, top
)->u
.s
.str
;
73 candidate_len
= get_str_or_seq_len(estack_bx(stack
, top
));
75 pattern
= estack_bx(stack
, top
)->u
.s
.str
;
76 pattern_len
= get_str_or_seq_len(estack_bx(stack
, top
));
77 candidate
= estack_ax(stack
, top
)->u
.s
.str
;
78 candidate_len
= get_str_or_seq_len(estack_ax(stack
, top
));
81 /* Perform the match. Returns 0 when the result is true. */
82 return !strutils_star_glob_match(pattern
, pattern_len
, candidate
,
87 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
__attribute__((unused
)))
89 const char *p
= estack_bx(stack
, top
)->u
.s
.str
, *q
= estack_ax(stack
, top
)->u
.s
.str
;
96 if (unlikely(p
- estack_bx(stack
, top
)->u
.s
.str
>= estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')) {
97 if (q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0') {
100 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
101 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
102 ret
= parse_char(&q
);
109 if (unlikely(q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')) {
110 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
111 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
112 ret
= parse_char(&p
);
118 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
119 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
120 ret
= parse_char(&p
);
123 } else if (ret
== -2) {
126 /* else compare both char */
128 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
129 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
130 ret
= parse_char(&q
);
133 } else if (ret
== -2) {
153 int lttng_bytecode_interpret_error(
154 struct lttng_ust_bytecode_runtime
*bytecode_runtime
__attribute__((unused
)),
155 const char *stack_data
__attribute__((unused
)),
156 struct lttng_ust_probe_ctx
*probe_ctx
__attribute__((unused
)),
157 void *ctx
__attribute__((unused
)))
159 return LTTNG_UST_BYTECODE_INTERPRETER_ERROR
;
162 #ifdef INTERPRETER_USE_SWITCH
165 * Fallback for compilers that do not support taking address of labels.
169 start_pc = &bytecode->code[0]; \
170 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
172 dbg_printf("Executing op %s (%u)\n", \
173 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
174 (unsigned int) *(bytecode_opcode_t *) pc); \
175 switch (*(bytecode_opcode_t *) pc) {
177 #define OP(name) jump_target_##name: __attribute__((unused)); \
185 #define JUMP_TO(name) \
186 goto jump_target_##name
191 * Dispatch-table based interpreter.
195 start_pc = &bytecode->code[0]; \
196 pc = next_pc = start_pc; \
197 if (unlikely(pc - start_pc >= bytecode->len)) \
199 goto *dispatch[*(bytecode_opcode_t *) pc];
206 goto *dispatch[*(bytecode_opcode_t *) pc];
210 #define JUMP_TO(name) \
215 #define IS_INTEGER_REGISTER(reg_type) \
216 (reg_type == REG_U64 || reg_type == REG_S64)
218 static int context_get_index(struct lttng_ust_ctx
*ctx
,
219 struct lttng_ust_probe_ctx
*probe_ctx
,
220 struct load_ptr
*ptr
,
224 const struct lttng_ust_ctx_field
*ctx_field
;
225 const struct lttng_ust_event_field
*field
;
226 struct lttng_ust_ctx_value v
;
228 ctx_field
= &ctx
->fields
[idx
];
229 field
= ctx_field
->event_field
;
230 ptr
->type
= LOAD_OBJECT
;
233 switch (field
->type
->type
) {
234 case lttng_ust_type_integer
:
235 ctx_field
->get_value(ctx_field
->priv
, probe_ctx
, &v
);
236 if (lttng_ust_get_type_integer(field
->type
)->signedness
) {
237 ptr
->object_type
= OBJECT_TYPE_S64
;
238 ptr
->u
.s64
= v
.u
.s64
;
239 ptr
->ptr
= &ptr
->u
.s64
;
241 ptr
->object_type
= OBJECT_TYPE_U64
;
242 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
243 ptr
->ptr
= &ptr
->u
.u64
;
245 ptr
->rev_bo
= lttng_ust_get_type_integer(field
->type
)->reverse_byte_order
;
247 case lttng_ust_type_enum
:
249 const struct lttng_ust_type_integer
*itype
;
251 itype
= lttng_ust_get_type_integer(lttng_ust_get_type_enum(field
->type
)->container_type
);
252 ctx_field
->get_value(ctx_field
->priv
, probe_ctx
, &v
);
253 if (itype
->signedness
) {
254 ptr
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
255 ptr
->u
.s64
= v
.u
.s64
;
256 ptr
->ptr
= &ptr
->u
.s64
;
258 ptr
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
259 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
260 ptr
->ptr
= &ptr
->u
.u64
;
262 ptr
->rev_bo
= itype
->reverse_byte_order
;
265 case lttng_ust_type_array
:
266 if (lttng_ust_get_type_array(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
267 ERR("Array nesting only supports integer types.");
270 if (lttng_ust_get_type_array(field
->type
)->encoding
== lttng_ust_string_encoding_none
) {
271 ERR("Only string arrays are supported for contexts.");
274 ptr
->object_type
= OBJECT_TYPE_STRING
;
275 ctx_field
->get_value(ctx_field
->priv
, probe_ctx
, &v
);
278 case lttng_ust_type_sequence
:
279 if (lttng_ust_get_type_sequence(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
280 ERR("Sequence nesting only supports integer types.");
283 if (lttng_ust_get_type_sequence(field
->type
)->encoding
== lttng_ust_string_encoding_none
) {
284 ERR("Only string sequences are supported for contexts.");
287 ptr
->object_type
= OBJECT_TYPE_STRING
;
288 ctx_field
->get_value(ctx_field
->priv
, probe_ctx
, &v
);
291 case lttng_ust_type_string
:
292 ptr
->object_type
= OBJECT_TYPE_STRING
;
293 ctx_field
->get_value(ctx_field
->priv
, probe_ctx
, &v
);
296 case lttng_ust_type_float
:
297 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
298 ctx_field
->get_value(ctx_field
->priv
, probe_ctx
, &v
);
300 ptr
->ptr
= &ptr
->u
.d
;
301 ptr
->rev_bo
= lttng_ust_get_type_float(field
->type
)->reverse_byte_order
;
303 case lttng_ust_type_dynamic
:
304 ctx_field
->get_value(ctx_field
->priv
, probe_ctx
, &v
);
306 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
308 case LTTNG_UST_DYNAMIC_TYPE_U8
:
309 case LTTNG_UST_DYNAMIC_TYPE_U16
:
310 case LTTNG_UST_DYNAMIC_TYPE_U32
:
311 case LTTNG_UST_DYNAMIC_TYPE_U64
:
312 ptr
->object_type
= OBJECT_TYPE_U64
;
313 ptr
->u
.u64
= v
.u
.u64
;
314 ptr
->ptr
= &ptr
->u
.u64
;
316 * struct lttng_ust_ctx_value does not currently
317 * feature a byte order field.
320 dbg_printf("context get index dynamic u64 %" PRIi64
"\n", ptr
->u
.u64
);
322 case LTTNG_UST_DYNAMIC_TYPE_S8
:
323 case LTTNG_UST_DYNAMIC_TYPE_S16
:
324 case LTTNG_UST_DYNAMIC_TYPE_S32
:
325 case LTTNG_UST_DYNAMIC_TYPE_S64
:
326 ptr
->object_type
= OBJECT_TYPE_S64
;
327 ptr
->u
.s64
= v
.u
.s64
;
328 ptr
->ptr
= &ptr
->u
.s64
;
330 * struct lttng_ust_ctx_value does not currently
331 * feature a byte order field.
334 dbg_printf("context get index dynamic s64 %" PRIi64
"\n", ptr
->u
.s64
);
336 case LTTNG_UST_DYNAMIC_TYPE_FLOAT
:
337 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
338 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
340 ptr
->ptr
= &ptr
->u
.d
;
342 * struct lttng_ust_ctx_value does not currently
343 * feature a byte order field.
346 dbg_printf("context get index dynamic double %g\n", ptr
->u
.d
);
348 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
349 ptr
->object_type
= OBJECT_TYPE_STRING
;
351 dbg_printf("context get index dynamic string %s\n", (const char *) ptr
->ptr
);
354 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
359 ERR("Unknown type: %d", (int) field
->type
->type
);
365 static int dynamic_get_index(struct lttng_ust_ctx
*ctx
,
366 struct lttng_ust_probe_ctx
*probe_ctx
,
367 struct bytecode_runtime
*runtime
,
368 uint64_t index
, struct estack_entry
*stack_top
)
371 const struct bytecode_get_index_data
*gid
;
373 gid
= (const struct bytecode_get_index_data
*) &runtime
->data
[index
];
374 switch (stack_top
->u
.ptr
.type
) {
376 switch (stack_top
->u
.ptr
.object_type
) {
377 case OBJECT_TYPE_ARRAY
:
381 assert(gid
->offset
< gid
->array_len
);
382 /* Skip count (unsigned long) */
383 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
384 ptr
= ptr
+ gid
->offset
;
385 stack_top
->u
.ptr
.ptr
= ptr
;
386 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
387 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
388 assert(stack_top
->u
.ptr
.field
->type
->type
== lttng_ust_type_array
);
389 stack_top
->u
.ptr
.field
= NULL
;
392 case OBJECT_TYPE_SEQUENCE
:
397 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
398 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
399 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
403 ptr
= ptr
+ gid
->offset
;
404 stack_top
->u
.ptr
.ptr
= ptr
;
405 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
406 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
407 assert(stack_top
->u
.ptr
.field
->type
->type
== lttng_ust_type_sequence
);
408 stack_top
->u
.ptr
.field
= NULL
;
411 case OBJECT_TYPE_STRUCT
:
412 ERR("Nested structures are not supported yet.");
415 case OBJECT_TYPE_VARIANT
:
417 ERR("Unexpected get index type %d",
418 (int) stack_top
->u
.ptr
.object_type
);
423 case LOAD_ROOT_CONTEXT
:
424 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
426 ret
= context_get_index(ctx
,
435 case LOAD_ROOT_PAYLOAD
:
436 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
437 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
438 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
439 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
440 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
441 stack_top
->u
.ptr
.field
= gid
->field
;
442 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
446 stack_top
->type
= REG_PTR
;
454 static int dynamic_load_field(struct estack_entry
*stack_top
)
458 switch (stack_top
->u
.ptr
.type
) {
461 case LOAD_ROOT_CONTEXT
:
462 case LOAD_ROOT_APP_CONTEXT
:
463 case LOAD_ROOT_PAYLOAD
:
465 dbg_printf("Interpreter warning: cannot load root, missing field name.\n");
469 switch (stack_top
->u
.ptr
.object_type
) {
471 dbg_printf("op load field s8\n");
472 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
473 stack_top
->type
= REG_S64
;
475 case OBJECT_TYPE_S16
:
479 dbg_printf("op load field s16\n");
480 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
481 if (stack_top
->u
.ptr
.rev_bo
)
482 tmp
= lttng_ust_bswap_16(tmp
);
483 stack_top
->u
.v
= tmp
;
484 stack_top
->type
= REG_S64
;
487 case OBJECT_TYPE_S32
:
491 dbg_printf("op load field s32\n");
492 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
493 if (stack_top
->u
.ptr
.rev_bo
)
494 tmp
= lttng_ust_bswap_32(tmp
);
495 stack_top
->u
.v
= tmp
;
496 stack_top
->type
= REG_S64
;
499 case OBJECT_TYPE_S64
:
503 dbg_printf("op load field s64\n");
504 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
505 if (stack_top
->u
.ptr
.rev_bo
)
506 tmp
= lttng_ust_bswap_64(tmp
);
507 stack_top
->u
.v
= tmp
;
508 stack_top
->type
= REG_S64
;
511 case OBJECT_TYPE_SIGNED_ENUM
:
515 dbg_printf("op load field signed enumeration\n");
516 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
517 if (stack_top
->u
.ptr
.rev_bo
)
518 tmp
= lttng_ust_bswap_64(tmp
);
519 stack_top
->u
.v
= tmp
;
520 stack_top
->type
= REG_S64
;
524 dbg_printf("op load field u8\n");
525 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
526 stack_top
->type
= REG_U64
;
528 case OBJECT_TYPE_U16
:
532 dbg_printf("op load field u16\n");
533 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
534 if (stack_top
->u
.ptr
.rev_bo
)
535 tmp
= lttng_ust_bswap_16(tmp
);
536 stack_top
->u
.v
= tmp
;
537 stack_top
->type
= REG_U64
;
540 case OBJECT_TYPE_U32
:
544 dbg_printf("op load field u32\n");
545 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
546 if (stack_top
->u
.ptr
.rev_bo
)
547 tmp
= lttng_ust_bswap_32(tmp
);
548 stack_top
->u
.v
= tmp
;
549 stack_top
->type
= REG_U64
;
552 case OBJECT_TYPE_U64
:
556 dbg_printf("op load field u64\n");
557 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
558 if (stack_top
->u
.ptr
.rev_bo
)
559 tmp
= lttng_ust_bswap_64(tmp
);
560 stack_top
->u
.v
= tmp
;
561 stack_top
->type
= REG_U64
;
564 case OBJECT_TYPE_UNSIGNED_ENUM
:
568 dbg_printf("op load field unsigned enumeration\n");
569 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
570 if (stack_top
->u
.ptr
.rev_bo
)
571 tmp
= lttng_ust_bswap_64(tmp
);
572 stack_top
->u
.v
= tmp
;
573 stack_top
->type
= REG_U64
;
576 case OBJECT_TYPE_DOUBLE
:
577 memcpy(&stack_top
->u
.d
,
578 stack_top
->u
.ptr
.ptr
,
579 sizeof(struct literal_double
));
580 stack_top
->type
= REG_DOUBLE
;
582 case OBJECT_TYPE_STRING
:
586 dbg_printf("op load field string\n");
587 str
= (const char *) stack_top
->u
.ptr
.ptr
;
588 stack_top
->u
.s
.str
= str
;
589 if (unlikely(!stack_top
->u
.s
.str
)) {
590 dbg_printf("Interpreter warning: loading a NULL string.\n");
594 stack_top
->u
.s
.seq_len
= SIZE_MAX
;
595 stack_top
->u
.s
.literal_type
=
596 ESTACK_STRING_LITERAL_TYPE_NONE
;
597 stack_top
->type
= REG_STRING
;
600 case OBJECT_TYPE_STRING_SEQUENCE
:
604 dbg_printf("op load field string sequence\n");
605 ptr
= stack_top
->u
.ptr
.ptr
;
606 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
607 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
608 stack_top
->type
= REG_STRING
;
609 if (unlikely(!stack_top
->u
.s
.str
)) {
610 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
614 stack_top
->u
.s
.literal_type
=
615 ESTACK_STRING_LITERAL_TYPE_NONE
;
618 case OBJECT_TYPE_DYNAMIC
:
620 * Dynamic types in context are looked up
621 * by context get index.
625 case OBJECT_TYPE_SEQUENCE
:
626 case OBJECT_TYPE_ARRAY
:
627 case OBJECT_TYPE_STRUCT
:
628 case OBJECT_TYPE_VARIANT
:
629 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
640 int lttng_bytecode_interpret_format_output(struct estack_entry
*ax
,
641 struct lttng_interpreter_output
*output
)
648 output
->type
= LTTNG_INTERPRETER_TYPE_S64
;
649 output
->u
.s
= ax
->u
.v
;
652 output
->type
= LTTNG_INTERPRETER_TYPE_U64
;
653 output
->u
.u
= (uint64_t) ax
->u
.v
;
656 output
->type
= LTTNG_INTERPRETER_TYPE_DOUBLE
;
657 output
->u
.d
= ax
->u
.d
;
660 output
->type
= LTTNG_INTERPRETER_TYPE_STRING
;
661 output
->u
.str
.str
= ax
->u
.s
.str
;
662 output
->u
.str
.len
= ax
->u
.s
.seq_len
;
665 switch (ax
->u
.ptr
.object_type
) {
667 case OBJECT_TYPE_S16
:
668 case OBJECT_TYPE_S32
:
669 case OBJECT_TYPE_S64
:
671 case OBJECT_TYPE_U16
:
672 case OBJECT_TYPE_U32
:
673 case OBJECT_TYPE_U64
:
674 case OBJECT_TYPE_DOUBLE
:
675 case OBJECT_TYPE_STRING
:
676 case OBJECT_TYPE_STRING_SEQUENCE
:
677 ret
= dynamic_load_field(ax
);
680 /* Retry after loading ptr into stack top. */
682 case OBJECT_TYPE_SEQUENCE
:
683 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
684 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
685 output
->u
.sequence
.nr_elem
= *(unsigned long *) ax
->u
.ptr
.ptr
;
686 output
->u
.sequence
.nested_type
= lttng_ust_get_type_sequence(ax
->u
.ptr
.field
->type
)->elem_type
;
688 case OBJECT_TYPE_ARRAY
:
689 /* Skip count (unsigned long) */
690 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
691 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
692 output
->u
.sequence
.nr_elem
= lttng_ust_get_type_array(ax
->u
.ptr
.field
->type
)->length
;
693 output
->u
.sequence
.nested_type
= lttng_ust_get_type_array(ax
->u
.ptr
.field
->type
)->elem_type
;
695 case OBJECT_TYPE_SIGNED_ENUM
:
696 ret
= dynamic_load_field(ax
);
699 output
->type
= LTTNG_INTERPRETER_TYPE_SIGNED_ENUM
;
700 output
->u
.s
= ax
->u
.v
;
702 case OBJECT_TYPE_UNSIGNED_ENUM
:
703 ret
= dynamic_load_field(ax
);
706 output
->type
= LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM
;
707 output
->u
.u
= ax
->u
.v
;
709 case OBJECT_TYPE_STRUCT
:
710 case OBJECT_TYPE_VARIANT
:
716 case REG_STAR_GLOB_STRING
:
726 * Return LTTNG_UST_BYTECODE_INTERPRETER_OK on success.
727 * Return LTTNG_UST_BYTECODE_INTERPRETER_ERROR on error.
729 * For FILTER bytecode: expect a struct lttng_ust_bytecode_filter_ctx *
731 * For CAPTURE bytecode: expect a struct lttng_interpreter_output *
734 int lttng_bytecode_interpret(struct lttng_ust_bytecode_runtime
*ust_bytecode
,
735 const char *interpreter_stack_data
,
736 struct lttng_ust_probe_ctx
*probe_ctx
,
739 struct bytecode_runtime
*bytecode
= caa_container_of(ust_bytecode
, struct bytecode_runtime
, p
);
740 struct lttng_ust_ctx
*ctx
= lttng_ust_rcu_dereference(*ust_bytecode
->pctx
);
741 void *pc
, *next_pc
, *start_pc
;
742 int ret
= -EINVAL
, retval
= 0;
743 struct estack _stack
;
744 struct estack
*stack
= &_stack
;
745 register int64_t ax
= 0, bx
= 0;
746 register enum entry_type ax_t
= REG_UNKNOWN
, bx_t
= REG_UNKNOWN
;
747 register int top
= INTERPRETER_STACK_EMPTY
;
748 #ifndef INTERPRETER_USE_SWITCH
749 static void *dispatch
[NR_BYTECODE_OPS
] = {
750 [ BYTECODE_OP_UNKNOWN
] = &&LABEL_BYTECODE_OP_UNKNOWN
,
752 [ BYTECODE_OP_RETURN
] = &&LABEL_BYTECODE_OP_RETURN
,
755 [ BYTECODE_OP_MUL
] = &&LABEL_BYTECODE_OP_MUL
,
756 [ BYTECODE_OP_DIV
] = &&LABEL_BYTECODE_OP_DIV
,
757 [ BYTECODE_OP_MOD
] = &&LABEL_BYTECODE_OP_MOD
,
758 [ BYTECODE_OP_PLUS
] = &&LABEL_BYTECODE_OP_PLUS
,
759 [ BYTECODE_OP_MINUS
] = &&LABEL_BYTECODE_OP_MINUS
,
760 [ BYTECODE_OP_BIT_RSHIFT
] = &&LABEL_BYTECODE_OP_BIT_RSHIFT
,
761 [ BYTECODE_OP_BIT_LSHIFT
] = &&LABEL_BYTECODE_OP_BIT_LSHIFT
,
762 [ BYTECODE_OP_BIT_AND
] = &&LABEL_BYTECODE_OP_BIT_AND
,
763 [ BYTECODE_OP_BIT_OR
] = &&LABEL_BYTECODE_OP_BIT_OR
,
764 [ BYTECODE_OP_BIT_XOR
] = &&LABEL_BYTECODE_OP_BIT_XOR
,
766 /* binary comparators */
767 [ BYTECODE_OP_EQ
] = &&LABEL_BYTECODE_OP_EQ
,
768 [ BYTECODE_OP_NE
] = &&LABEL_BYTECODE_OP_NE
,
769 [ BYTECODE_OP_GT
] = &&LABEL_BYTECODE_OP_GT
,
770 [ BYTECODE_OP_LT
] = &&LABEL_BYTECODE_OP_LT
,
771 [ BYTECODE_OP_GE
] = &&LABEL_BYTECODE_OP_GE
,
772 [ BYTECODE_OP_LE
] = &&LABEL_BYTECODE_OP_LE
,
774 /* string binary comparator */
775 [ BYTECODE_OP_EQ_STRING
] = &&LABEL_BYTECODE_OP_EQ_STRING
,
776 [ BYTECODE_OP_NE_STRING
] = &&LABEL_BYTECODE_OP_NE_STRING
,
777 [ BYTECODE_OP_GT_STRING
] = &&LABEL_BYTECODE_OP_GT_STRING
,
778 [ BYTECODE_OP_LT_STRING
] = &&LABEL_BYTECODE_OP_LT_STRING
,
779 [ BYTECODE_OP_GE_STRING
] = &&LABEL_BYTECODE_OP_GE_STRING
,
780 [ BYTECODE_OP_LE_STRING
] = &&LABEL_BYTECODE_OP_LE_STRING
,
782 /* globbing pattern binary comparator */
783 [ BYTECODE_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING
,
784 [ BYTECODE_OP_NE_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING
,
786 /* s64 binary comparator */
787 [ BYTECODE_OP_EQ_S64
] = &&LABEL_BYTECODE_OP_EQ_S64
,
788 [ BYTECODE_OP_NE_S64
] = &&LABEL_BYTECODE_OP_NE_S64
,
789 [ BYTECODE_OP_GT_S64
] = &&LABEL_BYTECODE_OP_GT_S64
,
790 [ BYTECODE_OP_LT_S64
] = &&LABEL_BYTECODE_OP_LT_S64
,
791 [ BYTECODE_OP_GE_S64
] = &&LABEL_BYTECODE_OP_GE_S64
,
792 [ BYTECODE_OP_LE_S64
] = &&LABEL_BYTECODE_OP_LE_S64
,
794 /* double binary comparator */
795 [ BYTECODE_OP_EQ_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE
,
796 [ BYTECODE_OP_NE_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_DOUBLE
,
797 [ BYTECODE_OP_GT_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_DOUBLE
,
798 [ BYTECODE_OP_LT_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_DOUBLE
,
799 [ BYTECODE_OP_GE_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_DOUBLE
,
800 [ BYTECODE_OP_LE_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_DOUBLE
,
802 /* Mixed S64-double binary comparators */
803 [ BYTECODE_OP_EQ_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64
,
804 [ BYTECODE_OP_NE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64
,
805 [ BYTECODE_OP_GT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64
,
806 [ BYTECODE_OP_LT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64
,
807 [ BYTECODE_OP_GE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64
,
808 [ BYTECODE_OP_LE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64
,
810 [ BYTECODE_OP_EQ_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE
,
811 [ BYTECODE_OP_NE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE
,
812 [ BYTECODE_OP_GT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE
,
813 [ BYTECODE_OP_LT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE
,
814 [ BYTECODE_OP_GE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE
,
815 [ BYTECODE_OP_LE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE
,
818 [ BYTECODE_OP_UNARY_PLUS
] = &&LABEL_BYTECODE_OP_UNARY_PLUS
,
819 [ BYTECODE_OP_UNARY_MINUS
] = &&LABEL_BYTECODE_OP_UNARY_MINUS
,
820 [ BYTECODE_OP_UNARY_NOT
] = &&LABEL_BYTECODE_OP_UNARY_NOT
,
821 [ BYTECODE_OP_UNARY_PLUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64
,
822 [ BYTECODE_OP_UNARY_MINUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64
,
823 [ BYTECODE_OP_UNARY_NOT_S64
] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64
,
824 [ BYTECODE_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE
,
825 [ BYTECODE_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE
,
826 [ BYTECODE_OP_UNARY_NOT_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE
,
829 [ BYTECODE_OP_AND
] = &&LABEL_BYTECODE_OP_AND
,
830 [ BYTECODE_OP_OR
] = &&LABEL_BYTECODE_OP_OR
,
833 [ BYTECODE_OP_LOAD_FIELD_REF
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF
,
834 [ BYTECODE_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING
,
835 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
,
836 [ BYTECODE_OP_LOAD_FIELD_REF_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64
,
837 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
,
839 /* load from immediate operand */
840 [ BYTECODE_OP_LOAD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STRING
,
841 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING
,
842 [ BYTECODE_OP_LOAD_S64
] = &&LABEL_BYTECODE_OP_LOAD_S64
,
843 [ BYTECODE_OP_LOAD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE
,
846 [ BYTECODE_OP_CAST_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_TO_S64
,
847 [ BYTECODE_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64
,
848 [ BYTECODE_OP_CAST_NOP
] = &&LABEL_BYTECODE_OP_CAST_NOP
,
850 /* get context ref */
851 [ BYTECODE_OP_GET_CONTEXT_REF
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF
,
852 [ BYTECODE_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING
,
853 [ BYTECODE_OP_GET_CONTEXT_REF_S64
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64
,
854 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
,
856 /* Instructions for recursive traversal through composed types. */
857 [ BYTECODE_OP_GET_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT
,
858 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT
,
859 [ BYTECODE_OP_GET_PAYLOAD_ROOT
] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT
,
861 [ BYTECODE_OP_GET_SYMBOL
] = &&LABEL_BYTECODE_OP_GET_SYMBOL
,
862 [ BYTECODE_OP_GET_SYMBOL_FIELD
] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD
,
863 [ BYTECODE_OP_GET_INDEX_U16
] = &&LABEL_BYTECODE_OP_GET_INDEX_U16
,
864 [ BYTECODE_OP_GET_INDEX_U64
] = &&LABEL_BYTECODE_OP_GET_INDEX_U64
,
866 [ BYTECODE_OP_LOAD_FIELD
] = &&LABEL_BYTECODE_OP_LOAD_FIELD
,
867 [ BYTECODE_OP_LOAD_FIELD_S8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8
,
868 [ BYTECODE_OP_LOAD_FIELD_S16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16
,
869 [ BYTECODE_OP_LOAD_FIELD_S32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32
,
870 [ BYTECODE_OP_LOAD_FIELD_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64
,
871 [ BYTECODE_OP_LOAD_FIELD_U8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8
,
872 [ BYTECODE_OP_LOAD_FIELD_U16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16
,
873 [ BYTECODE_OP_LOAD_FIELD_U32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32
,
874 [ BYTECODE_OP_LOAD_FIELD_U64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64
,
875 [ BYTECODE_OP_LOAD_FIELD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING
,
876 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE
,
877 [ BYTECODE_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE
,
879 [ BYTECODE_OP_UNARY_BIT_NOT
] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT
,
881 [ BYTECODE_OP_RETURN_S64
] = &&LABEL_BYTECODE_OP_RETURN_S64
,
883 #endif /* #ifndef INTERPRETER_USE_SWITCH */
887 OP(BYTECODE_OP_UNKNOWN
):
888 OP(BYTECODE_OP_LOAD_FIELD_REF
):
889 #ifdef INTERPRETER_USE_SWITCH
891 #endif /* INTERPRETER_USE_SWITCH */
892 ERR("unknown bytecode op %u",
893 (unsigned int) *(bytecode_opcode_t
*) pc
);
897 OP(BYTECODE_OP_RETURN
):
898 /* LTTNG_UST_BYTECODE_INTERPRETER_ERROR or LTTNG_UST_BYTECODE_INTERPRETER_OK */
899 /* Handle dynamic typing. */
900 switch (estack_ax_t
) {
903 retval
= !!estack_ax_v
;
908 if (ust_bytecode
->type
!= LTTNG_UST_BYTECODE_TYPE_CAPTURE
) {
914 case REG_STAR_GLOB_STRING
:
923 OP(BYTECODE_OP_RETURN_S64
):
924 /* LTTNG_UST_BYTECODE_INTERPRETER_ERROR or LTTNG_UST_BYTECODE_INTERPRETER_OK */
925 retval
= !!estack_ax_v
;
933 OP(BYTECODE_OP_PLUS
):
934 OP(BYTECODE_OP_MINUS
):
935 ERR("unsupported bytecode op %u",
936 (unsigned int) *(bytecode_opcode_t
*) pc
);
942 /* Dynamic typing. */
943 switch (estack_ax_t
) {
944 case REG_S64
: /* Fall-through */
946 switch (estack_bx_t
) {
947 case REG_S64
: /* Fall-through */
949 JUMP_TO(BYTECODE_OP_EQ_S64
);
951 JUMP_TO(BYTECODE_OP_EQ_DOUBLE_S64
);
952 case REG_STRING
: /* Fall-through */
953 case REG_STAR_GLOB_STRING
:
957 ERR("Unknown interpreter register type (%d)",
964 switch (estack_bx_t
) {
965 case REG_S64
: /* Fall-through */
967 JUMP_TO(BYTECODE_OP_EQ_S64_DOUBLE
);
969 JUMP_TO(BYTECODE_OP_EQ_DOUBLE
);
970 case REG_STRING
: /* Fall-through */
971 case REG_STAR_GLOB_STRING
:
975 ERR("Unknown interpreter register type (%d)",
982 switch (estack_bx_t
) {
983 case REG_S64
: /* Fall-through */
984 case REG_U64
: /* Fall-through */
989 JUMP_TO(BYTECODE_OP_EQ_STRING
);
990 case REG_STAR_GLOB_STRING
:
991 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
993 ERR("Unknown interpreter register type (%d)",
999 case REG_STAR_GLOB_STRING
:
1000 switch (estack_bx_t
) {
1001 case REG_S64
: /* Fall-through */
1002 case REG_U64
: /* Fall-through */
1007 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
1008 case REG_STAR_GLOB_STRING
:
1012 ERR("Unknown interpreter register type (%d)",
1019 ERR("Unknown interpreter register type (%d)",
1027 /* Dynamic typing. */
1028 switch (estack_ax_t
) {
1029 case REG_S64
: /* Fall-through */
1031 switch (estack_bx_t
) {
1032 case REG_S64
: /* Fall-through */
1034 JUMP_TO(BYTECODE_OP_NE_S64
);
1036 JUMP_TO(BYTECODE_OP_NE_DOUBLE_S64
);
1037 case REG_STRING
: /* Fall-through */
1038 case REG_STAR_GLOB_STRING
:
1042 ERR("Unknown interpreter register type (%d)",
1049 switch (estack_bx_t
) {
1050 case REG_S64
: /* Fall-through */
1052 JUMP_TO(BYTECODE_OP_NE_S64_DOUBLE
);
1054 JUMP_TO(BYTECODE_OP_NE_DOUBLE
);
1055 case REG_STRING
: /* Fall-through */
1056 case REG_STAR_GLOB_STRING
:
1060 ERR("Unknown interpreter register type (%d)",
1067 switch (estack_bx_t
) {
1068 case REG_S64
: /* Fall-through */
1074 JUMP_TO(BYTECODE_OP_NE_STRING
);
1075 case REG_STAR_GLOB_STRING
:
1076 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1078 ERR("Unknown interpreter register type (%d)",
1084 case REG_STAR_GLOB_STRING
:
1085 switch (estack_bx_t
) {
1086 case REG_S64
: /* Fall-through */
1092 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1093 case REG_STAR_GLOB_STRING
:
1097 ERR("Unknown interpreter register type (%d)",
1104 ERR("Unknown interpreter register type (%d)",
1112 /* Dynamic typing. */
1113 switch (estack_ax_t
) {
1114 case REG_S64
: /* Fall-through */
1116 switch (estack_bx_t
) {
1117 case REG_S64
: /* Fall-through */
1119 JUMP_TO(BYTECODE_OP_GT_S64
);
1121 JUMP_TO(BYTECODE_OP_GT_DOUBLE_S64
);
1122 case REG_STRING
: /* Fall-through */
1123 case REG_STAR_GLOB_STRING
:
1127 ERR("Unknown interpreter register type (%d)",
1134 switch (estack_bx_t
) {
1135 case REG_S64
: /* Fall-through */
1137 JUMP_TO(BYTECODE_OP_GT_S64_DOUBLE
);
1139 JUMP_TO(BYTECODE_OP_GT_DOUBLE
);
1140 case REG_STRING
: /* Fall-through */
1141 case REG_STAR_GLOB_STRING
:
1145 ERR("Unknown interpreter register type (%d)",
1152 switch (estack_bx_t
) {
1153 case REG_S64
: /* Fall-through */
1154 case REG_U64
: /* Fall-through */
1155 case REG_DOUBLE
: /* Fall-through */
1156 case REG_STAR_GLOB_STRING
:
1160 JUMP_TO(BYTECODE_OP_GT_STRING
);
1162 ERR("Unknown interpreter register type (%d)",
1169 ERR("Unknown interpreter register type (%d)",
1177 /* Dynamic typing. */
1178 switch (estack_ax_t
) {
1179 case REG_S64
: /* Fall-through */
1181 switch (estack_bx_t
) {
1182 case REG_S64
: /* Fall-through */
1184 JUMP_TO(BYTECODE_OP_LT_S64
);
1186 JUMP_TO(BYTECODE_OP_LT_DOUBLE_S64
);
1187 case REG_STRING
: /* Fall-through */
1188 case REG_STAR_GLOB_STRING
:
1192 ERR("Unknown interpreter register type (%d)",
1199 switch (estack_bx_t
) {
1200 case REG_S64
: /* Fall-through */
1202 JUMP_TO(BYTECODE_OP_LT_S64_DOUBLE
);
1204 JUMP_TO(BYTECODE_OP_LT_DOUBLE
);
1205 case REG_STRING
: /* Fall-through */
1206 case REG_STAR_GLOB_STRING
:
1210 ERR("Unknown interpreter register type (%d)",
1217 switch (estack_bx_t
) {
1218 case REG_S64
: /* Fall-through */
1219 case REG_U64
: /* Fall-through */
1220 case REG_DOUBLE
: /* Fall-through */
1221 case REG_STAR_GLOB_STRING
:
1225 JUMP_TO(BYTECODE_OP_LT_STRING
);
1227 ERR("Unknown interpreter register type (%d)",
1234 ERR("Unknown interpreter register type (%d)",
1242 /* Dynamic typing. */
1243 switch (estack_ax_t
) {
1244 case REG_S64
: /* Fall-through */
1246 switch (estack_bx_t
) {
1247 case REG_S64
: /* Fall-through */
1249 JUMP_TO(BYTECODE_OP_GE_S64
);
1251 JUMP_TO(BYTECODE_OP_GE_DOUBLE_S64
);
1252 case REG_STRING
: /* Fall-through */
1253 case REG_STAR_GLOB_STRING
:
1257 ERR("Unknown interpreter register type (%d)",
1264 switch (estack_bx_t
) {
1265 case REG_S64
: /* Fall-through */
1267 JUMP_TO(BYTECODE_OP_GE_S64_DOUBLE
);
1269 JUMP_TO(BYTECODE_OP_GE_DOUBLE
);
1270 case REG_STRING
: /* Fall-through */
1271 case REG_STAR_GLOB_STRING
:
1275 ERR("Unknown interpreter register type (%d)",
1282 switch (estack_bx_t
) {
1283 case REG_S64
: /* Fall-through */
1284 case REG_U64
: /* Fall-through */
1285 case REG_DOUBLE
: /* Fall-through */
1286 case REG_STAR_GLOB_STRING
:
1290 JUMP_TO(BYTECODE_OP_GE_STRING
);
1292 ERR("Unknown interpreter register type (%d)",
1299 ERR("Unknown interpreter register type (%d)",
1307 /* Dynamic typing. */
1308 switch (estack_ax_t
) {
1309 case REG_S64
: /* Fall-through */
1311 switch (estack_bx_t
) {
1312 case REG_S64
: /* Fall-through */
1314 JUMP_TO(BYTECODE_OP_LE_S64
);
1316 JUMP_TO(BYTECODE_OP_LE_DOUBLE_S64
);
1317 case REG_STRING
: /* Fall-through */
1318 case REG_STAR_GLOB_STRING
:
1322 ERR("Unknown interpreter register type (%d)",
1329 switch (estack_bx_t
) {
1330 case REG_S64
: /* Fall-through */
1332 JUMP_TO(BYTECODE_OP_LE_S64_DOUBLE
);
1334 JUMP_TO(BYTECODE_OP_LE_DOUBLE
);
1335 case REG_STRING
: /* Fall-through */
1336 case REG_STAR_GLOB_STRING
:
1340 ERR("Unknown interpreter register type (%d)",
1347 switch (estack_bx_t
) {
1348 case REG_S64
: /* Fall-through */
1349 case REG_U64
: /* Fall-through */
1350 case REG_DOUBLE
: /* Fall-through */
1351 case REG_STAR_GLOB_STRING
:
1355 JUMP_TO(BYTECODE_OP_LE_STRING
);
1357 ERR("Unknown interpreter register type (%d)",
1364 ERR("Unknown interpreter register type (%d)",
1371 OP(BYTECODE_OP_EQ_STRING
):
1375 res
= (stack_strcmp(stack
, top
, "==") == 0);
1376 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1378 estack_ax_t
= REG_S64
;
1379 next_pc
+= sizeof(struct binary_op
);
1382 OP(BYTECODE_OP_NE_STRING
):
1386 res
= (stack_strcmp(stack
, top
, "!=") != 0);
1387 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1389 estack_ax_t
= REG_S64
;
1390 next_pc
+= sizeof(struct binary_op
);
1393 OP(BYTECODE_OP_GT_STRING
):
1397 res
= (stack_strcmp(stack
, top
, ">") > 0);
1398 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1400 estack_ax_t
= REG_S64
;
1401 next_pc
+= sizeof(struct binary_op
);
1404 OP(BYTECODE_OP_LT_STRING
):
1408 res
= (stack_strcmp(stack
, top
, "<") < 0);
1409 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1411 estack_ax_t
= REG_S64
;
1412 next_pc
+= sizeof(struct binary_op
);
1415 OP(BYTECODE_OP_GE_STRING
):
1419 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
1420 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1422 estack_ax_t
= REG_S64
;
1423 next_pc
+= sizeof(struct binary_op
);
1426 OP(BYTECODE_OP_LE_STRING
):
1430 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
1431 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1433 estack_ax_t
= REG_S64
;
1434 next_pc
+= sizeof(struct binary_op
);
1438 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING
):
1442 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
1443 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1445 estack_ax_t
= REG_S64
;
1446 next_pc
+= sizeof(struct binary_op
);
1449 OP(BYTECODE_OP_NE_STAR_GLOB_STRING
):
1453 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
1454 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1456 estack_ax_t
= REG_S64
;
1457 next_pc
+= sizeof(struct binary_op
);
1461 OP(BYTECODE_OP_EQ_S64
):
1465 res
= (estack_bx_v
== estack_ax_v
);
1466 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1468 estack_ax_t
= REG_S64
;
1469 next_pc
+= sizeof(struct binary_op
);
1472 OP(BYTECODE_OP_NE_S64
):
1476 res
= (estack_bx_v
!= estack_ax_v
);
1477 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1479 estack_ax_t
= REG_S64
;
1480 next_pc
+= sizeof(struct binary_op
);
1483 OP(BYTECODE_OP_GT_S64
):
1487 res
= (estack_bx_v
> estack_ax_v
);
1488 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1490 estack_ax_t
= REG_S64
;
1491 next_pc
+= sizeof(struct binary_op
);
1494 OP(BYTECODE_OP_LT_S64
):
1498 res
= (estack_bx_v
< estack_ax_v
);
1499 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1501 estack_ax_t
= REG_S64
;
1502 next_pc
+= sizeof(struct binary_op
);
1505 OP(BYTECODE_OP_GE_S64
):
1509 res
= (estack_bx_v
>= estack_ax_v
);
1510 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1512 estack_ax_t
= REG_S64
;
1513 next_pc
+= sizeof(struct binary_op
);
1516 OP(BYTECODE_OP_LE_S64
):
1520 res
= (estack_bx_v
<= estack_ax_v
);
1521 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1523 estack_ax_t
= REG_S64
;
1524 next_pc
+= sizeof(struct binary_op
);
1528 OP(BYTECODE_OP_EQ_DOUBLE
):
1532 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax(stack
, top
)->u
.d
);
1533 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1535 estack_ax_t
= REG_S64
;
1536 next_pc
+= sizeof(struct binary_op
);
1539 OP(BYTECODE_OP_NE_DOUBLE
):
1543 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax(stack
, top
)->u
.d
);
1544 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1546 estack_ax_t
= REG_S64
;
1547 next_pc
+= sizeof(struct binary_op
);
1550 OP(BYTECODE_OP_GT_DOUBLE
):
1554 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax(stack
, top
)->u
.d
);
1555 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1557 estack_ax_t
= REG_S64
;
1558 next_pc
+= sizeof(struct binary_op
);
1561 OP(BYTECODE_OP_LT_DOUBLE
):
1565 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax(stack
, top
)->u
.d
);
1566 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1568 estack_ax_t
= REG_S64
;
1569 next_pc
+= sizeof(struct binary_op
);
1572 OP(BYTECODE_OP_GE_DOUBLE
):
1576 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax(stack
, top
)->u
.d
);
1577 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1579 estack_ax_t
= REG_S64
;
1580 next_pc
+= sizeof(struct binary_op
);
1583 OP(BYTECODE_OP_LE_DOUBLE
):
1587 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax(stack
, top
)->u
.d
);
1588 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1590 estack_ax_t
= REG_S64
;
1591 next_pc
+= sizeof(struct binary_op
);
1595 /* Mixed S64-double binary comparators */
1596 OP(BYTECODE_OP_EQ_DOUBLE_S64
):
1600 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax_v
);
1601 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1603 estack_ax_t
= REG_S64
;
1604 next_pc
+= sizeof(struct binary_op
);
1607 OP(BYTECODE_OP_NE_DOUBLE_S64
):
1611 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax_v
);
1612 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1614 estack_ax_t
= REG_S64
;
1615 next_pc
+= sizeof(struct binary_op
);
1618 OP(BYTECODE_OP_GT_DOUBLE_S64
):
1622 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax_v
);
1623 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1625 estack_ax_t
= REG_S64
;
1626 next_pc
+= sizeof(struct binary_op
);
1629 OP(BYTECODE_OP_LT_DOUBLE_S64
):
1633 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax_v
);
1634 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1636 estack_ax_t
= REG_S64
;
1637 next_pc
+= sizeof(struct binary_op
);
1640 OP(BYTECODE_OP_GE_DOUBLE_S64
):
1644 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax_v
);
1645 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1647 estack_ax_t
= REG_S64
;
1648 next_pc
+= sizeof(struct binary_op
);
1651 OP(BYTECODE_OP_LE_DOUBLE_S64
):
1655 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax_v
);
1656 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1658 estack_ax_t
= REG_S64
;
1659 next_pc
+= sizeof(struct binary_op
);
1663 OP(BYTECODE_OP_EQ_S64_DOUBLE
):
1667 res
= (estack_bx_v
== estack_ax(stack
, top
)->u
.d
);
1668 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1670 estack_ax_t
= REG_S64
;
1671 next_pc
+= sizeof(struct binary_op
);
1674 OP(BYTECODE_OP_NE_S64_DOUBLE
):
1678 res
= (estack_bx_v
!= estack_ax(stack
, top
)->u
.d
);
1679 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1681 estack_ax_t
= REG_S64
;
1682 next_pc
+= sizeof(struct binary_op
);
1685 OP(BYTECODE_OP_GT_S64_DOUBLE
):
1689 res
= (estack_bx_v
> estack_ax(stack
, top
)->u
.d
);
1690 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1692 estack_ax_t
= REG_S64
;
1693 next_pc
+= sizeof(struct binary_op
);
1696 OP(BYTECODE_OP_LT_S64_DOUBLE
):
1700 res
= (estack_bx_v
< estack_ax(stack
, top
)->u
.d
);
1701 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1703 estack_ax_t
= REG_S64
;
1704 next_pc
+= sizeof(struct binary_op
);
1707 OP(BYTECODE_OP_GE_S64_DOUBLE
):
1711 res
= (estack_bx_v
>= estack_ax(stack
, top
)->u
.d
);
1712 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1714 estack_ax_t
= REG_S64
;
1715 next_pc
+= sizeof(struct binary_op
);
1718 OP(BYTECODE_OP_LE_S64_DOUBLE
):
1722 res
= (estack_bx_v
<= estack_ax(stack
, top
)->u
.d
);
1723 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1725 estack_ax_t
= REG_S64
;
1726 next_pc
+= sizeof(struct binary_op
);
1729 OP(BYTECODE_OP_BIT_RSHIFT
):
1733 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1738 /* Catch undefined behavior. */
1739 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1743 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
1744 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1746 estack_ax_t
= REG_U64
;
1747 next_pc
+= sizeof(struct binary_op
);
1750 OP(BYTECODE_OP_BIT_LSHIFT
):
1754 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1759 /* Catch undefined behavior. */
1760 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1764 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
1765 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1767 estack_ax_t
= REG_U64
;
1768 next_pc
+= sizeof(struct binary_op
);
1771 OP(BYTECODE_OP_BIT_AND
):
1775 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1780 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
1781 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1783 estack_ax_t
= REG_U64
;
1784 next_pc
+= sizeof(struct binary_op
);
1787 OP(BYTECODE_OP_BIT_OR
):
1791 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1796 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1797 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1799 estack_ax_t
= REG_U64
;
1800 next_pc
+= sizeof(struct binary_op
);
1803 OP(BYTECODE_OP_BIT_XOR
):
1807 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1812 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1813 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1815 estack_ax_t
= REG_U64
;
1816 next_pc
+= sizeof(struct binary_op
);
1821 OP(BYTECODE_OP_UNARY_PLUS
):
1823 /* Dynamic typing. */
1824 switch (estack_ax_t
) {
1825 case REG_S64
: /* Fall-through. */
1827 JUMP_TO(BYTECODE_OP_UNARY_PLUS_S64
);
1829 JUMP_TO(BYTECODE_OP_UNARY_PLUS_DOUBLE
);
1830 case REG_STRING
: /* Fall-through */
1831 case REG_STAR_GLOB_STRING
:
1835 ERR("Unknown interpreter register type (%d)",
1841 OP(BYTECODE_OP_UNARY_MINUS
):
1843 /* Dynamic typing. */
1844 switch (estack_ax_t
) {
1845 case REG_S64
: /* Fall-through. */
1847 JUMP_TO(BYTECODE_OP_UNARY_MINUS_S64
);
1849 JUMP_TO(BYTECODE_OP_UNARY_MINUS_DOUBLE
);
1850 case REG_STRING
: /* Fall-through */
1851 case REG_STAR_GLOB_STRING
:
1855 ERR("Unknown interpreter register type (%d)",
1861 OP(BYTECODE_OP_UNARY_NOT
):
1863 /* Dynamic typing. */
1864 switch (estack_ax_t
) {
1865 case REG_S64
: /* Fall-through. */
1867 JUMP_TO(BYTECODE_OP_UNARY_NOT_S64
);
1869 JUMP_TO(BYTECODE_OP_UNARY_NOT_DOUBLE
);
1870 case REG_STRING
: /* Fall-through */
1871 case REG_STAR_GLOB_STRING
:
1875 ERR("Unknown interpreter register type (%d)",
1880 next_pc
+= sizeof(struct unary_op
);
1884 OP(BYTECODE_OP_UNARY_BIT_NOT
):
1886 /* Dynamic typing. */
1887 if (!IS_INTEGER_REGISTER(estack_ax_t
)) {
1892 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1893 estack_ax_t
= REG_U64
;
1894 next_pc
+= sizeof(struct unary_op
);
1898 OP(BYTECODE_OP_UNARY_PLUS_S64
):
1899 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE
):
1901 next_pc
+= sizeof(struct unary_op
);
1904 OP(BYTECODE_OP_UNARY_MINUS_S64
):
1906 estack_ax_v
= -estack_ax_v
;
1907 next_pc
+= sizeof(struct unary_op
);
1910 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE
):
1912 estack_ax(stack
, top
)->u
.d
= -estack_ax(stack
, top
)->u
.d
;
1913 next_pc
+= sizeof(struct unary_op
);
1916 OP(BYTECODE_OP_UNARY_NOT_S64
):
1918 estack_ax_v
= !estack_ax_v
;
1919 estack_ax_t
= REG_S64
;
1920 next_pc
+= sizeof(struct unary_op
);
1923 OP(BYTECODE_OP_UNARY_NOT_DOUBLE
):
1925 estack_ax_v
= !estack_ax(stack
, top
)->u
.d
;
1926 estack_ax_t
= REG_S64
;
1927 next_pc
+= sizeof(struct unary_op
);
1932 OP(BYTECODE_OP_AND
):
1934 struct logical_op
*insn
= (struct logical_op
*) pc
;
1936 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1940 /* If AX is 0, skip and evaluate to 0 */
1941 if (unlikely(estack_ax_v
== 0)) {
1942 dbg_printf("Jumping to bytecode offset %u\n",
1943 (unsigned int) insn
->skip_offset
);
1944 next_pc
= start_pc
+ insn
->skip_offset
;
1946 /* Pop 1 when jump not taken */
1947 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1948 next_pc
+= sizeof(struct logical_op
);
1954 struct logical_op
*insn
= (struct logical_op
*) pc
;
1956 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1960 /* If AX is nonzero, skip and evaluate to 1 */
1961 if (unlikely(estack_ax_v
!= 0)) {
1963 dbg_printf("Jumping to bytecode offset %u\n",
1964 (unsigned int) insn
->skip_offset
);
1965 next_pc
= start_pc
+ insn
->skip_offset
;
1967 /* Pop 1 when jump not taken */
1968 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1969 next_pc
+= sizeof(struct logical_op
);
1975 /* load field ref */
1976 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING
):
1978 struct load_op
*insn
= (struct load_op
*) pc
;
1979 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1981 dbg_printf("load field ref offset %u type string\n",
1983 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1984 estack_ax(stack
, top
)->u
.s
.str
=
1985 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
1986 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1987 dbg_printf("Interpreter warning: loading a NULL string.\n");
1991 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
1992 estack_ax(stack
, top
)->u
.s
.literal_type
=
1993 ESTACK_STRING_LITERAL_TYPE_NONE
;
1994 estack_ax_t
= REG_STRING
;
1995 dbg_printf("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1996 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2000 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
):
2002 struct load_op
*insn
= (struct load_op
*) pc
;
2003 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2005 dbg_printf("load field ref offset %u type sequence\n",
2007 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2008 estack_ax(stack
, top
)->u
.s
.seq_len
=
2009 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
2010 estack_ax(stack
, top
)->u
.s
.str
=
2011 *(const char **) (&interpreter_stack_data
[ref
->offset
2012 + sizeof(unsigned long)]);
2013 estack_ax_t
= REG_STRING
;
2014 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2015 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2019 estack_ax(stack
, top
)->u
.s
.literal_type
=
2020 ESTACK_STRING_LITERAL_TYPE_NONE
;
2021 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2025 OP(BYTECODE_OP_LOAD_FIELD_REF_S64
):
2027 struct load_op
*insn
= (struct load_op
*) pc
;
2028 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2030 dbg_printf("load field ref offset %u type s64\n",
2032 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2034 ((struct literal_numeric
*) &interpreter_stack_data
[ref
->offset
])->v
;
2035 estack_ax_t
= REG_S64
;
2036 dbg_printf("ref load s64 %" PRIi64
"\n", estack_ax_v
);
2037 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2041 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
):
2043 struct load_op
*insn
= (struct load_op
*) pc
;
2044 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2046 dbg_printf("load field ref offset %u type double\n",
2048 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2049 memcpy(&estack_ax(stack
, top
)->u
.d
, &interpreter_stack_data
[ref
->offset
],
2050 sizeof(struct literal_double
));
2051 estack_ax_t
= REG_DOUBLE
;
2052 dbg_printf("ref load double %g\n", estack_ax(stack
, top
)->u
.d
);
2053 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2057 /* load from immediate operand */
2058 OP(BYTECODE_OP_LOAD_STRING
):
2060 struct load_op
*insn
= (struct load_op
*) pc
;
2062 dbg_printf("load string %s\n", insn
->data
);
2063 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2064 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2065 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2066 estack_ax(stack
, top
)->u
.s
.literal_type
=
2067 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
2068 estack_ax_t
= REG_STRING
;
2069 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2073 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING
):
2075 struct load_op
*insn
= (struct load_op
*) pc
;
2077 dbg_printf("load globbing pattern %s\n", insn
->data
);
2078 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2079 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2080 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2081 estack_ax(stack
, top
)->u
.s
.literal_type
=
2082 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
2083 estack_ax_t
= REG_STAR_GLOB_STRING
;
2084 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2088 OP(BYTECODE_OP_LOAD_S64
):
2090 struct load_op
*insn
= (struct load_op
*) pc
;
2092 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2093 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
2094 estack_ax_t
= REG_S64
;
2095 dbg_printf("load s64 %" PRIi64
"\n", estack_ax_v
);
2096 next_pc
+= sizeof(struct load_op
)
2097 + sizeof(struct literal_numeric
);
2101 OP(BYTECODE_OP_LOAD_DOUBLE
):
2103 struct load_op
*insn
= (struct load_op
*) pc
;
2105 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2106 memcpy(&estack_ax(stack
, top
)->u
.d
, insn
->data
,
2107 sizeof(struct literal_double
));
2108 estack_ax_t
= REG_DOUBLE
;
2109 dbg_printf("load double %g\n", estack_ax(stack
, top
)->u
.d
);
2110 next_pc
+= sizeof(struct load_op
)
2111 + sizeof(struct literal_double
);
2116 OP(BYTECODE_OP_CAST_TO_S64
):
2118 /* Dynamic typing. */
2119 switch (estack_ax_t
) {
2121 JUMP_TO(BYTECODE_OP_CAST_NOP
);
2123 JUMP_TO(BYTECODE_OP_CAST_DOUBLE_TO_S64
);
2125 estack_ax_t
= REG_S64
;
2126 next_pc
+= sizeof(struct cast_op
); /* Fall-through */
2127 case REG_STRING
: /* Fall-through */
2128 case REG_STAR_GLOB_STRING
:
2132 ERR("Unknown interpreter register type (%d)",
2139 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64
):
2141 estack_ax_v
= (int64_t) estack_ax(stack
, top
)->u
.d
;
2142 estack_ax_t
= REG_S64
;
2143 next_pc
+= sizeof(struct cast_op
);
2147 OP(BYTECODE_OP_CAST_NOP
):
2149 next_pc
+= sizeof(struct cast_op
);
2153 /* get context ref */
2154 OP(BYTECODE_OP_GET_CONTEXT_REF
):
2156 struct load_op
*insn
= (struct load_op
*) pc
;
2157 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2158 const struct lttng_ust_ctx_field
*ctx_field
;
2159 struct lttng_ust_ctx_value v
;
2161 dbg_printf("get context ref offset %u type dynamic\n",
2163 ctx_field
= &ctx
->fields
[ref
->offset
];
2164 ctx_field
->get_value(ctx_field
->priv
, probe_ctx
, &v
);
2165 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2167 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
2170 case LTTNG_UST_DYNAMIC_TYPE_S64
:
2171 estack_ax_v
= v
.u
.s64
;
2172 estack_ax_t
= REG_S64
;
2173 dbg_printf("ref get context dynamic s64 %" PRIi64
"\n", estack_ax_v
);
2175 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
2176 estack_ax(stack
, top
)->u
.d
= v
.u
.d
;
2177 estack_ax_t
= REG_DOUBLE
;
2178 dbg_printf("ref get context dynamic double %g\n", estack_ax(stack
, top
)->u
.d
);
2180 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
2181 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2182 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2183 dbg_printf("Interpreter warning: loading a NULL string.\n");
2187 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2188 estack_ax(stack
, top
)->u
.s
.literal_type
=
2189 ESTACK_STRING_LITERAL_TYPE_NONE
;
2190 dbg_printf("ref get context dynamic string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2191 estack_ax_t
= REG_STRING
;
2194 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
2198 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2202 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING
):
2204 struct load_op
*insn
= (struct load_op
*) pc
;
2205 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2206 const struct lttng_ust_ctx_field
*ctx_field
;
2207 struct lttng_ust_ctx_value v
;
2209 dbg_printf("get context ref offset %u type string\n",
2211 ctx_field
= &ctx
->fields
[ref
->offset
];
2212 ctx_field
->get_value(ctx_field
->priv
, probe_ctx
, &v
);
2213 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2214 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2215 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2216 dbg_printf("Interpreter warning: loading a NULL string.\n");
2220 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2221 estack_ax(stack
, top
)->u
.s
.literal_type
=
2222 ESTACK_STRING_LITERAL_TYPE_NONE
;
2223 estack_ax_t
= REG_STRING
;
2224 dbg_printf("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2225 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2229 OP(BYTECODE_OP_GET_CONTEXT_REF_S64
):
2231 struct load_op
*insn
= (struct load_op
*) pc
;
2232 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2233 const struct lttng_ust_ctx_field
*ctx_field
;
2234 struct lttng_ust_ctx_value v
;
2236 dbg_printf("get context ref offset %u type s64\n",
2238 ctx_field
= &ctx
->fields
[ref
->offset
];
2239 ctx_field
->get_value(ctx_field
->priv
, probe_ctx
, &v
);
2240 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2241 estack_ax_v
= v
.u
.s64
;
2242 estack_ax_t
= REG_S64
;
2243 dbg_printf("ref get context s64 %" PRIi64
"\n", estack_ax_v
);
2244 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2248 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
):
2250 struct load_op
*insn
= (struct load_op
*) pc
;
2251 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2252 const struct lttng_ust_ctx_field
*ctx_field
;
2253 struct lttng_ust_ctx_value v
;
2255 dbg_printf("get context ref offset %u type double\n",
2257 ctx_field
= &ctx
->fields
[ref
->offset
];
2258 ctx_field
->get_value(ctx_field
->priv
, probe_ctx
, &v
);
2259 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2260 memcpy(&estack_ax(stack
, top
)->u
.d
, &v
.u
.d
, sizeof(struct literal_double
));
2261 estack_ax_t
= REG_DOUBLE
;
2262 dbg_printf("ref get context double %g\n", estack_ax(stack
, top
)->u
.d
);
2263 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2267 OP(BYTECODE_OP_GET_CONTEXT_ROOT
):
2269 dbg_printf("op get context root\n");
2270 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2271 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
2272 /* "field" only needed for variants. */
2273 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2274 estack_ax_t
= REG_PTR
;
2275 next_pc
+= sizeof(struct load_op
);
2279 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT
):
2281 dbg_printf("op get app context root\n");
2282 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2283 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_APP_CONTEXT
;
2284 /* "field" only needed for variants. */
2285 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2286 estack_ax_t
= REG_PTR
;
2287 next_pc
+= sizeof(struct load_op
);
2291 OP(BYTECODE_OP_GET_PAYLOAD_ROOT
):
2293 dbg_printf("op get app payload root\n");
2294 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2295 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
2296 estack_ax(stack
, top
)->u
.ptr
.ptr
= interpreter_stack_data
;
2297 /* "field" only needed for variants. */
2298 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2299 estack_ax_t
= REG_PTR
;
2300 next_pc
+= sizeof(struct load_op
);
2304 OP(BYTECODE_OP_GET_SYMBOL
):
2306 dbg_printf("op get symbol\n");
2307 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
2309 ERR("Nested fields not implemented yet.");
2312 case LOAD_ROOT_CONTEXT
:
2313 case LOAD_ROOT_APP_CONTEXT
:
2314 case LOAD_ROOT_PAYLOAD
:
2316 * symbol lookup is performed by
2322 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
2326 OP(BYTECODE_OP_GET_SYMBOL_FIELD
):
2329 * Used for first variant encountered in a
2330 * traversal. Variants are not implemented yet.
2336 OP(BYTECODE_OP_GET_INDEX_U16
):
2338 struct load_op
*insn
= (struct load_op
*) pc
;
2339 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
2341 dbg_printf("op get index u16\n");
2342 ret
= dynamic_get_index(ctx
, probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2345 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2346 estack_ax_t
= estack_ax(stack
, top
)->type
;
2347 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
2351 OP(BYTECODE_OP_GET_INDEX_U64
):
2353 struct load_op
*insn
= (struct load_op
*) pc
;
2354 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
2356 dbg_printf("op get index u64\n");
2357 ret
= dynamic_get_index(ctx
, probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2360 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2361 estack_ax_t
= estack_ax(stack
, top
)->type
;
2362 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
2366 OP(BYTECODE_OP_LOAD_FIELD
):
2368 dbg_printf("op load field\n");
2369 ret
= dynamic_load_field(estack_ax(stack
, top
));
2372 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2373 estack_ax_t
= estack_ax(stack
, top
)->type
;
2374 next_pc
+= sizeof(struct load_op
);
2378 OP(BYTECODE_OP_LOAD_FIELD_S8
):
2380 dbg_printf("op load field s8\n");
2382 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2383 estack_ax_t
= REG_S64
;
2384 next_pc
+= sizeof(struct load_op
);
2387 OP(BYTECODE_OP_LOAD_FIELD_S16
):
2389 dbg_printf("op load field s16\n");
2391 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2392 estack_ax_t
= REG_S64
;
2393 next_pc
+= sizeof(struct load_op
);
2396 OP(BYTECODE_OP_LOAD_FIELD_S32
):
2398 dbg_printf("op load field s32\n");
2400 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2401 estack_ax_t
= REG_S64
;
2402 next_pc
+= sizeof(struct load_op
);
2405 OP(BYTECODE_OP_LOAD_FIELD_S64
):
2407 dbg_printf("op load field s64\n");
2409 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2410 estack_ax_t
= REG_S64
;
2411 next_pc
+= sizeof(struct load_op
);
2414 OP(BYTECODE_OP_LOAD_FIELD_U8
):
2416 dbg_printf("op load field u8\n");
2418 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2419 estack_ax_t
= REG_U64
;
2420 next_pc
+= sizeof(struct load_op
);
2423 OP(BYTECODE_OP_LOAD_FIELD_U16
):
2425 dbg_printf("op load field u16\n");
2427 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2428 estack_ax_t
= REG_U64
;
2429 next_pc
+= sizeof(struct load_op
);
2432 OP(BYTECODE_OP_LOAD_FIELD_U32
):
2434 dbg_printf("op load field u32\n");
2436 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2437 estack_ax_t
= REG_U64
;
2438 next_pc
+= sizeof(struct load_op
);
2441 OP(BYTECODE_OP_LOAD_FIELD_U64
):
2443 dbg_printf("op load field u64\n");
2445 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2446 estack_ax_t
= REG_U64
;
2447 next_pc
+= sizeof(struct load_op
);
2450 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE
):
2452 dbg_printf("op load field double\n");
2454 memcpy(&estack_ax(stack
, top
)->u
.d
,
2455 estack_ax(stack
, top
)->u
.ptr
.ptr
,
2456 sizeof(struct literal_double
));
2457 estack_ax(stack
, top
)->type
= REG_DOUBLE
;
2458 next_pc
+= sizeof(struct load_op
);
2462 OP(BYTECODE_OP_LOAD_FIELD_STRING
):
2466 dbg_printf("op load field string\n");
2467 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2468 estack_ax(stack
, top
)->u
.s
.str
= str
;
2469 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2470 dbg_printf("Interpreter warning: loading a NULL string.\n");
2474 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2475 estack_ax(stack
, top
)->u
.s
.literal_type
=
2476 ESTACK_STRING_LITERAL_TYPE_NONE
;
2477 estack_ax(stack
, top
)->type
= REG_STRING
;
2478 next_pc
+= sizeof(struct load_op
);
2482 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE
):
2486 dbg_printf("op load field string sequence\n");
2487 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
2488 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
2489 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
2490 estack_ax(stack
, top
)->type
= REG_STRING
;
2491 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2492 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2496 estack_ax(stack
, top
)->u
.s
.literal_type
=
2497 ESTACK_STRING_LITERAL_TYPE_NONE
;
2498 next_pc
+= sizeof(struct load_op
);
2504 /* No need to prepare output if an error occurred. */
2506 return LTTNG_UST_BYTECODE_INTERPRETER_ERROR
;
2508 /* Prepare output. */
2509 switch (ust_bytecode
->type
) {
2510 case LTTNG_UST_BYTECODE_TYPE_FILTER
:
2512 struct lttng_ust_bytecode_filter_ctx
*filter_ctx
=
2513 (struct lttng_ust_bytecode_filter_ctx
*) caller_ctx
;
2515 filter_ctx
->result
= LTTNG_UST_BYTECODE_FILTER_ACCEPT
;
2517 filter_ctx
->result
= LTTNG_UST_BYTECODE_FILTER_REJECT
;
2520 case LTTNG_UST_BYTECODE_TYPE_CAPTURE
:
2521 ret
= lttng_bytecode_interpret_format_output(estack_ax(stack
, top
),
2522 (struct lttng_interpreter_output
*) caller_ctx
);
2529 return LTTNG_UST_BYTECODE_INTERPRETER_ERROR
;
2531 return LTTNG_UST_BYTECODE_INTERPRETER_OK
;
2535 * Return LTTNG_UST_EVENT_FILTER_ACCEPT or LTTNG_UST_EVENT_FILTER_REJECT.
2537 int lttng_ust_interpret_event_filter(const struct lttng_ust_event_common
*event
,
2538 const char *interpreter_stack_data
,
2539 struct lttng_ust_probe_ctx
*probe_ctx
,
2540 void *event_filter_ctx
__attribute__((unused
)))
2542 struct lttng_ust_bytecode_runtime
*filter_bc_runtime
;
2543 struct cds_list_head
*filter_bytecode_runtime_head
= &event
->priv
->filter_bytecode_runtime_head
;
2544 struct lttng_ust_bytecode_filter_ctx bytecode_filter_ctx
;
2545 bool filter_record
= false;
2547 cds_list_for_each_entry_rcu(filter_bc_runtime
, filter_bytecode_runtime_head
, node
) {
2548 if (caa_likely(filter_bc_runtime
->interpreter_func(filter_bc_runtime
,
2549 interpreter_stack_data
, probe_ctx
, &bytecode_filter_ctx
) == LTTNG_UST_BYTECODE_INTERPRETER_OK
)) {
2550 if (caa_unlikely(bytecode_filter_ctx
.result
== LTTNG_UST_BYTECODE_FILTER_ACCEPT
)) {
2551 filter_record
= true;
2557 return LTTNG_UST_EVENT_FILTER_ACCEPT
;
2559 return LTTNG_UST_EVENT_FILTER_REJECT
;