2 * SPDX-License-Identifier: MIT
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 * LTTng UST bytecode interpreter.
13 #include <lttng/urcu/pointer.h>
14 #include <urcu/rculist.h>
15 #include <lttng/ust-endian.h>
16 #include <lttng/ust-events.h>
17 #include "ust-events-internal.h"
19 #include "lttng-bytecode.h"
20 #include "string-utils.h"
25 * -2: unknown escape char.
30 int parse_char(const char **p
)
50 * Returns SIZE_MAX if the string is null-terminated, or the number of
54 size_t get_str_or_seq_len(const struct estack_entry
*entry
)
56 return entry
->u
.s
.seq_len
;
60 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
63 const char *candidate
;
67 /* Find out which side is the pattern vs. the candidate. */
68 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
69 pattern
= estack_ax(stack
, top
)->u
.s
.str
;
70 pattern_len
= get_str_or_seq_len(estack_ax(stack
, top
));
71 candidate
= estack_bx(stack
, top
)->u
.s
.str
;
72 candidate_len
= get_str_or_seq_len(estack_bx(stack
, top
));
74 pattern
= estack_bx(stack
, top
)->u
.s
.str
;
75 pattern_len
= get_str_or_seq_len(estack_bx(stack
, top
));
76 candidate
= estack_ax(stack
, top
)->u
.s
.str
;
77 candidate_len
= get_str_or_seq_len(estack_ax(stack
, top
));
80 /* Perform the match. Returns 0 when the result is true. */
81 return !strutils_star_glob_match(pattern
, pattern_len
, candidate
,
86 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
88 const char *p
= estack_bx(stack
, top
)->u
.s
.str
, *q
= estack_ax(stack
, top
)->u
.s
.str
;
95 if (unlikely(p
- estack_bx(stack
, top
)->u
.s
.str
>= estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')) {
96 if (q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0') {
99 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
100 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
101 ret
= parse_char(&q
);
108 if (unlikely(q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')) {
109 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
110 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
111 ret
= parse_char(&p
);
117 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
118 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
119 ret
= parse_char(&p
);
122 } else if (ret
== -2) {
125 /* else compare both char */
127 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
128 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
129 ret
= parse_char(&q
);
132 } else if (ret
== -2) {
152 int lttng_bytecode_interpret_error(struct lttng_ust_bytecode_runtime
*bytecode_runtime
,
153 const char *stack_data
,
156 return LTTNG_UST_BYTECODE_INTERPRETER_ERROR
;
159 #ifdef INTERPRETER_USE_SWITCH
162 * Fallback for compilers that do not support taking address of labels.
166 start_pc = &bytecode->data[0]; \
167 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
169 dbg_printf("Executing op %s (%u)\n", \
170 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
171 (unsigned int) *(bytecode_opcode_t *) pc); \
172 switch (*(bytecode_opcode_t *) pc) {
174 #define OP(name) jump_target_##name: __attribute__((unused)); \
182 #define JUMP_TO(name) \
183 goto jump_target_##name
188 * Dispatch-table based interpreter.
192 start_pc = &bytecode->code[0]; \
193 pc = next_pc = start_pc; \
194 if (unlikely(pc - start_pc >= bytecode->len)) \
196 goto *dispatch[*(bytecode_opcode_t *) pc];
203 goto *dispatch[*(bytecode_opcode_t *) pc];
207 #define JUMP_TO(name) \
212 #define IS_INTEGER_REGISTER(reg_type) \
213 (reg_type == REG_U64 || reg_type == REG_S64)
215 static int context_get_index(struct lttng_ust_ctx
*ctx
,
216 struct load_ptr
*ptr
,
220 struct lttng_ust_ctx_field
*ctx_field
;
221 struct lttng_ust_event_field
*field
;
222 struct lttng_ust_ctx_value v
;
224 ctx_field
= ctx
->fields
[idx
];
225 field
= ctx_field
->event_field
;
226 ptr
->type
= LOAD_OBJECT
;
229 switch (field
->type
->type
) {
230 case lttng_ust_type_integer
:
231 ctx_field
->get_value(ctx_field
, &v
);
232 if (lttng_ust_get_type_integer(field
->type
)->signedness
) {
233 ptr
->object_type
= OBJECT_TYPE_S64
;
234 ptr
->u
.s64
= v
.u
.s64
;
235 ptr
->ptr
= &ptr
->u
.s64
;
237 ptr
->object_type
= OBJECT_TYPE_U64
;
238 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
239 ptr
->ptr
= &ptr
->u
.u64
;
242 case lttng_ust_type_enum
:
244 const struct lttng_ust_type_integer
*itype
;
246 itype
= lttng_ust_get_type_integer(lttng_ust_get_type_enum(field
->type
)->container_type
);
247 ctx_field
->get_value(ctx_field
, &v
);
248 if (itype
->signedness
) {
249 ptr
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
250 ptr
->u
.s64
= v
.u
.s64
;
251 ptr
->ptr
= &ptr
->u
.s64
;
253 ptr
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
254 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
255 ptr
->ptr
= &ptr
->u
.u64
;
259 case lttng_ust_type_array
:
260 if (lttng_ust_get_type_array(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
261 ERR("Array nesting only supports integer types.");
264 if (lttng_ust_get_type_array(field
->type
)->encoding
== lttng_ust_string_encoding_none
) {
265 ERR("Only string arrays are supported for contexts.");
268 ptr
->object_type
= OBJECT_TYPE_STRING
;
269 ctx_field
->get_value(ctx_field
, &v
);
272 case lttng_ust_type_sequence
:
273 if (lttng_ust_get_type_sequence(field
->type
)->elem_type
->type
!= lttng_ust_type_integer
) {
274 ERR("Sequence nesting only supports integer types.");
277 if (lttng_ust_get_type_sequence(field
->type
)->encoding
== lttng_ust_string_encoding_none
) {
278 ERR("Only string sequences are supported for contexts.");
281 ptr
->object_type
= OBJECT_TYPE_STRING
;
282 ctx_field
->get_value(ctx_field
, &v
);
285 case lttng_ust_type_string
:
286 ptr
->object_type
= OBJECT_TYPE_STRING
;
287 ctx_field
->get_value(ctx_field
, &v
);
290 case lttng_ust_type_float
:
291 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
292 ctx_field
->get_value(ctx_field
, &v
);
294 ptr
->ptr
= &ptr
->u
.d
;
296 case lttng_ust_type_dynamic
:
297 ctx_field
->get_value(ctx_field
, &v
);
299 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
301 case LTTNG_UST_DYNAMIC_TYPE_U8
:
302 case LTTNG_UST_DYNAMIC_TYPE_U16
:
303 case LTTNG_UST_DYNAMIC_TYPE_U32
:
304 case LTTNG_UST_DYNAMIC_TYPE_U64
:
305 ptr
->object_type
= OBJECT_TYPE_U64
;
306 ptr
->u
.u64
= v
.u
.u64
;
307 ptr
->ptr
= &ptr
->u
.u64
;
308 dbg_printf("context get index dynamic u64 %" PRIi64
"\n", ptr
->u
.u64
);
310 case LTTNG_UST_DYNAMIC_TYPE_S8
:
311 case LTTNG_UST_DYNAMIC_TYPE_S16
:
312 case LTTNG_UST_DYNAMIC_TYPE_S32
:
313 case LTTNG_UST_DYNAMIC_TYPE_S64
:
314 ptr
->object_type
= OBJECT_TYPE_S64
;
315 ptr
->u
.s64
= v
.u
.s64
;
316 ptr
->ptr
= &ptr
->u
.s64
;
317 dbg_printf("context get index dynamic s64 %" PRIi64
"\n", ptr
->u
.s64
);
319 case LTTNG_UST_DYNAMIC_TYPE_FLOAT
:
320 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
321 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
323 ptr
->ptr
= &ptr
->u
.d
;
324 dbg_printf("context get index dynamic double %g\n", ptr
->u
.d
);
326 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
327 ptr
->object_type
= OBJECT_TYPE_STRING
;
329 dbg_printf("context get index dynamic string %s\n", (const char *) ptr
->ptr
);
332 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
337 ERR("Unknown type: %d", (int) field
->type
->type
);
343 static int dynamic_get_index(struct lttng_ust_ctx
*ctx
,
344 struct bytecode_runtime
*runtime
,
345 uint64_t index
, struct estack_entry
*stack_top
)
348 const struct bytecode_get_index_data
*gid
;
350 gid
= (const struct bytecode_get_index_data
*) &runtime
->data
[index
];
351 switch (stack_top
->u
.ptr
.type
) {
353 switch (stack_top
->u
.ptr
.object_type
) {
354 case OBJECT_TYPE_ARRAY
:
358 assert(gid
->offset
< gid
->array_len
);
359 /* Skip count (unsigned long) */
360 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
361 ptr
= ptr
+ gid
->offset
;
362 stack_top
->u
.ptr
.ptr
= ptr
;
363 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
364 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
365 assert(stack_top
->u
.ptr
.field
->type
->type
== lttng_ust_type_array
);
366 stack_top
->u
.ptr
.field
= NULL
;
369 case OBJECT_TYPE_SEQUENCE
:
374 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
375 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
376 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
380 ptr
= ptr
+ gid
->offset
;
381 stack_top
->u
.ptr
.ptr
= ptr
;
382 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
383 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
384 assert(stack_top
->u
.ptr
.field
->type
->type
== lttng_ust_type_sequence
);
385 stack_top
->u
.ptr
.field
= NULL
;
388 case OBJECT_TYPE_STRUCT
:
389 ERR("Nested structures are not supported yet.");
392 case OBJECT_TYPE_VARIANT
:
394 ERR("Unexpected get index type %d",
395 (int) stack_top
->u
.ptr
.object_type
);
400 case LOAD_ROOT_CONTEXT
:
401 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
403 ret
= context_get_index(ctx
,
411 case LOAD_ROOT_PAYLOAD
:
412 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
413 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
414 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
415 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
416 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
417 stack_top
->u
.ptr
.field
= gid
->field
;
418 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
422 stack_top
->type
= REG_PTR
;
430 static int dynamic_load_field(struct estack_entry
*stack_top
)
434 switch (stack_top
->u
.ptr
.type
) {
437 case LOAD_ROOT_CONTEXT
:
438 case LOAD_ROOT_APP_CONTEXT
:
439 case LOAD_ROOT_PAYLOAD
:
441 dbg_printf("Interpreter warning: cannot load root, missing field name.\n");
445 switch (stack_top
->u
.ptr
.object_type
) {
447 dbg_printf("op load field s8\n");
448 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
449 stack_top
->type
= REG_S64
;
451 case OBJECT_TYPE_S16
:
455 dbg_printf("op load field s16\n");
456 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
457 if (stack_top
->u
.ptr
.rev_bo
)
459 stack_top
->u
.v
= tmp
;
460 stack_top
->type
= REG_S64
;
463 case OBJECT_TYPE_S32
:
467 dbg_printf("op load field s32\n");
468 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
469 if (stack_top
->u
.ptr
.rev_bo
)
471 stack_top
->u
.v
= tmp
;
472 stack_top
->type
= REG_S64
;
475 case OBJECT_TYPE_S64
:
479 dbg_printf("op load field s64\n");
480 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
481 if (stack_top
->u
.ptr
.rev_bo
)
483 stack_top
->u
.v
= tmp
;
484 stack_top
->type
= REG_S64
;
487 case OBJECT_TYPE_SIGNED_ENUM
:
491 dbg_printf("op load field signed enumeration\n");
492 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
493 if (stack_top
->u
.ptr
.rev_bo
)
495 stack_top
->u
.v
= tmp
;
496 stack_top
->type
= REG_S64
;
500 dbg_printf("op load field u8\n");
501 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
502 stack_top
->type
= REG_U64
;
504 case OBJECT_TYPE_U16
:
508 dbg_printf("op load field u16\n");
509 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
510 if (stack_top
->u
.ptr
.rev_bo
)
512 stack_top
->u
.v
= tmp
;
513 stack_top
->type
= REG_U64
;
516 case OBJECT_TYPE_U32
:
520 dbg_printf("op load field u32\n");
521 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
522 if (stack_top
->u
.ptr
.rev_bo
)
524 stack_top
->u
.v
= tmp
;
525 stack_top
->type
= REG_U64
;
528 case OBJECT_TYPE_U64
:
532 dbg_printf("op load field u64\n");
533 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
534 if (stack_top
->u
.ptr
.rev_bo
)
536 stack_top
->u
.v
= tmp
;
537 stack_top
->type
= REG_U64
;
540 case OBJECT_TYPE_UNSIGNED_ENUM
:
544 dbg_printf("op load field unsigned enumeration\n");
545 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
546 if (stack_top
->u
.ptr
.rev_bo
)
548 stack_top
->u
.v
= tmp
;
549 stack_top
->type
= REG_U64
;
552 case OBJECT_TYPE_DOUBLE
:
553 memcpy(&stack_top
->u
.d
,
554 stack_top
->u
.ptr
.ptr
,
555 sizeof(struct literal_double
));
556 stack_top
->type
= REG_DOUBLE
;
558 case OBJECT_TYPE_STRING
:
562 dbg_printf("op load field string\n");
563 str
= (const char *) stack_top
->u
.ptr
.ptr
;
564 stack_top
->u
.s
.str
= str
;
565 if (unlikely(!stack_top
->u
.s
.str
)) {
566 dbg_printf("Interpreter warning: loading a NULL string.\n");
570 stack_top
->u
.s
.seq_len
= SIZE_MAX
;
571 stack_top
->u
.s
.literal_type
=
572 ESTACK_STRING_LITERAL_TYPE_NONE
;
573 stack_top
->type
= REG_STRING
;
576 case OBJECT_TYPE_STRING_SEQUENCE
:
580 dbg_printf("op load field string sequence\n");
581 ptr
= stack_top
->u
.ptr
.ptr
;
582 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
583 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
584 stack_top
->type
= REG_STRING
;
585 if (unlikely(!stack_top
->u
.s
.str
)) {
586 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
590 stack_top
->u
.s
.literal_type
=
591 ESTACK_STRING_LITERAL_TYPE_NONE
;
594 case OBJECT_TYPE_DYNAMIC
:
596 * Dynamic types in context are looked up
597 * by context get index.
601 case OBJECT_TYPE_SEQUENCE
:
602 case OBJECT_TYPE_ARRAY
:
603 case OBJECT_TYPE_STRUCT
:
604 case OBJECT_TYPE_VARIANT
:
605 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
616 int lttng_bytecode_interpret_format_output(struct estack_entry
*ax
,
617 struct lttng_interpreter_output
*output
)
624 output
->type
= LTTNG_INTERPRETER_TYPE_S64
;
625 output
->u
.s
= ax
->u
.v
;
628 output
->type
= LTTNG_INTERPRETER_TYPE_U64
;
629 output
->u
.u
= (uint64_t) ax
->u
.v
;
632 output
->type
= LTTNG_INTERPRETER_TYPE_DOUBLE
;
633 output
->u
.d
= ax
->u
.d
;
636 output
->type
= LTTNG_INTERPRETER_TYPE_STRING
;
637 output
->u
.str
.str
= ax
->u
.s
.str
;
638 output
->u
.str
.len
= ax
->u
.s
.seq_len
;
641 switch (ax
->u
.ptr
.object_type
) {
643 case OBJECT_TYPE_S16
:
644 case OBJECT_TYPE_S32
:
645 case OBJECT_TYPE_S64
:
647 case OBJECT_TYPE_U16
:
648 case OBJECT_TYPE_U32
:
649 case OBJECT_TYPE_U64
:
650 case OBJECT_TYPE_DOUBLE
:
651 case OBJECT_TYPE_STRING
:
652 case OBJECT_TYPE_STRING_SEQUENCE
:
653 ret
= dynamic_load_field(ax
);
656 /* Retry after loading ptr into stack top. */
658 case OBJECT_TYPE_SEQUENCE
:
659 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
660 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
661 output
->u
.sequence
.nr_elem
= *(unsigned long *) ax
->u
.ptr
.ptr
;
662 output
->u
.sequence
.nested_type
= lttng_ust_get_type_sequence(ax
->u
.ptr
.field
->type
)->elem_type
;
664 case OBJECT_TYPE_ARRAY
:
665 /* Skip count (unsigned long) */
666 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
667 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
668 output
->u
.sequence
.nr_elem
= lttng_ust_get_type_array(ax
->u
.ptr
.field
->type
)->length
;
669 output
->u
.sequence
.nested_type
= lttng_ust_get_type_array(ax
->u
.ptr
.field
->type
)->elem_type
;
671 case OBJECT_TYPE_SIGNED_ENUM
:
672 ret
= dynamic_load_field(ax
);
675 output
->type
= LTTNG_INTERPRETER_TYPE_SIGNED_ENUM
;
676 output
->u
.s
= ax
->u
.v
;
678 case OBJECT_TYPE_UNSIGNED_ENUM
:
679 ret
= dynamic_load_field(ax
);
682 output
->type
= LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM
;
683 output
->u
.u
= ax
->u
.v
;
685 case OBJECT_TYPE_STRUCT
:
686 case OBJECT_TYPE_VARIANT
:
692 case REG_STAR_GLOB_STRING
:
702 * Return LTTNG_UST_BYTECODE_INTERPRETER_OK on success.
703 * Return LTTNG_UST_BYTECODE_INTERPRETER_ERROR on error.
705 * For FILTER bytecode: expect a struct lttng_ust_bytecode_filter_ctx *
707 * For CAPTURE bytecode: expect a struct lttng_interpreter_output *
710 int lttng_bytecode_interpret(struct lttng_ust_bytecode_runtime
*ust_bytecode
,
711 const char *interpreter_stack_data
,
714 struct bytecode_runtime
*bytecode
= caa_container_of(ust_bytecode
, struct bytecode_runtime
, p
);
715 struct lttng_ust_ctx
*ctx
= lttng_ust_rcu_dereference(*ust_bytecode
->pctx
);
716 void *pc
, *next_pc
, *start_pc
;
717 int ret
= -EINVAL
, retval
= 0;
718 struct estack _stack
;
719 struct estack
*stack
= &_stack
;
720 register int64_t ax
= 0, bx
= 0;
721 register enum entry_type ax_t
= REG_UNKNOWN
, bx_t
= REG_UNKNOWN
;
722 register int top
= INTERPRETER_STACK_EMPTY
;
723 #ifndef INTERPRETER_USE_SWITCH
724 static void *dispatch
[NR_BYTECODE_OPS
] = {
725 [ BYTECODE_OP_UNKNOWN
] = &&LABEL_BYTECODE_OP_UNKNOWN
,
727 [ BYTECODE_OP_RETURN
] = &&LABEL_BYTECODE_OP_RETURN
,
730 [ BYTECODE_OP_MUL
] = &&LABEL_BYTECODE_OP_MUL
,
731 [ BYTECODE_OP_DIV
] = &&LABEL_BYTECODE_OP_DIV
,
732 [ BYTECODE_OP_MOD
] = &&LABEL_BYTECODE_OP_MOD
,
733 [ BYTECODE_OP_PLUS
] = &&LABEL_BYTECODE_OP_PLUS
,
734 [ BYTECODE_OP_MINUS
] = &&LABEL_BYTECODE_OP_MINUS
,
735 [ BYTECODE_OP_BIT_RSHIFT
] = &&LABEL_BYTECODE_OP_BIT_RSHIFT
,
736 [ BYTECODE_OP_BIT_LSHIFT
] = &&LABEL_BYTECODE_OP_BIT_LSHIFT
,
737 [ BYTECODE_OP_BIT_AND
] = &&LABEL_BYTECODE_OP_BIT_AND
,
738 [ BYTECODE_OP_BIT_OR
] = &&LABEL_BYTECODE_OP_BIT_OR
,
739 [ BYTECODE_OP_BIT_XOR
] = &&LABEL_BYTECODE_OP_BIT_XOR
,
741 /* binary comparators */
742 [ BYTECODE_OP_EQ
] = &&LABEL_BYTECODE_OP_EQ
,
743 [ BYTECODE_OP_NE
] = &&LABEL_BYTECODE_OP_NE
,
744 [ BYTECODE_OP_GT
] = &&LABEL_BYTECODE_OP_GT
,
745 [ BYTECODE_OP_LT
] = &&LABEL_BYTECODE_OP_LT
,
746 [ BYTECODE_OP_GE
] = &&LABEL_BYTECODE_OP_GE
,
747 [ BYTECODE_OP_LE
] = &&LABEL_BYTECODE_OP_LE
,
749 /* string binary comparator */
750 [ BYTECODE_OP_EQ_STRING
] = &&LABEL_BYTECODE_OP_EQ_STRING
,
751 [ BYTECODE_OP_NE_STRING
] = &&LABEL_BYTECODE_OP_NE_STRING
,
752 [ BYTECODE_OP_GT_STRING
] = &&LABEL_BYTECODE_OP_GT_STRING
,
753 [ BYTECODE_OP_LT_STRING
] = &&LABEL_BYTECODE_OP_LT_STRING
,
754 [ BYTECODE_OP_GE_STRING
] = &&LABEL_BYTECODE_OP_GE_STRING
,
755 [ BYTECODE_OP_LE_STRING
] = &&LABEL_BYTECODE_OP_LE_STRING
,
757 /* globbing pattern binary comparator */
758 [ BYTECODE_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING
,
759 [ BYTECODE_OP_NE_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING
,
761 /* s64 binary comparator */
762 [ BYTECODE_OP_EQ_S64
] = &&LABEL_BYTECODE_OP_EQ_S64
,
763 [ BYTECODE_OP_NE_S64
] = &&LABEL_BYTECODE_OP_NE_S64
,
764 [ BYTECODE_OP_GT_S64
] = &&LABEL_BYTECODE_OP_GT_S64
,
765 [ BYTECODE_OP_LT_S64
] = &&LABEL_BYTECODE_OP_LT_S64
,
766 [ BYTECODE_OP_GE_S64
] = &&LABEL_BYTECODE_OP_GE_S64
,
767 [ BYTECODE_OP_LE_S64
] = &&LABEL_BYTECODE_OP_LE_S64
,
769 /* double binary comparator */
770 [ BYTECODE_OP_EQ_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE
,
771 [ BYTECODE_OP_NE_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_DOUBLE
,
772 [ BYTECODE_OP_GT_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_DOUBLE
,
773 [ BYTECODE_OP_LT_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_DOUBLE
,
774 [ BYTECODE_OP_GE_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_DOUBLE
,
775 [ BYTECODE_OP_LE_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_DOUBLE
,
777 /* Mixed S64-double binary comparators */
778 [ BYTECODE_OP_EQ_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64
,
779 [ BYTECODE_OP_NE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64
,
780 [ BYTECODE_OP_GT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64
,
781 [ BYTECODE_OP_LT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64
,
782 [ BYTECODE_OP_GE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64
,
783 [ BYTECODE_OP_LE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64
,
785 [ BYTECODE_OP_EQ_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE
,
786 [ BYTECODE_OP_NE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE
,
787 [ BYTECODE_OP_GT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE
,
788 [ BYTECODE_OP_LT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE
,
789 [ BYTECODE_OP_GE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE
,
790 [ BYTECODE_OP_LE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE
,
793 [ BYTECODE_OP_UNARY_PLUS
] = &&LABEL_BYTECODE_OP_UNARY_PLUS
,
794 [ BYTECODE_OP_UNARY_MINUS
] = &&LABEL_BYTECODE_OP_UNARY_MINUS
,
795 [ BYTECODE_OP_UNARY_NOT
] = &&LABEL_BYTECODE_OP_UNARY_NOT
,
796 [ BYTECODE_OP_UNARY_PLUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64
,
797 [ BYTECODE_OP_UNARY_MINUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64
,
798 [ BYTECODE_OP_UNARY_NOT_S64
] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64
,
799 [ BYTECODE_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE
,
800 [ BYTECODE_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE
,
801 [ BYTECODE_OP_UNARY_NOT_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE
,
804 [ BYTECODE_OP_AND
] = &&LABEL_BYTECODE_OP_AND
,
805 [ BYTECODE_OP_OR
] = &&LABEL_BYTECODE_OP_OR
,
808 [ BYTECODE_OP_LOAD_FIELD_REF
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF
,
809 [ BYTECODE_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING
,
810 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
,
811 [ BYTECODE_OP_LOAD_FIELD_REF_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64
,
812 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
,
814 /* load from immediate operand */
815 [ BYTECODE_OP_LOAD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STRING
,
816 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING
,
817 [ BYTECODE_OP_LOAD_S64
] = &&LABEL_BYTECODE_OP_LOAD_S64
,
818 [ BYTECODE_OP_LOAD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE
,
821 [ BYTECODE_OP_CAST_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_TO_S64
,
822 [ BYTECODE_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64
,
823 [ BYTECODE_OP_CAST_NOP
] = &&LABEL_BYTECODE_OP_CAST_NOP
,
825 /* get context ref */
826 [ BYTECODE_OP_GET_CONTEXT_REF
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF
,
827 [ BYTECODE_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING
,
828 [ BYTECODE_OP_GET_CONTEXT_REF_S64
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64
,
829 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
,
831 /* Instructions for recursive traversal through composed types. */
832 [ BYTECODE_OP_GET_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT
,
833 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT
,
834 [ BYTECODE_OP_GET_PAYLOAD_ROOT
] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT
,
836 [ BYTECODE_OP_GET_SYMBOL
] = &&LABEL_BYTECODE_OP_GET_SYMBOL
,
837 [ BYTECODE_OP_GET_SYMBOL_FIELD
] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD
,
838 [ BYTECODE_OP_GET_INDEX_U16
] = &&LABEL_BYTECODE_OP_GET_INDEX_U16
,
839 [ BYTECODE_OP_GET_INDEX_U64
] = &&LABEL_BYTECODE_OP_GET_INDEX_U64
,
841 [ BYTECODE_OP_LOAD_FIELD
] = &&LABEL_BYTECODE_OP_LOAD_FIELD
,
842 [ BYTECODE_OP_LOAD_FIELD_S8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8
,
843 [ BYTECODE_OP_LOAD_FIELD_S16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16
,
844 [ BYTECODE_OP_LOAD_FIELD_S32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32
,
845 [ BYTECODE_OP_LOAD_FIELD_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64
,
846 [ BYTECODE_OP_LOAD_FIELD_U8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8
,
847 [ BYTECODE_OP_LOAD_FIELD_U16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16
,
848 [ BYTECODE_OP_LOAD_FIELD_U32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32
,
849 [ BYTECODE_OP_LOAD_FIELD_U64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64
,
850 [ BYTECODE_OP_LOAD_FIELD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING
,
851 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE
,
852 [ BYTECODE_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE
,
854 [ BYTECODE_OP_UNARY_BIT_NOT
] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT
,
856 [ BYTECODE_OP_RETURN_S64
] = &&LABEL_BYTECODE_OP_RETURN_S64
,
858 #endif /* #ifndef INTERPRETER_USE_SWITCH */
862 OP(BYTECODE_OP_UNKNOWN
):
863 OP(BYTECODE_OP_LOAD_FIELD_REF
):
864 #ifdef INTERPRETER_USE_SWITCH
866 #endif /* INTERPRETER_USE_SWITCH */
867 ERR("unknown bytecode op %u",
868 (unsigned int) *(bytecode_opcode_t
*) pc
);
872 OP(BYTECODE_OP_RETURN
):
873 /* LTTNG_UST_BYTECODE_INTERPRETER_ERROR or LTTNG_UST_BYTECODE_INTERPRETER_OK */
874 /* Handle dynamic typing. */
875 switch (estack_ax_t
) {
878 retval
= !!estack_ax_v
;
883 if (ust_bytecode
->type
!= LTTNG_UST_BYTECODE_TYPE_CAPTURE
) {
889 case REG_STAR_GLOB_STRING
:
898 OP(BYTECODE_OP_RETURN_S64
):
899 /* LTTNG_UST_BYTECODE_INTERPRETER_ERROR or LTTNG_UST_BYTECODE_INTERPRETER_OK */
900 retval
= !!estack_ax_v
;
908 OP(BYTECODE_OP_PLUS
):
909 OP(BYTECODE_OP_MINUS
):
910 ERR("unsupported bytecode op %u",
911 (unsigned int) *(bytecode_opcode_t
*) pc
);
917 /* Dynamic typing. */
918 switch (estack_ax_t
) {
919 case REG_S64
: /* Fall-through */
921 switch (estack_bx_t
) {
922 case REG_S64
: /* Fall-through */
924 JUMP_TO(BYTECODE_OP_EQ_S64
);
926 JUMP_TO(BYTECODE_OP_EQ_DOUBLE_S64
);
927 case REG_STRING
: /* Fall-through */
928 case REG_STAR_GLOB_STRING
:
932 ERR("Unknown interpreter register type (%d)",
939 switch (estack_bx_t
) {
940 case REG_S64
: /* Fall-through */
942 JUMP_TO(BYTECODE_OP_EQ_S64_DOUBLE
);
944 JUMP_TO(BYTECODE_OP_EQ_DOUBLE
);
945 case REG_STRING
: /* Fall-through */
946 case REG_STAR_GLOB_STRING
:
950 ERR("Unknown interpreter register type (%d)",
957 switch (estack_bx_t
) {
958 case REG_S64
: /* Fall-through */
959 case REG_U64
: /* Fall-through */
964 JUMP_TO(BYTECODE_OP_EQ_STRING
);
965 case REG_STAR_GLOB_STRING
:
966 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
968 ERR("Unknown interpreter register type (%d)",
974 case REG_STAR_GLOB_STRING
:
975 switch (estack_bx_t
) {
976 case REG_S64
: /* Fall-through */
977 case REG_U64
: /* Fall-through */
982 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
983 case REG_STAR_GLOB_STRING
:
987 ERR("Unknown interpreter register type (%d)",
994 ERR("Unknown interpreter register type (%d)",
1002 /* Dynamic typing. */
1003 switch (estack_ax_t
) {
1004 case REG_S64
: /* Fall-through */
1006 switch (estack_bx_t
) {
1007 case REG_S64
: /* Fall-through */
1009 JUMP_TO(BYTECODE_OP_NE_S64
);
1011 JUMP_TO(BYTECODE_OP_NE_DOUBLE_S64
);
1012 case REG_STRING
: /* Fall-through */
1013 case REG_STAR_GLOB_STRING
:
1017 ERR("Unknown interpreter register type (%d)",
1024 switch (estack_bx_t
) {
1025 case REG_S64
: /* Fall-through */
1027 JUMP_TO(BYTECODE_OP_NE_S64_DOUBLE
);
1029 JUMP_TO(BYTECODE_OP_NE_DOUBLE
);
1030 case REG_STRING
: /* Fall-through */
1031 case REG_STAR_GLOB_STRING
:
1035 ERR("Unknown interpreter register type (%d)",
1042 switch (estack_bx_t
) {
1043 case REG_S64
: /* Fall-through */
1049 JUMP_TO(BYTECODE_OP_NE_STRING
);
1050 case REG_STAR_GLOB_STRING
:
1051 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1053 ERR("Unknown interpreter register type (%d)",
1059 case REG_STAR_GLOB_STRING
:
1060 switch (estack_bx_t
) {
1061 case REG_S64
: /* Fall-through */
1067 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1068 case REG_STAR_GLOB_STRING
:
1072 ERR("Unknown interpreter register type (%d)",
1079 ERR("Unknown interpreter register type (%d)",
1087 /* Dynamic typing. */
1088 switch (estack_ax_t
) {
1089 case REG_S64
: /* Fall-through */
1091 switch (estack_bx_t
) {
1092 case REG_S64
: /* Fall-through */
1094 JUMP_TO(BYTECODE_OP_GT_S64
);
1096 JUMP_TO(BYTECODE_OP_GT_DOUBLE_S64
);
1097 case REG_STRING
: /* Fall-through */
1098 case REG_STAR_GLOB_STRING
:
1102 ERR("Unknown interpreter register type (%d)",
1109 switch (estack_bx_t
) {
1110 case REG_S64
: /* Fall-through */
1112 JUMP_TO(BYTECODE_OP_GT_S64_DOUBLE
);
1114 JUMP_TO(BYTECODE_OP_GT_DOUBLE
);
1115 case REG_STRING
: /* Fall-through */
1116 case REG_STAR_GLOB_STRING
:
1120 ERR("Unknown interpreter register type (%d)",
1127 switch (estack_bx_t
) {
1128 case REG_S64
: /* Fall-through */
1129 case REG_U64
: /* Fall-through */
1130 case REG_DOUBLE
: /* Fall-through */
1131 case REG_STAR_GLOB_STRING
:
1135 JUMP_TO(BYTECODE_OP_GT_STRING
);
1137 ERR("Unknown interpreter register type (%d)",
1144 ERR("Unknown interpreter register type (%d)",
1152 /* Dynamic typing. */
1153 switch (estack_ax_t
) {
1154 case REG_S64
: /* Fall-through */
1156 switch (estack_bx_t
) {
1157 case REG_S64
: /* Fall-through */
1159 JUMP_TO(BYTECODE_OP_LT_S64
);
1161 JUMP_TO(BYTECODE_OP_LT_DOUBLE_S64
);
1162 case REG_STRING
: /* Fall-through */
1163 case REG_STAR_GLOB_STRING
:
1167 ERR("Unknown interpreter register type (%d)",
1174 switch (estack_bx_t
) {
1175 case REG_S64
: /* Fall-through */
1177 JUMP_TO(BYTECODE_OP_LT_S64_DOUBLE
);
1179 JUMP_TO(BYTECODE_OP_LT_DOUBLE
);
1180 case REG_STRING
: /* Fall-through */
1181 case REG_STAR_GLOB_STRING
:
1185 ERR("Unknown interpreter register type (%d)",
1192 switch (estack_bx_t
) {
1193 case REG_S64
: /* Fall-through */
1194 case REG_U64
: /* Fall-through */
1195 case REG_DOUBLE
: /* Fall-through */
1196 case REG_STAR_GLOB_STRING
:
1200 JUMP_TO(BYTECODE_OP_LT_STRING
);
1202 ERR("Unknown interpreter register type (%d)",
1209 ERR("Unknown interpreter register type (%d)",
1217 /* Dynamic typing. */
1218 switch (estack_ax_t
) {
1219 case REG_S64
: /* Fall-through */
1221 switch (estack_bx_t
) {
1222 case REG_S64
: /* Fall-through */
1224 JUMP_TO(BYTECODE_OP_GE_S64
);
1226 JUMP_TO(BYTECODE_OP_GE_DOUBLE_S64
);
1227 case REG_STRING
: /* Fall-through */
1228 case REG_STAR_GLOB_STRING
:
1232 ERR("Unknown interpreter register type (%d)",
1239 switch (estack_bx_t
) {
1240 case REG_S64
: /* Fall-through */
1242 JUMP_TO(BYTECODE_OP_GE_S64_DOUBLE
);
1244 JUMP_TO(BYTECODE_OP_GE_DOUBLE
);
1245 case REG_STRING
: /* Fall-through */
1246 case REG_STAR_GLOB_STRING
:
1250 ERR("Unknown interpreter register type (%d)",
1257 switch (estack_bx_t
) {
1258 case REG_S64
: /* Fall-through */
1259 case REG_U64
: /* Fall-through */
1260 case REG_DOUBLE
: /* Fall-through */
1261 case REG_STAR_GLOB_STRING
:
1265 JUMP_TO(BYTECODE_OP_GE_STRING
);
1267 ERR("Unknown interpreter register type (%d)",
1274 ERR("Unknown interpreter register type (%d)",
1282 /* Dynamic typing. */
1283 switch (estack_ax_t
) {
1284 case REG_S64
: /* Fall-through */
1286 switch (estack_bx_t
) {
1287 case REG_S64
: /* Fall-through */
1289 JUMP_TO(BYTECODE_OP_LE_S64
);
1291 JUMP_TO(BYTECODE_OP_LE_DOUBLE_S64
);
1292 case REG_STRING
: /* Fall-through */
1293 case REG_STAR_GLOB_STRING
:
1297 ERR("Unknown interpreter register type (%d)",
1304 switch (estack_bx_t
) {
1305 case REG_S64
: /* Fall-through */
1307 JUMP_TO(BYTECODE_OP_LE_S64_DOUBLE
);
1309 JUMP_TO(BYTECODE_OP_LE_DOUBLE
);
1310 case REG_STRING
: /* Fall-through */
1311 case REG_STAR_GLOB_STRING
:
1315 ERR("Unknown interpreter register type (%d)",
1322 switch (estack_bx_t
) {
1323 case REG_S64
: /* Fall-through */
1324 case REG_U64
: /* Fall-through */
1325 case REG_DOUBLE
: /* Fall-through */
1326 case REG_STAR_GLOB_STRING
:
1330 JUMP_TO(BYTECODE_OP_LE_STRING
);
1332 ERR("Unknown interpreter register type (%d)",
1339 ERR("Unknown interpreter register type (%d)",
1346 OP(BYTECODE_OP_EQ_STRING
):
1350 res
= (stack_strcmp(stack
, top
, "==") == 0);
1351 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1353 estack_ax_t
= REG_S64
;
1354 next_pc
+= sizeof(struct binary_op
);
1357 OP(BYTECODE_OP_NE_STRING
):
1361 res
= (stack_strcmp(stack
, top
, "!=") != 0);
1362 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1364 estack_ax_t
= REG_S64
;
1365 next_pc
+= sizeof(struct binary_op
);
1368 OP(BYTECODE_OP_GT_STRING
):
1372 res
= (stack_strcmp(stack
, top
, ">") > 0);
1373 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1375 estack_ax_t
= REG_S64
;
1376 next_pc
+= sizeof(struct binary_op
);
1379 OP(BYTECODE_OP_LT_STRING
):
1383 res
= (stack_strcmp(stack
, top
, "<") < 0);
1384 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1386 estack_ax_t
= REG_S64
;
1387 next_pc
+= sizeof(struct binary_op
);
1390 OP(BYTECODE_OP_GE_STRING
):
1394 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
1395 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1397 estack_ax_t
= REG_S64
;
1398 next_pc
+= sizeof(struct binary_op
);
1401 OP(BYTECODE_OP_LE_STRING
):
1405 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
1406 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1408 estack_ax_t
= REG_S64
;
1409 next_pc
+= sizeof(struct binary_op
);
1413 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING
):
1417 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
1418 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1420 estack_ax_t
= REG_S64
;
1421 next_pc
+= sizeof(struct binary_op
);
1424 OP(BYTECODE_OP_NE_STAR_GLOB_STRING
):
1428 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
1429 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1431 estack_ax_t
= REG_S64
;
1432 next_pc
+= sizeof(struct binary_op
);
1436 OP(BYTECODE_OP_EQ_S64
):
1440 res
= (estack_bx_v
== estack_ax_v
);
1441 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1443 estack_ax_t
= REG_S64
;
1444 next_pc
+= sizeof(struct binary_op
);
1447 OP(BYTECODE_OP_NE_S64
):
1451 res
= (estack_bx_v
!= estack_ax_v
);
1452 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1454 estack_ax_t
= REG_S64
;
1455 next_pc
+= sizeof(struct binary_op
);
1458 OP(BYTECODE_OP_GT_S64
):
1462 res
= (estack_bx_v
> estack_ax_v
);
1463 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1465 estack_ax_t
= REG_S64
;
1466 next_pc
+= sizeof(struct binary_op
);
1469 OP(BYTECODE_OP_LT_S64
):
1473 res
= (estack_bx_v
< estack_ax_v
);
1474 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1476 estack_ax_t
= REG_S64
;
1477 next_pc
+= sizeof(struct binary_op
);
1480 OP(BYTECODE_OP_GE_S64
):
1484 res
= (estack_bx_v
>= estack_ax_v
);
1485 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1487 estack_ax_t
= REG_S64
;
1488 next_pc
+= sizeof(struct binary_op
);
1491 OP(BYTECODE_OP_LE_S64
):
1495 res
= (estack_bx_v
<= estack_ax_v
);
1496 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1498 estack_ax_t
= REG_S64
;
1499 next_pc
+= sizeof(struct binary_op
);
1503 OP(BYTECODE_OP_EQ_DOUBLE
):
1507 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax(stack
, top
)->u
.d
);
1508 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1510 estack_ax_t
= REG_S64
;
1511 next_pc
+= sizeof(struct binary_op
);
1514 OP(BYTECODE_OP_NE_DOUBLE
):
1518 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax(stack
, top
)->u
.d
);
1519 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1521 estack_ax_t
= REG_S64
;
1522 next_pc
+= sizeof(struct binary_op
);
1525 OP(BYTECODE_OP_GT_DOUBLE
):
1529 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax(stack
, top
)->u
.d
);
1530 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1532 estack_ax_t
= REG_S64
;
1533 next_pc
+= sizeof(struct binary_op
);
1536 OP(BYTECODE_OP_LT_DOUBLE
):
1540 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax(stack
, top
)->u
.d
);
1541 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1543 estack_ax_t
= REG_S64
;
1544 next_pc
+= sizeof(struct binary_op
);
1547 OP(BYTECODE_OP_GE_DOUBLE
):
1551 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax(stack
, top
)->u
.d
);
1552 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1554 estack_ax_t
= REG_S64
;
1555 next_pc
+= sizeof(struct binary_op
);
1558 OP(BYTECODE_OP_LE_DOUBLE
):
1562 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax(stack
, top
)->u
.d
);
1563 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1565 estack_ax_t
= REG_S64
;
1566 next_pc
+= sizeof(struct binary_op
);
1570 /* Mixed S64-double binary comparators */
1571 OP(BYTECODE_OP_EQ_DOUBLE_S64
):
1575 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax_v
);
1576 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1578 estack_ax_t
= REG_S64
;
1579 next_pc
+= sizeof(struct binary_op
);
1582 OP(BYTECODE_OP_NE_DOUBLE_S64
):
1586 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax_v
);
1587 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1589 estack_ax_t
= REG_S64
;
1590 next_pc
+= sizeof(struct binary_op
);
1593 OP(BYTECODE_OP_GT_DOUBLE_S64
):
1597 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax_v
);
1598 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1600 estack_ax_t
= REG_S64
;
1601 next_pc
+= sizeof(struct binary_op
);
1604 OP(BYTECODE_OP_LT_DOUBLE_S64
):
1608 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax_v
);
1609 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1611 estack_ax_t
= REG_S64
;
1612 next_pc
+= sizeof(struct binary_op
);
1615 OP(BYTECODE_OP_GE_DOUBLE_S64
):
1619 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax_v
);
1620 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1622 estack_ax_t
= REG_S64
;
1623 next_pc
+= sizeof(struct binary_op
);
1626 OP(BYTECODE_OP_LE_DOUBLE_S64
):
1630 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax_v
);
1631 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1633 estack_ax_t
= REG_S64
;
1634 next_pc
+= sizeof(struct binary_op
);
1638 OP(BYTECODE_OP_EQ_S64_DOUBLE
):
1642 res
= (estack_bx_v
== estack_ax(stack
, top
)->u
.d
);
1643 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1645 estack_ax_t
= REG_S64
;
1646 next_pc
+= sizeof(struct binary_op
);
1649 OP(BYTECODE_OP_NE_S64_DOUBLE
):
1653 res
= (estack_bx_v
!= estack_ax(stack
, top
)->u
.d
);
1654 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1656 estack_ax_t
= REG_S64
;
1657 next_pc
+= sizeof(struct binary_op
);
1660 OP(BYTECODE_OP_GT_S64_DOUBLE
):
1664 res
= (estack_bx_v
> estack_ax(stack
, top
)->u
.d
);
1665 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1667 estack_ax_t
= REG_S64
;
1668 next_pc
+= sizeof(struct binary_op
);
1671 OP(BYTECODE_OP_LT_S64_DOUBLE
):
1675 res
= (estack_bx_v
< estack_ax(stack
, top
)->u
.d
);
1676 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1678 estack_ax_t
= REG_S64
;
1679 next_pc
+= sizeof(struct binary_op
);
1682 OP(BYTECODE_OP_GE_S64_DOUBLE
):
1686 res
= (estack_bx_v
>= estack_ax(stack
, top
)->u
.d
);
1687 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1689 estack_ax_t
= REG_S64
;
1690 next_pc
+= sizeof(struct binary_op
);
1693 OP(BYTECODE_OP_LE_S64_DOUBLE
):
1697 res
= (estack_bx_v
<= estack_ax(stack
, top
)->u
.d
);
1698 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1700 estack_ax_t
= REG_S64
;
1701 next_pc
+= sizeof(struct binary_op
);
1704 OP(BYTECODE_OP_BIT_RSHIFT
):
1708 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1713 /* Catch undefined behavior. */
1714 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1718 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
1719 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1721 estack_ax_t
= REG_U64
;
1722 next_pc
+= sizeof(struct binary_op
);
1725 OP(BYTECODE_OP_BIT_LSHIFT
):
1729 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1734 /* Catch undefined behavior. */
1735 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1739 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
1740 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1742 estack_ax_t
= REG_U64
;
1743 next_pc
+= sizeof(struct binary_op
);
1746 OP(BYTECODE_OP_BIT_AND
):
1750 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1755 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
1756 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1758 estack_ax_t
= REG_U64
;
1759 next_pc
+= sizeof(struct binary_op
);
1762 OP(BYTECODE_OP_BIT_OR
):
1766 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1771 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1772 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1774 estack_ax_t
= REG_U64
;
1775 next_pc
+= sizeof(struct binary_op
);
1778 OP(BYTECODE_OP_BIT_XOR
):
1782 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1787 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1788 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1790 estack_ax_t
= REG_U64
;
1791 next_pc
+= sizeof(struct binary_op
);
1796 OP(BYTECODE_OP_UNARY_PLUS
):
1798 /* Dynamic typing. */
1799 switch (estack_ax_t
) {
1800 case REG_S64
: /* Fall-through. */
1802 JUMP_TO(BYTECODE_OP_UNARY_PLUS_S64
);
1804 JUMP_TO(BYTECODE_OP_UNARY_PLUS_DOUBLE
);
1805 case REG_STRING
: /* Fall-through */
1806 case REG_STAR_GLOB_STRING
:
1810 ERR("Unknown interpreter register type (%d)",
1816 OP(BYTECODE_OP_UNARY_MINUS
):
1818 /* Dynamic typing. */
1819 switch (estack_ax_t
) {
1820 case REG_S64
: /* Fall-through. */
1822 JUMP_TO(BYTECODE_OP_UNARY_MINUS_S64
);
1824 JUMP_TO(BYTECODE_OP_UNARY_MINUS_DOUBLE
);
1825 case REG_STRING
: /* Fall-through */
1826 case REG_STAR_GLOB_STRING
:
1830 ERR("Unknown interpreter register type (%d)",
1836 OP(BYTECODE_OP_UNARY_NOT
):
1838 /* Dynamic typing. */
1839 switch (estack_ax_t
) {
1840 case REG_S64
: /* Fall-through. */
1842 JUMP_TO(BYTECODE_OP_UNARY_NOT_S64
);
1844 JUMP_TO(BYTECODE_OP_UNARY_NOT_DOUBLE
);
1845 case REG_STRING
: /* Fall-through */
1846 case REG_STAR_GLOB_STRING
:
1850 ERR("Unknown interpreter register type (%d)",
1855 next_pc
+= sizeof(struct unary_op
);
1859 OP(BYTECODE_OP_UNARY_BIT_NOT
):
1861 /* Dynamic typing. */
1862 if (!IS_INTEGER_REGISTER(estack_ax_t
)) {
1867 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1868 estack_ax_t
= REG_U64
;
1869 next_pc
+= sizeof(struct unary_op
);
1873 OP(BYTECODE_OP_UNARY_PLUS_S64
):
1874 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE
):
1876 next_pc
+= sizeof(struct unary_op
);
1879 OP(BYTECODE_OP_UNARY_MINUS_S64
):
1881 estack_ax_v
= -estack_ax_v
;
1882 next_pc
+= sizeof(struct unary_op
);
1885 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE
):
1887 estack_ax(stack
, top
)->u
.d
= -estack_ax(stack
, top
)->u
.d
;
1888 next_pc
+= sizeof(struct unary_op
);
1891 OP(BYTECODE_OP_UNARY_NOT_S64
):
1893 estack_ax_v
= !estack_ax_v
;
1894 estack_ax_t
= REG_S64
;
1895 next_pc
+= sizeof(struct unary_op
);
1898 OP(BYTECODE_OP_UNARY_NOT_DOUBLE
):
1900 estack_ax_v
= !estack_ax(stack
, top
)->u
.d
;
1901 estack_ax_t
= REG_S64
;
1902 next_pc
+= sizeof(struct unary_op
);
1907 OP(BYTECODE_OP_AND
):
1909 struct logical_op
*insn
= (struct logical_op
*) pc
;
1911 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1915 /* If AX is 0, skip and evaluate to 0 */
1916 if (unlikely(estack_ax_v
== 0)) {
1917 dbg_printf("Jumping to bytecode offset %u\n",
1918 (unsigned int) insn
->skip_offset
);
1919 next_pc
= start_pc
+ insn
->skip_offset
;
1921 /* Pop 1 when jump not taken */
1922 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1923 next_pc
+= sizeof(struct logical_op
);
1929 struct logical_op
*insn
= (struct logical_op
*) pc
;
1931 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1935 /* If AX is nonzero, skip and evaluate to 1 */
1936 if (unlikely(estack_ax_v
!= 0)) {
1938 dbg_printf("Jumping to bytecode offset %u\n",
1939 (unsigned int) insn
->skip_offset
);
1940 next_pc
= start_pc
+ insn
->skip_offset
;
1942 /* Pop 1 when jump not taken */
1943 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1944 next_pc
+= sizeof(struct logical_op
);
1950 /* load field ref */
1951 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING
):
1953 struct load_op
*insn
= (struct load_op
*) pc
;
1954 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1956 dbg_printf("load field ref offset %u type string\n",
1958 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1959 estack_ax(stack
, top
)->u
.s
.str
=
1960 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
1961 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1962 dbg_printf("Interpreter warning: loading a NULL string.\n");
1966 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
1967 estack_ax(stack
, top
)->u
.s
.literal_type
=
1968 ESTACK_STRING_LITERAL_TYPE_NONE
;
1969 estack_ax_t
= REG_STRING
;
1970 dbg_printf("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1971 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1975 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
):
1977 struct load_op
*insn
= (struct load_op
*) pc
;
1978 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1980 dbg_printf("load field ref offset %u type sequence\n",
1982 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1983 estack_ax(stack
, top
)->u
.s
.seq_len
=
1984 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
1985 estack_ax(stack
, top
)->u
.s
.str
=
1986 *(const char **) (&interpreter_stack_data
[ref
->offset
1987 + sizeof(unsigned long)]);
1988 estack_ax_t
= REG_STRING
;
1989 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1990 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
1994 estack_ax(stack
, top
)->u
.s
.literal_type
=
1995 ESTACK_STRING_LITERAL_TYPE_NONE
;
1996 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2000 OP(BYTECODE_OP_LOAD_FIELD_REF_S64
):
2002 struct load_op
*insn
= (struct load_op
*) pc
;
2003 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2005 dbg_printf("load field ref offset %u type s64\n",
2007 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2009 ((struct literal_numeric
*) &interpreter_stack_data
[ref
->offset
])->v
;
2010 estack_ax_t
= REG_S64
;
2011 dbg_printf("ref load s64 %" PRIi64
"\n", estack_ax_v
);
2012 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2016 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
):
2018 struct load_op
*insn
= (struct load_op
*) pc
;
2019 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2021 dbg_printf("load field ref offset %u type double\n",
2023 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2024 memcpy(&estack_ax(stack
, top
)->u
.d
, &interpreter_stack_data
[ref
->offset
],
2025 sizeof(struct literal_double
));
2026 estack_ax_t
= REG_DOUBLE
;
2027 dbg_printf("ref load double %g\n", estack_ax(stack
, top
)->u
.d
);
2028 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2032 /* load from immediate operand */
2033 OP(BYTECODE_OP_LOAD_STRING
):
2035 struct load_op
*insn
= (struct load_op
*) pc
;
2037 dbg_printf("load string %s\n", insn
->data
);
2038 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2039 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2040 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2041 estack_ax(stack
, top
)->u
.s
.literal_type
=
2042 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
2043 estack_ax_t
= REG_STRING
;
2044 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2048 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING
):
2050 struct load_op
*insn
= (struct load_op
*) pc
;
2052 dbg_printf("load globbing pattern %s\n", insn
->data
);
2053 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2054 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2055 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2056 estack_ax(stack
, top
)->u
.s
.literal_type
=
2057 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
2058 estack_ax_t
= REG_STAR_GLOB_STRING
;
2059 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2063 OP(BYTECODE_OP_LOAD_S64
):
2065 struct load_op
*insn
= (struct load_op
*) pc
;
2067 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2068 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
2069 estack_ax_t
= REG_S64
;
2070 dbg_printf("load s64 %" PRIi64
"\n", estack_ax_v
);
2071 next_pc
+= sizeof(struct load_op
)
2072 + sizeof(struct literal_numeric
);
2076 OP(BYTECODE_OP_LOAD_DOUBLE
):
2078 struct load_op
*insn
= (struct load_op
*) pc
;
2080 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2081 memcpy(&estack_ax(stack
, top
)->u
.d
, insn
->data
,
2082 sizeof(struct literal_double
));
2083 estack_ax_t
= REG_DOUBLE
;
2084 dbg_printf("load double %g\n", estack_ax(stack
, top
)->u
.d
);
2085 next_pc
+= sizeof(struct load_op
)
2086 + sizeof(struct literal_double
);
2091 OP(BYTECODE_OP_CAST_TO_S64
):
2093 /* Dynamic typing. */
2094 switch (estack_ax_t
) {
2096 JUMP_TO(BYTECODE_OP_CAST_NOP
);
2098 JUMP_TO(BYTECODE_OP_CAST_DOUBLE_TO_S64
);
2100 estack_ax_t
= REG_S64
;
2101 next_pc
+= sizeof(struct cast_op
);
2102 case REG_STRING
: /* Fall-through */
2103 case REG_STAR_GLOB_STRING
:
2107 ERR("Unknown interpreter register type (%d)",
2114 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64
):
2116 estack_ax_v
= (int64_t) estack_ax(stack
, top
)->u
.d
;
2117 estack_ax_t
= REG_S64
;
2118 next_pc
+= sizeof(struct cast_op
);
2122 OP(BYTECODE_OP_CAST_NOP
):
2124 next_pc
+= sizeof(struct cast_op
);
2128 /* get context ref */
2129 OP(BYTECODE_OP_GET_CONTEXT_REF
):
2131 struct load_op
*insn
= (struct load_op
*) pc
;
2132 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2133 struct lttng_ust_ctx_field
*ctx_field
;
2134 struct lttng_ust_ctx_value v
;
2136 dbg_printf("get context ref offset %u type dynamic\n",
2138 ctx_field
= ctx
->fields
[ref
->offset
];
2139 ctx_field
->get_value(ctx_field
, &v
);
2140 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2142 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
2145 case LTTNG_UST_DYNAMIC_TYPE_S64
:
2146 estack_ax_v
= v
.u
.s64
;
2147 estack_ax_t
= REG_S64
;
2148 dbg_printf("ref get context dynamic s64 %" PRIi64
"\n", estack_ax_v
);
2150 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
2151 estack_ax(stack
, top
)->u
.d
= v
.u
.d
;
2152 estack_ax_t
= REG_DOUBLE
;
2153 dbg_printf("ref get context dynamic double %g\n", estack_ax(stack
, top
)->u
.d
);
2155 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
2156 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2157 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2158 dbg_printf("Interpreter warning: loading a NULL string.\n");
2162 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2163 estack_ax(stack
, top
)->u
.s
.literal_type
=
2164 ESTACK_STRING_LITERAL_TYPE_NONE
;
2165 dbg_printf("ref get context dynamic string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2166 estack_ax_t
= REG_STRING
;
2169 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
2173 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2177 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING
):
2179 struct load_op
*insn
= (struct load_op
*) pc
;
2180 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2181 struct lttng_ust_ctx_field
*ctx_field
;
2182 struct lttng_ust_ctx_value v
;
2184 dbg_printf("get context ref offset %u type string\n",
2186 ctx_field
= ctx
->fields
[ref
->offset
];
2187 ctx_field
->get_value(ctx_field
, &v
);
2188 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2189 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2190 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2191 dbg_printf("Interpreter warning: loading a NULL string.\n");
2195 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2196 estack_ax(stack
, top
)->u
.s
.literal_type
=
2197 ESTACK_STRING_LITERAL_TYPE_NONE
;
2198 estack_ax_t
= REG_STRING
;
2199 dbg_printf("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2200 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2204 OP(BYTECODE_OP_GET_CONTEXT_REF_S64
):
2206 struct load_op
*insn
= (struct load_op
*) pc
;
2207 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2208 struct lttng_ust_ctx_field
*ctx_field
;
2209 struct lttng_ust_ctx_value v
;
2211 dbg_printf("get context ref offset %u type s64\n",
2213 ctx_field
= ctx
->fields
[ref
->offset
];
2214 ctx_field
->get_value(ctx_field
, &v
);
2215 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2216 estack_ax_v
= v
.u
.s64
;
2217 estack_ax_t
= REG_S64
;
2218 dbg_printf("ref get context s64 %" PRIi64
"\n", estack_ax_v
);
2219 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2223 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
):
2225 struct load_op
*insn
= (struct load_op
*) pc
;
2226 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2227 struct lttng_ust_ctx_field
*ctx_field
;
2228 struct lttng_ust_ctx_value v
;
2230 dbg_printf("get context ref offset %u type double\n",
2232 ctx_field
= ctx
->fields
[ref
->offset
];
2233 ctx_field
->get_value(ctx_field
, &v
);
2234 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2235 memcpy(&estack_ax(stack
, top
)->u
.d
, &v
.u
.d
, sizeof(struct literal_double
));
2236 estack_ax_t
= REG_DOUBLE
;
2237 dbg_printf("ref get context double %g\n", estack_ax(stack
, top
)->u
.d
);
2238 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2242 OP(BYTECODE_OP_GET_CONTEXT_ROOT
):
2244 dbg_printf("op get context root\n");
2245 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2246 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
2247 /* "field" only needed for variants. */
2248 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2249 estack_ax_t
= REG_PTR
;
2250 next_pc
+= sizeof(struct load_op
);
2254 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT
):
2256 dbg_printf("op get app context root\n");
2257 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2258 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_APP_CONTEXT
;
2259 /* "field" only needed for variants. */
2260 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2261 estack_ax_t
= REG_PTR
;
2262 next_pc
+= sizeof(struct load_op
);
2266 OP(BYTECODE_OP_GET_PAYLOAD_ROOT
):
2268 dbg_printf("op get app payload root\n");
2269 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2270 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
2271 estack_ax(stack
, top
)->u
.ptr
.ptr
= interpreter_stack_data
;
2272 /* "field" only needed for variants. */
2273 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2274 estack_ax_t
= REG_PTR
;
2275 next_pc
+= sizeof(struct load_op
);
2279 OP(BYTECODE_OP_GET_SYMBOL
):
2281 dbg_printf("op get symbol\n");
2282 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
2284 ERR("Nested fields not implemented yet.");
2287 case LOAD_ROOT_CONTEXT
:
2288 case LOAD_ROOT_APP_CONTEXT
:
2289 case LOAD_ROOT_PAYLOAD
:
2291 * symbol lookup is performed by
2297 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
2301 OP(BYTECODE_OP_GET_SYMBOL_FIELD
):
2304 * Used for first variant encountered in a
2305 * traversal. Variants are not implemented yet.
2311 OP(BYTECODE_OP_GET_INDEX_U16
):
2313 struct load_op
*insn
= (struct load_op
*) pc
;
2314 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
2316 dbg_printf("op get index u16\n");
2317 ret
= dynamic_get_index(ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2320 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2321 estack_ax_t
= estack_ax(stack
, top
)->type
;
2322 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
2326 OP(BYTECODE_OP_GET_INDEX_U64
):
2328 struct load_op
*insn
= (struct load_op
*) pc
;
2329 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
2331 dbg_printf("op get index u64\n");
2332 ret
= dynamic_get_index(ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2335 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2336 estack_ax_t
= estack_ax(stack
, top
)->type
;
2337 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
2341 OP(BYTECODE_OP_LOAD_FIELD
):
2343 dbg_printf("op load field\n");
2344 ret
= dynamic_load_field(estack_ax(stack
, top
));
2347 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2348 estack_ax_t
= estack_ax(stack
, top
)->type
;
2349 next_pc
+= sizeof(struct load_op
);
2353 OP(BYTECODE_OP_LOAD_FIELD_S8
):
2355 dbg_printf("op load field s8\n");
2357 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2358 estack_ax_t
= REG_S64
;
2359 next_pc
+= sizeof(struct load_op
);
2362 OP(BYTECODE_OP_LOAD_FIELD_S16
):
2364 dbg_printf("op load field s16\n");
2366 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2367 estack_ax_t
= REG_S64
;
2368 next_pc
+= sizeof(struct load_op
);
2371 OP(BYTECODE_OP_LOAD_FIELD_S32
):
2373 dbg_printf("op load field s32\n");
2375 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2376 estack_ax_t
= REG_S64
;
2377 next_pc
+= sizeof(struct load_op
);
2380 OP(BYTECODE_OP_LOAD_FIELD_S64
):
2382 dbg_printf("op load field s64\n");
2384 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2385 estack_ax_t
= REG_S64
;
2386 next_pc
+= sizeof(struct load_op
);
2389 OP(BYTECODE_OP_LOAD_FIELD_U8
):
2391 dbg_printf("op load field u8\n");
2393 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2394 estack_ax_t
= REG_U64
;
2395 next_pc
+= sizeof(struct load_op
);
2398 OP(BYTECODE_OP_LOAD_FIELD_U16
):
2400 dbg_printf("op load field u16\n");
2402 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2403 estack_ax_t
= REG_U64
;
2404 next_pc
+= sizeof(struct load_op
);
2407 OP(BYTECODE_OP_LOAD_FIELD_U32
):
2409 dbg_printf("op load field u32\n");
2411 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2412 estack_ax_t
= REG_U64
;
2413 next_pc
+= sizeof(struct load_op
);
2416 OP(BYTECODE_OP_LOAD_FIELD_U64
):
2418 dbg_printf("op load field u64\n");
2420 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2421 estack_ax_t
= REG_U64
;
2422 next_pc
+= sizeof(struct load_op
);
2425 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE
):
2427 dbg_printf("op load field double\n");
2429 memcpy(&estack_ax(stack
, top
)->u
.d
,
2430 estack_ax(stack
, top
)->u
.ptr
.ptr
,
2431 sizeof(struct literal_double
));
2432 estack_ax(stack
, top
)->type
= REG_DOUBLE
;
2433 next_pc
+= sizeof(struct load_op
);
2437 OP(BYTECODE_OP_LOAD_FIELD_STRING
):
2441 dbg_printf("op load field string\n");
2442 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2443 estack_ax(stack
, top
)->u
.s
.str
= str
;
2444 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2445 dbg_printf("Interpreter warning: loading a NULL string.\n");
2449 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2450 estack_ax(stack
, top
)->u
.s
.literal_type
=
2451 ESTACK_STRING_LITERAL_TYPE_NONE
;
2452 estack_ax(stack
, top
)->type
= REG_STRING
;
2453 next_pc
+= sizeof(struct load_op
);
2457 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE
):
2461 dbg_printf("op load field string sequence\n");
2462 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
2463 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
2464 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
2465 estack_ax(stack
, top
)->type
= REG_STRING
;
2466 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2467 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2471 estack_ax(stack
, top
)->u
.s
.literal_type
=
2472 ESTACK_STRING_LITERAL_TYPE_NONE
;
2473 next_pc
+= sizeof(struct load_op
);
2479 /* No need to prepare output if an error occurred. */
2481 return LTTNG_UST_BYTECODE_INTERPRETER_ERROR
;
2483 /* Prepare output. */
2484 switch (ust_bytecode
->type
) {
2485 case LTTNG_UST_BYTECODE_TYPE_FILTER
:
2487 struct lttng_ust_bytecode_filter_ctx
*filter_ctx
=
2488 (struct lttng_ust_bytecode_filter_ctx
*) caller_ctx
;
2490 filter_ctx
->result
= LTTNG_UST_BYTECODE_FILTER_ACCEPT
;
2492 filter_ctx
->result
= LTTNG_UST_BYTECODE_FILTER_REJECT
;
2495 case LTTNG_UST_BYTECODE_TYPE_CAPTURE
:
2496 ret
= lttng_bytecode_interpret_format_output(estack_ax(stack
, top
),
2497 (struct lttng_interpreter_output
*) caller_ctx
);
2504 return LTTNG_UST_BYTECODE_INTERPRETER_ERROR
;
2506 return LTTNG_UST_BYTECODE_INTERPRETER_OK
;
2510 * Return LTTNG_UST_EVENT_FILTER_ACCEPT or LTTNG_UST_EVENT_FILTER_REJECT.
2512 int lttng_ust_interpret_event_filter(struct lttng_ust_event_common
*event
,
2513 const char *interpreter_stack_data
,
2514 void *event_filter_ctx
)
2516 struct lttng_ust_bytecode_runtime
*filter_bc_runtime
;
2517 struct cds_list_head
*filter_bytecode_runtime_head
= &event
->priv
->filter_bytecode_runtime_head
;
2518 struct lttng_ust_bytecode_filter_ctx bytecode_filter_ctx
;
2519 bool filter_record
= false;
2521 cds_list_for_each_entry_rcu(filter_bc_runtime
, filter_bytecode_runtime_head
, node
) {
2522 if (caa_likely(filter_bc_runtime
->interpreter_func(filter_bc_runtime
,
2523 interpreter_stack_data
, &bytecode_filter_ctx
) == LTTNG_UST_BYTECODE_INTERPRETER_OK
)) {
2524 if (caa_unlikely(bytecode_filter_ctx
.result
== LTTNG_UST_BYTECODE_FILTER_ACCEPT
)) {
2525 filter_record
= true;
2531 return LTTNG_UST_EVENT_FILTER_ACCEPT
;
2533 return LTTNG_UST_EVENT_FILTER_REJECT
;