1 /* SPDX-License-Identifier: MIT
3 * lttng-bytecode-interpreter.c
5 * LTTng modules bytecode interpreter.
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
15 #include <lttng/lttng-bytecode.h>
16 #include <lttng/string-utils.h>
19 * get_char should be called with page fault handler disabled if it is expected
20 * to handle user-space read.
23 char get_char(const struct estack_entry
*reg
, size_t offset
)
25 if (unlikely(offset
>= reg
->u
.s
.seq_len
))
30 /* Handle invalid access as end of string. */
31 if (unlikely(!lttng_access_ok(VERIFY_READ
,
32 reg
->u
.s
.user_str
+ offset
,
35 /* Handle fault (nonzero return value) as end of string. */
36 if (unlikely(__copy_from_user_inatomic(&c
,
37 reg
->u
.s
.user_str
+ offset
,
42 return reg
->u
.s
.str
[offset
];
48 * -2: unknown escape char.
52 int parse_char(struct estack_entry
*reg
, char *c
, size_t *offset
)
57 *c
= get_char(reg
, *offset
);
73 char get_char_at_cb(size_t at
, void *data
)
75 return get_char(data
, at
);
79 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
81 bool has_user
= false;
83 struct estack_entry
*pattern_reg
;
84 struct estack_entry
*candidate_reg
;
86 /* Disable the page fault handler when reading from userspace. */
87 if (estack_bx(stack
, top
)->u
.s
.user
88 || estack_ax(stack
, top
)->u
.s
.user
) {
93 /* Find out which side is the pattern vs. the candidate. */
94 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
95 pattern_reg
= estack_ax(stack
, top
);
96 candidate_reg
= estack_bx(stack
, top
);
98 pattern_reg
= estack_bx(stack
, top
);
99 candidate_reg
= estack_ax(stack
, top
);
102 /* Perform the match operation. */
103 result
= !strutils_star_glob_match_char_cb(get_char_at_cb
,
104 pattern_reg
, get_char_at_cb
, candidate_reg
);
112 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
114 size_t offset_bx
= 0, offset_ax
= 0;
115 int diff
, has_user
= 0;
117 if (estack_bx(stack
, top
)->u
.s
.user
118 || estack_ax(stack
, top
)->u
.s
.user
) {
126 char char_bx
, char_ax
;
128 char_bx
= get_char(estack_bx(stack
, top
), offset_bx
);
129 char_ax
= get_char(estack_ax(stack
, top
), offset_ax
);
131 if (unlikely(char_bx
== '\0')) {
132 if (char_ax
== '\0') {
136 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
137 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
138 ret
= parse_char(estack_ax(stack
, top
),
139 &char_ax
, &offset_ax
);
149 if (unlikely(char_ax
== '\0')) {
150 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
151 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
152 ret
= parse_char(estack_bx(stack
, top
),
153 &char_bx
, &offset_bx
);
162 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
163 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
164 ret
= parse_char(estack_bx(stack
, top
),
165 &char_bx
, &offset_bx
);
169 } else if (ret
== -2) {
172 /* else compare both char */
174 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
175 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
176 ret
= parse_char(estack_ax(stack
, top
),
177 &char_ax
, &offset_ax
);
181 } else if (ret
== -2) {
198 diff
= char_bx
- char_ax
;
210 uint64_t lttng_bytecode_filter_interpret_false(void *filter_data
,
211 struct lttng_probe_ctx
*lttng_probe_ctx
,
212 const char *filter_stack_data
)
214 return LTTNG_INTERPRETER_DISCARD
;
217 uint64_t lttng_bytecode_capture_interpret_false(void *filter_data
,
218 struct lttng_probe_ctx
*lttng_probe_ctx
,
219 const char *capture_stack_data
,
220 struct lttng_interpreter_output
*output
)
222 return LTTNG_INTERPRETER_DISCARD
;
225 #ifdef INTERPRETER_USE_SWITCH
228 * Fallback for compilers that do not support taking address of labels.
232 start_pc = &bytecode->data[0]; \
233 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
235 dbg_printk("LTTng: Executing op %s (%u)\n", \
236 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
237 (unsigned int) *(bytecode_opcode_t *) pc); \
238 switch (*(bytecode_opcode_t *) pc) {
240 #define OP(name) case name
250 * Dispatch-table based interpreter.
254 start_pc = &bytecode->code[0]; \
255 pc = next_pc = start_pc; \
256 if (unlikely(pc - start_pc >= bytecode->len)) \
258 goto *dispatch[*(bytecode_opcode_t *) pc];
265 goto *dispatch[*(bytecode_opcode_t *) pc];
271 #define IS_INTEGER_REGISTER(reg_type) \
272 (reg_type == REG_S64 || reg_type == REG_U64)
274 static int context_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
275 struct load_ptr
*ptr
,
279 struct lttng_ctx_field
*ctx_field
;
280 struct lttng_event_field
*field
;
281 union lttng_ctx_value v
;
283 ctx_field
= <tng_static_ctx
->fields
[idx
];
284 field
= &ctx_field
->event_field
;
285 ptr
->type
= LOAD_OBJECT
;
286 /* field is only used for types nested within variants. */
289 switch (field
->type
.type
) {
290 case lttng_kernel_type_integer
:
291 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
292 if (field
->type
.u
.integer
.signedness
) {
293 ptr
->object_type
= OBJECT_TYPE_S64
;
295 ptr
->ptr
= &ptr
->u
.s64
;
297 ptr
->object_type
= OBJECT_TYPE_U64
;
298 ptr
->u
.u64
= v
.s64
; /* Cast. */
299 ptr
->ptr
= &ptr
->u
.u64
;
302 case lttng_kernel_type_enum_nestable
:
304 const struct lttng_integer_type
*itype
=
305 &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
307 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
308 if (itype
->signedness
) {
309 ptr
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
311 ptr
->ptr
= &ptr
->u
.s64
;
313 ptr
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
314 ptr
->u
.u64
= v
.s64
; /* Cast. */
315 ptr
->ptr
= &ptr
->u
.u64
;
319 case lttng_kernel_type_array_nestable
:
320 if (!lttng_is_bytewise_integer(field
->type
.u
.array_nestable
.elem_type
)) {
321 printk(KERN_WARNING
"LTTng: bytecode: Array nesting only supports integer types.\n");
324 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
325 printk(KERN_WARNING
"LTTng: bytecode: Only string arrays are supported for contexts.\n");
328 ptr
->object_type
= OBJECT_TYPE_STRING
;
329 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
332 case lttng_kernel_type_sequence_nestable
:
333 if (!lttng_is_bytewise_integer(field
->type
.u
.sequence_nestable
.elem_type
)) {
334 printk(KERN_WARNING
"LTTng: bytecode: Sequence nesting only supports integer types.\n");
337 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
338 printk(KERN_WARNING
"LTTng: bytecode: Only string sequences are supported for contexts.\n");
341 ptr
->object_type
= OBJECT_TYPE_STRING
;
342 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
345 case lttng_kernel_type_string
:
346 ptr
->object_type
= OBJECT_TYPE_STRING
;
347 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
350 case lttng_kernel_type_struct_nestable
:
351 printk(KERN_WARNING
"LTTng: bytecode: Structure type cannot be loaded.\n");
353 case lttng_kernel_type_variant_nestable
:
354 printk(KERN_WARNING
"LTTng: bytecode: Variant type cannot be loaded.\n");
357 printk(KERN_WARNING
"LTTng: bytecode: Unknown type: %d", (int) field
->type
.type
);
363 static int dynamic_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
364 struct bytecode_runtime
*runtime
,
365 uint64_t index
, struct estack_entry
*stack_top
)
368 const struct bytecode_get_index_data
*gid
;
370 gid
= (const struct bytecode_get_index_data
*) &runtime
->data
[index
];
371 switch (stack_top
->u
.ptr
.type
) {
373 switch (stack_top
->u
.ptr
.object_type
) {
374 case OBJECT_TYPE_ARRAY
:
378 WARN_ON_ONCE(gid
->offset
>= gid
->array_len
);
379 /* Skip count (unsigned long) */
380 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
381 ptr
= ptr
+ gid
->offset
;
382 stack_top
->u
.ptr
.ptr
= ptr
;
383 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
384 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
385 BUG_ON(stack_top
->u
.ptr
.field
->type
.type
!= lttng_kernel_type_array_nestable
);
386 stack_top
->u
.ptr
.field
= NULL
;
389 case OBJECT_TYPE_SEQUENCE
:
394 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
395 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
396 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
400 ptr
= ptr
+ gid
->offset
;
401 stack_top
->u
.ptr
.ptr
= ptr
;
402 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
403 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
404 BUG_ON(stack_top
->u
.ptr
.field
->type
.type
!= lttng_kernel_type_sequence_nestable
);
405 stack_top
->u
.ptr
.field
= NULL
;
408 case OBJECT_TYPE_STRUCT
:
409 printk(KERN_WARNING
"LTTng: bytecode: Nested structures are not supported yet.\n");
412 case OBJECT_TYPE_VARIANT
:
414 printk(KERN_WARNING
"LTTng: bytecode: Unexpected get index type %d",
415 (int) stack_top
->u
.ptr
.object_type
);
420 case LOAD_ROOT_CONTEXT
:
421 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
423 ret
= context_get_index(lttng_probe_ctx
,
431 case LOAD_ROOT_PAYLOAD
:
432 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
433 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
434 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
435 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
436 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
437 stack_top
->u
.ptr
.field
= gid
->field
;
438 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
442 stack_top
->type
= REG_PTR
;
450 static int dynamic_load_field(struct estack_entry
*stack_top
)
454 switch (stack_top
->u
.ptr
.type
) {
457 case LOAD_ROOT_CONTEXT
:
458 case LOAD_ROOT_APP_CONTEXT
:
459 case LOAD_ROOT_PAYLOAD
:
461 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
465 switch (stack_top
->u
.ptr
.object_type
) {
467 dbg_printk("op load field s8\n");
468 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
469 stack_top
->type
= REG_S64
;
471 case OBJECT_TYPE_S16
:
475 dbg_printk("op load field s16\n");
476 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
477 if (stack_top
->u
.ptr
.rev_bo
)
479 stack_top
->u
.v
= tmp
;
480 stack_top
->type
= REG_S64
;
483 case OBJECT_TYPE_S32
:
487 dbg_printk("op load field s32\n");
488 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
489 if (stack_top
->u
.ptr
.rev_bo
)
491 stack_top
->u
.v
= tmp
;
492 stack_top
->type
= REG_S64
;
495 case OBJECT_TYPE_S64
:
499 dbg_printk("op load field s64\n");
500 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
501 if (stack_top
->u
.ptr
.rev_bo
)
503 stack_top
->u
.v
= tmp
;
504 stack_top
->type
= REG_S64
;
507 case OBJECT_TYPE_SIGNED_ENUM
:
511 dbg_printk("op load field signed enumeration\n");
512 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
513 if (stack_top
->u
.ptr
.rev_bo
)
515 stack_top
->u
.v
= tmp
;
516 stack_top
->type
= REG_S64
;
520 dbg_printk("op load field u8\n");
521 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
522 stack_top
->type
= REG_U64
;
524 case OBJECT_TYPE_U16
:
528 dbg_printk("op load field u16\n");
529 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
530 if (stack_top
->u
.ptr
.rev_bo
)
532 stack_top
->u
.v
= tmp
;
533 stack_top
->type
= REG_U64
;
536 case OBJECT_TYPE_U32
:
540 dbg_printk("op load field u32\n");
541 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
542 if (stack_top
->u
.ptr
.rev_bo
)
544 stack_top
->u
.v
= tmp
;
545 stack_top
->type
= REG_U64
;
548 case OBJECT_TYPE_U64
:
552 dbg_printk("op load field u64\n");
553 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
554 if (stack_top
->u
.ptr
.rev_bo
)
556 stack_top
->u
.v
= tmp
;
557 stack_top
->type
= REG_U64
;
560 case OBJECT_TYPE_UNSIGNED_ENUM
:
564 dbg_printk("op load field unsigned enumeration\n");
565 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
566 if (stack_top
->u
.ptr
.rev_bo
)
568 stack_top
->u
.v
= tmp
;
569 stack_top
->type
= REG_U64
;
572 case OBJECT_TYPE_STRING
:
576 dbg_printk("op load field string\n");
577 str
= (const char *) stack_top
->u
.ptr
.ptr
;
578 stack_top
->u
.s
.str
= str
;
579 if (unlikely(!stack_top
->u
.s
.str
)) {
580 dbg_printk("Bytecode warning: loading a NULL string.\n");
584 stack_top
->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
585 stack_top
->u
.s
.literal_type
=
586 ESTACK_STRING_LITERAL_TYPE_NONE
;
587 stack_top
->type
= REG_STRING
;
590 case OBJECT_TYPE_STRING_SEQUENCE
:
594 dbg_printk("op load field string sequence\n");
595 ptr
= stack_top
->u
.ptr
.ptr
;
596 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
597 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
598 if (unlikely(!stack_top
->u
.s
.str
)) {
599 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
603 stack_top
->u
.s
.literal_type
=
604 ESTACK_STRING_LITERAL_TYPE_NONE
;
605 stack_top
->type
= REG_STRING
;
608 case OBJECT_TYPE_DYNAMIC
:
610 * Dynamic types in context are looked up
611 * by context get index.
615 case OBJECT_TYPE_DOUBLE
:
618 case OBJECT_TYPE_SEQUENCE
:
619 case OBJECT_TYPE_ARRAY
:
620 case OBJECT_TYPE_STRUCT
:
621 case OBJECT_TYPE_VARIANT
:
622 printk(KERN_WARNING
"LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
633 int lttng_bytecode_interpret_format_output(struct estack_entry
*ax
,
634 struct lttng_interpreter_output
*output
)
641 output
->type
= LTTNG_INTERPRETER_TYPE_S64
;
642 output
->u
.s
= ax
->u
.v
;
645 output
->type
= LTTNG_INTERPRETER_TYPE_U64
;
646 output
->u
.u
= (uint64_t) ax
->u
.v
;
649 output
->type
= LTTNG_INTERPRETER_TYPE_STRING
;
650 output
->u
.str
.str
= ax
->u
.s
.str
;
651 output
->u
.str
.len
= ax
->u
.s
.seq_len
;
654 switch (ax
->u
.ptr
.object_type
) {
656 case OBJECT_TYPE_S16
:
657 case OBJECT_TYPE_S32
:
658 case OBJECT_TYPE_S64
:
660 case OBJECT_TYPE_U16
:
661 case OBJECT_TYPE_U32
:
662 case OBJECT_TYPE_U64
:
663 case OBJECT_TYPE_DOUBLE
:
664 case OBJECT_TYPE_STRING
:
665 case OBJECT_TYPE_STRING_SEQUENCE
:
666 ret
= dynamic_load_field(ax
);
669 /* Retry after loading ptr into stack top. */
671 case OBJECT_TYPE_SEQUENCE
:
672 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
673 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
674 output
->u
.sequence
.nr_elem
= *(unsigned long *) ax
->u
.ptr
.ptr
;
675 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.sequence_nestable
.elem_type
;
677 case OBJECT_TYPE_ARRAY
:
678 /* Skip count (unsigned long) */
679 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
680 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
681 output
->u
.sequence
.nr_elem
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.length
;
682 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.elem_type
;
684 case OBJECT_TYPE_SIGNED_ENUM
:
685 ret
= dynamic_load_field(ax
);
688 output
->type
= LTTNG_INTERPRETER_TYPE_SIGNED_ENUM
;
689 output
->u
.s
= ax
->u
.v
;
691 case OBJECT_TYPE_UNSIGNED_ENUM
:
692 ret
= dynamic_load_field(ax
);
695 output
->type
= LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM
;
696 output
->u
.u
= ax
->u
.v
;
698 case OBJECT_TYPE_STRUCT
:
699 case OBJECT_TYPE_VARIANT
:
705 case REG_STAR_GLOB_STRING
:
706 case REG_TYPE_UNKNOWN
:
711 return LTTNG_INTERPRETER_RECORD_FLAG
;
716 #define DBG_USER_STR_CUTOFF 32
719 * In debug mode, print user string (truncated, if necessary).
722 void dbg_load_ref_user_str_printk(const struct estack_entry
*user_str_reg
)
726 char user_str
[DBG_USER_STR_CUTOFF
];
730 last_char
= get_char(user_str_reg
, pos
);
731 user_str
[pos
] = last_char
;
733 } while (last_char
!= '\0' && pos
< sizeof(user_str
));
736 user_str
[sizeof(user_str
) - 1] = '\0';
737 dbg_printk("load field ref user string: '%s%s'\n", user_str
,
738 last_char
!= '\0' ? "[...]" : "");
742 void dbg_load_ref_user_str_printk(const struct estack_entry
*user_str_reg
)
748 * Return 0 (discard), or raise the 0x1 flag (log event).
749 * Currently, other flags are kept for future extensions and have no
753 uint64_t bytecode_interpret(void *interpreter_data
,
754 struct lttng_probe_ctx
*lttng_probe_ctx
,
755 const char *interpreter_stack_data
,
756 struct lttng_interpreter_output
*output
)
758 struct bytecode_runtime
*bytecode
= interpreter_data
;
759 void *pc
, *next_pc
, *start_pc
;
762 struct estack _stack
;
763 struct estack
*stack
= &_stack
;
764 register int64_t ax
= 0, bx
= 0;
765 register enum entry_type ax_t
= REG_TYPE_UNKNOWN
, bx_t
= REG_TYPE_UNKNOWN
;
766 register int top
= INTERPRETER_STACK_EMPTY
;
767 #ifndef INTERPRETER_USE_SWITCH
768 static void *dispatch
[NR_BYTECODE_OPS
] = {
769 [ BYTECODE_OP_UNKNOWN
] = &&LABEL_BYTECODE_OP_UNKNOWN
,
771 [ BYTECODE_OP_RETURN
] = &&LABEL_BYTECODE_OP_RETURN
,
774 [ BYTECODE_OP_MUL
] = &&LABEL_BYTECODE_OP_MUL
,
775 [ BYTECODE_OP_DIV
] = &&LABEL_BYTECODE_OP_DIV
,
776 [ BYTECODE_OP_MOD
] = &&LABEL_BYTECODE_OP_MOD
,
777 [ BYTECODE_OP_PLUS
] = &&LABEL_BYTECODE_OP_PLUS
,
778 [ BYTECODE_OP_MINUS
] = &&LABEL_BYTECODE_OP_MINUS
,
779 [ BYTECODE_OP_BIT_RSHIFT
] = &&LABEL_BYTECODE_OP_BIT_RSHIFT
,
780 [ BYTECODE_OP_BIT_LSHIFT
] = &&LABEL_BYTECODE_OP_BIT_LSHIFT
,
781 [ BYTECODE_OP_BIT_AND
] = &&LABEL_BYTECODE_OP_BIT_AND
,
782 [ BYTECODE_OP_BIT_OR
] = &&LABEL_BYTECODE_OP_BIT_OR
,
783 [ BYTECODE_OP_BIT_XOR
] = &&LABEL_BYTECODE_OP_BIT_XOR
,
785 /* binary comparators */
786 [ BYTECODE_OP_EQ
] = &&LABEL_BYTECODE_OP_EQ
,
787 [ BYTECODE_OP_NE
] = &&LABEL_BYTECODE_OP_NE
,
788 [ BYTECODE_OP_GT
] = &&LABEL_BYTECODE_OP_GT
,
789 [ BYTECODE_OP_LT
] = &&LABEL_BYTECODE_OP_LT
,
790 [ BYTECODE_OP_GE
] = &&LABEL_BYTECODE_OP_GE
,
791 [ BYTECODE_OP_LE
] = &&LABEL_BYTECODE_OP_LE
,
793 /* string binary comparator */
794 [ BYTECODE_OP_EQ_STRING
] = &&LABEL_BYTECODE_OP_EQ_STRING
,
795 [ BYTECODE_OP_NE_STRING
] = &&LABEL_BYTECODE_OP_NE_STRING
,
796 [ BYTECODE_OP_GT_STRING
] = &&LABEL_BYTECODE_OP_GT_STRING
,
797 [ BYTECODE_OP_LT_STRING
] = &&LABEL_BYTECODE_OP_LT_STRING
,
798 [ BYTECODE_OP_GE_STRING
] = &&LABEL_BYTECODE_OP_GE_STRING
,
799 [ BYTECODE_OP_LE_STRING
] = &&LABEL_BYTECODE_OP_LE_STRING
,
801 /* globbing pattern binary comparator */
802 [ BYTECODE_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING
,
803 [ BYTECODE_OP_NE_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING
,
805 /* s64 binary comparator */
806 [ BYTECODE_OP_EQ_S64
] = &&LABEL_BYTECODE_OP_EQ_S64
,
807 [ BYTECODE_OP_NE_S64
] = &&LABEL_BYTECODE_OP_NE_S64
,
808 [ BYTECODE_OP_GT_S64
] = &&LABEL_BYTECODE_OP_GT_S64
,
809 [ BYTECODE_OP_LT_S64
] = &&LABEL_BYTECODE_OP_LT_S64
,
810 [ BYTECODE_OP_GE_S64
] = &&LABEL_BYTECODE_OP_GE_S64
,
811 [ BYTECODE_OP_LE_S64
] = &&LABEL_BYTECODE_OP_LE_S64
,
813 /* double binary comparator */
814 [ BYTECODE_OP_EQ_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE
,
815 [ BYTECODE_OP_NE_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_DOUBLE
,
816 [ BYTECODE_OP_GT_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_DOUBLE
,
817 [ BYTECODE_OP_LT_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_DOUBLE
,
818 [ BYTECODE_OP_GE_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_DOUBLE
,
819 [ BYTECODE_OP_LE_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_DOUBLE
,
821 /* Mixed S64-double binary comparators */
822 [ BYTECODE_OP_EQ_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64
,
823 [ BYTECODE_OP_NE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64
,
824 [ BYTECODE_OP_GT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64
,
825 [ BYTECODE_OP_LT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64
,
826 [ BYTECODE_OP_GE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64
,
827 [ BYTECODE_OP_LE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64
,
829 [ BYTECODE_OP_EQ_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE
,
830 [ BYTECODE_OP_NE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE
,
831 [ BYTECODE_OP_GT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE
,
832 [ BYTECODE_OP_LT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE
,
833 [ BYTECODE_OP_GE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE
,
834 [ BYTECODE_OP_LE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE
,
837 [ BYTECODE_OP_UNARY_PLUS
] = &&LABEL_BYTECODE_OP_UNARY_PLUS
,
838 [ BYTECODE_OP_UNARY_MINUS
] = &&LABEL_BYTECODE_OP_UNARY_MINUS
,
839 [ BYTECODE_OP_UNARY_NOT
] = &&LABEL_BYTECODE_OP_UNARY_NOT
,
840 [ BYTECODE_OP_UNARY_PLUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64
,
841 [ BYTECODE_OP_UNARY_MINUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64
,
842 [ BYTECODE_OP_UNARY_NOT_S64
] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64
,
843 [ BYTECODE_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE
,
844 [ BYTECODE_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE
,
845 [ BYTECODE_OP_UNARY_NOT_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE
,
848 [ BYTECODE_OP_AND
] = &&LABEL_BYTECODE_OP_AND
,
849 [ BYTECODE_OP_OR
] = &&LABEL_BYTECODE_OP_OR
,
852 [ BYTECODE_OP_LOAD_FIELD_REF
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF
,
853 [ BYTECODE_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING
,
854 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
,
855 [ BYTECODE_OP_LOAD_FIELD_REF_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64
,
856 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
,
858 /* load from immediate operand */
859 [ BYTECODE_OP_LOAD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STRING
,
860 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING
,
861 [ BYTECODE_OP_LOAD_S64
] = &&LABEL_BYTECODE_OP_LOAD_S64
,
862 [ BYTECODE_OP_LOAD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE
,
865 [ BYTECODE_OP_CAST_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_TO_S64
,
866 [ BYTECODE_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64
,
867 [ BYTECODE_OP_CAST_NOP
] = &&LABEL_BYTECODE_OP_CAST_NOP
,
869 /* get context ref */
870 [ BYTECODE_OP_GET_CONTEXT_REF
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF
,
871 [ BYTECODE_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING
,
872 [ BYTECODE_OP_GET_CONTEXT_REF_S64
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64
,
873 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
,
875 /* load userspace field ref */
876 [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
,
877 [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
,
879 /* Instructions for recursive traversal through composed types. */
880 [ BYTECODE_OP_GET_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT
,
881 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT
,
882 [ BYTECODE_OP_GET_PAYLOAD_ROOT
] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT
,
884 [ BYTECODE_OP_GET_SYMBOL
] = &&LABEL_BYTECODE_OP_GET_SYMBOL
,
885 [ BYTECODE_OP_GET_SYMBOL_FIELD
] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD
,
886 [ BYTECODE_OP_GET_INDEX_U16
] = &&LABEL_BYTECODE_OP_GET_INDEX_U16
,
887 [ BYTECODE_OP_GET_INDEX_U64
] = &&LABEL_BYTECODE_OP_GET_INDEX_U64
,
889 [ BYTECODE_OP_LOAD_FIELD
] = &&LABEL_BYTECODE_OP_LOAD_FIELD
,
890 [ BYTECODE_OP_LOAD_FIELD_S8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8
,
891 [ BYTECODE_OP_LOAD_FIELD_S16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16
,
892 [ BYTECODE_OP_LOAD_FIELD_S32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32
,
893 [ BYTECODE_OP_LOAD_FIELD_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64
,
894 [ BYTECODE_OP_LOAD_FIELD_U8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8
,
895 [ BYTECODE_OP_LOAD_FIELD_U16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16
,
896 [ BYTECODE_OP_LOAD_FIELD_U32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32
,
897 [ BYTECODE_OP_LOAD_FIELD_U64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64
,
898 [ BYTECODE_OP_LOAD_FIELD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING
,
899 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE
,
900 [ BYTECODE_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE
,
902 [ BYTECODE_OP_UNARY_BIT_NOT
] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT
,
904 [ BYTECODE_OP_RETURN_S64
] = &&LABEL_BYTECODE_OP_RETURN_S64
,
906 #endif /* #ifndef INTERPRETER_USE_SWITCH */
910 OP(BYTECODE_OP_UNKNOWN
):
911 OP(BYTECODE_OP_LOAD_FIELD_REF
):
912 OP(BYTECODE_OP_GET_CONTEXT_REF
):
913 #ifdef INTERPRETER_USE_SWITCH
915 #endif /* INTERPRETER_USE_SWITCH */
916 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
917 (unsigned int) *(bytecode_opcode_t
*) pc
);
921 OP(BYTECODE_OP_RETURN
):
922 OP(BYTECODE_OP_RETURN_S64
):
923 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
924 switch (estack_ax_t
) {
927 retval
= !!estack_ax_v
;
938 case REG_STAR_GLOB_STRING
:
939 case REG_TYPE_UNKNOWN
:
950 OP(BYTECODE_OP_PLUS
):
951 OP(BYTECODE_OP_MINUS
):
952 printk(KERN_WARNING
"LTTng: bytecode: unsupported bytecode op %u\n",
953 (unsigned int) *(bytecode_opcode_t
*) pc
);
963 printk(KERN_WARNING
"LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
964 (unsigned int) *(bytecode_opcode_t
*) pc
);
968 OP(BYTECODE_OP_EQ_STRING
):
972 res
= (stack_strcmp(stack
, top
, "==") == 0);
973 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
975 estack_ax_t
= REG_S64
;
976 next_pc
+= sizeof(struct binary_op
);
979 OP(BYTECODE_OP_NE_STRING
):
983 res
= (stack_strcmp(stack
, top
, "!=") != 0);
984 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
986 estack_ax_t
= REG_S64
;
987 next_pc
+= sizeof(struct binary_op
);
990 OP(BYTECODE_OP_GT_STRING
):
994 res
= (stack_strcmp(stack
, top
, ">") > 0);
995 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
997 estack_ax_t
= REG_S64
;
998 next_pc
+= sizeof(struct binary_op
);
1001 OP(BYTECODE_OP_LT_STRING
):
1005 res
= (stack_strcmp(stack
, top
, "<") < 0);
1006 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1008 estack_ax_t
= REG_S64
;
1009 next_pc
+= sizeof(struct binary_op
);
1012 OP(BYTECODE_OP_GE_STRING
):
1016 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
1017 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1019 estack_ax_t
= REG_S64
;
1020 next_pc
+= sizeof(struct binary_op
);
1023 OP(BYTECODE_OP_LE_STRING
):
1027 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
1028 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1030 estack_ax_t
= REG_S64
;
1031 next_pc
+= sizeof(struct binary_op
);
1035 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING
):
1039 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
1040 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1042 estack_ax_t
= REG_S64
;
1043 next_pc
+= sizeof(struct binary_op
);
1046 OP(BYTECODE_OP_NE_STAR_GLOB_STRING
):
1050 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
1051 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1053 estack_ax_t
= REG_S64
;
1054 next_pc
+= sizeof(struct binary_op
);
1058 OP(BYTECODE_OP_EQ_S64
):
1062 res
= (estack_bx_v
== estack_ax_v
);
1063 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1065 estack_ax_t
= REG_S64
;
1066 next_pc
+= sizeof(struct binary_op
);
1069 OP(BYTECODE_OP_NE_S64
):
1073 res
= (estack_bx_v
!= estack_ax_v
);
1074 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1076 estack_ax_t
= REG_S64
;
1077 next_pc
+= sizeof(struct binary_op
);
1080 OP(BYTECODE_OP_GT_S64
):
1084 res
= (estack_bx_v
> estack_ax_v
);
1085 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1087 estack_ax_t
= REG_S64
;
1088 next_pc
+= sizeof(struct binary_op
);
1091 OP(BYTECODE_OP_LT_S64
):
1095 res
= (estack_bx_v
< estack_ax_v
);
1096 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1098 estack_ax_t
= REG_S64
;
1099 next_pc
+= sizeof(struct binary_op
);
1102 OP(BYTECODE_OP_GE_S64
):
1106 res
= (estack_bx_v
>= estack_ax_v
);
1107 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1109 estack_ax_t
= REG_S64
;
1110 next_pc
+= sizeof(struct binary_op
);
1113 OP(BYTECODE_OP_LE_S64
):
1117 res
= (estack_bx_v
<= estack_ax_v
);
1118 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1120 estack_ax_t
= REG_S64
;
1121 next_pc
+= sizeof(struct binary_op
);
1125 OP(BYTECODE_OP_EQ_DOUBLE
):
1126 OP(BYTECODE_OP_NE_DOUBLE
):
1127 OP(BYTECODE_OP_GT_DOUBLE
):
1128 OP(BYTECODE_OP_LT_DOUBLE
):
1129 OP(BYTECODE_OP_GE_DOUBLE
):
1130 OP(BYTECODE_OP_LE_DOUBLE
):
1136 /* Mixed S64-double binary comparators */
1137 OP(BYTECODE_OP_EQ_DOUBLE_S64
):
1138 OP(BYTECODE_OP_NE_DOUBLE_S64
):
1139 OP(BYTECODE_OP_GT_DOUBLE_S64
):
1140 OP(BYTECODE_OP_LT_DOUBLE_S64
):
1141 OP(BYTECODE_OP_GE_DOUBLE_S64
):
1142 OP(BYTECODE_OP_LE_DOUBLE_S64
):
1143 OP(BYTECODE_OP_EQ_S64_DOUBLE
):
1144 OP(BYTECODE_OP_NE_S64_DOUBLE
):
1145 OP(BYTECODE_OP_GT_S64_DOUBLE
):
1146 OP(BYTECODE_OP_LT_S64_DOUBLE
):
1147 OP(BYTECODE_OP_GE_S64_DOUBLE
):
1148 OP(BYTECODE_OP_LE_S64_DOUBLE
):
1153 OP(BYTECODE_OP_BIT_RSHIFT
):
1157 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1162 /* Catch undefined behavior. */
1163 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1167 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
1168 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1170 estack_ax_t
= REG_U64
;
1171 next_pc
+= sizeof(struct binary_op
);
1174 OP(BYTECODE_OP_BIT_LSHIFT
):
1178 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1183 /* Catch undefined behavior. */
1184 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1188 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
1189 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1191 estack_ax_t
= REG_U64
;
1192 next_pc
+= sizeof(struct binary_op
);
1195 OP(BYTECODE_OP_BIT_AND
):
1199 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1204 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
1205 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1207 estack_ax_t
= REG_U64
;
1208 next_pc
+= sizeof(struct binary_op
);
1211 OP(BYTECODE_OP_BIT_OR
):
1215 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1220 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1221 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1223 estack_ax_t
= REG_U64
;
1224 next_pc
+= sizeof(struct binary_op
);
1227 OP(BYTECODE_OP_BIT_XOR
):
1231 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1236 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1237 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1239 estack_ax_t
= REG_U64
;
1240 next_pc
+= sizeof(struct binary_op
);
1245 OP(BYTECODE_OP_UNARY_PLUS
):
1246 OP(BYTECODE_OP_UNARY_MINUS
):
1247 OP(BYTECODE_OP_UNARY_NOT
):
1248 printk(KERN_WARNING
"LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1249 (unsigned int) *(bytecode_opcode_t
*) pc
);
1254 OP(BYTECODE_OP_UNARY_BIT_NOT
):
1256 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1257 estack_ax_t
= REG_S64
;
1258 next_pc
+= sizeof(struct unary_op
);
1262 OP(BYTECODE_OP_UNARY_PLUS_S64
):
1264 next_pc
+= sizeof(struct unary_op
);
1267 OP(BYTECODE_OP_UNARY_MINUS_S64
):
1269 estack_ax_v
= -estack_ax_v
;
1270 estack_ax_t
= REG_S64
;
1271 next_pc
+= sizeof(struct unary_op
);
1274 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE
):
1275 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE
):
1280 OP(BYTECODE_OP_UNARY_NOT_S64
):
1282 estack_ax_v
= !estack_ax_v
;
1283 estack_ax_t
= REG_S64
;
1284 next_pc
+= sizeof(struct unary_op
);
1287 OP(BYTECODE_OP_UNARY_NOT_DOUBLE
):
1294 OP(BYTECODE_OP_AND
):
1296 struct logical_op
*insn
= (struct logical_op
*) pc
;
1298 /* If AX is 0, skip and evaluate to 0 */
1299 if (unlikely(estack_ax_v
== 0)) {
1300 dbg_printk("Jumping to bytecode offset %u\n",
1301 (unsigned int) insn
->skip_offset
);
1302 next_pc
= start_pc
+ insn
->skip_offset
;
1304 /* Pop 1 when jump not taken */
1305 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1306 next_pc
+= sizeof(struct logical_op
);
1312 struct logical_op
*insn
= (struct logical_op
*) pc
;
1314 /* If AX is nonzero, skip and evaluate to 1 */
1316 if (unlikely(estack_ax_v
!= 0)) {
1318 dbg_printk("Jumping to bytecode offset %u\n",
1319 (unsigned int) insn
->skip_offset
);
1320 next_pc
= start_pc
+ insn
->skip_offset
;
1322 /* Pop 1 when jump not taken */
1323 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1324 next_pc
+= sizeof(struct logical_op
);
1330 /* load field ref */
1331 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING
):
1333 struct load_op
*insn
= (struct load_op
*) pc
;
1334 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1336 dbg_printk("load field ref offset %u type string\n",
1338 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1339 estack_ax(stack
, top
)->u
.s
.str
=
1340 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
1341 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1342 dbg_printk("Bytecode warning: loading a NULL string.\n");
1346 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1347 estack_ax(stack
, top
)->u
.s
.literal_type
=
1348 ESTACK_STRING_LITERAL_TYPE_NONE
;
1349 estack_ax(stack
, top
)->u
.s
.user
= 0;
1350 estack_ax(stack
, top
)->type
= REG_STRING
;
1351 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1352 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1356 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
):
1358 struct load_op
*insn
= (struct load_op
*) pc
;
1359 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1361 dbg_printk("load field ref offset %u type sequence\n",
1363 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1364 estack_ax(stack
, top
)->u
.s
.seq_len
=
1365 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
1366 estack_ax(stack
, top
)->u
.s
.str
=
1367 *(const char **) (&interpreter_stack_data
[ref
->offset
1368 + sizeof(unsigned long)]);
1369 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1370 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1374 estack_ax(stack
, top
)->u
.s
.literal_type
=
1375 ESTACK_STRING_LITERAL_TYPE_NONE
;
1376 estack_ax(stack
, top
)->u
.s
.user
= 0;
1377 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1381 OP(BYTECODE_OP_LOAD_FIELD_REF_S64
):
1383 struct load_op
*insn
= (struct load_op
*) pc
;
1384 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1386 dbg_printk("load field ref offset %u type s64\n",
1388 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1390 ((struct literal_numeric
*) &interpreter_stack_data
[ref
->offset
])->v
;
1391 estack_ax_t
= REG_S64
;
1392 dbg_printk("ref load s64 %lld\n",
1393 (long long) estack_ax_v
);
1394 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1398 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
):
1404 /* load from immediate operand */
1405 OP(BYTECODE_OP_LOAD_STRING
):
1407 struct load_op
*insn
= (struct load_op
*) pc
;
1409 dbg_printk("load string %s\n", insn
->data
);
1410 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1411 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1412 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1413 estack_ax(stack
, top
)->u
.s
.literal_type
=
1414 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
1415 estack_ax(stack
, top
)->u
.s
.user
= 0;
1416 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1420 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING
):
1422 struct load_op
*insn
= (struct load_op
*) pc
;
1424 dbg_printk("load globbing pattern %s\n", insn
->data
);
1425 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1426 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1427 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1428 estack_ax(stack
, top
)->u
.s
.literal_type
=
1429 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
1430 estack_ax(stack
, top
)->u
.s
.user
= 0;
1431 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1435 OP(BYTECODE_OP_LOAD_S64
):
1437 struct load_op
*insn
= (struct load_op
*) pc
;
1439 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1440 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
1441 estack_ax_t
= REG_S64
;
1442 dbg_printk("load s64 %lld\n",
1443 (long long) estack_ax_v
);
1444 next_pc
+= sizeof(struct load_op
)
1445 + sizeof(struct literal_numeric
);
1449 OP(BYTECODE_OP_LOAD_DOUBLE
):
1456 OP(BYTECODE_OP_CAST_TO_S64
):
1457 printk(KERN_WARNING
"LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1458 (unsigned int) *(bytecode_opcode_t
*) pc
);
1462 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64
):
1468 OP(BYTECODE_OP_CAST_NOP
):
1470 next_pc
+= sizeof(struct cast_op
);
1474 /* get context ref */
1475 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING
):
1477 struct load_op
*insn
= (struct load_op
*) pc
;
1478 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1479 struct lttng_ctx_field
*ctx_field
;
1480 union lttng_ctx_value v
;
1482 dbg_printk("get context ref offset %u type string\n",
1484 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1485 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1486 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1487 estack_ax(stack
, top
)->u
.s
.str
= v
.str
;
1488 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1489 dbg_printk("Bytecode warning: loading a NULL string.\n");
1493 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1494 estack_ax(stack
, top
)->u
.s
.literal_type
=
1495 ESTACK_STRING_LITERAL_TYPE_NONE
;
1496 estack_ax(stack
, top
)->u
.s
.user
= 0;
1497 estack_ax(stack
, top
)->type
= REG_STRING
;
1498 dbg_printk("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1499 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1503 OP(BYTECODE_OP_GET_CONTEXT_REF_S64
):
1505 struct load_op
*insn
= (struct load_op
*) pc
;
1506 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1507 struct lttng_ctx_field
*ctx_field
;
1508 union lttng_ctx_value v
;
1510 dbg_printk("get context ref offset %u type s64\n",
1512 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1513 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1514 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1515 estack_ax_v
= v
.s64
;
1516 estack_ax_t
= REG_S64
;
1517 dbg_printk("ref get context s64 %lld\n",
1518 (long long) estack_ax_v
);
1519 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1523 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
):
1529 /* load userspace field ref */
1530 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
):
1532 struct load_op
*insn
= (struct load_op
*) pc
;
1533 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1535 dbg_printk("load field ref offset %u type user string\n",
1537 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1538 estack_ax(stack
, top
)->u
.s
.user_str
=
1539 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
1540 if (unlikely(!estack_ax(stack
, top
)->u
.s
.user_str
)) {
1541 dbg_printk("Bytecode warning: loading a NULL string.\n");
1545 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1546 estack_ax(stack
, top
)->u
.s
.literal_type
=
1547 ESTACK_STRING_LITERAL_TYPE_NONE
;
1548 estack_ax(stack
, top
)->u
.s
.user
= 1;
1549 estack_ax(stack
, top
)->type
= REG_STRING
;
1550 dbg_load_ref_user_str_printk(estack_ax(stack
, top
));
1551 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1555 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
):
1557 struct load_op
*insn
= (struct load_op
*) pc
;
1558 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1560 dbg_printk("load field ref offset %u type user sequence\n",
1562 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1563 estack_ax(stack
, top
)->u
.s
.seq_len
=
1564 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
1565 estack_ax(stack
, top
)->u
.s
.user_str
=
1566 *(const char **) (&interpreter_stack_data
[ref
->offset
1567 + sizeof(unsigned long)]);
1568 if (unlikely(!estack_ax(stack
, top
)->u
.s
.user_str
)) {
1569 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1573 estack_ax(stack
, top
)->u
.s
.literal_type
=
1574 ESTACK_STRING_LITERAL_TYPE_NONE
;
1575 estack_ax(stack
, top
)->u
.s
.user
= 1;
1576 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1580 OP(BYTECODE_OP_GET_CONTEXT_ROOT
):
1582 dbg_printk("op get context root\n");
1583 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1584 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
1585 /* "field" only needed for variants. */
1586 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1587 estack_ax(stack
, top
)->type
= REG_PTR
;
1588 next_pc
+= sizeof(struct load_op
);
1592 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT
):
1598 OP(BYTECODE_OP_GET_PAYLOAD_ROOT
):
1600 dbg_printk("op get app payload root\n");
1601 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1602 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
1603 estack_ax(stack
, top
)->u
.ptr
.ptr
= interpreter_stack_data
;
1604 /* "field" only needed for variants. */
1605 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1606 estack_ax(stack
, top
)->type
= REG_PTR
;
1607 next_pc
+= sizeof(struct load_op
);
1611 OP(BYTECODE_OP_GET_SYMBOL
):
1613 dbg_printk("op get symbol\n");
1614 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
1616 printk(KERN_WARNING
"LTTng: bytecode: Nested fields not implemented yet.\n");
1619 case LOAD_ROOT_CONTEXT
:
1620 case LOAD_ROOT_APP_CONTEXT
:
1621 case LOAD_ROOT_PAYLOAD
:
1623 * symbol lookup is performed by
1629 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1633 OP(BYTECODE_OP_GET_SYMBOL_FIELD
):
1636 * Used for first variant encountered in a
1637 * traversal. Variants are not implemented yet.
1643 OP(BYTECODE_OP_GET_INDEX_U16
):
1645 struct load_op
*insn
= (struct load_op
*) pc
;
1646 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1648 dbg_printk("op get index u16\n");
1649 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1652 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1653 estack_ax_t
= estack_ax(stack
, top
)->type
;
1654 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1658 OP(BYTECODE_OP_GET_INDEX_U64
):
1660 struct load_op
*insn
= (struct load_op
*) pc
;
1661 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1663 dbg_printk("op get index u64\n");
1664 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1667 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1668 estack_ax_t
= estack_ax(stack
, top
)->type
;
1669 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1673 OP(BYTECODE_OP_LOAD_FIELD
):
1675 dbg_printk("op load field\n");
1676 ret
= dynamic_load_field(estack_ax(stack
, top
));
1679 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1680 estack_ax_t
= estack_ax(stack
, top
)->type
;
1681 next_pc
+= sizeof(struct load_op
);
1685 OP(BYTECODE_OP_LOAD_FIELD_S8
):
1687 dbg_printk("op load field s8\n");
1689 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1690 estack_ax_t
= REG_S64
;
1691 next_pc
+= sizeof(struct load_op
);
1694 OP(BYTECODE_OP_LOAD_FIELD_S16
):
1696 dbg_printk("op load field s16\n");
1698 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1699 estack_ax_t
= REG_S64
;
1700 next_pc
+= sizeof(struct load_op
);
1703 OP(BYTECODE_OP_LOAD_FIELD_S32
):
1705 dbg_printk("op load field s32\n");
1707 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1708 estack_ax_t
= REG_S64
;
1709 next_pc
+= sizeof(struct load_op
);
1712 OP(BYTECODE_OP_LOAD_FIELD_S64
):
1714 dbg_printk("op load field s64\n");
1716 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1717 estack_ax_t
= REG_S64
;
1718 next_pc
+= sizeof(struct load_op
);
1721 OP(BYTECODE_OP_LOAD_FIELD_U8
):
1723 dbg_printk("op load field u8\n");
1725 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1726 estack_ax_t
= REG_S64
;
1727 next_pc
+= sizeof(struct load_op
);
1730 OP(BYTECODE_OP_LOAD_FIELD_U16
):
1732 dbg_printk("op load field u16\n");
1734 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1735 estack_ax_t
= REG_S64
;
1736 next_pc
+= sizeof(struct load_op
);
1739 OP(BYTECODE_OP_LOAD_FIELD_U32
):
1741 dbg_printk("op load field u32\n");
1743 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1744 estack_ax_t
= REG_S64
;
1745 next_pc
+= sizeof(struct load_op
);
1748 OP(BYTECODE_OP_LOAD_FIELD_U64
):
1750 dbg_printk("op load field u64\n");
1752 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1753 estack_ax_t
= REG_S64
;
1754 next_pc
+= sizeof(struct load_op
);
1757 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE
):
1763 OP(BYTECODE_OP_LOAD_FIELD_STRING
):
1767 dbg_printk("op load field string\n");
1768 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1769 estack_ax(stack
, top
)->u
.s
.str
= str
;
1770 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1771 dbg_printk("Bytecode warning: loading a NULL string.\n");
1775 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1776 estack_ax(stack
, top
)->u
.s
.literal_type
=
1777 ESTACK_STRING_LITERAL_TYPE_NONE
;
1778 estack_ax(stack
, top
)->type
= REG_STRING
;
1779 next_pc
+= sizeof(struct load_op
);
1783 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE
):
1787 dbg_printk("op load field string sequence\n");
1788 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
1789 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
1790 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
1791 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1792 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1796 estack_ax(stack
, top
)->u
.s
.literal_type
=
1797 ESTACK_STRING_LITERAL_TYPE_NONE
;
1798 estack_ax(stack
, top
)->type
= REG_STRING
;
1799 next_pc
+= sizeof(struct load_op
);
1805 /* Return _DISCARD on error. */
1807 return LTTNG_INTERPRETER_DISCARD
;
1810 return lttng_bytecode_interpret_format_output(
1811 estack_ax(stack
, top
), output
);
1816 LTTNG_STACK_FRAME_NON_STANDARD(bytecode_interpret
);
1818 uint64_t lttng_bytecode_filter_interpret(void *filter_data
,
1819 struct lttng_probe_ctx
*lttng_probe_ctx
,
1820 const char *filter_stack_data
)
1822 return bytecode_interpret(filter_data
, lttng_probe_ctx
,
1823 filter_stack_data
, NULL
);
1826 uint64_t lttng_bytecode_capture_interpret(void *capture_data
,
1827 struct lttng_probe_ctx
*lttng_probe_ctx
,
1828 const char *capture_stack_data
,
1829 struct lttng_interpreter_output
*output
)
1831 return bytecode_interpret(capture_data
, lttng_probe_ctx
,
1832 capture_stack_data
, output
);