1 /* SPDX-License-Identifier: MIT
3 * lttng-bytecode-interpreter.c
5 * LTTng modules bytecode interpreter.
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
15 #include <lttng/lttng-bytecode.h>
16 #include <lttng/string-utils.h>
19 * get_char should be called with page fault handler disabled if it is expected
20 * to handle user-space read.
23 char get_char(struct estack_entry
*reg
, size_t offset
)
25 if (unlikely(offset
>= reg
->u
.s
.seq_len
))
30 /* Handle invalid access as end of string. */
31 if (unlikely(!lttng_access_ok(VERIFY_READ
,
32 reg
->u
.s
.user_str
+ offset
,
35 /* Handle fault (nonzero return value) as end of string. */
36 if (unlikely(__copy_from_user_inatomic(&c
,
37 reg
->u
.s
.user_str
+ offset
,
42 return reg
->u
.s
.str
[offset
];
48 * -2: unknown escape char.
52 int parse_char(struct estack_entry
*reg
, char *c
, size_t *offset
)
57 *c
= get_char(reg
, *offset
);
73 char get_char_at_cb(size_t at
, void *data
)
75 return get_char(data
, at
);
79 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
81 bool has_user
= false;
83 struct estack_entry
*pattern_reg
;
84 struct estack_entry
*candidate_reg
;
86 /* Disable the page fault handler when reading from userspace. */
87 if (estack_bx(stack
, top
)->u
.s
.user
88 || estack_ax(stack
, top
)->u
.s
.user
) {
93 /* Find out which side is the pattern vs. the candidate. */
94 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
95 pattern_reg
= estack_ax(stack
, top
);
96 candidate_reg
= estack_bx(stack
, top
);
98 pattern_reg
= estack_bx(stack
, top
);
99 candidate_reg
= estack_ax(stack
, top
);
102 /* Perform the match operation. */
103 result
= !strutils_star_glob_match_char_cb(get_char_at_cb
,
104 pattern_reg
, get_char_at_cb
, candidate_reg
);
112 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
114 size_t offset_bx
= 0, offset_ax
= 0;
115 int diff
, has_user
= 0;
117 if (estack_bx(stack
, top
)->u
.s
.user
118 || estack_ax(stack
, top
)->u
.s
.user
) {
126 char char_bx
, char_ax
;
128 char_bx
= get_char(estack_bx(stack
, top
), offset_bx
);
129 char_ax
= get_char(estack_ax(stack
, top
), offset_ax
);
131 if (unlikely(char_bx
== '\0')) {
132 if (char_ax
== '\0') {
136 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
137 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
138 ret
= parse_char(estack_ax(stack
, top
),
139 &char_ax
, &offset_ax
);
149 if (unlikely(char_ax
== '\0')) {
150 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
151 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
152 ret
= parse_char(estack_bx(stack
, top
),
153 &char_bx
, &offset_bx
);
162 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
163 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
164 ret
= parse_char(estack_bx(stack
, top
),
165 &char_bx
, &offset_bx
);
169 } else if (ret
== -2) {
172 /* else compare both char */
174 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
175 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
176 ret
= parse_char(estack_ax(stack
, top
),
177 &char_ax
, &offset_ax
);
181 } else if (ret
== -2) {
198 diff
= char_bx
- char_ax
;
210 uint64_t lttng_bytecode_filter_interpret_false(void *filter_data
,
211 struct lttng_probe_ctx
*lttng_probe_ctx
,
212 const char *filter_stack_data
)
214 return LTTNG_INTERPRETER_DISCARD
;
217 #ifdef INTERPRETER_USE_SWITCH
220 * Fallback for compilers that do not support taking address of labels.
224 start_pc = &bytecode->data[0]; \
225 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
227 dbg_printk("LTTng: Executing op %s (%u)\n", \
228 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
229 (unsigned int) *(bytecode_opcode_t *) pc); \
230 switch (*(bytecode_opcode_t *) pc) {
232 #define OP(name) case name
242 * Dispatch-table based interpreter.
246 start_pc = &bytecode->code[0]; \
247 pc = next_pc = start_pc; \
248 if (unlikely(pc - start_pc >= bytecode->len)) \
250 goto *dispatch[*(bytecode_opcode_t *) pc];
257 goto *dispatch[*(bytecode_opcode_t *) pc];
263 #define IS_INTEGER_REGISTER(reg_type) \
264 (reg_type == REG_S64 || reg_type == REG_U64)
266 static int context_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
267 struct load_ptr
*ptr
,
271 struct lttng_ctx_field
*ctx_field
;
272 struct lttng_event_field
*field
;
273 union lttng_ctx_value v
;
275 ctx_field
= <tng_static_ctx
->fields
[idx
];
276 field
= &ctx_field
->event_field
;
277 ptr
->type
= LOAD_OBJECT
;
278 /* field is only used for types nested within variants. */
281 switch (field
->type
.atype
) {
283 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
284 if (field
->type
.u
.integer
.signedness
) {
285 ptr
->object_type
= OBJECT_TYPE_S64
;
287 ptr
->ptr
= &ptr
->u
.s64
;
289 ptr
->object_type
= OBJECT_TYPE_U64
;
290 ptr
->u
.u64
= v
.s64
; /* Cast. */
291 ptr
->ptr
= &ptr
->u
.u64
;
294 case atype_enum_nestable
:
296 const struct lttng_integer_type
*itype
=
297 &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
299 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
300 if (itype
->signedness
) {
301 ptr
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
303 ptr
->ptr
= &ptr
->u
.s64
;
305 ptr
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
306 ptr
->u
.u64
= v
.s64
; /* Cast. */
307 ptr
->ptr
= &ptr
->u
.u64
;
311 case atype_array_nestable
:
312 if (!lttng_is_bytewise_integer(field
->type
.u
.array_nestable
.elem_type
)) {
313 printk(KERN_WARNING
"LTTng: bytecode: Array nesting only supports integer types.\n");
316 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
317 printk(KERN_WARNING
"LTTng: bytecode: Only string arrays are supported for contexts.\n");
320 ptr
->object_type
= OBJECT_TYPE_STRING
;
321 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
324 case atype_sequence_nestable
:
325 if (!lttng_is_bytewise_integer(field
->type
.u
.sequence_nestable
.elem_type
)) {
326 printk(KERN_WARNING
"LTTng: bytecode: Sequence nesting only supports integer types.\n");
329 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
330 printk(KERN_WARNING
"LTTng: bytecode: Only string sequences are supported for contexts.\n");
333 ptr
->object_type
= OBJECT_TYPE_STRING
;
334 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
338 ptr
->object_type
= OBJECT_TYPE_STRING
;
339 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
342 case atype_struct_nestable
:
343 printk(KERN_WARNING
"LTTng: bytecode: Structure type cannot be loaded.\n");
345 case atype_variant_nestable
:
346 printk(KERN_WARNING
"LTTng: bytecode: Variant type cannot be loaded.\n");
349 printk(KERN_WARNING
"LTTng: bytecode: Unknown type: %d", (int) field
->type
.atype
);
355 static int dynamic_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
356 struct bytecode_runtime
*runtime
,
357 uint64_t index
, struct estack_entry
*stack_top
)
360 const struct bytecode_get_index_data
*gid
;
362 gid
= (const struct bytecode_get_index_data
*) &runtime
->data
[index
];
363 switch (stack_top
->u
.ptr
.type
) {
365 switch (stack_top
->u
.ptr
.object_type
) {
366 case OBJECT_TYPE_ARRAY
:
370 WARN_ON_ONCE(gid
->offset
>= gid
->array_len
);
371 /* Skip count (unsigned long) */
372 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
373 ptr
= ptr
+ gid
->offset
;
374 stack_top
->u
.ptr
.ptr
= ptr
;
375 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
376 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
377 BUG_ON(stack_top
->u
.ptr
.field
->type
.atype
!= atype_array_nestable
);
378 stack_top
->u
.ptr
.field
= NULL
;
381 case OBJECT_TYPE_SEQUENCE
:
386 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
387 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
388 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
392 ptr
= ptr
+ gid
->offset
;
393 stack_top
->u
.ptr
.ptr
= ptr
;
394 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
395 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
396 BUG_ON(stack_top
->u
.ptr
.field
->type
.atype
!= atype_sequence_nestable
);
397 stack_top
->u
.ptr
.field
= NULL
;
400 case OBJECT_TYPE_STRUCT
:
401 printk(KERN_WARNING
"LTTng: bytecode: Nested structures are not supported yet.\n");
404 case OBJECT_TYPE_VARIANT
:
406 printk(KERN_WARNING
"LTTng: bytecode: Unexpected get index type %d",
407 (int) stack_top
->u
.ptr
.object_type
);
412 case LOAD_ROOT_CONTEXT
:
413 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
415 ret
= context_get_index(lttng_probe_ctx
,
423 case LOAD_ROOT_PAYLOAD
:
424 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
425 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
426 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
427 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
428 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
429 stack_top
->u
.ptr
.field
= gid
->field
;
430 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
434 stack_top
->type
= REG_PTR
;
442 static int dynamic_load_field(struct estack_entry
*stack_top
)
446 switch (stack_top
->u
.ptr
.type
) {
449 case LOAD_ROOT_CONTEXT
:
450 case LOAD_ROOT_APP_CONTEXT
:
451 case LOAD_ROOT_PAYLOAD
:
453 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
457 switch (stack_top
->u
.ptr
.object_type
) {
459 dbg_printk("op load field s8\n");
460 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
461 stack_top
->type
= REG_S64
;
463 case OBJECT_TYPE_S16
:
467 dbg_printk("op load field s16\n");
468 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
469 if (stack_top
->u
.ptr
.rev_bo
)
471 stack_top
->u
.v
= tmp
;
472 stack_top
->type
= REG_S64
;
475 case OBJECT_TYPE_S32
:
479 dbg_printk("op load field s32\n");
480 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
481 if (stack_top
->u
.ptr
.rev_bo
)
483 stack_top
->u
.v
= tmp
;
484 stack_top
->type
= REG_S64
;
487 case OBJECT_TYPE_S64
:
491 dbg_printk("op load field s64\n");
492 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
493 if (stack_top
->u
.ptr
.rev_bo
)
495 stack_top
->u
.v
= tmp
;
496 stack_top
->type
= REG_S64
;
499 case OBJECT_TYPE_SIGNED_ENUM
:
503 dbg_printk("op load field signed enumeration\n");
504 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
505 if (stack_top
->u
.ptr
.rev_bo
)
507 stack_top
->u
.v
= tmp
;
508 stack_top
->type
= REG_S64
;
512 dbg_printk("op load field u8\n");
513 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
514 stack_top
->type
= REG_U64
;
516 case OBJECT_TYPE_U16
:
520 dbg_printk("op load field u16\n");
521 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
522 if (stack_top
->u
.ptr
.rev_bo
)
524 stack_top
->u
.v
= tmp
;
525 stack_top
->type
= REG_U64
;
528 case OBJECT_TYPE_U32
:
532 dbg_printk("op load field u32\n");
533 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
534 if (stack_top
->u
.ptr
.rev_bo
)
536 stack_top
->u
.v
= tmp
;
537 stack_top
->type
= REG_U64
;
540 case OBJECT_TYPE_U64
:
544 dbg_printk("op load field u64\n");
545 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
546 if (stack_top
->u
.ptr
.rev_bo
)
548 stack_top
->u
.v
= tmp
;
549 stack_top
->type
= REG_U64
;
552 case OBJECT_TYPE_UNSIGNED_ENUM
:
556 dbg_printk("op load field unsigned enumeration\n");
557 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
558 if (stack_top
->u
.ptr
.rev_bo
)
560 stack_top
->u
.v
= tmp
;
561 stack_top
->type
= REG_U64
;
564 case OBJECT_TYPE_STRING
:
568 dbg_printk("op load field string\n");
569 str
= (const char *) stack_top
->u
.ptr
.ptr
;
570 stack_top
->u
.s
.str
= str
;
571 if (unlikely(!stack_top
->u
.s
.str
)) {
572 dbg_printk("Bytecode warning: loading a NULL string.\n");
576 stack_top
->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
577 stack_top
->u
.s
.literal_type
=
578 ESTACK_STRING_LITERAL_TYPE_NONE
;
579 stack_top
->type
= REG_STRING
;
582 case OBJECT_TYPE_STRING_SEQUENCE
:
586 dbg_printk("op load field string sequence\n");
587 ptr
= stack_top
->u
.ptr
.ptr
;
588 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
589 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
590 if (unlikely(!stack_top
->u
.s
.str
)) {
591 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
595 stack_top
->u
.s
.literal_type
=
596 ESTACK_STRING_LITERAL_TYPE_NONE
;
597 stack_top
->type
= REG_STRING
;
600 case OBJECT_TYPE_DYNAMIC
:
602 * Dynamic types in context are looked up
603 * by context get index.
607 case OBJECT_TYPE_DOUBLE
:
610 case OBJECT_TYPE_SEQUENCE
:
611 case OBJECT_TYPE_ARRAY
:
612 case OBJECT_TYPE_STRUCT
:
613 case OBJECT_TYPE_VARIANT
:
614 printk(KERN_WARNING
"LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
625 int lttng_bytecode_interpret_format_output(struct estack_entry
*ax
,
626 struct lttng_interpreter_output
*output
)
633 output
->type
= LTTNG_INTERPRETER_TYPE_S64
;
634 output
->u
.s
= ax
->u
.v
;
637 output
->type
= LTTNG_INTERPRETER_TYPE_U64
;
638 output
->u
.u
= (uint64_t) ax
->u
.v
;
641 output
->type
= LTTNG_INTERPRETER_TYPE_STRING
;
642 output
->u
.str
.str
= ax
->u
.s
.str
;
643 output
->u
.str
.len
= ax
->u
.s
.seq_len
;
646 switch (ax
->u
.ptr
.object_type
) {
648 case OBJECT_TYPE_S16
:
649 case OBJECT_TYPE_S32
:
650 case OBJECT_TYPE_S64
:
652 case OBJECT_TYPE_U16
:
653 case OBJECT_TYPE_U32
:
654 case OBJECT_TYPE_U64
:
655 case OBJECT_TYPE_DOUBLE
:
656 case OBJECT_TYPE_STRING
:
657 case OBJECT_TYPE_STRING_SEQUENCE
:
658 ret
= dynamic_load_field(ax
);
661 /* Retry after loading ptr into stack top. */
663 case OBJECT_TYPE_SEQUENCE
:
664 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
665 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
666 output
->u
.sequence
.nr_elem
= *(unsigned long *) ax
->u
.ptr
.ptr
;
667 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.sequence_nestable
.elem_type
;
669 case OBJECT_TYPE_ARRAY
:
670 /* Skip count (unsigned long) */
671 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
672 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
673 output
->u
.sequence
.nr_elem
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.length
;
674 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.elem_type
;
676 case OBJECT_TYPE_SIGNED_ENUM
:
677 ret
= dynamic_load_field(ax
);
680 output
->type
= LTTNG_INTERPRETER_TYPE_SIGNED_ENUM
;
681 output
->u
.s
= ax
->u
.v
;
683 case OBJECT_TYPE_UNSIGNED_ENUM
:
684 ret
= dynamic_load_field(ax
);
687 output
->type
= LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM
;
688 output
->u
.u
= ax
->u
.v
;
690 case OBJECT_TYPE_STRUCT
:
691 case OBJECT_TYPE_VARIANT
:
697 case REG_STAR_GLOB_STRING
:
698 case REG_TYPE_UNKNOWN
:
703 return LTTNG_INTERPRETER_RECORD_FLAG
;
707 * Return 0 (discard), or raise the 0x1 flag (log event).
708 * Currently, other flags are kept for future extensions and have no
712 uint64_t bytecode_interpret(void *interpreter_data
,
713 struct lttng_probe_ctx
*lttng_probe_ctx
,
714 const char *interpreter_stack_data
,
715 struct lttng_interpreter_output
*output
)
717 struct bytecode_runtime
*bytecode
= interpreter_data
;
718 void *pc
, *next_pc
, *start_pc
;
721 struct estack _stack
;
722 struct estack
*stack
= &_stack
;
723 register int64_t ax
= 0, bx
= 0;
724 register enum entry_type ax_t
= REG_TYPE_UNKNOWN
, bx_t
= REG_TYPE_UNKNOWN
;
725 register int top
= INTERPRETER_STACK_EMPTY
;
726 #ifndef INTERPRETER_USE_SWITCH
727 static void *dispatch
[NR_BYTECODE_OPS
] = {
728 [ BYTECODE_OP_UNKNOWN
] = &&LABEL_BYTECODE_OP_UNKNOWN
,
730 [ BYTECODE_OP_RETURN
] = &&LABEL_BYTECODE_OP_RETURN
,
733 [ BYTECODE_OP_MUL
] = &&LABEL_BYTECODE_OP_MUL
,
734 [ BYTECODE_OP_DIV
] = &&LABEL_BYTECODE_OP_DIV
,
735 [ BYTECODE_OP_MOD
] = &&LABEL_BYTECODE_OP_MOD
,
736 [ BYTECODE_OP_PLUS
] = &&LABEL_BYTECODE_OP_PLUS
,
737 [ BYTECODE_OP_MINUS
] = &&LABEL_BYTECODE_OP_MINUS
,
738 [ BYTECODE_OP_BIT_RSHIFT
] = &&LABEL_BYTECODE_OP_BIT_RSHIFT
,
739 [ BYTECODE_OP_BIT_LSHIFT
] = &&LABEL_BYTECODE_OP_BIT_LSHIFT
,
740 [ BYTECODE_OP_BIT_AND
] = &&LABEL_BYTECODE_OP_BIT_AND
,
741 [ BYTECODE_OP_BIT_OR
] = &&LABEL_BYTECODE_OP_BIT_OR
,
742 [ BYTECODE_OP_BIT_XOR
] = &&LABEL_BYTECODE_OP_BIT_XOR
,
744 /* binary comparators */
745 [ BYTECODE_OP_EQ
] = &&LABEL_BYTECODE_OP_EQ
,
746 [ BYTECODE_OP_NE
] = &&LABEL_BYTECODE_OP_NE
,
747 [ BYTECODE_OP_GT
] = &&LABEL_BYTECODE_OP_GT
,
748 [ BYTECODE_OP_LT
] = &&LABEL_BYTECODE_OP_LT
,
749 [ BYTECODE_OP_GE
] = &&LABEL_BYTECODE_OP_GE
,
750 [ BYTECODE_OP_LE
] = &&LABEL_BYTECODE_OP_LE
,
752 /* string binary comparator */
753 [ BYTECODE_OP_EQ_STRING
] = &&LABEL_BYTECODE_OP_EQ_STRING
,
754 [ BYTECODE_OP_NE_STRING
] = &&LABEL_BYTECODE_OP_NE_STRING
,
755 [ BYTECODE_OP_GT_STRING
] = &&LABEL_BYTECODE_OP_GT_STRING
,
756 [ BYTECODE_OP_LT_STRING
] = &&LABEL_BYTECODE_OP_LT_STRING
,
757 [ BYTECODE_OP_GE_STRING
] = &&LABEL_BYTECODE_OP_GE_STRING
,
758 [ BYTECODE_OP_LE_STRING
] = &&LABEL_BYTECODE_OP_LE_STRING
,
760 /* globbing pattern binary comparator */
761 [ BYTECODE_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING
,
762 [ BYTECODE_OP_NE_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING
,
764 /* s64 binary comparator */
765 [ BYTECODE_OP_EQ_S64
] = &&LABEL_BYTECODE_OP_EQ_S64
,
766 [ BYTECODE_OP_NE_S64
] = &&LABEL_BYTECODE_OP_NE_S64
,
767 [ BYTECODE_OP_GT_S64
] = &&LABEL_BYTECODE_OP_GT_S64
,
768 [ BYTECODE_OP_LT_S64
] = &&LABEL_BYTECODE_OP_LT_S64
,
769 [ BYTECODE_OP_GE_S64
] = &&LABEL_BYTECODE_OP_GE_S64
,
770 [ BYTECODE_OP_LE_S64
] = &&LABEL_BYTECODE_OP_LE_S64
,
772 /* double binary comparator */
773 [ BYTECODE_OP_EQ_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE
,
774 [ BYTECODE_OP_NE_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_DOUBLE
,
775 [ BYTECODE_OP_GT_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_DOUBLE
,
776 [ BYTECODE_OP_LT_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_DOUBLE
,
777 [ BYTECODE_OP_GE_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_DOUBLE
,
778 [ BYTECODE_OP_LE_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_DOUBLE
,
780 /* Mixed S64-double binary comparators */
781 [ BYTECODE_OP_EQ_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64
,
782 [ BYTECODE_OP_NE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64
,
783 [ BYTECODE_OP_GT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64
,
784 [ BYTECODE_OP_LT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64
,
785 [ BYTECODE_OP_GE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64
,
786 [ BYTECODE_OP_LE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64
,
788 [ BYTECODE_OP_EQ_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE
,
789 [ BYTECODE_OP_NE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE
,
790 [ BYTECODE_OP_GT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE
,
791 [ BYTECODE_OP_LT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE
,
792 [ BYTECODE_OP_GE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE
,
793 [ BYTECODE_OP_LE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE
,
796 [ BYTECODE_OP_UNARY_PLUS
] = &&LABEL_BYTECODE_OP_UNARY_PLUS
,
797 [ BYTECODE_OP_UNARY_MINUS
] = &&LABEL_BYTECODE_OP_UNARY_MINUS
,
798 [ BYTECODE_OP_UNARY_NOT
] = &&LABEL_BYTECODE_OP_UNARY_NOT
,
799 [ BYTECODE_OP_UNARY_PLUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64
,
800 [ BYTECODE_OP_UNARY_MINUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64
,
801 [ BYTECODE_OP_UNARY_NOT_S64
] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64
,
802 [ BYTECODE_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE
,
803 [ BYTECODE_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE
,
804 [ BYTECODE_OP_UNARY_NOT_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE
,
807 [ BYTECODE_OP_AND
] = &&LABEL_BYTECODE_OP_AND
,
808 [ BYTECODE_OP_OR
] = &&LABEL_BYTECODE_OP_OR
,
811 [ BYTECODE_OP_LOAD_FIELD_REF
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF
,
812 [ BYTECODE_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING
,
813 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
,
814 [ BYTECODE_OP_LOAD_FIELD_REF_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64
,
815 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
,
817 /* load from immediate operand */
818 [ BYTECODE_OP_LOAD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STRING
,
819 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING
,
820 [ BYTECODE_OP_LOAD_S64
] = &&LABEL_BYTECODE_OP_LOAD_S64
,
821 [ BYTECODE_OP_LOAD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE
,
824 [ BYTECODE_OP_CAST_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_TO_S64
,
825 [ BYTECODE_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64
,
826 [ BYTECODE_OP_CAST_NOP
] = &&LABEL_BYTECODE_OP_CAST_NOP
,
828 /* get context ref */
829 [ BYTECODE_OP_GET_CONTEXT_REF
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF
,
830 [ BYTECODE_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING
,
831 [ BYTECODE_OP_GET_CONTEXT_REF_S64
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64
,
832 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
,
834 /* load userspace field ref */
835 [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
,
836 [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
,
838 /* Instructions for recursive traversal through composed types. */
839 [ BYTECODE_OP_GET_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT
,
840 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT
,
841 [ BYTECODE_OP_GET_PAYLOAD_ROOT
] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT
,
843 [ BYTECODE_OP_GET_SYMBOL
] = &&LABEL_BYTECODE_OP_GET_SYMBOL
,
844 [ BYTECODE_OP_GET_SYMBOL_FIELD
] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD
,
845 [ BYTECODE_OP_GET_INDEX_U16
] = &&LABEL_BYTECODE_OP_GET_INDEX_U16
,
846 [ BYTECODE_OP_GET_INDEX_U64
] = &&LABEL_BYTECODE_OP_GET_INDEX_U64
,
848 [ BYTECODE_OP_LOAD_FIELD
] = &&LABEL_BYTECODE_OP_LOAD_FIELD
,
849 [ BYTECODE_OP_LOAD_FIELD_S8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8
,
850 [ BYTECODE_OP_LOAD_FIELD_S16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16
,
851 [ BYTECODE_OP_LOAD_FIELD_S32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32
,
852 [ BYTECODE_OP_LOAD_FIELD_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64
,
853 [ BYTECODE_OP_LOAD_FIELD_U8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8
,
854 [ BYTECODE_OP_LOAD_FIELD_U16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16
,
855 [ BYTECODE_OP_LOAD_FIELD_U32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32
,
856 [ BYTECODE_OP_LOAD_FIELD_U64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64
,
857 [ BYTECODE_OP_LOAD_FIELD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING
,
858 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE
,
859 [ BYTECODE_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE
,
861 [ BYTECODE_OP_UNARY_BIT_NOT
] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT
,
863 [ BYTECODE_OP_RETURN_S64
] = &&LABEL_BYTECODE_OP_RETURN_S64
,
865 #endif /* #ifndef INTERPRETER_USE_SWITCH */
869 OP(BYTECODE_OP_UNKNOWN
):
870 OP(BYTECODE_OP_LOAD_FIELD_REF
):
871 OP(BYTECODE_OP_GET_CONTEXT_REF
):
872 #ifdef INTERPRETER_USE_SWITCH
874 #endif /* INTERPRETER_USE_SWITCH */
875 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
876 (unsigned int) *(bytecode_opcode_t
*) pc
);
880 OP(BYTECODE_OP_RETURN
):
881 OP(BYTECODE_OP_RETURN_S64
):
882 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
883 switch (estack_ax_t
) {
886 retval
= !!estack_ax_v
;
897 case REG_STAR_GLOB_STRING
:
898 case REG_TYPE_UNKNOWN
:
909 OP(BYTECODE_OP_PLUS
):
910 OP(BYTECODE_OP_MINUS
):
911 printk(KERN_WARNING
"LTTng: bytecode: unsupported bytecode op %u\n",
912 (unsigned int) *(bytecode_opcode_t
*) pc
);
922 printk(KERN_WARNING
"LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
923 (unsigned int) *(bytecode_opcode_t
*) pc
);
927 OP(BYTECODE_OP_EQ_STRING
):
931 res
= (stack_strcmp(stack
, top
, "==") == 0);
932 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
934 estack_ax_t
= REG_S64
;
935 next_pc
+= sizeof(struct binary_op
);
938 OP(BYTECODE_OP_NE_STRING
):
942 res
= (stack_strcmp(stack
, top
, "!=") != 0);
943 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
945 estack_ax_t
= REG_S64
;
946 next_pc
+= sizeof(struct binary_op
);
949 OP(BYTECODE_OP_GT_STRING
):
953 res
= (stack_strcmp(stack
, top
, ">") > 0);
954 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
956 estack_ax_t
= REG_S64
;
957 next_pc
+= sizeof(struct binary_op
);
960 OP(BYTECODE_OP_LT_STRING
):
964 res
= (stack_strcmp(stack
, top
, "<") < 0);
965 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
967 estack_ax_t
= REG_S64
;
968 next_pc
+= sizeof(struct binary_op
);
971 OP(BYTECODE_OP_GE_STRING
):
975 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
976 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
978 estack_ax_t
= REG_S64
;
979 next_pc
+= sizeof(struct binary_op
);
982 OP(BYTECODE_OP_LE_STRING
):
986 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
987 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
989 estack_ax_t
= REG_S64
;
990 next_pc
+= sizeof(struct binary_op
);
994 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING
):
998 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
999 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1001 estack_ax_t
= REG_S64
;
1002 next_pc
+= sizeof(struct binary_op
);
1005 OP(BYTECODE_OP_NE_STAR_GLOB_STRING
):
1009 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
1010 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1012 estack_ax_t
= REG_S64
;
1013 next_pc
+= sizeof(struct binary_op
);
1017 OP(BYTECODE_OP_EQ_S64
):
1021 res
= (estack_bx_v
== estack_ax_v
);
1022 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1024 estack_ax_t
= REG_S64
;
1025 next_pc
+= sizeof(struct binary_op
);
1028 OP(BYTECODE_OP_NE_S64
):
1032 res
= (estack_bx_v
!= estack_ax_v
);
1033 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1035 estack_ax_t
= REG_S64
;
1036 next_pc
+= sizeof(struct binary_op
);
1039 OP(BYTECODE_OP_GT_S64
):
1043 res
= (estack_bx_v
> estack_ax_v
);
1044 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1046 estack_ax_t
= REG_S64
;
1047 next_pc
+= sizeof(struct binary_op
);
1050 OP(BYTECODE_OP_LT_S64
):
1054 res
= (estack_bx_v
< estack_ax_v
);
1055 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1057 estack_ax_t
= REG_S64
;
1058 next_pc
+= sizeof(struct binary_op
);
1061 OP(BYTECODE_OP_GE_S64
):
1065 res
= (estack_bx_v
>= estack_ax_v
);
1066 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1068 estack_ax_t
= REG_S64
;
1069 next_pc
+= sizeof(struct binary_op
);
1072 OP(BYTECODE_OP_LE_S64
):
1076 res
= (estack_bx_v
<= estack_ax_v
);
1077 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1079 estack_ax_t
= REG_S64
;
1080 next_pc
+= sizeof(struct binary_op
);
1084 OP(BYTECODE_OP_EQ_DOUBLE
):
1085 OP(BYTECODE_OP_NE_DOUBLE
):
1086 OP(BYTECODE_OP_GT_DOUBLE
):
1087 OP(BYTECODE_OP_LT_DOUBLE
):
1088 OP(BYTECODE_OP_GE_DOUBLE
):
1089 OP(BYTECODE_OP_LE_DOUBLE
):
1095 /* Mixed S64-double binary comparators */
1096 OP(BYTECODE_OP_EQ_DOUBLE_S64
):
1097 OP(BYTECODE_OP_NE_DOUBLE_S64
):
1098 OP(BYTECODE_OP_GT_DOUBLE_S64
):
1099 OP(BYTECODE_OP_LT_DOUBLE_S64
):
1100 OP(BYTECODE_OP_GE_DOUBLE_S64
):
1101 OP(BYTECODE_OP_LE_DOUBLE_S64
):
1102 OP(BYTECODE_OP_EQ_S64_DOUBLE
):
1103 OP(BYTECODE_OP_NE_S64_DOUBLE
):
1104 OP(BYTECODE_OP_GT_S64_DOUBLE
):
1105 OP(BYTECODE_OP_LT_S64_DOUBLE
):
1106 OP(BYTECODE_OP_GE_S64_DOUBLE
):
1107 OP(BYTECODE_OP_LE_S64_DOUBLE
):
1112 OP(BYTECODE_OP_BIT_RSHIFT
):
1116 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1121 /* Catch undefined behavior. */
1122 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1126 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
1127 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1129 estack_ax_t
= REG_U64
;
1130 next_pc
+= sizeof(struct binary_op
);
1133 OP(BYTECODE_OP_BIT_LSHIFT
):
1137 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1142 /* Catch undefined behavior. */
1143 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1147 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
1148 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1150 estack_ax_t
= REG_U64
;
1151 next_pc
+= sizeof(struct binary_op
);
1154 OP(BYTECODE_OP_BIT_AND
):
1158 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1163 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
1164 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1166 estack_ax_t
= REG_U64
;
1167 next_pc
+= sizeof(struct binary_op
);
1170 OP(BYTECODE_OP_BIT_OR
):
1174 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1179 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1180 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1182 estack_ax_t
= REG_U64
;
1183 next_pc
+= sizeof(struct binary_op
);
1186 OP(BYTECODE_OP_BIT_XOR
):
1190 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1195 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1196 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1198 estack_ax_t
= REG_U64
;
1199 next_pc
+= sizeof(struct binary_op
);
1204 OP(BYTECODE_OP_UNARY_PLUS
):
1205 OP(BYTECODE_OP_UNARY_MINUS
):
1206 OP(BYTECODE_OP_UNARY_NOT
):
1207 printk(KERN_WARNING
"LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1208 (unsigned int) *(bytecode_opcode_t
*) pc
);
1213 OP(BYTECODE_OP_UNARY_BIT_NOT
):
1215 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1216 estack_ax_t
= REG_S64
;
1217 next_pc
+= sizeof(struct unary_op
);
1221 OP(BYTECODE_OP_UNARY_PLUS_S64
):
1223 next_pc
+= sizeof(struct unary_op
);
1226 OP(BYTECODE_OP_UNARY_MINUS_S64
):
1228 estack_ax_v
= -estack_ax_v
;
1229 estack_ax_t
= REG_S64
;
1230 next_pc
+= sizeof(struct unary_op
);
1233 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE
):
1234 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE
):
1239 OP(BYTECODE_OP_UNARY_NOT_S64
):
1241 estack_ax_v
= !estack_ax_v
;
1242 estack_ax_t
= REG_S64
;
1243 next_pc
+= sizeof(struct unary_op
);
1246 OP(BYTECODE_OP_UNARY_NOT_DOUBLE
):
1253 OP(BYTECODE_OP_AND
):
1255 struct logical_op
*insn
= (struct logical_op
*) pc
;
1257 /* If AX is 0, skip and evaluate to 0 */
1258 if (unlikely(estack_ax_v
== 0)) {
1259 dbg_printk("Jumping to bytecode offset %u\n",
1260 (unsigned int) insn
->skip_offset
);
1261 next_pc
= start_pc
+ insn
->skip_offset
;
1263 /* Pop 1 when jump not taken */
1264 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1265 next_pc
+= sizeof(struct logical_op
);
1271 struct logical_op
*insn
= (struct logical_op
*) pc
;
1273 /* If AX is nonzero, skip and evaluate to 1 */
1275 if (unlikely(estack_ax_v
!= 0)) {
1277 dbg_printk("Jumping to bytecode offset %u\n",
1278 (unsigned int) insn
->skip_offset
);
1279 next_pc
= start_pc
+ insn
->skip_offset
;
1281 /* Pop 1 when jump not taken */
1282 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1283 next_pc
+= sizeof(struct logical_op
);
1289 /* load field ref */
1290 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING
):
1292 struct load_op
*insn
= (struct load_op
*) pc
;
1293 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1295 dbg_printk("load field ref offset %u type string\n",
1297 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1298 estack_ax(stack
, top
)->u
.s
.str
=
1299 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
1300 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1301 dbg_printk("Bytecode warning: loading a NULL string.\n");
1305 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1306 estack_ax(stack
, top
)->u
.s
.literal_type
=
1307 ESTACK_STRING_LITERAL_TYPE_NONE
;
1308 estack_ax(stack
, top
)->u
.s
.user
= 0;
1309 estack_ax(stack
, top
)->type
= REG_STRING
;
1310 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1311 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1315 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
):
1317 struct load_op
*insn
= (struct load_op
*) pc
;
1318 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1320 dbg_printk("load field ref offset %u type sequence\n",
1322 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1323 estack_ax(stack
, top
)->u
.s
.seq_len
=
1324 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
1325 estack_ax(stack
, top
)->u
.s
.str
=
1326 *(const char **) (&interpreter_stack_data
[ref
->offset
1327 + sizeof(unsigned long)]);
1328 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1329 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1333 estack_ax(stack
, top
)->u
.s
.literal_type
=
1334 ESTACK_STRING_LITERAL_TYPE_NONE
;
1335 estack_ax(stack
, top
)->u
.s
.user
= 0;
1336 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1340 OP(BYTECODE_OP_LOAD_FIELD_REF_S64
):
1342 struct load_op
*insn
= (struct load_op
*) pc
;
1343 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1345 dbg_printk("load field ref offset %u type s64\n",
1347 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1349 ((struct literal_numeric
*) &interpreter_stack_data
[ref
->offset
])->v
;
1350 estack_ax_t
= REG_S64
;
1351 dbg_printk("ref load s64 %lld\n",
1352 (long long) estack_ax_v
);
1353 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1357 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
):
1363 /* load from immediate operand */
1364 OP(BYTECODE_OP_LOAD_STRING
):
1366 struct load_op
*insn
= (struct load_op
*) pc
;
1368 dbg_printk("load string %s\n", insn
->data
);
1369 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1370 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1371 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1372 estack_ax(stack
, top
)->u
.s
.literal_type
=
1373 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
1374 estack_ax(stack
, top
)->u
.s
.user
= 0;
1375 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1379 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING
):
1381 struct load_op
*insn
= (struct load_op
*) pc
;
1383 dbg_printk("load globbing pattern %s\n", insn
->data
);
1384 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1385 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1386 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1387 estack_ax(stack
, top
)->u
.s
.literal_type
=
1388 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
1389 estack_ax(stack
, top
)->u
.s
.user
= 0;
1390 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1394 OP(BYTECODE_OP_LOAD_S64
):
1396 struct load_op
*insn
= (struct load_op
*) pc
;
1398 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1399 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
1400 estack_ax_t
= REG_S64
;
1401 dbg_printk("load s64 %lld\n",
1402 (long long) estack_ax_v
);
1403 next_pc
+= sizeof(struct load_op
)
1404 + sizeof(struct literal_numeric
);
1408 OP(BYTECODE_OP_LOAD_DOUBLE
):
1415 OP(BYTECODE_OP_CAST_TO_S64
):
1416 printk(KERN_WARNING
"LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1417 (unsigned int) *(bytecode_opcode_t
*) pc
);
1421 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64
):
1427 OP(BYTECODE_OP_CAST_NOP
):
1429 next_pc
+= sizeof(struct cast_op
);
1433 /* get context ref */
1434 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING
):
1436 struct load_op
*insn
= (struct load_op
*) pc
;
1437 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1438 struct lttng_ctx_field
*ctx_field
;
1439 union lttng_ctx_value v
;
1441 dbg_printk("get context ref offset %u type string\n",
1443 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1444 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1445 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1446 estack_ax(stack
, top
)->u
.s
.str
= v
.str
;
1447 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1448 dbg_printk("Bytecode warning: loading a NULL string.\n");
1452 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1453 estack_ax(stack
, top
)->u
.s
.literal_type
=
1454 ESTACK_STRING_LITERAL_TYPE_NONE
;
1455 estack_ax(stack
, top
)->u
.s
.user
= 0;
1456 estack_ax(stack
, top
)->type
= REG_STRING
;
1457 dbg_printk("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1458 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1462 OP(BYTECODE_OP_GET_CONTEXT_REF_S64
):
1464 struct load_op
*insn
= (struct load_op
*) pc
;
1465 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1466 struct lttng_ctx_field
*ctx_field
;
1467 union lttng_ctx_value v
;
1469 dbg_printk("get context ref offset %u type s64\n",
1471 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1472 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1473 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1474 estack_ax_v
= v
.s64
;
1475 estack_ax_t
= REG_S64
;
1476 dbg_printk("ref get context s64 %lld\n",
1477 (long long) estack_ax_v
);
1478 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1482 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
):
1488 /* load userspace field ref */
1489 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
):
1491 struct load_op
*insn
= (struct load_op
*) pc
;
1492 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1494 dbg_printk("load field ref offset %u type user string\n",
1496 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1497 estack_ax(stack
, top
)->u
.s
.user_str
=
1498 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
1499 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1500 dbg_printk("Bytecode warning: loading a NULL string.\n");
1504 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1505 estack_ax(stack
, top
)->u
.s
.literal_type
=
1506 ESTACK_STRING_LITERAL_TYPE_NONE
;
1507 estack_ax(stack
, top
)->u
.s
.user
= 1;
1508 estack_ax(stack
, top
)->type
= REG_STRING
;
1509 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1510 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1514 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
):
1516 struct load_op
*insn
= (struct load_op
*) pc
;
1517 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1519 dbg_printk("load field ref offset %u type user sequence\n",
1521 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1522 estack_ax(stack
, top
)->u
.s
.seq_len
=
1523 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
1524 estack_ax(stack
, top
)->u
.s
.user_str
=
1525 *(const char **) (&interpreter_stack_data
[ref
->offset
1526 + sizeof(unsigned long)]);
1527 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1528 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1532 estack_ax(stack
, top
)->u
.s
.literal_type
=
1533 ESTACK_STRING_LITERAL_TYPE_NONE
;
1534 estack_ax(stack
, top
)->u
.s
.user
= 1;
1535 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1539 OP(BYTECODE_OP_GET_CONTEXT_ROOT
):
1541 dbg_printk("op get context root\n");
1542 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1543 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
1544 /* "field" only needed for variants. */
1545 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1546 estack_ax(stack
, top
)->type
= REG_PTR
;
1547 next_pc
+= sizeof(struct load_op
);
1551 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT
):
1557 OP(BYTECODE_OP_GET_PAYLOAD_ROOT
):
1559 dbg_printk("op get app payload root\n");
1560 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1561 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
1562 estack_ax(stack
, top
)->u
.ptr
.ptr
= interpreter_stack_data
;
1563 /* "field" only needed for variants. */
1564 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1565 estack_ax(stack
, top
)->type
= REG_PTR
;
1566 next_pc
+= sizeof(struct load_op
);
1570 OP(BYTECODE_OP_GET_SYMBOL
):
1572 dbg_printk("op get symbol\n");
1573 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
1575 printk(KERN_WARNING
"LTTng: bytecode: Nested fields not implemented yet.\n");
1578 case LOAD_ROOT_CONTEXT
:
1579 case LOAD_ROOT_APP_CONTEXT
:
1580 case LOAD_ROOT_PAYLOAD
:
1582 * symbol lookup is performed by
1588 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1592 OP(BYTECODE_OP_GET_SYMBOL_FIELD
):
1595 * Used for first variant encountered in a
1596 * traversal. Variants are not implemented yet.
1602 OP(BYTECODE_OP_GET_INDEX_U16
):
1604 struct load_op
*insn
= (struct load_op
*) pc
;
1605 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1607 dbg_printk("op get index u16\n");
1608 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1611 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1612 estack_ax_t
= estack_ax(stack
, top
)->type
;
1613 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1617 OP(BYTECODE_OP_GET_INDEX_U64
):
1619 struct load_op
*insn
= (struct load_op
*) pc
;
1620 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1622 dbg_printk("op get index u64\n");
1623 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1626 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1627 estack_ax_t
= estack_ax(stack
, top
)->type
;
1628 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1632 OP(BYTECODE_OP_LOAD_FIELD
):
1634 dbg_printk("op load field\n");
1635 ret
= dynamic_load_field(estack_ax(stack
, top
));
1638 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1639 estack_ax_t
= estack_ax(stack
, top
)->type
;
1640 next_pc
+= sizeof(struct load_op
);
1644 OP(BYTECODE_OP_LOAD_FIELD_S8
):
1646 dbg_printk("op load field s8\n");
1648 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1649 estack_ax_t
= REG_S64
;
1650 next_pc
+= sizeof(struct load_op
);
1653 OP(BYTECODE_OP_LOAD_FIELD_S16
):
1655 dbg_printk("op load field s16\n");
1657 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1658 estack_ax_t
= REG_S64
;
1659 next_pc
+= sizeof(struct load_op
);
1662 OP(BYTECODE_OP_LOAD_FIELD_S32
):
1664 dbg_printk("op load field s32\n");
1666 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1667 estack_ax_t
= REG_S64
;
1668 next_pc
+= sizeof(struct load_op
);
1671 OP(BYTECODE_OP_LOAD_FIELD_S64
):
1673 dbg_printk("op load field s64\n");
1675 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1676 estack_ax_t
= REG_S64
;
1677 next_pc
+= sizeof(struct load_op
);
1680 OP(BYTECODE_OP_LOAD_FIELD_U8
):
1682 dbg_printk("op load field u8\n");
1684 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1685 estack_ax_t
= REG_S64
;
1686 next_pc
+= sizeof(struct load_op
);
1689 OP(BYTECODE_OP_LOAD_FIELD_U16
):
1691 dbg_printk("op load field u16\n");
1693 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1694 estack_ax_t
= REG_S64
;
1695 next_pc
+= sizeof(struct load_op
);
1698 OP(BYTECODE_OP_LOAD_FIELD_U32
):
1700 dbg_printk("op load field u32\n");
1702 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1703 estack_ax_t
= REG_S64
;
1704 next_pc
+= sizeof(struct load_op
);
1707 OP(BYTECODE_OP_LOAD_FIELD_U64
):
1709 dbg_printk("op load field u64\n");
1711 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1712 estack_ax_t
= REG_S64
;
1713 next_pc
+= sizeof(struct load_op
);
1716 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE
):
1722 OP(BYTECODE_OP_LOAD_FIELD_STRING
):
1726 dbg_printk("op load field string\n");
1727 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1728 estack_ax(stack
, top
)->u
.s
.str
= str
;
1729 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1730 dbg_printk("Bytecode warning: loading a NULL string.\n");
1734 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1735 estack_ax(stack
, top
)->u
.s
.literal_type
=
1736 ESTACK_STRING_LITERAL_TYPE_NONE
;
1737 estack_ax(stack
, top
)->type
= REG_STRING
;
1738 next_pc
+= sizeof(struct load_op
);
1742 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE
):
1746 dbg_printk("op load field string sequence\n");
1747 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
1748 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
1749 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
1750 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1751 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1755 estack_ax(stack
, top
)->u
.s
.literal_type
=
1756 ESTACK_STRING_LITERAL_TYPE_NONE
;
1757 estack_ax(stack
, top
)->type
= REG_STRING
;
1758 next_pc
+= sizeof(struct load_op
);
1764 /* Return _DISCARD on error. */
1766 return LTTNG_INTERPRETER_DISCARD
;
1769 return lttng_bytecode_interpret_format_output(
1770 estack_ax(stack
, top
), output
);
1775 LTTNG_STACK_FRAME_NON_STANDARD(bytecode_interpret
);
1777 uint64_t lttng_bytecode_filter_interpret(void *filter_data
,
1778 struct lttng_probe_ctx
*lttng_probe_ctx
,
1779 const char *filter_stack_data
)
1781 return bytecode_interpret(filter_data
, lttng_probe_ctx
,
1782 filter_stack_data
, NULL
);