2 * SPDX-License-Identifier: MIT
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 * LTTng UST bytecode code.
13 #include <urcu/rculist.h>
15 #include "context-internal.h"
16 #include "lttng-bytecode.h"
17 #include "lib/lttng-ust/events.h"
18 #include "common/macros.h"
19 #include "common/tracer.h"
21 static const char *opnames
[] = {
22 [ BYTECODE_OP_UNKNOWN
] = "UNKNOWN",
24 [ BYTECODE_OP_RETURN
] = "RETURN",
27 [ BYTECODE_OP_MUL
] = "MUL",
28 [ BYTECODE_OP_DIV
] = "DIV",
29 [ BYTECODE_OP_MOD
] = "MOD",
30 [ BYTECODE_OP_PLUS
] = "PLUS",
31 [ BYTECODE_OP_MINUS
] = "MINUS",
32 [ BYTECODE_OP_BIT_RSHIFT
] = "BIT_RSHIFT",
33 [ BYTECODE_OP_BIT_LSHIFT
] = "BIT_LSHIFT",
34 [ BYTECODE_OP_BIT_AND
] = "BIT_AND",
35 [ BYTECODE_OP_BIT_OR
] = "BIT_OR",
36 [ BYTECODE_OP_BIT_XOR
] = "BIT_XOR",
38 /* binary comparators */
39 [ BYTECODE_OP_EQ
] = "EQ",
40 [ BYTECODE_OP_NE
] = "NE",
41 [ BYTECODE_OP_GT
] = "GT",
42 [ BYTECODE_OP_LT
] = "LT",
43 [ BYTECODE_OP_GE
] = "GE",
44 [ BYTECODE_OP_LE
] = "LE",
46 /* string binary comparators */
47 [ BYTECODE_OP_EQ_STRING
] = "EQ_STRING",
48 [ BYTECODE_OP_NE_STRING
] = "NE_STRING",
49 [ BYTECODE_OP_GT_STRING
] = "GT_STRING",
50 [ BYTECODE_OP_LT_STRING
] = "LT_STRING",
51 [ BYTECODE_OP_GE_STRING
] = "GE_STRING",
52 [ BYTECODE_OP_LE_STRING
] = "LE_STRING",
54 /* s64 binary comparators */
55 [ BYTECODE_OP_EQ_S64
] = "EQ_S64",
56 [ BYTECODE_OP_NE_S64
] = "NE_S64",
57 [ BYTECODE_OP_GT_S64
] = "GT_S64",
58 [ BYTECODE_OP_LT_S64
] = "LT_S64",
59 [ BYTECODE_OP_GE_S64
] = "GE_S64",
60 [ BYTECODE_OP_LE_S64
] = "LE_S64",
62 /* double binary comparators */
63 [ BYTECODE_OP_EQ_DOUBLE
] = "EQ_DOUBLE",
64 [ BYTECODE_OP_NE_DOUBLE
] = "NE_DOUBLE",
65 [ BYTECODE_OP_GT_DOUBLE
] = "GT_DOUBLE",
66 [ BYTECODE_OP_LT_DOUBLE
] = "LT_DOUBLE",
67 [ BYTECODE_OP_GE_DOUBLE
] = "GE_DOUBLE",
68 [ BYTECODE_OP_LE_DOUBLE
] = "LE_DOUBLE",
70 /* Mixed S64-double binary comparators */
71 [ BYTECODE_OP_EQ_DOUBLE_S64
] = "EQ_DOUBLE_S64",
72 [ BYTECODE_OP_NE_DOUBLE_S64
] = "NE_DOUBLE_S64",
73 [ BYTECODE_OP_GT_DOUBLE_S64
] = "GT_DOUBLE_S64",
74 [ BYTECODE_OP_LT_DOUBLE_S64
] = "LT_DOUBLE_S64",
75 [ BYTECODE_OP_GE_DOUBLE_S64
] = "GE_DOUBLE_S64",
76 [ BYTECODE_OP_LE_DOUBLE_S64
] = "LE_DOUBLE_S64",
78 [ BYTECODE_OP_EQ_S64_DOUBLE
] = "EQ_S64_DOUBLE",
79 [ BYTECODE_OP_NE_S64_DOUBLE
] = "NE_S64_DOUBLE",
80 [ BYTECODE_OP_GT_S64_DOUBLE
] = "GT_S64_DOUBLE",
81 [ BYTECODE_OP_LT_S64_DOUBLE
] = "LT_S64_DOUBLE",
82 [ BYTECODE_OP_GE_S64_DOUBLE
] = "GE_S64_DOUBLE",
83 [ BYTECODE_OP_LE_S64_DOUBLE
] = "LE_S64_DOUBLE",
86 [ BYTECODE_OP_UNARY_PLUS
] = "UNARY_PLUS",
87 [ BYTECODE_OP_UNARY_MINUS
] = "UNARY_MINUS",
88 [ BYTECODE_OP_UNARY_NOT
] = "UNARY_NOT",
89 [ BYTECODE_OP_UNARY_PLUS_S64
] = "UNARY_PLUS_S64",
90 [ BYTECODE_OP_UNARY_MINUS_S64
] = "UNARY_MINUS_S64",
91 [ BYTECODE_OP_UNARY_NOT_S64
] = "UNARY_NOT_S64",
92 [ BYTECODE_OP_UNARY_PLUS_DOUBLE
] = "UNARY_PLUS_DOUBLE",
93 [ BYTECODE_OP_UNARY_MINUS_DOUBLE
] = "UNARY_MINUS_DOUBLE",
94 [ BYTECODE_OP_UNARY_NOT_DOUBLE
] = "UNARY_NOT_DOUBLE",
97 [ BYTECODE_OP_AND
] = "AND",
98 [ BYTECODE_OP_OR
] = "OR",
101 [ BYTECODE_OP_LOAD_FIELD_REF
] = "LOAD_FIELD_REF",
102 [ BYTECODE_OP_LOAD_FIELD_REF_STRING
] = "LOAD_FIELD_REF_STRING",
103 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
] = "LOAD_FIELD_REF_SEQUENCE",
104 [ BYTECODE_OP_LOAD_FIELD_REF_S64
] = "LOAD_FIELD_REF_S64",
105 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
] = "LOAD_FIELD_REF_DOUBLE",
107 /* load from immediate operand */
108 [ BYTECODE_OP_LOAD_STRING
] = "LOAD_STRING",
109 [ BYTECODE_OP_LOAD_S64
] = "LOAD_S64",
110 [ BYTECODE_OP_LOAD_DOUBLE
] = "LOAD_DOUBLE",
113 [ BYTECODE_OP_CAST_TO_S64
] = "CAST_TO_S64",
114 [ BYTECODE_OP_CAST_DOUBLE_TO_S64
] = "CAST_DOUBLE_TO_S64",
115 [ BYTECODE_OP_CAST_NOP
] = "CAST_NOP",
117 /* get context ref */
118 [ BYTECODE_OP_GET_CONTEXT_REF
] = "GET_CONTEXT_REF",
119 [ BYTECODE_OP_GET_CONTEXT_REF_STRING
] = "GET_CONTEXT_REF_STRING",
120 [ BYTECODE_OP_GET_CONTEXT_REF_S64
] = "GET_CONTEXT_REF_S64",
121 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
] = "GET_CONTEXT_REF_DOUBLE",
123 /* load userspace field ref */
124 [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
] = "LOAD_FIELD_REF_USER_STRING",
125 [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
] = "LOAD_FIELD_REF_USER_SEQUENCE",
128 * load immediate star globbing pattern (literal string)
131 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING
] = "LOAD_STAR_GLOB_STRING",
133 /* globbing pattern binary operator: apply to */
134 [ BYTECODE_OP_EQ_STAR_GLOB_STRING
] = "EQ_STAR_GLOB_STRING",
135 [ BYTECODE_OP_NE_STAR_GLOB_STRING
] = "NE_STAR_GLOB_STRING",
138 * Instructions for recursive traversal through composed types.
140 [ BYTECODE_OP_GET_CONTEXT_ROOT
] = "GET_CONTEXT_ROOT",
141 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT
] = "GET_APP_CONTEXT_ROOT",
142 [ BYTECODE_OP_GET_PAYLOAD_ROOT
] = "GET_PAYLOAD_ROOT",
144 [ BYTECODE_OP_GET_SYMBOL
] = "GET_SYMBOL",
145 [ BYTECODE_OP_GET_SYMBOL_FIELD
] = "GET_SYMBOL_FIELD",
146 [ BYTECODE_OP_GET_INDEX_U16
] = "GET_INDEX_U16",
147 [ BYTECODE_OP_GET_INDEX_U64
] = "GET_INDEX_U64",
149 [ BYTECODE_OP_LOAD_FIELD
] = "LOAD_FIELD",
150 [ BYTECODE_OP_LOAD_FIELD_S8
] = "LOAD_FIELD_S8",
151 [ BYTECODE_OP_LOAD_FIELD_S16
] = "LOAD_FIELD_S16",
152 [ BYTECODE_OP_LOAD_FIELD_S32
] = "LOAD_FIELD_S32",
153 [ BYTECODE_OP_LOAD_FIELD_S64
] = "LOAD_FIELD_S64",
154 [ BYTECODE_OP_LOAD_FIELD_U8
] = "LOAD_FIELD_U8",
155 [ BYTECODE_OP_LOAD_FIELD_U16
] = "LOAD_FIELD_U16",
156 [ BYTECODE_OP_LOAD_FIELD_U32
] = "LOAD_FIELD_U32",
157 [ BYTECODE_OP_LOAD_FIELD_U64
] = "LOAD_FIELD_U64",
158 [ BYTECODE_OP_LOAD_FIELD_STRING
] = "LOAD_FIELD_STRING",
159 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE
] = "LOAD_FIELD_SEQUENCE",
160 [ BYTECODE_OP_LOAD_FIELD_DOUBLE
] = "LOAD_FIELD_DOUBLE",
162 [ BYTECODE_OP_UNARY_BIT_NOT
] = "UNARY_BIT_NOT",
164 [ BYTECODE_OP_RETURN_S64
] = "RETURN_S64",
167 const char *lttng_bytecode_print_op(enum bytecode_op op
)
169 if (op
>= NR_BYTECODE_OPS
)
176 int apply_field_reloc(const struct lttng_ust_event_desc
*event_desc
,
177 struct bytecode_runtime
*runtime
,
178 uint32_t runtime_len
__attribute__((unused
)),
179 uint32_t reloc_offset
,
180 const char *field_name
,
181 enum bytecode_op bytecode_op
)
183 const struct lttng_ust_event_field
* const *fields
, *field
= NULL
;
184 unsigned int nr_fields
, i
;
186 uint32_t field_offset
= 0;
188 dbg_printf("Apply field reloc: %u %s\n", reloc_offset
, field_name
);
190 /* Lookup event by name */
193 fields
= event_desc
->tp_class
->fields
;
196 nr_fields
= event_desc
->tp_class
->nr_fields
;
197 for (i
= 0; i
< nr_fields
; i
++) {
198 if (fields
[i
]->nofilter
) {
201 if (!strcmp(fields
[i
]->name
, field_name
)) {
205 /* compute field offset */
206 switch (fields
[i
]->type
->type
) {
207 case lttng_ust_type_integer
:
208 case lttng_ust_type_enum
:
209 field_offset
+= sizeof(int64_t);
211 case lttng_ust_type_array
:
212 case lttng_ust_type_sequence
:
213 field_offset
+= sizeof(unsigned long);
214 field_offset
+= sizeof(void *);
216 case lttng_ust_type_string
:
217 field_offset
+= sizeof(void *);
219 case lttng_ust_type_float
:
220 field_offset
+= sizeof(double);
229 /* Check if field offset is too large for 16-bit offset */
230 if (field_offset
> LTTNG_UST_ABI_FILTER_BYTECODE_MAX_LEN
- 1)
234 op
= (struct load_op
*) &runtime
->code
[reloc_offset
];
236 switch (bytecode_op
) {
237 case BYTECODE_OP_LOAD_FIELD_REF
:
239 struct field_ref
*field_ref
;
241 field_ref
= (struct field_ref
*) op
->data
;
242 switch (field
->type
->type
) {
243 case lttng_ust_type_integer
:
244 case lttng_ust_type_enum
:
245 op
->op
= BYTECODE_OP_LOAD_FIELD_REF_S64
;
247 case lttng_ust_type_array
:
249 struct lttng_ust_type_array
*array
= (struct lttng_ust_type_array
*) field
->type
;
251 if (array
->encoding
== lttng_ust_string_encoding_none
)
253 op
->op
= BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
;
256 case lttng_ust_type_sequence
:
258 struct lttng_ust_type_sequence
*sequence
= (struct lttng_ust_type_sequence
*) field
->type
;
260 if (sequence
->encoding
== lttng_ust_string_encoding_none
)
262 op
->op
= BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
;
265 case lttng_ust_type_string
:
266 op
->op
= BYTECODE_OP_LOAD_FIELD_REF_STRING
;
268 case lttng_ust_type_float
:
269 op
->op
= BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
;
275 field_ref
->offset
= (uint16_t) field_offset
;
285 int apply_context_reloc(struct bytecode_runtime
*runtime
,
286 uint32_t runtime_len
__attribute__((unused
)),
287 uint32_t reloc_offset
,
288 const char *context_name
,
289 enum bytecode_op bytecode_op
)
292 const struct lttng_ust_ctx_field
*ctx_field
;
294 struct lttng_ust_ctx
**pctx
= runtime
->p
.pctx
;
296 dbg_printf("Apply context reloc: %u %s\n", reloc_offset
, context_name
);
298 /* Get context index */
299 idx
= lttng_get_context_index(*pctx
, context_name
);
301 if (lttng_context_is_app(context_name
)) {
304 ret
= lttng_ust_add_app_context_to_ctx_rcu(context_name
,
308 idx
= lttng_get_context_index(*pctx
, context_name
);
315 /* Check if idx is too large for 16-bit offset */
316 if (idx
> LTTNG_UST_ABI_FILTER_BYTECODE_MAX_LEN
- 1)
319 /* Get context return type */
320 ctx_field
= &(*pctx
)->fields
[idx
];
321 op
= (struct load_op
*) &runtime
->code
[reloc_offset
];
323 switch (bytecode_op
) {
324 case BYTECODE_OP_GET_CONTEXT_REF
:
326 struct field_ref
*field_ref
;
328 field_ref
= (struct field_ref
*) op
->data
;
329 switch (ctx_field
->event_field
->type
->type
) {
330 case lttng_ust_type_integer
:
331 case lttng_ust_type_enum
:
332 op
->op
= BYTECODE_OP_GET_CONTEXT_REF_S64
;
334 /* Sequence and array supported only as string */
335 case lttng_ust_type_array
:
337 struct lttng_ust_type_array
*array
= (struct lttng_ust_type_array
*) ctx_field
->event_field
->type
;
339 if (array
->encoding
== lttng_ust_string_encoding_none
)
341 op
->op
= BYTECODE_OP_GET_CONTEXT_REF_STRING
;
344 case lttng_ust_type_sequence
:
346 struct lttng_ust_type_sequence
*sequence
= (struct lttng_ust_type_sequence
*) ctx_field
->event_field
->type
;
348 if (sequence
->encoding
== lttng_ust_string_encoding_none
)
350 op
->op
= BYTECODE_OP_GET_CONTEXT_REF_STRING
;
353 case lttng_ust_type_string
:
354 op
->op
= BYTECODE_OP_GET_CONTEXT_REF_STRING
;
356 case lttng_ust_type_float
:
357 op
->op
= BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
;
359 case lttng_ust_type_dynamic
:
360 op
->op
= BYTECODE_OP_GET_CONTEXT_REF
;
365 /* set offset to context index within channel contexts */
366 field_ref
->offset
= (uint16_t) idx
;
376 int apply_reloc(const struct lttng_ust_event_desc
*event_desc
,
377 struct bytecode_runtime
*runtime
,
378 uint32_t runtime_len
,
379 uint32_t reloc_offset
,
384 dbg_printf("Apply reloc: %u %s\n", reloc_offset
, name
);
386 /* Ensure that the reloc is within the code */
387 if (runtime_len
- reloc_offset
< sizeof(uint16_t))
390 op
= (struct load_op
*) &runtime
->code
[reloc_offset
];
392 case BYTECODE_OP_LOAD_FIELD_REF
:
393 return apply_field_reloc(event_desc
, runtime
, runtime_len
,
394 reloc_offset
, name
, op
->op
);
395 case BYTECODE_OP_GET_CONTEXT_REF
:
396 return apply_context_reloc(runtime
, runtime_len
,
397 reloc_offset
, name
, op
->op
);
398 case BYTECODE_OP_GET_SYMBOL
:
399 case BYTECODE_OP_GET_SYMBOL_FIELD
:
401 * Will be handled by load specialize phase or
402 * dynamically by interpreter.
406 ERR("Unknown reloc op type %u\n", op
->op
);
413 int bytecode_is_linked(struct lttng_ust_bytecode_node
*bytecode
,
414 struct cds_list_head
*bytecode_runtime_head
)
416 struct lttng_ust_bytecode_runtime
*bc_runtime
;
418 cds_list_for_each_entry(bc_runtime
, bytecode_runtime_head
, node
) {
419 if (bc_runtime
->bc
== bytecode
)
426 * Take a bytecode with reloc table and link it to an event to create a
430 int link_bytecode(const struct lttng_ust_event_desc
*event_desc
,
431 struct lttng_ust_ctx
**ctx
,
432 struct lttng_ust_bytecode_node
*bytecode
,
433 struct cds_list_head
*bytecode_runtime_head
,
434 struct cds_list_head
*insert_loc
)
436 int ret
, offset
, next_offset
;
437 struct bytecode_runtime
*runtime
= NULL
;
438 size_t runtime_alloc_len
;
442 /* Bytecode already linked */
443 if (bytecode_is_linked(bytecode
, bytecode_runtime_head
))
446 dbg_printf("Linking...\n");
448 /* We don't need the reloc table in the runtime */
449 runtime_alloc_len
= sizeof(*runtime
) + bytecode
->bc
.reloc_offset
;
450 runtime
= zmalloc(runtime_alloc_len
);
455 runtime
->p
.type
= bytecode
->type
;
456 runtime
->p
.bc
= bytecode
;
457 runtime
->p
.pctx
= ctx
;
458 runtime
->len
= bytecode
->bc
.reloc_offset
;
459 /* copy original bytecode */
460 memcpy(runtime
->code
, bytecode
->bc
.data
, runtime
->len
);
461 /* Validate bytecode load instructions before relocs. */
462 ret
= lttng_bytecode_validate_load(runtime
);
467 * apply relocs. Those are a uint16_t (offset in bytecode)
468 * followed by a string (field name).
470 for (offset
= bytecode
->bc
.reloc_offset
;
471 offset
< bytecode
->bc
.len
;
472 offset
= next_offset
) {
473 uint16_t reloc_offset
=
474 *(uint16_t *) &bytecode
->bc
.data
[offset
];
476 (const char *) &bytecode
->bc
.data
[offset
+ sizeof(uint16_t)];
478 ret
= apply_reloc(event_desc
, runtime
, runtime
->len
, reloc_offset
, name
);
482 next_offset
= offset
+ sizeof(uint16_t) + strlen(name
) + 1;
484 /* Validate bytecode */
485 ret
= lttng_bytecode_validate(runtime
);
489 /* Specialize bytecode */
490 ret
= lttng_bytecode_specialize(event_desc
, runtime
);
495 runtime
->p
.interpreter_func
= lttng_bytecode_interpret
;
496 runtime
->p
.link_failed
= 0;
497 cds_list_add_rcu(&runtime
->p
.node
, insert_loc
);
498 dbg_printf("Linking successful.\n");
502 runtime
->p
.interpreter_func
= lttng_bytecode_interpret_error
;
503 runtime
->p
.link_failed
= 1;
504 cds_list_add_rcu(&runtime
->p
.node
, insert_loc
);
506 dbg_printf("Linking failed.\n");
510 void lttng_bytecode_sync_state(struct lttng_ust_bytecode_runtime
*runtime
)
512 struct lttng_ust_bytecode_node
*bc
= runtime
->bc
;
514 if (!bc
->enabler
->enabled
|| runtime
->link_failed
)
515 runtime
->interpreter_func
= lttng_bytecode_interpret_error
;
517 runtime
->interpreter_func
= lttng_bytecode_interpret
;
521 * Given the lists of bytecode programs of an instance (trigger or event) and
522 * of a matching enabler, try to link all the enabler's bytecode programs with
525 * This function is called after we confirmed that name enabler and the
526 * instance are name matching (or glob pattern matching).
528 void lttng_enabler_link_bytecode(const struct lttng_ust_event_desc
*event_desc
,
529 struct lttng_ust_ctx
**ctx
,
530 struct cds_list_head
*instance_bytecode_head
,
531 struct cds_list_head
*enabler_bytecode_head
)
533 struct lttng_ust_bytecode_node
*enabler_bc
;
534 struct lttng_ust_bytecode_runtime
*runtime
;
538 /* Go over all the bytecode programs of the enabler. */
539 cds_list_for_each_entry(enabler_bc
, enabler_bytecode_head
, node
) {
541 struct cds_list_head
*insert_loc
;
544 * Check if the current enabler bytecode program is already
545 * linked with the instance.
547 cds_list_for_each_entry(runtime
, instance_bytecode_head
, node
) {
548 if (runtime
->bc
== enabler_bc
) {
555 * Skip bytecode already linked, go to the next enabler
562 * Insert at specified priority (seqnum) in increasing
563 * order. If there already is a bytecode of the same priority,
564 * insert the new bytecode right after it.
566 cds_list_for_each_entry_reverse(runtime
,
567 instance_bytecode_head
, node
) {
568 if (runtime
->bc
->bc
.seqnum
<= enabler_bc
->bc
.seqnum
) {
570 insert_loc
= &runtime
->node
;
575 /* Add to head to list */
576 insert_loc
= instance_bytecode_head
;
578 dbg_printf("linking bytecode\n");
579 ret
= link_bytecode(event_desc
, ctx
, enabler_bc
, instance_bytecode_head
, insert_loc
);
581 dbg_printf("[lttng filter] warning: cannot link event bytecode\n");
587 void free_filter_runtime(struct cds_list_head
*bytecode_runtime_head
)
589 struct bytecode_runtime
*runtime
, *tmp
;
591 cds_list_for_each_entry_safe(runtime
, tmp
, bytecode_runtime_head
,
598 void lttng_free_event_filter_runtime(struct lttng_ust_event_common
*event
)
600 free_filter_runtime(&event
->priv
->filter_bytecode_runtime_head
);