+#undef _ctf_integer_ext
+#define _ctf_integer_ext(_type, _item, _user_src, _byte_order, _base, _user, _nowrite) \
+ _ctf_integer_ext_isuser##_user(_type, _item, _user_src, _byte_order, _base, _nowrite)
+
+#undef _ctf_array_encoded
+#define _ctf_array_encoded(_type, _item, _src, _length, _encoding, _user, _nowrite) \
+ lib_ring_buffer_align_ctx(&__ctx, lttng_alignof(_type)); \
+ if (_user) { \
+ __chan->ops->event_write_from_user(&__ctx, _src, sizeof(_type) * (_length)); \
+ } else { \
+ __chan->ops->event_write(&__ctx, _src, sizeof(_type) * (_length)); \
+ }
+
+#if (__BYTE_ORDER == __LITTLE_ENDIAN)
+#undef _ctf_array_bitfield
+#define _ctf_array_bitfield(_type, _item, _src, _length, _user, _nowrite) \
+ lib_ring_buffer_align_ctx(&__ctx, lttng_alignof(_type)); \
+ if (_user) { \
+ __chan->ops->event_write_from_user(&__ctx, _src, sizeof(_type) * (_length)); \
+ } else { \
+ __chan->ops->event_write(&__ctx, _src, sizeof(_type) * (_length)); \
+ }
+#else /* #if (__BYTE_ORDER == __LITTLE_ENDIAN) */
+/*
+ * For big endian, we need to byteswap into little endian.
+ */
+#undef _ctf_array_bitfield
+#define _ctf_array_bitfield(_type, _item, _src, _length, _user, _nowrite) \
+ lib_ring_buffer_align_ctx(&__ctx, lttng_alignof(_type)); \
+ { \
+ size_t _i; \
+ \
+ for (_i = 0; _i < (_length); _i++) { \
+ _type _tmp; \
+ \
+ if (_user) { \
+ if (get_user(_tmp, (_type *) _src + _i)) \
+ _tmp = 0; \
+ } else { \
+ _tmp = ((_type *) _src)[_i]; \
+ } \
+ switch (sizeof(_type)) { \
+ case 1: \
+ break; \
+ case 2: \
+ _tmp = cpu_to_le16(_tmp); \
+ break; \
+ case 4: \
+ _tmp = cpu_to_le32(_tmp); \
+ break; \
+ case 8: \
+ _tmp = cpu_to_le64(_tmp); \
+ break; \
+ default: \
+ BUG_ON(1); \
+ } \
+ __chan->ops->event_write(&__ctx, &_tmp, sizeof(_type)); \
+ } \
+ }
+#endif /* #else #if (__BYTE_ORDER == __LITTLE_ENDIAN) */
+
+#undef _ctf_sequence_encoded
+#define _ctf_sequence_encoded(_type, _item, _src, _length_type, \
+ _src_length, _encoding, _byte_order, _base, _user, _nowrite) \
+ { \
+ _length_type __tmpl = __stackvar.__dynamic_len[__dynamic_len_idx]; \
+ lib_ring_buffer_align_ctx(&__ctx, lttng_alignof(_length_type));\
+ __chan->ops->event_write(&__ctx, &__tmpl, sizeof(_length_type));\
+ } \
+ lib_ring_buffer_align_ctx(&__ctx, lttng_alignof(_type)); \
+ if (_user) { \
+ __chan->ops->event_write_from_user(&__ctx, _src, \
+ sizeof(_type) * __get_dynamic_len(dest)); \
+ } else { \
+ __chan->ops->event_write(&__ctx, _src, \
+ sizeof(_type) * __get_dynamic_len(dest)); \
+ }
+
+#if (__BYTE_ORDER == __LITTLE_ENDIAN)
+#undef _ctf_sequence_bitfield
+#define _ctf_sequence_bitfield(_type, _item, _src, \
+ _length_type, _src_length, \
+ _user, _nowrite) \
+ { \
+ _length_type __tmpl = __stackvar.__dynamic_len[__dynamic_len_idx] * sizeof(_type) * CHAR_BIT; \
+ lib_ring_buffer_align_ctx(&__ctx, lttng_alignof(_length_type));\
+ __chan->ops->event_write(&__ctx, &__tmpl, sizeof(_length_type));\
+ } \
+ lib_ring_buffer_align_ctx(&__ctx, lttng_alignof(_type)); \
+ if (_user) { \
+ __chan->ops->event_write_from_user(&__ctx, _src, \
+ sizeof(_type) * __get_dynamic_len(dest)); \
+ } else { \
+ __chan->ops->event_write(&__ctx, _src, \
+ sizeof(_type) * __get_dynamic_len(dest)); \
+ }
+#else /* #if (__BYTE_ORDER == __LITTLE_ENDIAN) */
+/*
+ * For big endian, we need to byteswap into little endian.
+ */
+#undef _ctf_sequence_bitfield
+#define _ctf_sequence_bitfield(_type, _item, _src, \
+ _length_type, _src_length, \
+ _user, _nowrite) \
+ { \
+ _length_type __tmpl = __stackvar.__dynamic_len[__dynamic_len_idx] * sizeof(_type) * CHAR_BIT; \
+ lib_ring_buffer_align_ctx(&__ctx, lttng_alignof(_length_type));\
+ __chan->ops->event_write(&__ctx, &__tmpl, sizeof(_length_type));\
+ } \
+ lib_ring_buffer_align_ctx(&__ctx, lttng_alignof(_type)); \
+ { \
+ size_t _i, _length; \
+ \
+ _length = __get_dynamic_len(dest); \
+ for (_i = 0; _i < _length; _i++) { \
+ _type _tmp; \
+ \
+ if (_user) { \
+ if (get_user(_tmp, (_type *) _src + _i)) \
+ _tmp = 0; \
+ } else { \
+ _tmp = ((_type *) _src)[_i]; \
+ } \
+ switch (sizeof(_type)) { \
+ case 1: \
+ break; \
+ case 2: \
+ _tmp = cpu_to_le16(_tmp); \
+ break; \
+ case 4: \
+ _tmp = cpu_to_le32(_tmp); \
+ break; \
+ case 8: \
+ _tmp = cpu_to_le64(_tmp); \
+ break; \
+ default: \
+ BUG_ON(1); \
+ } \
+ __chan->ops->event_write(&__ctx, &_tmp, sizeof(_type)); \
+ } \
+ }
+#endif /* #else #if (__BYTE_ORDER == __LITTLE_ENDIAN) */
+
+#undef _ctf_string
+#define _ctf_string(_item, _src, _user, _nowrite) \
+ lib_ring_buffer_align_ctx(&__ctx, lttng_alignof(*(_src))); \
+ if (_user) { \
+ __chan->ops->event_strcpy_from_user(&__ctx, _src, \
+ __get_dynamic_len(dest)); \
+ } else { \
+ __chan->ops->event_strcpy(&__ctx, _src, \
+ __get_dynamic_len(dest)); \
+ }