2 * SPDX-License-Identifier: MIT
4 * Copyright (C) 2020 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 * LTTng Counters API, requiring counter/config.h
9 #ifndef _LTTNG_COUNTER_API_H
10 #define _LTTNG_COUNTER_API_H
16 #include "counter-internal.h"
17 #include <urcu/compiler.h>
18 #include <urcu/uatomic.h>
19 #include "common/bitmap.h"
20 #include "common/getcpu.h"
23 * Using unsigned arithmetic because overflow is defined.
25 static inline int __lttng_counter_add(const struct lib_counter_config
*config
,
26 enum lib_counter_config_alloc alloc
,
27 enum lib_counter_config_sync sync
__attribute__((unused
)),
28 struct lib_counter
*counter
,
29 const size_t *dimension_indexes
, int64_t v
,
33 bool overflow
= false, underflow
= false;
34 struct lib_counter_layout
*layout
;
37 if (caa_unlikely(lttng_counter_validate_indexes(config
, counter
, dimension_indexes
)))
39 index
= lttng_counter_get_index(config
, counter
, dimension_indexes
);
42 case COUNTER_ALLOC_PER_CPU
:
43 layout
= &counter
->percpu_counters
[lttng_ust_get_cpu()];
45 case COUNTER_ALLOC_GLOBAL
:
46 layout
= &counter
->global_counters
;
51 if (caa_unlikely(!layout
->counters
))
54 switch (config
->counter_size
) {
55 case COUNTER_SIZE_8_BIT
:
57 int8_t *int_p
= (int8_t *) layout
->counters
+ index
;
59 int8_t global_sum_step
= counter
->global_sum_step
.s8
;
63 case COUNTER_ALLOC_PER_CPU
:
68 n
= (int8_t) ((uint8_t) old
+ (uint8_t) v
);
69 if (caa_unlikely(global_sum_step
)) {
70 if (caa_unlikely(n
> (int8_t) global_sum_step
))
71 move_sum
= (int8_t) global_sum_step
/ 2;
72 else if (caa_unlikely(n
< -(int8_t) global_sum_step
))
73 move_sum
= -((int8_t) global_sum_step
/ 2);
76 res
= uatomic_cmpxchg(int_p
, old
, n
);
80 case COUNTER_ALLOC_GLOBAL
:
84 n
= (int8_t) ((uint8_t) old
+ (uint8_t) v
);
85 res
= uatomic_cmpxchg(int_p
, old
, n
);
92 if (v
> 0 && (v
>= UINT8_MAX
|| n
< old
))
94 else if (v
< 0 && (v
<= -(int64_t) UINT8_MAX
|| n
> old
))
98 case COUNTER_SIZE_16_BIT
:
100 int16_t *int_p
= (int16_t *) layout
->counters
+ index
;
102 int16_t global_sum_step
= counter
->global_sum_step
.s16
;
106 case COUNTER_ALLOC_PER_CPU
:
111 n
= (int16_t) ((uint16_t) old
+ (uint16_t) v
);
112 if (caa_unlikely(global_sum_step
)) {
113 if (caa_unlikely(n
> (int16_t) global_sum_step
))
114 move_sum
= (int16_t) global_sum_step
/ 2;
115 else if (caa_unlikely(n
< -(int16_t) global_sum_step
))
116 move_sum
= -((int16_t) global_sum_step
/ 2);
119 res
= uatomic_cmpxchg(int_p
, old
, n
);
120 } while (old
!= res
);
123 case COUNTER_ALLOC_GLOBAL
:
127 n
= (int16_t) ((uint16_t) old
+ (uint16_t) v
);
128 res
= uatomic_cmpxchg(int_p
, old
, n
);
129 } while (old
!= res
);
135 if (v
> 0 && (v
>= UINT16_MAX
|| n
< old
))
137 else if (v
< 0 && (v
<= -(int64_t) UINT16_MAX
|| n
> old
))
141 case COUNTER_SIZE_32_BIT
:
143 int32_t *int_p
= (int32_t *) layout
->counters
+ index
;
145 int32_t global_sum_step
= counter
->global_sum_step
.s32
;
149 case COUNTER_ALLOC_PER_CPU
:
154 n
= (int32_t) ((uint32_t) old
+ (uint32_t) v
);
155 if (caa_unlikely(global_sum_step
)) {
156 if (caa_unlikely(n
> (int32_t) global_sum_step
))
157 move_sum
= (int32_t) global_sum_step
/ 2;
158 else if (caa_unlikely(n
< -(int32_t) global_sum_step
))
159 move_sum
= -((int32_t) global_sum_step
/ 2);
162 res
= uatomic_cmpxchg(int_p
, old
, n
);
163 } while (old
!= res
);
166 case COUNTER_ALLOC_GLOBAL
:
170 n
= (int32_t) ((uint32_t) old
+ (uint32_t) v
);
171 res
= uatomic_cmpxchg(int_p
, old
, n
);
172 } while (old
!= res
);
178 if (v
> 0 && (v
>= UINT32_MAX
|| n
< old
))
180 else if (v
< 0 && (v
<= -(int64_t) UINT32_MAX
|| n
> old
))
184 #if CAA_BITS_PER_LONG == 64
185 case COUNTER_SIZE_64_BIT
:
187 int64_t *int_p
= (int64_t *) layout
->counters
+ index
;
189 int64_t global_sum_step
= counter
->global_sum_step
.s64
;
193 case COUNTER_ALLOC_PER_CPU
:
198 n
= (int64_t) ((uint64_t) old
+ (uint64_t) v
);
199 if (caa_unlikely(global_sum_step
)) {
200 if (caa_unlikely(n
> (int64_t) global_sum_step
))
201 move_sum
= (int64_t) global_sum_step
/ 2;
202 else if (caa_unlikely(n
< -(int64_t) global_sum_step
))
203 move_sum
= -((int64_t) global_sum_step
/ 2);
206 res
= uatomic_cmpxchg(int_p
, old
, n
);
207 } while (old
!= res
);
210 case COUNTER_ALLOC_GLOBAL
:
214 n
= (int64_t) ((uint64_t) old
+ (uint64_t) v
);
215 res
= uatomic_cmpxchg(int_p
, old
, n
);
216 } while (old
!= res
);
222 if (v
> 0 && n
< old
)
224 else if (v
< 0 && n
> old
)
232 if (caa_unlikely(overflow
&& !lttng_bitmap_test_bit(index
, layout
->overflow_bitmap
)))
233 lttng_bitmap_set_bit(index
, layout
->overflow_bitmap
);
234 else if (caa_unlikely(underflow
&& !lttng_bitmap_test_bit(index
, layout
->underflow_bitmap
)))
235 lttng_bitmap_set_bit(index
, layout
->underflow_bitmap
);
237 *remainder
= move_sum
;
241 static inline int __lttng_counter_add_percpu(const struct lib_counter_config
*config
,
242 struct lib_counter
*counter
,
243 const size_t *dimension_indexes
, int64_t v
)
245 int64_t move_sum
= 0;
248 ret
= __lttng_counter_add(config
, COUNTER_ALLOC_PER_CPU
, config
->sync
,
249 counter
, dimension_indexes
, v
, &move_sum
);
250 if (caa_unlikely(ret
))
252 if (caa_unlikely(move_sum
))
253 return __lttng_counter_add(config
, COUNTER_ALLOC_GLOBAL
, COUNTER_SYNC_GLOBAL
,
254 counter
, dimension_indexes
, move_sum
, NULL
);
258 static inline int __lttng_counter_add_global(const struct lib_counter_config
*config
,
259 struct lib_counter
*counter
,
260 const size_t *dimension_indexes
, int64_t v
)
262 return __lttng_counter_add(config
, COUNTER_ALLOC_GLOBAL
, config
->sync
, counter
,
263 dimension_indexes
, v
, NULL
);
266 static inline int lttng_counter_add(const struct lib_counter_config
*config
,
267 struct lib_counter
*counter
,
268 const size_t *dimension_indexes
, int64_t v
)
270 switch (config
->alloc
) {
271 case COUNTER_ALLOC_PER_CPU
: /* Fallthrough */
272 case COUNTER_ALLOC_PER_CPU
| COUNTER_ALLOC_GLOBAL
:
273 return __lttng_counter_add_percpu(config
, counter
, dimension_indexes
, v
);
274 case COUNTER_ALLOC_GLOBAL
:
275 return __lttng_counter_add_global(config
, counter
, dimension_indexes
, v
);
281 static inline int lttng_counter_inc(const struct lib_counter_config
*config
,
282 struct lib_counter
*counter
,
283 const size_t *dimension_indexes
)
285 return lttng_counter_add(config
, counter
, dimension_indexes
, 1);
288 static inline int lttng_counter_dec(const struct lib_counter_config
*config
,
289 struct lib_counter
*counter
,
290 const size_t *dimension_indexes
)
292 return lttng_counter_add(config
, counter
, dimension_indexes
, -1);
295 #endif /* _LTTNG_COUNTER_API_H */