2 * SPDX-License-Identifier: MIT
4 * Copyright (C) 2020 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 * LTTng Counters API, requiring counter/config.h
9 #ifndef _LTTNG_COUNTER_API_H
10 #define _LTTNG_COUNTER_API_H
16 #include "counter-internal.h"
17 #include <urcu/compiler.h>
18 #include <urcu/uatomic.h>
19 #include "common/bitmap.h"
20 #include "common/getcpu.h"
23 * Using unsigned arithmetic because overflow is defined.
25 static inline int __lttng_counter_add(const struct lib_counter_config
*config
,
26 enum lib_counter_config_alloc alloc
,
27 enum lib_counter_config_sync sync
,
28 struct lib_counter
*counter
,
29 const size_t *dimension_indexes
, int64_t v
,
33 bool overflow
= false, underflow
= false;
34 struct lib_counter_layout
*layout
;
37 if (caa_unlikely(lttng_counter_validate_indexes(config
, counter
, dimension_indexes
)))
39 index
= lttng_counter_get_index(config
, counter
, dimension_indexes
);
42 case COUNTER_ALLOC_PER_CPU
:
43 layout
= &counter
->percpu_counters
[lttng_ust_get_cpu()];
45 case COUNTER_ALLOC_GLOBAL
:
46 layout
= &counter
->global_counters
;
51 if (caa_unlikely(!layout
->counters
))
54 switch (config
->counter_size
) {
55 case COUNTER_SIZE_8_BIT
:
57 int8_t *int_p
= (int8_t *) layout
->counters
+ index
;
59 int8_t global_sum_step
= counter
->global_sum_step
.s8
;
63 case COUNTER_SYNC_PER_CPU
:
68 n
= (int8_t) ((uint8_t) old
+ (uint8_t) v
);
69 if (caa_unlikely(n
> (int8_t) global_sum_step
))
70 move_sum
= (int8_t) global_sum_step
/ 2;
71 else if (caa_unlikely(n
< -(int8_t) global_sum_step
))
72 move_sum
= -((int8_t) global_sum_step
/ 2);
74 res
= uatomic_cmpxchg(int_p
, old
, n
);
78 case COUNTER_SYNC_GLOBAL
:
82 n
= (int8_t) ((uint8_t) old
+ (uint8_t) v
);
83 res
= uatomic_cmpxchg(int_p
, old
, n
);
90 if (v
> 0 && (v
>= UINT8_MAX
|| n
< old
))
92 else if (v
< 0 && (v
<= -(int64_t) UINT8_MAX
|| n
> old
))
96 case COUNTER_SIZE_16_BIT
:
98 int16_t *int_p
= (int16_t *) layout
->counters
+ index
;
100 int16_t global_sum_step
= counter
->global_sum_step
.s16
;
104 case COUNTER_SYNC_PER_CPU
:
109 n
= (int16_t) ((uint16_t) old
+ (uint16_t) v
);
110 if (caa_unlikely(n
> (int16_t) global_sum_step
))
111 move_sum
= (int16_t) global_sum_step
/ 2;
112 else if (caa_unlikely(n
< -(int16_t) global_sum_step
))
113 move_sum
= -((int16_t) global_sum_step
/ 2);
115 res
= uatomic_cmpxchg(int_p
, old
, n
);
116 } while (old
!= res
);
119 case COUNTER_SYNC_GLOBAL
:
123 n
= (int16_t) ((uint16_t) old
+ (uint16_t) v
);
124 res
= uatomic_cmpxchg(int_p
, old
, n
);
125 } while (old
!= res
);
131 if (v
> 0 && (v
>= UINT16_MAX
|| n
< old
))
133 else if (v
< 0 && (v
<= -(int64_t) UINT16_MAX
|| n
> old
))
137 case COUNTER_SIZE_32_BIT
:
139 int32_t *int_p
= (int32_t *) layout
->counters
+ index
;
141 int32_t global_sum_step
= counter
->global_sum_step
.s32
;
145 case COUNTER_SYNC_PER_CPU
:
150 n
= (int32_t) ((uint32_t) old
+ (uint32_t) v
);
151 if (caa_unlikely(n
> (int32_t) global_sum_step
))
152 move_sum
= (int32_t) global_sum_step
/ 2;
153 else if (caa_unlikely(n
< -(int32_t) global_sum_step
))
154 move_sum
= -((int32_t) global_sum_step
/ 2);
156 res
= uatomic_cmpxchg(int_p
, old
, n
);
157 } while (old
!= res
);
160 case COUNTER_SYNC_GLOBAL
:
164 n
= (int32_t) ((uint32_t) old
+ (uint32_t) v
);
165 res
= uatomic_cmpxchg(int_p
, old
, n
);
166 } while (old
!= res
);
172 if (v
> 0 && (v
>= UINT32_MAX
|| n
< old
))
174 else if (v
< 0 && (v
<= -(int64_t) UINT32_MAX
|| n
> old
))
178 #if CAA_BITS_PER_LONG == 64
179 case COUNTER_SIZE_64_BIT
:
181 int64_t *int_p
= (int64_t *) layout
->counters
+ index
;
183 int64_t global_sum_step
= counter
->global_sum_step
.s64
;
187 case COUNTER_SYNC_PER_CPU
:
192 n
= (int64_t) ((uint64_t) old
+ (uint64_t) v
);
193 if (caa_unlikely(n
> (int64_t) global_sum_step
))
194 move_sum
= (int64_t) global_sum_step
/ 2;
195 else if (caa_unlikely(n
< -(int64_t) global_sum_step
))
196 move_sum
= -((int64_t) global_sum_step
/ 2);
198 res
= uatomic_cmpxchg(int_p
, old
, n
);
199 } while (old
!= res
);
202 case COUNTER_SYNC_GLOBAL
:
206 n
= (int64_t) ((uint64_t) old
+ (uint64_t) v
);
207 res
= uatomic_cmpxchg(int_p
, old
, n
);
208 } while (old
!= res
);
214 if (v
> 0 && n
< old
)
216 else if (v
< 0 && n
> old
)
224 if (caa_unlikely(overflow
&& !lttng_bitmap_test_bit(index
, layout
->overflow_bitmap
)))
225 lttng_bitmap_set_bit(index
, layout
->overflow_bitmap
);
226 else if (caa_unlikely(underflow
&& !lttng_bitmap_test_bit(index
, layout
->underflow_bitmap
)))
227 lttng_bitmap_set_bit(index
, layout
->underflow_bitmap
);
229 *remainder
= move_sum
;
233 static inline int __lttng_counter_add_percpu(const struct lib_counter_config
*config
,
234 struct lib_counter
*counter
,
235 const size_t *dimension_indexes
, int64_t v
)
240 ret
= __lttng_counter_add(config
, COUNTER_ALLOC_PER_CPU
, config
->sync
,
241 counter
, dimension_indexes
, v
, &move_sum
);
242 if (caa_unlikely(ret
))
244 if (caa_unlikely(move_sum
))
245 return __lttng_counter_add(config
, COUNTER_ALLOC_GLOBAL
, COUNTER_SYNC_GLOBAL
,
246 counter
, dimension_indexes
, move_sum
, NULL
);
250 static inline int __lttng_counter_add_global(const struct lib_counter_config
*config
,
251 struct lib_counter
*counter
,
252 const size_t *dimension_indexes
, int64_t v
)
254 return __lttng_counter_add(config
, COUNTER_ALLOC_GLOBAL
, config
->sync
, counter
,
255 dimension_indexes
, v
, NULL
);
258 static inline int lttng_counter_add(const struct lib_counter_config
*config
,
259 struct lib_counter
*counter
,
260 const size_t *dimension_indexes
, int64_t v
)
262 switch (config
->alloc
) {
263 case COUNTER_ALLOC_PER_CPU
: /* Fallthrough */
264 case COUNTER_ALLOC_PER_CPU
| COUNTER_ALLOC_GLOBAL
:
265 return __lttng_counter_add_percpu(config
, counter
, dimension_indexes
, v
);
266 case COUNTER_ALLOC_GLOBAL
:
267 return __lttng_counter_add_global(config
, counter
, dimension_indexes
, v
);
273 static inline int lttng_counter_inc(const struct lib_counter_config
*config
,
274 struct lib_counter
*counter
,
275 const size_t *dimension_indexes
)
277 return lttng_counter_add(config
, counter
, dimension_indexes
, 1);
280 static inline int lttng_counter_dec(const struct lib_counter_config
*config
,
281 struct lib_counter
*counter
,
282 const size_t *dimension_indexes
)
284 return lttng_counter_add(config
, counter
, dimension_indexes
, -1);
287 #endif /* _LTTNG_COUNTER_API_H */