2 * SPDX-License-Identifier: MIT
4 * Copyright (C) 2020 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 * LTTng Counters API, requiring counter/config.h
9 #ifndef _LTTNG_COUNTER_API_H
10 #define _LTTNG_COUNTER_API_H
15 #include "counter-internal.h"
16 #include <urcu/compiler.h>
17 #include <urcu/uatomic.h>
18 #include "common/bitmap.h"
19 #include "libringbuffer/getcpu.h"
22 * Using unsigned arithmetic because overflow is defined.
24 static inline int __lttng_counter_add(const struct lib_counter_config
*config
,
25 enum lib_counter_config_alloc alloc
,
26 enum lib_counter_config_sync sync
,
27 struct lib_counter
*counter
,
28 const size_t *dimension_indexes
, int64_t v
,
32 bool overflow
= false, underflow
= false;
33 struct lib_counter_layout
*layout
;
36 if (caa_unlikely(lttng_counter_validate_indexes(config
, counter
, dimension_indexes
)))
38 index
= lttng_counter_get_index(config
, counter
, dimension_indexes
);
41 case COUNTER_ALLOC_PER_CPU
:
42 layout
= &counter
->percpu_counters
[lttng_ust_get_cpu()];
44 case COUNTER_ALLOC_GLOBAL
:
45 layout
= &counter
->global_counters
;
50 if (caa_unlikely(!layout
->counters
))
53 switch (config
->counter_size
) {
54 case COUNTER_SIZE_8_BIT
:
56 int8_t *int_p
= (int8_t *) layout
->counters
+ index
;
58 int8_t global_sum_step
= counter
->global_sum_step
.s8
;
62 case COUNTER_SYNC_PER_CPU
:
67 n
= (int8_t) ((uint8_t) old
+ (uint8_t) v
);
68 if (caa_unlikely(n
> (int8_t) global_sum_step
))
69 move_sum
= (int8_t) global_sum_step
/ 2;
70 else if (caa_unlikely(n
< -(int8_t) global_sum_step
))
71 move_sum
= -((int8_t) global_sum_step
/ 2);
73 res
= uatomic_cmpxchg(int_p
, old
, n
);
77 case COUNTER_SYNC_GLOBAL
:
81 n
= (int8_t) ((uint8_t) old
+ (uint8_t) v
);
82 res
= uatomic_cmpxchg(int_p
, old
, n
);
89 if (v
> 0 && (v
>= UINT8_MAX
|| n
< old
))
91 else if (v
< 0 && (v
<= -(int64_t) UINT8_MAX
|| n
> old
))
95 case COUNTER_SIZE_16_BIT
:
97 int16_t *int_p
= (int16_t *) layout
->counters
+ index
;
99 int16_t global_sum_step
= counter
->global_sum_step
.s16
;
103 case COUNTER_SYNC_PER_CPU
:
108 n
= (int16_t) ((uint16_t) old
+ (uint16_t) v
);
109 if (caa_unlikely(n
> (int16_t) global_sum_step
))
110 move_sum
= (int16_t) global_sum_step
/ 2;
111 else if (caa_unlikely(n
< -(int16_t) global_sum_step
))
112 move_sum
= -((int16_t) global_sum_step
/ 2);
114 res
= uatomic_cmpxchg(int_p
, old
, n
);
115 } while (old
!= res
);
118 case COUNTER_SYNC_GLOBAL
:
122 n
= (int16_t) ((uint16_t) old
+ (uint16_t) v
);
123 res
= uatomic_cmpxchg(int_p
, old
, n
);
124 } while (old
!= res
);
130 if (v
> 0 && (v
>= UINT16_MAX
|| n
< old
))
132 else if (v
< 0 && (v
<= -(int64_t) UINT16_MAX
|| n
> old
))
136 case COUNTER_SIZE_32_BIT
:
138 int32_t *int_p
= (int32_t *) layout
->counters
+ index
;
140 int32_t global_sum_step
= counter
->global_sum_step
.s32
;
144 case COUNTER_SYNC_PER_CPU
:
149 n
= (int32_t) ((uint32_t) old
+ (uint32_t) v
);
150 if (caa_unlikely(n
> (int32_t) global_sum_step
))
151 move_sum
= (int32_t) global_sum_step
/ 2;
152 else if (caa_unlikely(n
< -(int32_t) global_sum_step
))
153 move_sum
= -((int32_t) global_sum_step
/ 2);
155 res
= uatomic_cmpxchg(int_p
, old
, n
);
156 } while (old
!= res
);
159 case COUNTER_SYNC_GLOBAL
:
163 n
= (int32_t) ((uint32_t) old
+ (uint32_t) v
);
164 res
= uatomic_cmpxchg(int_p
, old
, n
);
165 } while (old
!= res
);
171 if (v
> 0 && (v
>= UINT32_MAX
|| n
< old
))
173 else if (v
< 0 && (v
<= -(int64_t) UINT32_MAX
|| n
> old
))
177 #if CAA_BITS_PER_LONG == 64
178 case COUNTER_SIZE_64_BIT
:
180 int64_t *int_p
= (int64_t *) layout
->counters
+ index
;
182 int64_t global_sum_step
= counter
->global_sum_step
.s64
;
186 case COUNTER_SYNC_PER_CPU
:
191 n
= (int64_t) ((uint64_t) old
+ (uint64_t) v
);
192 if (caa_unlikely(n
> (int64_t) global_sum_step
))
193 move_sum
= (int64_t) global_sum_step
/ 2;
194 else if (caa_unlikely(n
< -(int64_t) global_sum_step
))
195 move_sum
= -((int64_t) global_sum_step
/ 2);
197 res
= uatomic_cmpxchg(int_p
, old
, n
);
198 } while (old
!= res
);
201 case COUNTER_SYNC_GLOBAL
:
205 n
= (int64_t) ((uint64_t) old
+ (uint64_t) v
);
206 res
= uatomic_cmpxchg(int_p
, old
, n
);
207 } while (old
!= res
);
213 if (v
> 0 && n
< old
)
215 else if (v
< 0 && n
> old
)
223 if (caa_unlikely(overflow
&& !lttng_bitmap_test_bit(index
, layout
->overflow_bitmap
)))
224 lttng_bitmap_set_bit(index
, layout
->overflow_bitmap
);
225 else if (caa_unlikely(underflow
&& !lttng_bitmap_test_bit(index
, layout
->underflow_bitmap
)))
226 lttng_bitmap_set_bit(index
, layout
->underflow_bitmap
);
228 *remainder
= move_sum
;
232 static inline int __lttng_counter_add_percpu(const struct lib_counter_config
*config
,
233 struct lib_counter
*counter
,
234 const size_t *dimension_indexes
, int64_t v
)
239 ret
= __lttng_counter_add(config
, COUNTER_ALLOC_PER_CPU
, config
->sync
,
240 counter
, dimension_indexes
, v
, &move_sum
);
241 if (caa_unlikely(ret
))
243 if (caa_unlikely(move_sum
))
244 return __lttng_counter_add(config
, COUNTER_ALLOC_GLOBAL
, COUNTER_SYNC_GLOBAL
,
245 counter
, dimension_indexes
, move_sum
, NULL
);
249 static inline int __lttng_counter_add_global(const struct lib_counter_config
*config
,
250 struct lib_counter
*counter
,
251 const size_t *dimension_indexes
, int64_t v
)
253 return __lttng_counter_add(config
, COUNTER_ALLOC_GLOBAL
, config
->sync
, counter
,
254 dimension_indexes
, v
, NULL
);
257 static inline int lttng_counter_add(const struct lib_counter_config
*config
,
258 struct lib_counter
*counter
,
259 const size_t *dimension_indexes
, int64_t v
)
261 switch (config
->alloc
) {
262 case COUNTER_ALLOC_PER_CPU
: /* Fallthrough */
263 case COUNTER_ALLOC_PER_CPU
| COUNTER_ALLOC_GLOBAL
:
264 return __lttng_counter_add_percpu(config
, counter
, dimension_indexes
, v
);
265 case COUNTER_ALLOC_GLOBAL
:
266 return __lttng_counter_add_global(config
, counter
, dimension_indexes
, v
);
272 static inline int lttng_counter_inc(const struct lib_counter_config
*config
,
273 struct lib_counter
*counter
,
274 const size_t *dimension_indexes
)
276 return lttng_counter_add(config
, counter
, dimension_indexes
, 1);
279 static inline int lttng_counter_dec(const struct lib_counter_config
*config
,
280 struct lib_counter
*counter
,
281 const size_t *dimension_indexes
)
283 return lttng_counter_add(config
, counter
, dimension_indexes
, -1);
286 #endif /* _LTTNG_COUNTER_API_H */