1 /* SPDX-License-Identifier: (GPL-2.0-only or LGPL-2.1-only)
3 * counter/counter-api.h
5 * LTTng Counters API, requiring counter/config.h
7 * Copyright (C) 2020 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #ifndef _LTTNG_COUNTER_API_H
11 #define _LTTNG_COUNTER_API_H
13 #include <linux/types.h>
14 #include <linux/percpu.h>
15 #include <linux/bitops.h>
16 #include <counter/counter.h>
17 #include <counter/counter-internal.h>
18 #include <wrapper/compiler_attributes.h>
19 #include <wrapper/limits.h>
22 * Using unsigned arithmetic because overflow is defined.
24 static __always_inline
int __lttng_counter_add(const struct lib_counter_config
*config
,
25 enum lib_counter_config_alloc alloc
,
26 enum lib_counter_config_sync sync
,
27 struct lib_counter
*counter
,
28 const size_t *dimension_indexes
, int64_t v
,
32 bool overflow
= false, underflow
= false;
33 struct lib_counter_layout
*layout
;
36 if (unlikely(lttng_counter_validate_indexes(config
, counter
, dimension_indexes
)))
38 index
= lttng_counter_get_index(config
, counter
, dimension_indexes
);
41 case COUNTER_ALLOC_PER_CPU
:
42 layout
= per_cpu_ptr(counter
->percpu_counters
, smp_processor_id());
44 case COUNTER_ALLOC_GLOBAL
:
45 layout
= &counter
->global_counters
;
51 switch (config
->counter_size
) {
52 case COUNTER_SIZE_8_BIT
:
54 int8_t *int_p
= (int8_t *) layout
->counters
+ index
;
56 int8_t global_sum_step
= counter
->global_sum_step
.s8
;
60 case COUNTER_ALLOC_PER_CPU
:
65 n
= (int8_t) ((uint8_t) old
+ (uint8_t) v
);
66 if (unlikely(global_sum_step
)) {
67 if (unlikely(n
> (int8_t) global_sum_step
))
68 move_sum
= (int8_t) global_sum_step
/ 2;
69 else if (unlikely(n
< -(int8_t) global_sum_step
))
70 move_sum
= -((int8_t) global_sum_step
/ 2);
73 res
= cmpxchg_local(int_p
, old
, n
);
77 case COUNTER_ALLOC_GLOBAL
:
81 n
= (int8_t) ((uint8_t) old
+ (uint8_t) v
);
82 res
= cmpxchg(int_p
, old
, n
);
89 if (v
> 0 && (v
>= U8_MAX
|| n
< old
))
91 else if (v
< 0 && (v
<= -(s64
) U8_MAX
|| n
> old
))
95 case COUNTER_SIZE_16_BIT
:
97 int16_t *int_p
= (int16_t *) layout
->counters
+ index
;
99 int16_t global_sum_step
= counter
->global_sum_step
.s16
;
103 case COUNTER_ALLOC_PER_CPU
:
108 n
= (int16_t) ((uint16_t) old
+ (uint16_t) v
);
109 if (unlikely(global_sum_step
)) {
110 if (unlikely(n
> (int16_t) global_sum_step
))
111 move_sum
= (int16_t) global_sum_step
/ 2;
112 else if (unlikely(n
< -(int16_t) global_sum_step
))
113 move_sum
= -((int16_t) global_sum_step
/ 2);
116 res
= cmpxchg_local(int_p
, old
, n
);
117 } while (old
!= res
);
120 case COUNTER_ALLOC_GLOBAL
:
124 n
= (int16_t) ((uint16_t) old
+ (uint16_t) v
);
125 res
= cmpxchg(int_p
, old
, n
);
126 } while (old
!= res
);
132 if (v
> 0 && (v
>= U16_MAX
|| n
< old
))
134 else if (v
< 0 && (v
<= -(s64
) U16_MAX
|| n
> old
))
138 case COUNTER_SIZE_32_BIT
:
140 int32_t *int_p
= (int32_t *) layout
->counters
+ index
;
142 int32_t global_sum_step
= counter
->global_sum_step
.s32
;
146 case COUNTER_ALLOC_PER_CPU
:
151 n
= (int32_t) ((uint32_t) old
+ (uint32_t) v
);
152 if (unlikely(global_sum_step
)) {
153 if (unlikely(n
> (int32_t) global_sum_step
))
154 move_sum
= (int32_t) global_sum_step
/ 2;
155 else if (unlikely(n
< -(int32_t) global_sum_step
))
156 move_sum
= -((int32_t) global_sum_step
/ 2);
159 res
= cmpxchg_local(int_p
, old
, n
);
160 } while (old
!= res
);
163 case COUNTER_ALLOC_GLOBAL
:
167 n
= (int32_t) ((uint32_t) old
+ (uint32_t) v
);
168 res
= cmpxchg(int_p
, old
, n
);
169 } while (old
!= res
);
175 if (v
> 0 && (v
>= U32_MAX
|| n
< old
))
177 else if (v
< 0 && (v
<= -(s64
) U32_MAX
|| n
> old
))
181 #if BITS_PER_LONG == 64
182 case COUNTER_SIZE_64_BIT
:
184 int64_t *int_p
= (int64_t *) layout
->counters
+ index
;
186 int64_t global_sum_step
= counter
->global_sum_step
.s64
;
190 case COUNTER_ALLOC_PER_CPU
:
195 n
= (int64_t) ((uint64_t) old
+ (uint64_t) v
);
196 if (unlikely(global_sum_step
)) {
197 if (unlikely(n
> (int64_t) global_sum_step
))
198 move_sum
= (int64_t) global_sum_step
/ 2;
199 else if (unlikely(n
< -(int64_t) global_sum_step
))
200 move_sum
= -((int64_t) global_sum_step
/ 2);
203 res
= cmpxchg_local(int_p
, old
, n
);
204 } while (old
!= res
);
207 case COUNTER_ALLOC_GLOBAL
:
211 n
= (int64_t) ((uint64_t) old
+ (uint64_t) v
);
212 res
= cmpxchg(int_p
, old
, n
);
213 } while (old
!= res
);
219 if (v
> 0 && n
< old
)
221 else if (v
< 0 && n
> old
)
229 if (unlikely(overflow
&& !test_bit(index
, layout
->overflow_bitmap
)))
230 set_bit(index
, layout
->overflow_bitmap
);
231 else if (unlikely(underflow
&& !test_bit(index
, layout
->underflow_bitmap
)))
232 set_bit(index
, layout
->underflow_bitmap
);
234 *remainder
= move_sum
;
238 static __always_inline
int __lttng_counter_add_percpu(const struct lib_counter_config
*config
,
239 struct lib_counter
*counter
,
240 const size_t *dimension_indexes
, int64_t v
)
245 ret
= __lttng_counter_add(config
, COUNTER_ALLOC_PER_CPU
, config
->sync
,
246 counter
, dimension_indexes
, v
, &move_sum
);
249 if (unlikely(move_sum
))
250 return __lttng_counter_add(config
, COUNTER_ALLOC_GLOBAL
, COUNTER_SYNC_GLOBAL
,
251 counter
, dimension_indexes
, move_sum
, NULL
);
255 static __always_inline
int __lttng_counter_add_global(const struct lib_counter_config
*config
,
256 struct lib_counter
*counter
,
257 const size_t *dimension_indexes
, int64_t v
)
259 return __lttng_counter_add(config
, COUNTER_ALLOC_GLOBAL
, config
->sync
, counter
,
260 dimension_indexes
, v
, NULL
);
263 static __always_inline
int lttng_counter_add(const struct lib_counter_config
*config
,
264 struct lib_counter
*counter
,
265 const size_t *dimension_indexes
, int64_t v
)
267 switch (config
->alloc
) {
268 case COUNTER_ALLOC_PER_CPU
:
270 case COUNTER_ALLOC_PER_CPU
| COUNTER_ALLOC_GLOBAL
:
271 return __lttng_counter_add_percpu(config
, counter
, dimension_indexes
, v
);
272 case COUNTER_ALLOC_GLOBAL
:
273 return __lttng_counter_add_global(config
, counter
, dimension_indexes
, v
);
279 static __always_inline
int lttng_counter_inc(const struct lib_counter_config
*config
,
280 struct lib_counter
*counter
,
281 const size_t *dimension_indexes
)
283 return lttng_counter_add(config
, counter
, dimension_indexes
, 1);
286 static __always_inline
int lttng_counter_dec(const struct lib_counter_config
*config
,
287 struct lib_counter
*counter
,
288 const size_t *dimension_indexes
)
290 return lttng_counter_add(config
, counter
, dimension_indexes
, -1);
293 #endif /* _LTTNG_COUNTER_API_H */