Fix: Build examples when rpath is stripped from in-build-tree libs
[lttng-ust.git] / src / common / counter / counter-api.h
CommitLineData
ebabbf58 1/*
c0c0989a 2 * SPDX-License-Identifier: MIT
ebabbf58
MD
3 *
4 * Copyright (C) 2020 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5 *
c0c0989a 6 * LTTng Counters API, requiring counter/config.h
ebabbf58
MD
7 */
8
9#ifndef _LTTNG_COUNTER_API_H
10#define _LTTNG_COUNTER_API_H
11
12#include <stdint.h>
13#include <limits.h>
8cd08025 14#include <errno.h>
ebabbf58
MD
15#include "counter.h"
16#include "counter-internal.h"
17#include <urcu/compiler.h>
18#include <urcu/uatomic.h>
9d315d6d 19#include "common/bitmap.h"
f73bcf5e 20#include "common/getcpu.h"
ebabbf58
MD
21
22/*
23 * Using unsigned arithmetic because overflow is defined.
24 */
25static inline int __lttng_counter_add(const struct lib_counter_config *config,
26 enum lib_counter_config_alloc alloc,
09fd84e6 27 enum lib_counter_config_sync sync __attribute__((unused)),
ebabbf58
MD
28 struct lib_counter *counter,
29 const size_t *dimension_indexes, int64_t v,
30 int64_t *remainder)
31{
32 size_t index;
33 bool overflow = false, underflow = false;
34 struct lib_counter_layout *layout;
35 int64_t move_sum = 0;
36
37 if (caa_unlikely(lttng_counter_validate_indexes(config, counter, dimension_indexes)))
38 return -EOVERFLOW;
39 index = lttng_counter_get_index(config, counter, dimension_indexes);
40
41 switch (alloc) {
42 case COUNTER_ALLOC_PER_CPU:
43 layout = &counter->percpu_counters[lttng_ust_get_cpu()];
44 break;
45 case COUNTER_ALLOC_GLOBAL:
46 layout = &counter->global_counters;
47 break;
48 default:
49 return -EINVAL;
50 }
51 if (caa_unlikely(!layout->counters))
52 return -ENODEV;
53
54 switch (config->counter_size) {
55 case COUNTER_SIZE_8_BIT:
56 {
57 int8_t *int_p = (int8_t *) layout->counters + index;
58 int8_t old, n, res;
59 int8_t global_sum_step = counter->global_sum_step.s8;
60
61 res = *int_p;
09fd84e6
MD
62 switch (alloc) {
63 case COUNTER_ALLOC_PER_CPU:
ebabbf58
MD
64 {
65 do {
66 move_sum = 0;
67 old = res;
68 n = (int8_t) ((uint8_t) old + (uint8_t) v);
d7123736
MD
69 if (caa_unlikely(global_sum_step)) {
70 if (caa_unlikely(n > (int8_t) global_sum_step))
71 move_sum = (int8_t) global_sum_step / 2;
72 else if (caa_unlikely(n < -(int8_t) global_sum_step))
73 move_sum = -((int8_t) global_sum_step / 2);
74 n -= move_sum;
75 }
ebabbf58
MD
76 res = uatomic_cmpxchg(int_p, old, n);
77 } while (old != res);
78 break;
79 }
09fd84e6 80 case COUNTER_ALLOC_GLOBAL:
ebabbf58
MD
81 {
82 do {
83 old = res;
84 n = (int8_t) ((uint8_t) old + (uint8_t) v);
85 res = uatomic_cmpxchg(int_p, old, n);
86 } while (old != res);
87 break;
88 }
3336564f
MD
89 default:
90 return -EINVAL;
ebabbf58
MD
91 }
92 if (v > 0 && (v >= UINT8_MAX || n < old))
93 overflow = true;
a4071608 94 else if (v < 0 && (v <= -(int64_t) UINT8_MAX || n > old))
ebabbf58
MD
95 underflow = true;
96 break;
97 }
98 case COUNTER_SIZE_16_BIT:
99 {
100 int16_t *int_p = (int16_t *) layout->counters + index;
101 int16_t old, n, res;
102 int16_t global_sum_step = counter->global_sum_step.s16;
103
104 res = *int_p;
09fd84e6
MD
105 switch (alloc) {
106 case COUNTER_ALLOC_PER_CPU:
ebabbf58
MD
107 {
108 do {
109 move_sum = 0;
110 old = res;
111 n = (int16_t) ((uint16_t) old + (uint16_t) v);
d7123736
MD
112 if (caa_unlikely(global_sum_step)) {
113 if (caa_unlikely(n > (int16_t) global_sum_step))
114 move_sum = (int16_t) global_sum_step / 2;
115 else if (caa_unlikely(n < -(int16_t) global_sum_step))
116 move_sum = -((int16_t) global_sum_step / 2);
117 n -= move_sum;
118 }
ebabbf58
MD
119 res = uatomic_cmpxchg(int_p, old, n);
120 } while (old != res);
121 break;
122 }
09fd84e6 123 case COUNTER_ALLOC_GLOBAL:
ebabbf58
MD
124 {
125 do {
126 old = res;
127 n = (int16_t) ((uint16_t) old + (uint16_t) v);
128 res = uatomic_cmpxchg(int_p, old, n);
129 } while (old != res);
130 break;
131 }
3336564f
MD
132 default:
133 return -EINVAL;
ebabbf58
MD
134 }
135 if (v > 0 && (v >= UINT16_MAX || n < old))
136 overflow = true;
a4071608 137 else if (v < 0 && (v <= -(int64_t) UINT16_MAX || n > old))
ebabbf58
MD
138 underflow = true;
139 break;
140 }
141 case COUNTER_SIZE_32_BIT:
142 {
143 int32_t *int_p = (int32_t *) layout->counters + index;
144 int32_t old, n, res;
145 int32_t global_sum_step = counter->global_sum_step.s32;
146
147 res = *int_p;
09fd84e6
MD
148 switch (alloc) {
149 case COUNTER_ALLOC_PER_CPU:
ebabbf58
MD
150 {
151 do {
152 move_sum = 0;
153 old = res;
154 n = (int32_t) ((uint32_t) old + (uint32_t) v);
d7123736
MD
155 if (caa_unlikely(global_sum_step)) {
156 if (caa_unlikely(n > (int32_t) global_sum_step))
157 move_sum = (int32_t) global_sum_step / 2;
158 else if (caa_unlikely(n < -(int32_t) global_sum_step))
159 move_sum = -((int32_t) global_sum_step / 2);
160 n -= move_sum;
161 }
ebabbf58
MD
162 res = uatomic_cmpxchg(int_p, old, n);
163 } while (old != res);
164 break;
165 }
09fd84e6 166 case COUNTER_ALLOC_GLOBAL:
ebabbf58
MD
167 {
168 do {
169 old = res;
170 n = (int32_t) ((uint32_t) old + (uint32_t) v);
171 res = uatomic_cmpxchg(int_p, old, n);
172 } while (old != res);
173 break;
174 }
3336564f
MD
175 default:
176 return -EINVAL;
ebabbf58
MD
177 }
178 if (v > 0 && (v >= UINT32_MAX || n < old))
179 overflow = true;
a4071608 180 else if (v < 0 && (v <= -(int64_t) UINT32_MAX || n > old))
ebabbf58
MD
181 underflow = true;
182 break;
183 }
184#if CAA_BITS_PER_LONG == 64
185 case COUNTER_SIZE_64_BIT:
186 {
187 int64_t *int_p = (int64_t *) layout->counters + index;
188 int64_t old, n, res;
189 int64_t global_sum_step = counter->global_sum_step.s64;
190
191 res = *int_p;
09fd84e6
MD
192 switch (alloc) {
193 case COUNTER_ALLOC_PER_CPU:
ebabbf58
MD
194 {
195 do {
196 move_sum = 0;
197 old = res;
198 n = (int64_t) ((uint64_t) old + (uint64_t) v);
d7123736
MD
199 if (caa_unlikely(global_sum_step)) {
200 if (caa_unlikely(n > (int64_t) global_sum_step))
201 move_sum = (int64_t) global_sum_step / 2;
202 else if (caa_unlikely(n < -(int64_t) global_sum_step))
203 move_sum = -((int64_t) global_sum_step / 2);
204 n -= move_sum;
205 }
ebabbf58
MD
206 res = uatomic_cmpxchg(int_p, old, n);
207 } while (old != res);
208 break;
209 }
09fd84e6 210 case COUNTER_ALLOC_GLOBAL:
ebabbf58
MD
211 {
212 do {
213 old = res;
214 n = (int64_t) ((uint64_t) old + (uint64_t) v);
215 res = uatomic_cmpxchg(int_p, old, n);
216 } while (old != res);
217 break;
218 }
3336564f
MD
219 default:
220 return -EINVAL;
ebabbf58
MD
221 }
222 if (v > 0 && n < old)
223 overflow = true;
224 else if (v < 0 && n > old)
225 underflow = true;
226 break;
227 }
228#endif
229 default:
230 return -EINVAL;
231 }
232 if (caa_unlikely(overflow && !lttng_bitmap_test_bit(index, layout->overflow_bitmap)))
233 lttng_bitmap_set_bit(index, layout->overflow_bitmap);
234 else if (caa_unlikely(underflow && !lttng_bitmap_test_bit(index, layout->underflow_bitmap)))
235 lttng_bitmap_set_bit(index, layout->underflow_bitmap);
236 if (remainder)
237 *remainder = move_sum;
238 return 0;
239}
240
241static inline int __lttng_counter_add_percpu(const struct lib_counter_config *config,
242 struct lib_counter *counter,
243 const size_t *dimension_indexes, int64_t v)
244{
08c1dfc4 245 int64_t move_sum = 0;
ebabbf58
MD
246 int ret;
247
248 ret = __lttng_counter_add(config, COUNTER_ALLOC_PER_CPU, config->sync,
249 counter, dimension_indexes, v, &move_sum);
250 if (caa_unlikely(ret))
251 return ret;
252 if (caa_unlikely(move_sum))
253 return __lttng_counter_add(config, COUNTER_ALLOC_GLOBAL, COUNTER_SYNC_GLOBAL,
254 counter, dimension_indexes, move_sum, NULL);
255 return 0;
256}
257
258static inline int __lttng_counter_add_global(const struct lib_counter_config *config,
259 struct lib_counter *counter,
260 const size_t *dimension_indexes, int64_t v)
261{
262 return __lttng_counter_add(config, COUNTER_ALLOC_GLOBAL, config->sync, counter,
263 dimension_indexes, v, NULL);
264}
265
266static inline int lttng_counter_add(const struct lib_counter_config *config,
267 struct lib_counter *counter,
268 const size_t *dimension_indexes, int64_t v)
269{
270 switch (config->alloc) {
271 case COUNTER_ALLOC_PER_CPU: /* Fallthrough */
272 case COUNTER_ALLOC_PER_CPU | COUNTER_ALLOC_GLOBAL:
273 return __lttng_counter_add_percpu(config, counter, dimension_indexes, v);
274 case COUNTER_ALLOC_GLOBAL:
275 return __lttng_counter_add_global(config, counter, dimension_indexes, v);
276 default:
277 return -EINVAL;
278 }
279}
280
281static inline int lttng_counter_inc(const struct lib_counter_config *config,
282 struct lib_counter *counter,
283 const size_t *dimension_indexes)
284{
285 return lttng_counter_add(config, counter, dimension_indexes, 1);
286}
287
288static inline int lttng_counter_dec(const struct lib_counter_config *config,
289 struct lib_counter *counter,
290 const size_t *dimension_indexes)
291{
292 return lttng_counter_add(config, counter, dimension_indexes, -1);
293}
294
295#endif /* _LTTNG_COUNTER_API_H */
This page took 0.046295 seconds and 5 git commands to generate.