Commit | Line | Data |
---|---|---|
8760d94e PB |
1 | #ifndef _URCU_UATOMIC_GENERIC_H |
2 | #define _URCU_UATOMIC_GENERIC_H | |
3 | ||
4 | /* | |
5 | * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved. | |
6 | * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved. | |
7 | * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P. | |
8 | * Copyright (c) 2009 Mathieu Desnoyers | |
9 | * Copyright (c) 2010 Paolo Bonzini | |
10 | * | |
11 | * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED | |
12 | * OR IMPLIED. ANY USE IS AT YOUR OWN RISK. | |
13 | * | |
14 | * Permission is hereby granted to use or copy this program | |
15 | * for any purpose, provided the above notices are retained on all copies. | |
16 | * Permission to modify the code and to distribute modified code is granted, | |
17 | * provided the above notices are retained, and a notice that the code was | |
18 | * modified is included with the above copyright notice. | |
19 | * | |
20 | * Code inspired from libuatomic_ops-1.2, inherited in part from the | |
21 | * Boehm-Demers-Weiser conservative garbage collector. | |
22 | */ | |
23 | ||
24 | #include <urcu/compiler.h> | |
25 | #include <urcu/system.h> | |
26 | ||
27 | #ifdef __cplusplus | |
28 | extern "C" { | |
29 | #endif | |
30 | ||
8760d94e | 31 | #ifndef uatomic_set |
3daae22a | 32 | #define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v))) |
8760d94e PB |
33 | #endif |
34 | ||
35 | #ifndef uatomic_read | |
6cf3827c | 36 | #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr)) |
8760d94e PB |
37 | #endif |
38 | ||
39 | #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR | |
40 | static inline __attribute__((always_inline)) | |
5c28497b | 41 | void _uatomic_link_error(void) |
8760d94e PB |
42 | { |
43 | #ifdef ILLEGAL_INSTR | |
d0bbd9c2 MD |
44 | /* |
45 | * generate an illegal instruction. Cannot catch this with | |
46 | * linker tricks when optimizations are disabled. | |
47 | */ | |
8760d94e PB |
48 | __asm__ __volatile__(ILLEGAL_INSTR); |
49 | #else | |
5c28497b | 50 | __builtin_trap(); |
8760d94e PB |
51 | #endif |
52 | } | |
53 | ||
54 | #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */ | |
5c28497b | 55 | extern void _uatomic_link_error(void); |
8760d94e PB |
56 | #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */ |
57 | ||
58 | /* cmpxchg */ | |
59 | ||
60 | #ifndef uatomic_cmpxchg | |
61 | static inline __attribute__((always_inline)) | |
62 | unsigned long _uatomic_cmpxchg(void *addr, unsigned long old, | |
63 | unsigned long _new, int len) | |
64 | { | |
65 | switch (len) { | |
f469d839 PB |
66 | #ifdef UATOMIC_HAS_ATOMIC_BYTE |
67 | case 1: | |
68 | return __sync_val_compare_and_swap_1(addr, old, _new); | |
69 | #endif | |
70 | #ifdef UATOMIC_HAS_ATOMIC_SHORT | |
71 | case 2: | |
72 | return __sync_val_compare_and_swap_2(addr, old, _new); | |
73 | #endif | |
8760d94e PB |
74 | case 4: |
75 | return __sync_val_compare_and_swap_4(addr, old, _new); | |
b39e1761 | 76 | #if (CAA_BITS_PER_LONG == 64) |
8760d94e PB |
77 | case 8: |
78 | return __sync_val_compare_and_swap_8(addr, old, _new); | |
79 | #endif | |
80 | } | |
81 | _uatomic_link_error(); | |
82 | return 0; | |
83 | } | |
84 | ||
85 | ||
e56d99bf MD |
86 | #define uatomic_cmpxchg(addr, old, _new) \ |
87 | ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \ | |
88 | caa_cast_long_keep_sign(old), \ | |
89 | caa_cast_long_keep_sign(_new),\ | |
8760d94e PB |
90 | sizeof(*(addr)))) |
91 | ||
92 | ||
bf33aaea PB |
93 | /* uatomic_and */ |
94 | ||
95 | #ifndef uatomic_and | |
96 | static inline __attribute__((always_inline)) | |
97 | void _uatomic_and(void *addr, unsigned long val, | |
98 | int len) | |
99 | { | |
100 | switch (len) { | |
101 | #ifdef UATOMIC_HAS_ATOMIC_BYTE | |
102 | case 1: | |
103 | __sync_and_and_fetch_1(addr, val); | |
c51d5c6e | 104 | return; |
bf33aaea PB |
105 | #endif |
106 | #ifdef UATOMIC_HAS_ATOMIC_SHORT | |
107 | case 2: | |
108 | __sync_and_and_fetch_2(addr, val); | |
c51d5c6e | 109 | return; |
bf33aaea PB |
110 | #endif |
111 | case 4: | |
112 | __sync_and_and_fetch_4(addr, val); | |
c51d5c6e | 113 | return; |
bf33aaea PB |
114 | #if (CAA_BITS_PER_LONG == 64) |
115 | case 8: | |
116 | __sync_and_and_fetch_8(addr, val); | |
c51d5c6e | 117 | return; |
bf33aaea PB |
118 | #endif |
119 | } | |
120 | _uatomic_link_error(); | |
bf33aaea PB |
121 | } |
122 | ||
123 | #define uatomic_and(addr, v) \ | |
124 | (_uatomic_and((addr), \ | |
e56d99bf MD |
125 | caa_cast_long_keep_sign(v), \ |
126 | sizeof(*(addr)))) | |
42e83919 MD |
127 | #define cmm_smp_mb__before_uatomic_and() cmm_barrier() |
128 | #define cmm_smp_mb__after_uatomic_and() cmm_barrier() | |
2812a2d2 | 129 | |
bf33aaea PB |
130 | #endif |
131 | ||
985b35b1 PB |
132 | /* uatomic_or */ |
133 | ||
134 | #ifndef uatomic_or | |
135 | static inline __attribute__((always_inline)) | |
136 | void _uatomic_or(void *addr, unsigned long val, | |
137 | int len) | |
138 | { | |
139 | switch (len) { | |
140 | #ifdef UATOMIC_HAS_ATOMIC_BYTE | |
141 | case 1: | |
142 | __sync_or_and_fetch_1(addr, val); | |
c51d5c6e | 143 | return; |
985b35b1 PB |
144 | #endif |
145 | #ifdef UATOMIC_HAS_ATOMIC_SHORT | |
146 | case 2: | |
147 | __sync_or_and_fetch_2(addr, val); | |
c51d5c6e | 148 | return; |
985b35b1 PB |
149 | #endif |
150 | case 4: | |
151 | __sync_or_and_fetch_4(addr, val); | |
c51d5c6e | 152 | return; |
985b35b1 PB |
153 | #if (CAA_BITS_PER_LONG == 64) |
154 | case 8: | |
155 | __sync_or_and_fetch_8(addr, val); | |
c51d5c6e | 156 | return; |
985b35b1 PB |
157 | #endif |
158 | } | |
159 | _uatomic_link_error(); | |
c51d5c6e | 160 | return; |
985b35b1 PB |
161 | } |
162 | ||
163 | #define uatomic_or(addr, v) \ | |
164 | (_uatomic_or((addr), \ | |
e56d99bf MD |
165 | caa_cast_long_keep_sign(v), \ |
166 | sizeof(*(addr)))) | |
42e83919 MD |
167 | #define cmm_smp_mb__before_uatomic_or() cmm_barrier() |
168 | #define cmm_smp_mb__after_uatomic_or() cmm_barrier() | |
2812a2d2 | 169 | |
985b35b1 PB |
170 | #endif |
171 | ||
2812a2d2 | 172 | |
8760d94e PB |
173 | /* uatomic_add_return */ |
174 | ||
175 | #ifndef uatomic_add_return | |
176 | static inline __attribute__((always_inline)) | |
177 | unsigned long _uatomic_add_return(void *addr, unsigned long val, | |
178 | int len) | |
179 | { | |
180 | switch (len) { | |
f469d839 PB |
181 | #ifdef UATOMIC_HAS_ATOMIC_BYTE |
182 | case 1: | |
183 | return __sync_add_and_fetch_1(addr, val); | |
184 | #endif | |
185 | #ifdef UATOMIC_HAS_ATOMIC_SHORT | |
186 | case 2: | |
187 | return __sync_add_and_fetch_2(addr, val); | |
188 | #endif | |
8760d94e PB |
189 | case 4: |
190 | return __sync_add_and_fetch_4(addr, val); | |
b39e1761 | 191 | #if (CAA_BITS_PER_LONG == 64) |
8760d94e PB |
192 | case 8: |
193 | return __sync_add_and_fetch_8(addr, val); | |
194 | #endif | |
195 | } | |
196 | _uatomic_link_error(); | |
197 | return 0; | |
198 | } | |
199 | ||
200 | ||
e56d99bf MD |
201 | #define uatomic_add_return(addr, v) \ |
202 | ((__typeof__(*(addr))) _uatomic_add_return((addr), \ | |
203 | caa_cast_long_keep_sign(v), \ | |
204 | sizeof(*(addr)))) | |
8760d94e PB |
205 | #endif /* #ifndef uatomic_add_return */ |
206 | ||
207 | #ifndef uatomic_xchg | |
208 | /* xchg */ | |
209 | ||
210 | static inline __attribute__((always_inline)) | |
211 | unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) | |
212 | { | |
213 | switch (len) { | |
f469d839 PB |
214 | #ifdef UATOMIC_HAS_ATOMIC_BYTE |
215 | case 1: | |
216 | { | |
217 | unsigned char old; | |
218 | ||
219 | do { | |
220 | old = uatomic_read((unsigned char *)addr); | |
221 | } while (!__sync_bool_compare_and_swap_1(addr, old, val)); | |
222 | ||
223 | return old; | |
224 | } | |
225 | #endif | |
226 | #ifdef UATOMIC_HAS_ATOMIC_SHORT | |
227 | case 2: | |
228 | { | |
229 | unsigned short old; | |
230 | ||
231 | do { | |
232 | old = uatomic_read((unsigned short *)addr); | |
233 | } while (!__sync_bool_compare_and_swap_2(addr, old, val)); | |
234 | ||
235 | return old; | |
236 | } | |
237 | #endif | |
8760d94e PB |
238 | case 4: |
239 | { | |
240 | unsigned int old; | |
241 | ||
242 | do { | |
243 | old = uatomic_read((unsigned int *)addr); | |
2f2908d0 | 244 | } while (!__sync_bool_compare_and_swap_4(addr, old, val)); |
8760d94e | 245 | |
2f2908d0 | 246 | return old; |
8760d94e | 247 | } |
b39e1761 | 248 | #if (CAA_BITS_PER_LONG == 64) |
8760d94e PB |
249 | case 8: |
250 | { | |
251 | unsigned long old; | |
252 | ||
253 | do { | |
254 | old = uatomic_read((unsigned long *)addr); | |
255 | } while (!__sync_bool_compare_and_swap_8(addr, old, val)); | |
256 | ||
257 | return old; | |
258 | } | |
259 | #endif | |
260 | } | |
261 | _uatomic_link_error(); | |
262 | return 0; | |
263 | } | |
264 | ||
265 | #define uatomic_xchg(addr, v) \ | |
e56d99bf MD |
266 | ((__typeof__(*(addr))) _uatomic_exchange((addr), \ |
267 | caa_cast_long_keep_sign(v), \ | |
8760d94e PB |
268 | sizeof(*(addr)))) |
269 | #endif /* #ifndef uatomic_xchg */ | |
270 | ||
271 | #else /* #ifndef uatomic_cmpxchg */ | |
272 | ||
bf33aaea PB |
273 | #ifndef uatomic_and |
274 | /* uatomic_and */ | |
275 | ||
276 | static inline __attribute__((always_inline)) | |
277 | void _uatomic_and(void *addr, unsigned long val, int len) | |
278 | { | |
279 | switch (len) { | |
280 | #ifdef UATOMIC_HAS_ATOMIC_BYTE | |
281 | case 1: | |
282 | { | |
283 | unsigned char old, oldt; | |
284 | ||
285 | oldt = uatomic_read((unsigned char *)addr); | |
286 | do { | |
287 | old = oldt; | |
288 | oldt = _uatomic_cmpxchg(addr, old, old & val, 1); | |
289 | } while (oldt != old); | |
e6e5957d MD |
290 | |
291 | return; | |
bf33aaea PB |
292 | } |
293 | #endif | |
294 | #ifdef UATOMIC_HAS_ATOMIC_SHORT | |
295 | case 2: | |
296 | { | |
297 | unsigned short old, oldt; | |
298 | ||
299 | oldt = uatomic_read((unsigned short *)addr); | |
300 | do { | |
301 | old = oldt; | |
302 | oldt = _uatomic_cmpxchg(addr, old, old & val, 2); | |
303 | } while (oldt != old); | |
304 | } | |
305 | #endif | |
306 | case 4: | |
307 | { | |
308 | unsigned int old, oldt; | |
309 | ||
310 | oldt = uatomic_read((unsigned int *)addr); | |
311 | do { | |
312 | old = oldt; | |
313 | oldt = _uatomic_cmpxchg(addr, old, old & val, 4); | |
314 | } while (oldt != old); | |
e6e5957d MD |
315 | |
316 | return; | |
bf33aaea PB |
317 | } |
318 | #if (CAA_BITS_PER_LONG == 64) | |
319 | case 8: | |
320 | { | |
321 | unsigned long old, oldt; | |
322 | ||
323 | oldt = uatomic_read((unsigned long *)addr); | |
324 | do { | |
325 | old = oldt; | |
326 | oldt = _uatomic_cmpxchg(addr, old, old & val, 8); | |
327 | } while (oldt != old); | |
e6e5957d MD |
328 | |
329 | return; | |
bf33aaea PB |
330 | } |
331 | #endif | |
332 | } | |
333 | _uatomic_link_error(); | |
bf33aaea PB |
334 | } |
335 | ||
e56d99bf MD |
336 | #define uatomic_and(addr, v) \ |
337 | (_uatomic_and((addr), \ | |
338 | caa_cast_long_keep_sign(v), \ | |
339 | sizeof(*(addr)))) | |
42e83919 MD |
340 | #define cmm_smp_mb__before_uatomic_and() cmm_barrier() |
341 | #define cmm_smp_mb__after_uatomic_and() cmm_barrier() | |
2812a2d2 | 342 | |
bf33aaea PB |
343 | #endif /* #ifndef uatomic_and */ |
344 | ||
985b35b1 PB |
345 | #ifndef uatomic_or |
346 | /* uatomic_or */ | |
347 | ||
348 | static inline __attribute__((always_inline)) | |
349 | void _uatomic_or(void *addr, unsigned long val, int len) | |
350 | { | |
351 | switch (len) { | |
352 | #ifdef UATOMIC_HAS_ATOMIC_BYTE | |
353 | case 1: | |
354 | { | |
355 | unsigned char old, oldt; | |
356 | ||
357 | oldt = uatomic_read((unsigned char *)addr); | |
358 | do { | |
359 | old = oldt; | |
360 | oldt = _uatomic_cmpxchg(addr, old, old | val, 1); | |
361 | } while (oldt != old); | |
e6e5957d MD |
362 | |
363 | return; | |
985b35b1 PB |
364 | } |
365 | #endif | |
366 | #ifdef UATOMIC_HAS_ATOMIC_SHORT | |
367 | case 2: | |
368 | { | |
369 | unsigned short old, oldt; | |
370 | ||
371 | oldt = uatomic_read((unsigned short *)addr); | |
372 | do { | |
373 | old = oldt; | |
374 | oldt = _uatomic_cmpxchg(addr, old, old | val, 2); | |
375 | } while (oldt != old); | |
e6e5957d MD |
376 | |
377 | return; | |
985b35b1 PB |
378 | } |
379 | #endif | |
380 | case 4: | |
381 | { | |
382 | unsigned int old, oldt; | |
383 | ||
384 | oldt = uatomic_read((unsigned int *)addr); | |
385 | do { | |
386 | old = oldt; | |
387 | oldt = _uatomic_cmpxchg(addr, old, old | val, 4); | |
388 | } while (oldt != old); | |
e6e5957d MD |
389 | |
390 | return; | |
985b35b1 PB |
391 | } |
392 | #if (CAA_BITS_PER_LONG == 64) | |
393 | case 8: | |
394 | { | |
395 | unsigned long old, oldt; | |
396 | ||
397 | oldt = uatomic_read((unsigned long *)addr); | |
398 | do { | |
399 | old = oldt; | |
400 | oldt = _uatomic_cmpxchg(addr, old, old | val, 8); | |
401 | } while (oldt != old); | |
e6e5957d MD |
402 | |
403 | return; | |
985b35b1 PB |
404 | } |
405 | #endif | |
406 | } | |
407 | _uatomic_link_error(); | |
985b35b1 PB |
408 | } |
409 | ||
e56d99bf MD |
410 | #define uatomic_or(addr, v) \ |
411 | (_uatomic_or((addr), \ | |
412 | caa_cast_long_keep_sign(v), \ | |
413 | sizeof(*(addr)))) | |
42e83919 MD |
414 | #define cmm_smp_mb__before_uatomic_or() cmm_barrier() |
415 | #define cmm_smp_mb__after_uatomic_or() cmm_barrier() | |
2812a2d2 | 416 | |
985b35b1 PB |
417 | #endif /* #ifndef uatomic_or */ |
418 | ||
8760d94e PB |
419 | #ifndef uatomic_add_return |
420 | /* uatomic_add_return */ | |
421 | ||
422 | static inline __attribute__((always_inline)) | |
423 | unsigned long _uatomic_add_return(void *addr, unsigned long val, int len) | |
424 | { | |
425 | switch (len) { | |
f469d839 PB |
426 | #ifdef UATOMIC_HAS_ATOMIC_BYTE |
427 | case 1: | |
428 | { | |
429 | unsigned char old, oldt; | |
430 | ||
431 | oldt = uatomic_read((unsigned char *)addr); | |
432 | do { | |
433 | old = oldt; | |
b4e6d540 PB |
434 | oldt = uatomic_cmpxchg((unsigned char *)addr, |
435 | old, old + val); | |
f469d839 PB |
436 | } while (oldt != old); |
437 | ||
438 | return old + val; | |
439 | } | |
440 | #endif | |
441 | #ifdef UATOMIC_HAS_ATOMIC_SHORT | |
442 | case 2: | |
443 | { | |
444 | unsigned short old, oldt; | |
445 | ||
446 | oldt = uatomic_read((unsigned short *)addr); | |
447 | do { | |
448 | old = oldt; | |
b4e6d540 PB |
449 | oldt = uatomic_cmpxchg((unsigned short *)addr, |
450 | old, old + val); | |
f469d839 PB |
451 | } while (oldt != old); |
452 | ||
453 | return old + val; | |
454 | } | |
455 | #endif | |
8760d94e PB |
456 | case 4: |
457 | { | |
458 | unsigned int old, oldt; | |
459 | ||
460 | oldt = uatomic_read((unsigned int *)addr); | |
461 | do { | |
462 | old = oldt; | |
b4e6d540 PB |
463 | oldt = uatomic_cmpxchg((unsigned int *)addr, |
464 | old, old + val); | |
8760d94e PB |
465 | } while (oldt != old); |
466 | ||
467 | return old + val; | |
468 | } | |
b39e1761 | 469 | #if (CAA_BITS_PER_LONG == 64) |
8760d94e PB |
470 | case 8: |
471 | { | |
472 | unsigned long old, oldt; | |
473 | ||
474 | oldt = uatomic_read((unsigned long *)addr); | |
475 | do { | |
476 | old = oldt; | |
b4e6d540 PB |
477 | oldt = uatomic_cmpxchg((unsigned long *)addr, |
478 | old, old + val); | |
8760d94e PB |
479 | } while (oldt != old); |
480 | ||
481 | return old + val; | |
482 | } | |
483 | #endif | |
484 | } | |
485 | _uatomic_link_error(); | |
486 | return 0; | |
487 | } | |
488 | ||
e56d99bf MD |
489 | #define uatomic_add_return(addr, v) \ |
490 | ((__typeof__(*(addr))) _uatomic_add_return((addr), \ | |
491 | caa_cast_long_keep_sign(v), \ | |
492 | sizeof(*(addr)))) | |
8760d94e PB |
493 | #endif /* #ifndef uatomic_add_return */ |
494 | ||
495 | #ifndef uatomic_xchg | |
496 | /* xchg */ | |
497 | ||
498 | static inline __attribute__((always_inline)) | |
499 | unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) | |
500 | { | |
501 | switch (len) { | |
f469d839 PB |
502 | #ifdef UATOMIC_HAS_ATOMIC_BYTE |
503 | case 1: | |
504 | { | |
505 | unsigned char old, oldt; | |
506 | ||
507 | oldt = uatomic_read((unsigned char *)addr); | |
508 | do { | |
509 | old = oldt; | |
b4e6d540 PB |
510 | oldt = uatomic_cmpxchg((unsigned char *)addr, |
511 | old, val); | |
f469d839 PB |
512 | } while (oldt != old); |
513 | ||
514 | return old; | |
515 | } | |
516 | #endif | |
517 | #ifdef UATOMIC_HAS_ATOMIC_SHORT | |
518 | case 2: | |
519 | { | |
520 | unsigned short old, oldt; | |
521 | ||
522 | oldt = uatomic_read((unsigned short *)addr); | |
523 | do { | |
524 | old = oldt; | |
b4e6d540 PB |
525 | oldt = uatomic_cmpxchg((unsigned short *)addr, |
526 | old, val); | |
f469d839 PB |
527 | } while (oldt != old); |
528 | ||
529 | return old; | |
530 | } | |
531 | #endif | |
8760d94e PB |
532 | case 4: |
533 | { | |
534 | unsigned int old, oldt; | |
535 | ||
536 | oldt = uatomic_read((unsigned int *)addr); | |
537 | do { | |
538 | old = oldt; | |
b4e6d540 PB |
539 | oldt = uatomic_cmpxchg((unsigned int *)addr, |
540 | old, val); | |
8760d94e PB |
541 | } while (oldt != old); |
542 | ||
543 | return old; | |
544 | } | |
b39e1761 | 545 | #if (CAA_BITS_PER_LONG == 64) |
8760d94e PB |
546 | case 8: |
547 | { | |
548 | unsigned long old, oldt; | |
549 | ||
550 | oldt = uatomic_read((unsigned long *)addr); | |
551 | do { | |
552 | old = oldt; | |
b4e6d540 PB |
553 | oldt = uatomic_cmpxchg((unsigned long *)addr, |
554 | old, val); | |
8760d94e PB |
555 | } while (oldt != old); |
556 | ||
557 | return old; | |
558 | } | |
559 | #endif | |
560 | } | |
561 | _uatomic_link_error(); | |
562 | return 0; | |
563 | } | |
564 | ||
565 | #define uatomic_xchg(addr, v) \ | |
e56d99bf MD |
566 | ((__typeof__(*(addr))) _uatomic_exchange((addr), \ |
567 | caa_cast_long_keep_sign(v), \ | |
8760d94e PB |
568 | sizeof(*(addr)))) |
569 | #endif /* #ifndef uatomic_xchg */ | |
570 | ||
571 | #endif /* #else #ifndef uatomic_cmpxchg */ | |
572 | ||
573 | /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */ | |
574 | ||
575 | #ifndef uatomic_add | |
576 | #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v)) | |
42e83919 MD |
577 | #define cmm_smp_mb__before_uatomic_add() cmm_barrier() |
578 | #define cmm_smp_mb__after_uatomic_add() cmm_barrier() | |
8760d94e PB |
579 | #endif |
580 | ||
e56d99bf MD |
581 | #define uatomic_sub_return(addr, v) \ |
582 | uatomic_add_return((addr), -(caa_cast_long_keep_sign(v))) | |
583 | #define uatomic_sub(addr, v) \ | |
584 | uatomic_add((addr), -(caa_cast_long_keep_sign(v))) | |
42e83919 MD |
585 | #define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add() |
586 | #define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add() | |
8760d94e PB |
587 | |
588 | #ifndef uatomic_inc | |
589 | #define uatomic_inc(addr) uatomic_add((addr), 1) | |
42e83919 MD |
590 | #define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add() |
591 | #define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add() | |
8760d94e PB |
592 | #endif |
593 | ||
594 | #ifndef uatomic_dec | |
595 | #define uatomic_dec(addr) uatomic_add((addr), -1) | |
42e83919 MD |
596 | #define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add() |
597 | #define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add() | |
8760d94e PB |
598 | #endif |
599 | ||
600 | #ifdef __cplusplus | |
601 | } | |
602 | #endif | |
603 | ||
604 | #endif /* _URCU_UATOMIC_GENERIC_H */ |