aarch64: Implement caa_cpu_relax as yield instruction
[userspace-rcu.git] / include / urcu / uatomic / generic.h
CommitLineData
8760d94e
PB
1#ifndef _URCU_UATOMIC_GENERIC_H
2#define _URCU_UATOMIC_GENERIC_H
3
4/*
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
10 *
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 *
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
19 *
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
22 */
23
2917c006 24#include <stdint.h>
8760d94e
PB
25#include <urcu/compiler.h>
26#include <urcu/system.h>
27
28#ifdef __cplusplus
29extern "C" {
30#endif
31
8760d94e 32#ifndef uatomic_set
3daae22a 33#define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
8760d94e
PB
34#endif
35
36#ifndef uatomic_read
6cf3827c 37#define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
8760d94e
PB
38#endif
39
40#if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
106ed137
MJ
41#ifdef ILLEGAL_INSTR
42static inline __attribute__((always_inline))
5c28497b 43void _uatomic_link_error(void)
8760d94e 44{
d0bbd9c2
MD
45 /*
46 * generate an illegal instruction. Cannot catch this with
47 * linker tricks when optimizations are disabled.
48 */
8760d94e 49 __asm__ __volatile__(ILLEGAL_INSTR);
106ed137 50}
8760d94e 51#else
106ed137
MJ
52static inline __attribute__((always_inline, __noreturn__))
53void _uatomic_link_error(void)
54{
5c28497b 55 __builtin_trap();
8760d94e 56}
106ed137 57#endif
8760d94e
PB
58
59#else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
5c28497b 60extern void _uatomic_link_error(void);
8760d94e
PB
61#endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
62
63/* cmpxchg */
64
65#ifndef uatomic_cmpxchg
66static inline __attribute__((always_inline))
67unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
68 unsigned long _new, int len)
69{
70 switch (len) {
f469d839
PB
71#ifdef UATOMIC_HAS_ATOMIC_BYTE
72 case 1:
2917c006
KR
73 return __sync_val_compare_and_swap_1((uint8_t *) addr, old,
74 _new);
f469d839
PB
75#endif
76#ifdef UATOMIC_HAS_ATOMIC_SHORT
77 case 2:
2917c006
KR
78 return __sync_val_compare_and_swap_2((uint16_t *) addr, old,
79 _new);
f469d839 80#endif
8760d94e 81 case 4:
2917c006
KR
82 return __sync_val_compare_and_swap_4((uint32_t *) addr, old,
83 _new);
b39e1761 84#if (CAA_BITS_PER_LONG == 64)
8760d94e 85 case 8:
2917c006
KR
86 return __sync_val_compare_and_swap_8((uint64_t *) addr, old,
87 _new);
8760d94e
PB
88#endif
89 }
90 _uatomic_link_error();
91 return 0;
92}
93
94
e56d99bf
MD
95#define uatomic_cmpxchg(addr, old, _new) \
96 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
97 caa_cast_long_keep_sign(old), \
98 caa_cast_long_keep_sign(_new),\
8760d94e
PB
99 sizeof(*(addr))))
100
101
bf33aaea
PB
102/* uatomic_and */
103
104#ifndef uatomic_and
105static inline __attribute__((always_inline))
106void _uatomic_and(void *addr, unsigned long val,
107 int len)
108{
109 switch (len) {
110#ifdef UATOMIC_HAS_ATOMIC_BYTE
111 case 1:
2917c006 112 __sync_and_and_fetch_1((uint8_t *) addr, val);
c51d5c6e 113 return;
bf33aaea
PB
114#endif
115#ifdef UATOMIC_HAS_ATOMIC_SHORT
116 case 2:
2917c006 117 __sync_and_and_fetch_2((uint16_t *) addr, val);
c51d5c6e 118 return;
bf33aaea
PB
119#endif
120 case 4:
2917c006 121 __sync_and_and_fetch_4((uint32_t *) addr, val);
c51d5c6e 122 return;
bf33aaea
PB
123#if (CAA_BITS_PER_LONG == 64)
124 case 8:
2917c006 125 __sync_and_and_fetch_8((uint64_t *) addr, val);
c51d5c6e 126 return;
bf33aaea
PB
127#endif
128 }
129 _uatomic_link_error();
bf33aaea
PB
130}
131
132#define uatomic_and(addr, v) \
133 (_uatomic_and((addr), \
e56d99bf
MD
134 caa_cast_long_keep_sign(v), \
135 sizeof(*(addr))))
42e83919
MD
136#define cmm_smp_mb__before_uatomic_and() cmm_barrier()
137#define cmm_smp_mb__after_uatomic_and() cmm_barrier()
2812a2d2 138
bf33aaea
PB
139#endif
140
985b35b1
PB
141/* uatomic_or */
142
143#ifndef uatomic_or
144static inline __attribute__((always_inline))
145void _uatomic_or(void *addr, unsigned long val,
146 int len)
147{
148 switch (len) {
149#ifdef UATOMIC_HAS_ATOMIC_BYTE
150 case 1:
2917c006 151 __sync_or_and_fetch_1((uint8_t *) addr, val);
c51d5c6e 152 return;
985b35b1
PB
153#endif
154#ifdef UATOMIC_HAS_ATOMIC_SHORT
155 case 2:
2917c006 156 __sync_or_and_fetch_2((uint16_t *) addr, val);
c51d5c6e 157 return;
985b35b1
PB
158#endif
159 case 4:
2917c006 160 __sync_or_and_fetch_4((uint32_t *) addr, val);
c51d5c6e 161 return;
985b35b1
PB
162#if (CAA_BITS_PER_LONG == 64)
163 case 8:
2917c006 164 __sync_or_and_fetch_8((uint64_t *) addr, val);
c51d5c6e 165 return;
985b35b1
PB
166#endif
167 }
168 _uatomic_link_error();
c51d5c6e 169 return;
985b35b1
PB
170}
171
172#define uatomic_or(addr, v) \
173 (_uatomic_or((addr), \
e56d99bf
MD
174 caa_cast_long_keep_sign(v), \
175 sizeof(*(addr))))
42e83919
MD
176#define cmm_smp_mb__before_uatomic_or() cmm_barrier()
177#define cmm_smp_mb__after_uatomic_or() cmm_barrier()
2812a2d2 178
985b35b1
PB
179#endif
180
2812a2d2 181
8760d94e
PB
182/* uatomic_add_return */
183
184#ifndef uatomic_add_return
185static inline __attribute__((always_inline))
186unsigned long _uatomic_add_return(void *addr, unsigned long val,
187 int len)
188{
189 switch (len) {
f469d839
PB
190#ifdef UATOMIC_HAS_ATOMIC_BYTE
191 case 1:
2917c006 192 return __sync_add_and_fetch_1((uint8_t *) addr, val);
f469d839
PB
193#endif
194#ifdef UATOMIC_HAS_ATOMIC_SHORT
195 case 2:
2917c006 196 return __sync_add_and_fetch_2((uint16_t *) addr, val);
f469d839 197#endif
8760d94e 198 case 4:
2917c006 199 return __sync_add_and_fetch_4((uint32_t *) addr, val);
b39e1761 200#if (CAA_BITS_PER_LONG == 64)
8760d94e 201 case 8:
2917c006 202 return __sync_add_and_fetch_8((uint64_t *) addr, val);
8760d94e
PB
203#endif
204 }
205 _uatomic_link_error();
206 return 0;
207}
208
209
e56d99bf
MD
210#define uatomic_add_return(addr, v) \
211 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
212 caa_cast_long_keep_sign(v), \
213 sizeof(*(addr))))
8760d94e
PB
214#endif /* #ifndef uatomic_add_return */
215
216#ifndef uatomic_xchg
217/* xchg */
218
219static inline __attribute__((always_inline))
220unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
221{
222 switch (len) {
f469d839
PB
223#ifdef UATOMIC_HAS_ATOMIC_BYTE
224 case 1:
225 {
2917c006 226 uint8_t old;
f469d839
PB
227
228 do {
2917c006
KR
229 old = uatomic_read((uint8_t *) addr);
230 } while (!__sync_bool_compare_and_swap_1((uint8_t *) addr,
231 old, val));
f469d839
PB
232
233 return old;
234 }
235#endif
236#ifdef UATOMIC_HAS_ATOMIC_SHORT
237 case 2:
238 {
2917c006 239 uint16_t old;
f469d839
PB
240
241 do {
2917c006
KR
242 old = uatomic_read((uint16_t *) addr);
243 } while (!__sync_bool_compare_and_swap_2((uint16_t *) addr,
244 old, val));
f469d839
PB
245
246 return old;
247 }
248#endif
8760d94e
PB
249 case 4:
250 {
2917c006 251 uint32_t old;
8760d94e
PB
252
253 do {
2917c006
KR
254 old = uatomic_read((uint32_t *) addr);
255 } while (!__sync_bool_compare_and_swap_4((uint32_t *) addr,
256 old, val));
8760d94e 257
2f2908d0 258 return old;
8760d94e 259 }
b39e1761 260#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
261 case 8:
262 {
2917c006 263 uint64_t old;
8760d94e
PB
264
265 do {
2917c006
KR
266 old = uatomic_read((uint64_t *) addr);
267 } while (!__sync_bool_compare_and_swap_8((uint64_t *) addr,
268 old, val));
8760d94e
PB
269
270 return old;
271 }
272#endif
273 }
274 _uatomic_link_error();
275 return 0;
276}
277
278#define uatomic_xchg(addr, v) \
e56d99bf
MD
279 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
280 caa_cast_long_keep_sign(v), \
8760d94e
PB
281 sizeof(*(addr))))
282#endif /* #ifndef uatomic_xchg */
283
284#else /* #ifndef uatomic_cmpxchg */
285
bf33aaea
PB
286#ifndef uatomic_and
287/* uatomic_and */
288
289static inline __attribute__((always_inline))
290void _uatomic_and(void *addr, unsigned long val, int len)
291{
292 switch (len) {
293#ifdef UATOMIC_HAS_ATOMIC_BYTE
294 case 1:
295 {
2917c006 296 uint8_t old, oldt;
bf33aaea 297
2917c006 298 oldt = uatomic_read((uint8_t *) addr);
bf33aaea
PB
299 do {
300 old = oldt;
301 oldt = _uatomic_cmpxchg(addr, old, old & val, 1);
302 } while (oldt != old);
e6e5957d
MD
303
304 return;
bf33aaea
PB
305 }
306#endif
307#ifdef UATOMIC_HAS_ATOMIC_SHORT
308 case 2:
309 {
2917c006 310 uint16_t old, oldt;
bf33aaea 311
2917c006 312 oldt = uatomic_read((uint16_t *) addr);
bf33aaea
PB
313 do {
314 old = oldt;
315 oldt = _uatomic_cmpxchg(addr, old, old & val, 2);
316 } while (oldt != old);
317 }
318#endif
319 case 4:
320 {
2917c006 321 uint32_t old, oldt;
bf33aaea 322
2917c006 323 oldt = uatomic_read((uint32_t *) addr);
bf33aaea
PB
324 do {
325 old = oldt;
326 oldt = _uatomic_cmpxchg(addr, old, old & val, 4);
327 } while (oldt != old);
e6e5957d
MD
328
329 return;
bf33aaea
PB
330 }
331#if (CAA_BITS_PER_LONG == 64)
332 case 8:
333 {
2917c006 334 uint64_t old, oldt;
bf33aaea 335
2917c006 336 oldt = uatomic_read((uint64_t *) addr);
bf33aaea
PB
337 do {
338 old = oldt;
339 oldt = _uatomic_cmpxchg(addr, old, old & val, 8);
340 } while (oldt != old);
e6e5957d
MD
341
342 return;
bf33aaea
PB
343 }
344#endif
345 }
346 _uatomic_link_error();
bf33aaea
PB
347}
348
e56d99bf
MD
349#define uatomic_and(addr, v) \
350 (_uatomic_and((addr), \
351 caa_cast_long_keep_sign(v), \
352 sizeof(*(addr))))
42e83919
MD
353#define cmm_smp_mb__before_uatomic_and() cmm_barrier()
354#define cmm_smp_mb__after_uatomic_and() cmm_barrier()
2812a2d2 355
bf33aaea
PB
356#endif /* #ifndef uatomic_and */
357
985b35b1
PB
358#ifndef uatomic_or
359/* uatomic_or */
360
361static inline __attribute__((always_inline))
362void _uatomic_or(void *addr, unsigned long val, int len)
363{
364 switch (len) {
365#ifdef UATOMIC_HAS_ATOMIC_BYTE
366 case 1:
367 {
2917c006 368 uint8_t old, oldt;
985b35b1 369
2917c006 370 oldt = uatomic_read((uint8_t *) addr);
985b35b1
PB
371 do {
372 old = oldt;
373 oldt = _uatomic_cmpxchg(addr, old, old | val, 1);
374 } while (oldt != old);
e6e5957d
MD
375
376 return;
985b35b1
PB
377 }
378#endif
379#ifdef UATOMIC_HAS_ATOMIC_SHORT
380 case 2:
381 {
2917c006 382 uint16_t old, oldt;
985b35b1 383
2917c006 384 oldt = uatomic_read((uint16_t *) addr);
985b35b1
PB
385 do {
386 old = oldt;
387 oldt = _uatomic_cmpxchg(addr, old, old | val, 2);
388 } while (oldt != old);
e6e5957d
MD
389
390 return;
985b35b1
PB
391 }
392#endif
393 case 4:
394 {
2917c006 395 uint32_t old, oldt;
985b35b1 396
2917c006 397 oldt = uatomic_read((uint32_t *) addr);
985b35b1
PB
398 do {
399 old = oldt;
400 oldt = _uatomic_cmpxchg(addr, old, old | val, 4);
401 } while (oldt != old);
e6e5957d
MD
402
403 return;
985b35b1
PB
404 }
405#if (CAA_BITS_PER_LONG == 64)
406 case 8:
407 {
2917c006 408 uint64_t old, oldt;
985b35b1 409
2917c006 410 oldt = uatomic_read((uint64_t *) addr);
985b35b1
PB
411 do {
412 old = oldt;
413 oldt = _uatomic_cmpxchg(addr, old, old | val, 8);
414 } while (oldt != old);
e6e5957d
MD
415
416 return;
985b35b1
PB
417 }
418#endif
419 }
420 _uatomic_link_error();
985b35b1
PB
421}
422
e56d99bf
MD
423#define uatomic_or(addr, v) \
424 (_uatomic_or((addr), \
425 caa_cast_long_keep_sign(v), \
426 sizeof(*(addr))))
42e83919
MD
427#define cmm_smp_mb__before_uatomic_or() cmm_barrier()
428#define cmm_smp_mb__after_uatomic_or() cmm_barrier()
2812a2d2 429
985b35b1
PB
430#endif /* #ifndef uatomic_or */
431
8760d94e
PB
432#ifndef uatomic_add_return
433/* uatomic_add_return */
434
435static inline __attribute__((always_inline))
436unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
437{
438 switch (len) {
f469d839
PB
439#ifdef UATOMIC_HAS_ATOMIC_BYTE
440 case 1:
441 {
2917c006 442 uint8_t old, oldt;
f469d839 443
2917c006 444 oldt = uatomic_read((uint8_t *) addr);
f469d839
PB
445 do {
446 old = oldt;
2917c006 447 oldt = uatomic_cmpxchg((uint8_t *) addr,
b4e6d540 448 old, old + val);
f469d839
PB
449 } while (oldt != old);
450
451 return old + val;
452 }
453#endif
454#ifdef UATOMIC_HAS_ATOMIC_SHORT
455 case 2:
456 {
2917c006 457 uint16_t old, oldt;
f469d839 458
2917c006 459 oldt = uatomic_read((uint16_t *) addr);
f469d839
PB
460 do {
461 old = oldt;
2917c006 462 oldt = uatomic_cmpxchg((uint16_t *) addr,
b4e6d540 463 old, old + val);
f469d839
PB
464 } while (oldt != old);
465
466 return old + val;
467 }
468#endif
8760d94e
PB
469 case 4:
470 {
2917c006 471 uint32_t old, oldt;
8760d94e 472
2917c006 473 oldt = uatomic_read((uint32_t *) addr);
8760d94e
PB
474 do {
475 old = oldt;
2917c006 476 oldt = uatomic_cmpxchg((uint32_t *) addr,
b4e6d540 477 old, old + val);
8760d94e
PB
478 } while (oldt != old);
479
480 return old + val;
481 }
b39e1761 482#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
483 case 8:
484 {
2917c006 485 uint64_t old, oldt;
8760d94e 486
2917c006 487 oldt = uatomic_read((uint64_t *) addr);
8760d94e
PB
488 do {
489 old = oldt;
2917c006 490 oldt = uatomic_cmpxchg((uint64_t *) addr,
b4e6d540 491 old, old + val);
8760d94e
PB
492 } while (oldt != old);
493
494 return old + val;
495 }
496#endif
497 }
498 _uatomic_link_error();
499 return 0;
500}
501
e56d99bf
MD
502#define uatomic_add_return(addr, v) \
503 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
504 caa_cast_long_keep_sign(v), \
505 sizeof(*(addr))))
8760d94e
PB
506#endif /* #ifndef uatomic_add_return */
507
508#ifndef uatomic_xchg
509/* xchg */
510
511static inline __attribute__((always_inline))
512unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
513{
514 switch (len) {
f469d839
PB
515#ifdef UATOMIC_HAS_ATOMIC_BYTE
516 case 1:
517 {
2917c006 518 uint8_t old, oldt;
f469d839 519
2917c006 520 oldt = uatomic_read((uint8_t *) addr);
f469d839
PB
521 do {
522 old = oldt;
2917c006 523 oldt = uatomic_cmpxchg((uint8_t *) addr,
b4e6d540 524 old, val);
f469d839
PB
525 } while (oldt != old);
526
527 return old;
528 }
529#endif
530#ifdef UATOMIC_HAS_ATOMIC_SHORT
531 case 2:
532 {
2917c006 533 uint16_t old, oldt;
f469d839 534
2917c006 535 oldt = uatomic_read((uint16_t *) addr);
f469d839
PB
536 do {
537 old = oldt;
2917c006 538 oldt = uatomic_cmpxchg((uint16_t *) addr,
b4e6d540 539 old, val);
f469d839
PB
540 } while (oldt != old);
541
542 return old;
543 }
544#endif
8760d94e
PB
545 case 4:
546 {
2917c006 547 uint32_t old, oldt;
8760d94e 548
2917c006 549 oldt = uatomic_read((uint32_t *) addr);
8760d94e
PB
550 do {
551 old = oldt;
2917c006 552 oldt = uatomic_cmpxchg((uint32_t *) addr,
b4e6d540 553 old, val);
8760d94e
PB
554 } while (oldt != old);
555
556 return old;
557 }
b39e1761 558#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
559 case 8:
560 {
2917c006 561 uint64_t old, oldt;
8760d94e 562
2917c006 563 oldt = uatomic_read((uint64_t *) addr);
8760d94e
PB
564 do {
565 old = oldt;
2917c006 566 oldt = uatomic_cmpxchg((uint64_t *) addr,
b4e6d540 567 old, val);
8760d94e
PB
568 } while (oldt != old);
569
570 return old;
571 }
572#endif
573 }
574 _uatomic_link_error();
575 return 0;
576}
577
578#define uatomic_xchg(addr, v) \
e56d99bf
MD
579 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
580 caa_cast_long_keep_sign(v), \
8760d94e
PB
581 sizeof(*(addr))))
582#endif /* #ifndef uatomic_xchg */
583
584#endif /* #else #ifndef uatomic_cmpxchg */
585
586/* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
587
588#ifndef uatomic_add
589#define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
42e83919
MD
590#define cmm_smp_mb__before_uatomic_add() cmm_barrier()
591#define cmm_smp_mb__after_uatomic_add() cmm_barrier()
8760d94e
PB
592#endif
593
e56d99bf
MD
594#define uatomic_sub_return(addr, v) \
595 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
596#define uatomic_sub(addr, v) \
597 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
42e83919
MD
598#define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add()
599#define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
600
601#ifndef uatomic_inc
602#define uatomic_inc(addr) uatomic_add((addr), 1)
42e83919
MD
603#define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add()
604#define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
605#endif
606
607#ifndef uatomic_dec
608#define uatomic_dec(addr) uatomic_add((addr), -1)
42e83919
MD
609#define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add()
610#define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
611#endif
612
613#ifdef __cplusplus
614}
615#endif
616
617#endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.075974 seconds and 4 git commands to generate.