1 // SPDX-FileCopyrightText: 1991-1994 by Xerox Corporation. All rights reserved.
2 // SPDX-FileCopyrightText: 1996-1999 by Silicon Graphics. All rights reserved.
3 // SPDX-FileCopyrightText: 1999-2004 Hewlett-Packard Development Company, L.P.
4 // SPDX-FileCopyrightText: 2009 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5 // SPDX-FileCopyrightText: 2010 Paolo Bonzini
7 // SPDX-License-Identifier: LicenseRef-Boehm-GC
9 #ifndef _URCU_UATOMIC_GENERIC_H
10 #define _URCU_UATOMIC_GENERIC_H
13 * Code inspired from libuatomic_ops-1.2, inherited in part from the
14 * Boehm-Demers-Weiser conservative garbage collector.
18 #include <urcu/compiler.h>
19 #include <urcu/system.h>
26 #define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
30 * Can be defined for the architecture.
32 * What needs to be emitted _before_ the `operation' with memory ordering `mo'.
34 #ifndef _cmm_compat_c11_smp_mb__before_mo
35 # define _cmm_compat_c11_smp_mb__before_mo(operation, mo) cmm_smp_mb()
39 * Can be defined for the architecture.
41 * What needs to be emitted _after_ the `operation' with memory ordering `mo'.
43 #ifndef _cmm_compat_c11_smp_mb__after_mo
44 # define _cmm_compat_c11_smp_mb__after_mo(operation, mo) cmm_smp_mb()
47 #define uatomic_load_store_return_op(op, addr, v, mo) \
50 _cmm_compat_c11_smp_mb__before_mo(op, mo); \
51 __typeof__((*addr)) _value = op(addr, v); \
52 _cmm_compat_c11_smp_mb__after_mo(op, mo); \
57 #define uatomic_load_store_op(op, addr, v, mo) \
59 _cmm_compat_c11_smp_mb__before_mo(op, mo); \
61 _cmm_compat_c11_smp_mb__after_mo(op, mo); \
64 #define uatomic_store(addr, v, mo) \
66 _cmm_compat_c11_smp_mb__before_mo(uatomic_set, mo); \
67 uatomic_set(addr, v); \
68 _cmm_compat_c11_smp_mb__after_mo(uatomic_set, mo); \
71 #define uatomic_and_mo(addr, v, mo) \
72 uatomic_load_store_op(uatomic_and, addr, v, mo)
74 #define uatomic_or_mo(addr, v, mo) \
75 uatomic_load_store_op(uatomic_or, addr, v, mo)
77 #define uatomic_add_mo(addr, v, mo) \
78 uatomic_load_store_op(uatomic_add, addr, v, mo)
80 #define uatomic_sub_mo(addr, v, mo) \
81 uatomic_load_store_op(uatomic_sub, addr, v, mo)
83 #define uatomic_inc_mo(addr, mo) \
84 uatomic_load_store_op(uatomic_add, addr, 1, mo)
86 #define uatomic_dec_mo(addr, mo) \
87 uatomic_load_store_op(uatomic_add, addr, -1, mo)
89 * NOTE: We can not just do switch (_value == (old) ? mos : mof) otherwise the
90 * compiler emit a -Wduplicated-cond warning.
92 #define uatomic_cmpxchg_mo(addr, old, new, mos, mof) \
95 _cmm_compat_c11_smp_mb__before_mo(uatomic_cmpxchg, mos); \
96 __typeof__(*(addr)) _value = uatomic_cmpxchg(addr, old, \
99 if (_value == (old)) { \
100 _cmm_compat_c11_smp_mb__after_mo(uatomic_cmpxchg, mos); \
102 _cmm_compat_c11_smp_mb__after_mo(uatomic_cmpxchg, mof); \
107 #define uatomic_xchg_mo(addr, v, mo) \
108 uatomic_load_store_return_op(uatomic_xchg, addr, v, mo)
110 #define uatomic_add_return_mo(addr, v, mo) \
111 uatomic_load_store_return_op(uatomic_add_return, addr, v)
113 #define uatomic_sub_return_mo(addr, v, mo) \
114 uatomic_load_store_return_op(uatomic_sub_return, addr, v)
117 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
120 #define uatomic_load(addr, mo) \
123 _cmm_compat_c11_smp_mb__before_mo(uatomic_read, mo); \
124 __typeof__(*(addr)) _rcu_value = uatomic_read(addr); \
125 _cmm_compat_c11_smp_mb__after_mo(uatomic_read, mo); \
130 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
132 static inline __attribute__((always_inline
))
133 void _uatomic_link_error(void)
136 * generate an illegal instruction. Cannot catch this with
137 * linker tricks when optimizations are disabled.
139 __asm__
__volatile__(ILLEGAL_INSTR
);
142 static inline __attribute__((always_inline
, __noreturn__
))
143 void _uatomic_link_error(void)
149 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
150 extern void _uatomic_link_error(void);
151 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
155 #ifndef uatomic_cmpxchg
156 static inline __attribute__((always_inline
))
157 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
158 unsigned long _new
, int len
)
161 #ifdef UATOMIC_HAS_ATOMIC_BYTE
163 return __sync_val_compare_and_swap_1((uint8_t *) addr
, old
,
166 #ifdef UATOMIC_HAS_ATOMIC_SHORT
168 return __sync_val_compare_and_swap_2((uint16_t *) addr
, old
,
172 return __sync_val_compare_and_swap_4((uint32_t *) addr
, old
,
174 #if (CAA_BITS_PER_LONG == 64)
176 return __sync_val_compare_and_swap_8((uint64_t *) addr
, old
,
180 _uatomic_link_error();
185 #define uatomic_cmpxchg(addr, old, _new) \
186 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
187 caa_cast_long_keep_sign(old), \
188 caa_cast_long_keep_sign(_new),\
195 static inline __attribute__((always_inline
))
196 void _uatomic_and(void *addr
, unsigned long val
,
200 #ifdef UATOMIC_HAS_ATOMIC_BYTE
202 __sync_and_and_fetch_1((uint8_t *) addr
, val
);
205 #ifdef UATOMIC_HAS_ATOMIC_SHORT
207 __sync_and_and_fetch_2((uint16_t *) addr
, val
);
211 __sync_and_and_fetch_4((uint32_t *) addr
, val
);
213 #if (CAA_BITS_PER_LONG == 64)
215 __sync_and_and_fetch_8((uint64_t *) addr
, val
);
219 _uatomic_link_error();
222 #define uatomic_and(addr, v) \
223 (_uatomic_and((addr), \
224 caa_cast_long_keep_sign(v), \
226 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
227 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
234 static inline __attribute__((always_inline
))
235 void _uatomic_or(void *addr
, unsigned long val
,
239 #ifdef UATOMIC_HAS_ATOMIC_BYTE
241 __sync_or_and_fetch_1((uint8_t *) addr
, val
);
244 #ifdef UATOMIC_HAS_ATOMIC_SHORT
246 __sync_or_and_fetch_2((uint16_t *) addr
, val
);
250 __sync_or_and_fetch_4((uint32_t *) addr
, val
);
252 #if (CAA_BITS_PER_LONG == 64)
254 __sync_or_and_fetch_8((uint64_t *) addr
, val
);
258 _uatomic_link_error();
262 #define uatomic_or(addr, v) \
263 (_uatomic_or((addr), \
264 caa_cast_long_keep_sign(v), \
266 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
267 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
272 /* uatomic_add_return */
274 #ifndef uatomic_add_return
275 static inline __attribute__((always_inline
))
276 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
280 #ifdef UATOMIC_HAS_ATOMIC_BYTE
282 return __sync_add_and_fetch_1((uint8_t *) addr
, val
);
284 #ifdef UATOMIC_HAS_ATOMIC_SHORT
286 return __sync_add_and_fetch_2((uint16_t *) addr
, val
);
289 return __sync_add_and_fetch_4((uint32_t *) addr
, val
);
290 #if (CAA_BITS_PER_LONG == 64)
292 return __sync_add_and_fetch_8((uint64_t *) addr
, val
);
295 _uatomic_link_error();
300 #define uatomic_add_return(addr, v) \
301 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
302 caa_cast_long_keep_sign(v), \
304 #endif /* #ifndef uatomic_add_return */
309 static inline __attribute__((always_inline
))
310 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
313 #ifdef UATOMIC_HAS_ATOMIC_BYTE
319 old
= uatomic_read((uint8_t *) addr
);
320 } while (!__sync_bool_compare_and_swap_1((uint8_t *) addr
,
326 #ifdef UATOMIC_HAS_ATOMIC_SHORT
332 old
= uatomic_read((uint16_t *) addr
);
333 } while (!__sync_bool_compare_and_swap_2((uint16_t *) addr
,
344 old
= uatomic_read((uint32_t *) addr
);
345 } while (!__sync_bool_compare_and_swap_4((uint32_t *) addr
,
350 #if (CAA_BITS_PER_LONG == 64)
356 old
= uatomic_read((uint64_t *) addr
);
357 } while (!__sync_bool_compare_and_swap_8((uint64_t *) addr
,
364 _uatomic_link_error();
368 #define uatomic_xchg(addr, v) \
369 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
370 caa_cast_long_keep_sign(v), \
372 #endif /* #ifndef uatomic_xchg */
374 #else /* #ifndef uatomic_cmpxchg */
379 static inline __attribute__((always_inline
))
380 void _uatomic_and(void *addr
, unsigned long val
, int len
)
383 #ifdef UATOMIC_HAS_ATOMIC_BYTE
388 oldt
= uatomic_read((uint8_t *) addr
);
391 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 1);
392 } while (oldt
!= old
);
397 #ifdef UATOMIC_HAS_ATOMIC_SHORT
402 oldt
= uatomic_read((uint16_t *) addr
);
405 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 2);
406 } while (oldt
!= old
);
413 oldt
= uatomic_read((uint32_t *) addr
);
416 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 4);
417 } while (oldt
!= old
);
421 #if (CAA_BITS_PER_LONG == 64)
426 oldt
= uatomic_read((uint64_t *) addr
);
429 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 8);
430 } while (oldt
!= old
);
436 _uatomic_link_error();
439 #define uatomic_and(addr, v) \
440 (_uatomic_and((addr), \
441 caa_cast_long_keep_sign(v), \
443 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
444 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
446 #endif /* #ifndef uatomic_and */
451 static inline __attribute__((always_inline
))
452 void _uatomic_or(void *addr
, unsigned long val
, int len
)
455 #ifdef UATOMIC_HAS_ATOMIC_BYTE
460 oldt
= uatomic_read((uint8_t *) addr
);
463 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 1);
464 } while (oldt
!= old
);
469 #ifdef UATOMIC_HAS_ATOMIC_SHORT
474 oldt
= uatomic_read((uint16_t *) addr
);
477 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 2);
478 } while (oldt
!= old
);
487 oldt
= uatomic_read((uint32_t *) addr
);
490 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 4);
491 } while (oldt
!= old
);
495 #if (CAA_BITS_PER_LONG == 64)
500 oldt
= uatomic_read((uint64_t *) addr
);
503 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 8);
504 } while (oldt
!= old
);
510 _uatomic_link_error();
513 #define uatomic_or(addr, v) \
514 (_uatomic_or((addr), \
515 caa_cast_long_keep_sign(v), \
517 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
518 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
520 #endif /* #ifndef uatomic_or */
522 #ifndef uatomic_add_return
523 /* uatomic_add_return */
525 static inline __attribute__((always_inline
))
526 unsigned long _uatomic_add_return(void *addr
, unsigned long val
, int len
)
529 #ifdef UATOMIC_HAS_ATOMIC_BYTE
534 oldt
= uatomic_read((uint8_t *) addr
);
537 oldt
= uatomic_cmpxchg((uint8_t *) addr
,
539 } while (oldt
!= old
);
544 #ifdef UATOMIC_HAS_ATOMIC_SHORT
549 oldt
= uatomic_read((uint16_t *) addr
);
552 oldt
= uatomic_cmpxchg((uint16_t *) addr
,
554 } while (oldt
!= old
);
563 oldt
= uatomic_read((uint32_t *) addr
);
566 oldt
= uatomic_cmpxchg((uint32_t *) addr
,
568 } while (oldt
!= old
);
572 #if (CAA_BITS_PER_LONG == 64)
577 oldt
= uatomic_read((uint64_t *) addr
);
580 oldt
= uatomic_cmpxchg((uint64_t *) addr
,
582 } while (oldt
!= old
);
588 _uatomic_link_error();
592 #define uatomic_add_return(addr, v) \
593 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
594 caa_cast_long_keep_sign(v), \
596 #endif /* #ifndef uatomic_add_return */
601 static inline __attribute__((always_inline
))
602 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
605 #ifdef UATOMIC_HAS_ATOMIC_BYTE
610 oldt
= uatomic_read((uint8_t *) addr
);
613 oldt
= uatomic_cmpxchg((uint8_t *) addr
,
615 } while (oldt
!= old
);
620 #ifdef UATOMIC_HAS_ATOMIC_SHORT
625 oldt
= uatomic_read((uint16_t *) addr
);
628 oldt
= uatomic_cmpxchg((uint16_t *) addr
,
630 } while (oldt
!= old
);
639 oldt
= uatomic_read((uint32_t *) addr
);
642 oldt
= uatomic_cmpxchg((uint32_t *) addr
,
644 } while (oldt
!= old
);
648 #if (CAA_BITS_PER_LONG == 64)
653 oldt
= uatomic_read((uint64_t *) addr
);
656 oldt
= uatomic_cmpxchg((uint64_t *) addr
,
658 } while (oldt
!= old
);
664 _uatomic_link_error();
668 #define uatomic_xchg(addr, v) \
669 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
670 caa_cast_long_keep_sign(v), \
672 #endif /* #ifndef uatomic_xchg */
674 #endif /* #else #ifndef uatomic_cmpxchg */
676 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
679 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
680 #define cmm_smp_mb__before_uatomic_add() cmm_barrier()
681 #define cmm_smp_mb__after_uatomic_add() cmm_barrier()
684 #define uatomic_sub_return(addr, v) \
685 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
686 #define uatomic_sub(addr, v) \
687 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
688 #define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add()
689 #define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add()
692 #define uatomic_inc(addr) uatomic_add((addr), 1)
693 #define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add()
694 #define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add()
698 #define uatomic_dec(addr) uatomic_add((addr), -1)
699 #define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add()
700 #define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add()
707 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.04669 seconds and 5 git commands to generate.