a03015897baa6fe980c36b03da95d181b4d93fc0
1 // SPDX-FileCopyrightText: 1991-1994 by Xerox Corporation. All rights reserved.
2 // SPDX-FileCopyrightText: 1996-1999 by Silicon Graphics. All rights reserved.
3 // SPDX-FileCopyrightText: 1999-2004 Hewlett-Packard Development Company, L.P.
4 // SPDX-FileCopyrightText: 2009 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5 // SPDX-FileCopyrightText: 2010 Paolo Bonzini
7 // SPDX-License-Identifier: LicenseRef-Boehm-GC
9 #ifndef _URCU_UATOMIC_GENERIC_H
10 #define _URCU_UATOMIC_GENERIC_H
13 * Code inspired from libuatomic_ops-1.2, inherited in part from the
14 * Boehm-Demers-Weiser conservative garbage collector.
18 #include <urcu/compiler.h>
19 #include <urcu/system.h>
26 #define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
30 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
33 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
35 static inline __attribute__((always_inline
))
36 void _uatomic_link_error(void)
39 * generate an illegal instruction. Cannot catch this with
40 * linker tricks when optimizations are disabled.
42 __asm__
__volatile__(ILLEGAL_INSTR
);
45 static inline __attribute__((always_inline
, __noreturn__
))
46 void _uatomic_link_error(void)
52 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
53 extern void _uatomic_link_error(void);
54 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
58 #ifndef uatomic_cmpxchg
59 static inline __attribute__((always_inline
))
60 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
61 unsigned long _new
, int len
)
64 #ifdef UATOMIC_HAS_ATOMIC_BYTE
66 return __sync_val_compare_and_swap_1((uint8_t *) addr
, old
,
69 #ifdef UATOMIC_HAS_ATOMIC_SHORT
71 return __sync_val_compare_and_swap_2((uint16_t *) addr
, old
,
75 return __sync_val_compare_and_swap_4((uint32_t *) addr
, old
,
77 #if (CAA_BITS_PER_LONG == 64)
79 return __sync_val_compare_and_swap_8((uint64_t *) addr
, old
,
83 _uatomic_link_error();
88 #define uatomic_cmpxchg(addr, old, _new) \
89 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
90 caa_cast_long_keep_sign(old), \
91 caa_cast_long_keep_sign(_new),\
98 static inline __attribute__((always_inline
))
99 void _uatomic_and(void *addr
, unsigned long val
,
103 #ifdef UATOMIC_HAS_ATOMIC_BYTE
105 __sync_and_and_fetch_1((uint8_t *) addr
, val
);
108 #ifdef UATOMIC_HAS_ATOMIC_SHORT
110 __sync_and_and_fetch_2((uint16_t *) addr
, val
);
114 __sync_and_and_fetch_4((uint32_t *) addr
, val
);
116 #if (CAA_BITS_PER_LONG == 64)
118 __sync_and_and_fetch_8((uint64_t *) addr
, val
);
122 _uatomic_link_error();
125 #define uatomic_and(addr, v) \
126 (_uatomic_and((addr), \
127 caa_cast_long_keep_sign(v), \
129 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
130 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
137 static inline __attribute__((always_inline
))
138 void _uatomic_or(void *addr
, unsigned long val
,
142 #ifdef UATOMIC_HAS_ATOMIC_BYTE
144 __sync_or_and_fetch_1((uint8_t *) addr
, val
);
147 #ifdef UATOMIC_HAS_ATOMIC_SHORT
149 __sync_or_and_fetch_2((uint16_t *) addr
, val
);
153 __sync_or_and_fetch_4((uint32_t *) addr
, val
);
155 #if (CAA_BITS_PER_LONG == 64)
157 __sync_or_and_fetch_8((uint64_t *) addr
, val
);
161 _uatomic_link_error();
165 #define uatomic_or(addr, v) \
166 (_uatomic_or((addr), \
167 caa_cast_long_keep_sign(v), \
169 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
170 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
175 /* uatomic_add_return */
177 #ifndef uatomic_add_return
178 static inline __attribute__((always_inline
))
179 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
183 #ifdef UATOMIC_HAS_ATOMIC_BYTE
185 return __sync_add_and_fetch_1((uint8_t *) addr
, val
);
187 #ifdef UATOMIC_HAS_ATOMIC_SHORT
189 return __sync_add_and_fetch_2((uint16_t *) addr
, val
);
192 return __sync_add_and_fetch_4((uint32_t *) addr
, val
);
193 #if (CAA_BITS_PER_LONG == 64)
195 return __sync_add_and_fetch_8((uint64_t *) addr
, val
);
198 _uatomic_link_error();
203 #define uatomic_add_return(addr, v) \
204 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
205 caa_cast_long_keep_sign(v), \
207 #endif /* #ifndef uatomic_add_return */
212 static inline __attribute__((always_inline
))
213 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
216 #ifdef UATOMIC_HAS_ATOMIC_BYTE
222 old
= uatomic_read((uint8_t *) addr
);
223 } while (!__sync_bool_compare_and_swap_1((uint8_t *) addr
,
229 #ifdef UATOMIC_HAS_ATOMIC_SHORT
235 old
= uatomic_read((uint16_t *) addr
);
236 } while (!__sync_bool_compare_and_swap_2((uint16_t *) addr
,
247 old
= uatomic_read((uint32_t *) addr
);
248 } while (!__sync_bool_compare_and_swap_4((uint32_t *) addr
,
253 #if (CAA_BITS_PER_LONG == 64)
259 old
= uatomic_read((uint64_t *) addr
);
260 } while (!__sync_bool_compare_and_swap_8((uint64_t *) addr
,
267 _uatomic_link_error();
271 #define uatomic_xchg(addr, v) \
272 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
273 caa_cast_long_keep_sign(v), \
275 #endif /* #ifndef uatomic_xchg */
277 #else /* #ifndef uatomic_cmpxchg */
282 static inline __attribute__((always_inline
))
283 void _uatomic_and(void *addr
, unsigned long val
, int len
)
286 #ifdef UATOMIC_HAS_ATOMIC_BYTE
291 oldt
= uatomic_read((uint8_t *) addr
);
294 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 1);
295 } while (oldt
!= old
);
300 #ifdef UATOMIC_HAS_ATOMIC_SHORT
305 oldt
= uatomic_read((uint16_t *) addr
);
308 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 2);
309 } while (oldt
!= old
);
316 oldt
= uatomic_read((uint32_t *) addr
);
319 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 4);
320 } while (oldt
!= old
);
324 #if (CAA_BITS_PER_LONG == 64)
329 oldt
= uatomic_read((uint64_t *) addr
);
332 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 8);
333 } while (oldt
!= old
);
339 _uatomic_link_error();
342 #define uatomic_and(addr, v) \
343 (_uatomic_and((addr), \
344 caa_cast_long_keep_sign(v), \
346 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
347 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
349 #endif /* #ifndef uatomic_and */
354 static inline __attribute__((always_inline
))
355 void _uatomic_or(void *addr
, unsigned long val
, int len
)
358 #ifdef UATOMIC_HAS_ATOMIC_BYTE
363 oldt
= uatomic_read((uint8_t *) addr
);
366 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 1);
367 } while (oldt
!= old
);
372 #ifdef UATOMIC_HAS_ATOMIC_SHORT
377 oldt
= uatomic_read((uint16_t *) addr
);
380 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 2);
381 } while (oldt
!= old
);
390 oldt
= uatomic_read((uint32_t *) addr
);
393 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 4);
394 } while (oldt
!= old
);
398 #if (CAA_BITS_PER_LONG == 64)
403 oldt
= uatomic_read((uint64_t *) addr
);
406 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 8);
407 } while (oldt
!= old
);
413 _uatomic_link_error();
416 #define uatomic_or(addr, v) \
417 (_uatomic_or((addr), \
418 caa_cast_long_keep_sign(v), \
420 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
421 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
423 #endif /* #ifndef uatomic_or */
425 #ifndef uatomic_add_return
426 /* uatomic_add_return */
428 static inline __attribute__((always_inline
))
429 unsigned long _uatomic_add_return(void *addr
, unsigned long val
, int len
)
432 #ifdef UATOMIC_HAS_ATOMIC_BYTE
437 oldt
= uatomic_read((uint8_t *) addr
);
440 oldt
= uatomic_cmpxchg((uint8_t *) addr
,
442 } while (oldt
!= old
);
447 #ifdef UATOMIC_HAS_ATOMIC_SHORT
452 oldt
= uatomic_read((uint16_t *) addr
);
455 oldt
= uatomic_cmpxchg((uint16_t *) addr
,
457 } while (oldt
!= old
);
466 oldt
= uatomic_read((uint32_t *) addr
);
469 oldt
= uatomic_cmpxchg((uint32_t *) addr
,
471 } while (oldt
!= old
);
475 #if (CAA_BITS_PER_LONG == 64)
480 oldt
= uatomic_read((uint64_t *) addr
);
483 oldt
= uatomic_cmpxchg((uint64_t *) addr
,
485 } while (oldt
!= old
);
491 _uatomic_link_error();
495 #define uatomic_add_return(addr, v) \
496 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
497 caa_cast_long_keep_sign(v), \
499 #endif /* #ifndef uatomic_add_return */
504 static inline __attribute__((always_inline
))
505 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
508 #ifdef UATOMIC_HAS_ATOMIC_BYTE
513 oldt
= uatomic_read((uint8_t *) addr
);
516 oldt
= uatomic_cmpxchg((uint8_t *) addr
,
518 } while (oldt
!= old
);
523 #ifdef UATOMIC_HAS_ATOMIC_SHORT
528 oldt
= uatomic_read((uint16_t *) addr
);
531 oldt
= uatomic_cmpxchg((uint16_t *) addr
,
533 } while (oldt
!= old
);
542 oldt
= uatomic_read((uint32_t *) addr
);
545 oldt
= uatomic_cmpxchg((uint32_t *) addr
,
547 } while (oldt
!= old
);
551 #if (CAA_BITS_PER_LONG == 64)
556 oldt
= uatomic_read((uint64_t *) addr
);
559 oldt
= uatomic_cmpxchg((uint64_t *) addr
,
561 } while (oldt
!= old
);
567 _uatomic_link_error();
571 #define uatomic_xchg(addr, v) \
572 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
573 caa_cast_long_keep_sign(v), \
575 #endif /* #ifndef uatomic_xchg */
577 #endif /* #else #ifndef uatomic_cmpxchg */
579 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
582 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
583 #define cmm_smp_mb__before_uatomic_add() cmm_barrier()
584 #define cmm_smp_mb__after_uatomic_add() cmm_barrier()
587 #define uatomic_sub_return(addr, v) \
588 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
589 #define uatomic_sub(addr, v) \
590 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
591 #define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add()
592 #define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add()
595 #define uatomic_inc(addr) uatomic_add((addr), 1)
596 #define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add()
597 #define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add()
601 #define uatomic_dec(addr) uatomic_add((addr), -1)
602 #define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add()
603 #define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add()
610 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.051508 seconds and 4 git commands to generate.