1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
25 #include <urcu/compiler.h>
26 #include <urcu/system.h>
33 #define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
37 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
40 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
42 static inline __attribute__((always_inline
))
43 void _uatomic_link_error(void)
46 * generate an illegal instruction. Cannot catch this with
47 * linker tricks when optimizations are disabled.
49 __asm__
__volatile__(ILLEGAL_INSTR
);
52 static inline __attribute__((always_inline
, __noreturn__
))
53 void _uatomic_link_error(void)
59 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
60 extern void _uatomic_link_error(void);
61 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
65 #ifndef uatomic_cmpxchg
66 static inline __attribute__((always_inline
))
67 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
68 unsigned long _new
, int len
)
71 #ifdef UATOMIC_HAS_ATOMIC_BYTE
73 return __sync_val_compare_and_swap_1((uint8_t *) addr
, old
,
76 #ifdef UATOMIC_HAS_ATOMIC_SHORT
78 return __sync_val_compare_and_swap_2((uint16_t *) addr
, old
,
82 return __sync_val_compare_and_swap_4((uint32_t *) addr
, old
,
84 #if (CAA_BITS_PER_LONG == 64)
86 return __sync_val_compare_and_swap_8((uint64_t *) addr
, old
,
90 _uatomic_link_error();
95 #define uatomic_cmpxchg(addr, old, _new) \
96 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
97 caa_cast_long_keep_sign(old), \
98 caa_cast_long_keep_sign(_new),\
105 static inline __attribute__((always_inline
))
106 void _uatomic_and(void *addr
, unsigned long val
,
110 #ifdef UATOMIC_HAS_ATOMIC_BYTE
112 __sync_and_and_fetch_1((uint8_t *) addr
, val
);
115 #ifdef UATOMIC_HAS_ATOMIC_SHORT
117 __sync_and_and_fetch_2((uint16_t *) addr
, val
);
121 __sync_and_and_fetch_4((uint32_t *) addr
, val
);
123 #if (CAA_BITS_PER_LONG == 64)
125 __sync_and_and_fetch_8((uint64_t *) addr
, val
);
129 _uatomic_link_error();
132 #define uatomic_and(addr, v) \
133 (_uatomic_and((addr), \
134 caa_cast_long_keep_sign(v), \
136 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
137 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
144 static inline __attribute__((always_inline
))
145 void _uatomic_or(void *addr
, unsigned long val
,
149 #ifdef UATOMIC_HAS_ATOMIC_BYTE
151 __sync_or_and_fetch_1((uint8_t *) addr
, val
);
154 #ifdef UATOMIC_HAS_ATOMIC_SHORT
156 __sync_or_and_fetch_2((uint16_t *) addr
, val
);
160 __sync_or_and_fetch_4((uint32_t *) addr
, val
);
162 #if (CAA_BITS_PER_LONG == 64)
164 __sync_or_and_fetch_8((uint64_t *) addr
, val
);
168 _uatomic_link_error();
172 #define uatomic_or(addr, v) \
173 (_uatomic_or((addr), \
174 caa_cast_long_keep_sign(v), \
176 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
177 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
182 /* uatomic_add_return */
184 #ifndef uatomic_add_return
185 static inline __attribute__((always_inline
))
186 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
190 #ifdef UATOMIC_HAS_ATOMIC_BYTE
192 return __sync_add_and_fetch_1((uint8_t *) addr
, val
);
194 #ifdef UATOMIC_HAS_ATOMIC_SHORT
196 return __sync_add_and_fetch_2((uint16_t *) addr
, val
);
199 return __sync_add_and_fetch_4((uint32_t *) addr
, val
);
200 #if (CAA_BITS_PER_LONG == 64)
202 return __sync_add_and_fetch_8((uint64_t *) addr
, val
);
205 _uatomic_link_error();
210 #define uatomic_add_return(addr, v) \
211 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
212 caa_cast_long_keep_sign(v), \
214 #endif /* #ifndef uatomic_add_return */
219 static inline __attribute__((always_inline
))
220 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
223 #ifdef UATOMIC_HAS_ATOMIC_BYTE
229 old
= uatomic_read((uint8_t *) addr
);
230 } while (!__sync_bool_compare_and_swap_1((uint8_t *) addr
,
236 #ifdef UATOMIC_HAS_ATOMIC_SHORT
242 old
= uatomic_read((uint16_t *) addr
);
243 } while (!__sync_bool_compare_and_swap_2((uint16_t *) addr
,
254 old
= uatomic_read((uint32_t *) addr
);
255 } while (!__sync_bool_compare_and_swap_4((uint32_t *) addr
,
260 #if (CAA_BITS_PER_LONG == 64)
266 old
= uatomic_read((uint64_t *) addr
);
267 } while (!__sync_bool_compare_and_swap_8((uint64_t *) addr
,
274 _uatomic_link_error();
278 #define uatomic_xchg(addr, v) \
279 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
280 caa_cast_long_keep_sign(v), \
282 #endif /* #ifndef uatomic_xchg */
284 #else /* #ifndef uatomic_cmpxchg */
289 static inline __attribute__((always_inline
))
290 void _uatomic_and(void *addr
, unsigned long val
, int len
)
293 #ifdef UATOMIC_HAS_ATOMIC_BYTE
298 oldt
= uatomic_read((uint8_t *) addr
);
301 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 1);
302 } while (oldt
!= old
);
307 #ifdef UATOMIC_HAS_ATOMIC_SHORT
312 oldt
= uatomic_read((uint16_t *) addr
);
315 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 2);
316 } while (oldt
!= old
);
323 oldt
= uatomic_read((uint32_t *) addr
);
326 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 4);
327 } while (oldt
!= old
);
331 #if (CAA_BITS_PER_LONG == 64)
336 oldt
= uatomic_read((uint64_t *) addr
);
339 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 8);
340 } while (oldt
!= old
);
346 _uatomic_link_error();
349 #define uatomic_and(addr, v) \
350 (_uatomic_and((addr), \
351 caa_cast_long_keep_sign(v), \
353 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
354 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
356 #endif /* #ifndef uatomic_and */
361 static inline __attribute__((always_inline
))
362 void _uatomic_or(void *addr
, unsigned long val
, int len
)
365 #ifdef UATOMIC_HAS_ATOMIC_BYTE
370 oldt
= uatomic_read((uint8_t *) addr
);
373 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 1);
374 } while (oldt
!= old
);
379 #ifdef UATOMIC_HAS_ATOMIC_SHORT
384 oldt
= uatomic_read((uint16_t *) addr
);
387 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 2);
388 } while (oldt
!= old
);
397 oldt
= uatomic_read((uint32_t *) addr
);
400 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 4);
401 } while (oldt
!= old
);
405 #if (CAA_BITS_PER_LONG == 64)
410 oldt
= uatomic_read((uint64_t *) addr
);
413 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 8);
414 } while (oldt
!= old
);
420 _uatomic_link_error();
423 #define uatomic_or(addr, v) \
424 (_uatomic_or((addr), \
425 caa_cast_long_keep_sign(v), \
427 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
428 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
430 #endif /* #ifndef uatomic_or */
432 #ifndef uatomic_add_return
433 /* uatomic_add_return */
435 static inline __attribute__((always_inline
))
436 unsigned long _uatomic_add_return(void *addr
, unsigned long val
, int len
)
439 #ifdef UATOMIC_HAS_ATOMIC_BYTE
444 oldt
= uatomic_read((uint8_t *) addr
);
447 oldt
= uatomic_cmpxchg((uint8_t *) addr
,
449 } while (oldt
!= old
);
454 #ifdef UATOMIC_HAS_ATOMIC_SHORT
459 oldt
= uatomic_read((uint16_t *) addr
);
462 oldt
= uatomic_cmpxchg((uint16_t *) addr
,
464 } while (oldt
!= old
);
473 oldt
= uatomic_read((uint32_t *) addr
);
476 oldt
= uatomic_cmpxchg((uint32_t *) addr
,
478 } while (oldt
!= old
);
482 #if (CAA_BITS_PER_LONG == 64)
487 oldt
= uatomic_read((uint64_t *) addr
);
490 oldt
= uatomic_cmpxchg((uint64_t *) addr
,
492 } while (oldt
!= old
);
498 _uatomic_link_error();
502 #define uatomic_add_return(addr, v) \
503 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
504 caa_cast_long_keep_sign(v), \
506 #endif /* #ifndef uatomic_add_return */
511 static inline __attribute__((always_inline
))
512 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
515 #ifdef UATOMIC_HAS_ATOMIC_BYTE
520 oldt
= uatomic_read((uint8_t *) addr
);
523 oldt
= uatomic_cmpxchg((uint8_t *) addr
,
525 } while (oldt
!= old
);
530 #ifdef UATOMIC_HAS_ATOMIC_SHORT
535 oldt
= uatomic_read((uint16_t *) addr
);
538 oldt
= uatomic_cmpxchg((uint16_t *) addr
,
540 } while (oldt
!= old
);
549 oldt
= uatomic_read((uint32_t *) addr
);
552 oldt
= uatomic_cmpxchg((uint32_t *) addr
,
554 } while (oldt
!= old
);
558 #if (CAA_BITS_PER_LONG == 64)
563 oldt
= uatomic_read((uint64_t *) addr
);
566 oldt
= uatomic_cmpxchg((uint64_t *) addr
,
568 } while (oldt
!= old
);
574 _uatomic_link_error();
578 #define uatomic_xchg(addr, v) \
579 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
580 caa_cast_long_keep_sign(v), \
582 #endif /* #ifndef uatomic_xchg */
584 #endif /* #else #ifndef uatomic_cmpxchg */
586 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
589 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
590 #define cmm_smp_mb__before_uatomic_add() cmm_barrier()
591 #define cmm_smp_mb__after_uatomic_add() cmm_barrier()
594 #define uatomic_sub_return(addr, v) \
595 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
596 #define uatomic_sub(addr, v) \
597 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
598 #define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add()
599 #define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add()
602 #define uatomic_inc(addr) uatomic_add((addr), 1)
603 #define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add()
604 #define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add()
608 #define uatomic_dec(addr) uatomic_add((addr), -1)
609 #define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add()
610 #define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add()
617 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.041922 seconds and 4 git commands to generate.