1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
32 #define uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v))
36 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
39 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
40 static inline __attribute__((always_inline
))
41 void _uatomic_link_error()
44 /* generate an illegal instruction. Cannot catch this with linker tricks
45 * when optimizations are disabled. */
46 __asm__
__volatile__(ILLEGAL_INSTR
);
52 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
53 extern void _uatomic_link_error ();
54 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
58 #ifndef uatomic_cmpxchg
59 static inline __attribute__((always_inline
))
60 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
61 unsigned long _new
, int len
)
64 #ifdef UATOMIC_HAS_ATOMIC_BYTE
66 return __sync_val_compare_and_swap_1(addr
, old
, _new
);
68 #ifdef UATOMIC_HAS_ATOMIC_SHORT
70 return __sync_val_compare_and_swap_2(addr
, old
, _new
);
73 return __sync_val_compare_and_swap_4(addr
, old
, _new
);
74 #if (CAA_BITS_PER_LONG == 64)
76 return __sync_val_compare_and_swap_8(addr
, old
, _new
);
79 _uatomic_link_error();
84 #define uatomic_cmpxchg(addr, old, _new) \
85 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
86 (unsigned long)(_new), \
93 static inline __attribute__((always_inline
))
94 void _uatomic_and(void *addr
, unsigned long val
,
98 #ifdef UATOMIC_HAS_ATOMIC_BYTE
100 __sync_and_and_fetch_1(addr
, val
);
102 #ifdef UATOMIC_HAS_ATOMIC_SHORT
104 __sync_and_and_fetch_2(addr
, val
);
107 __sync_and_and_fetch_4(addr
, val
);
108 #if (CAA_BITS_PER_LONG == 64)
110 __sync_and_and_fetch_8(addr
, val
);
113 _uatomic_link_error();
117 #define uatomic_and(addr, v) \
118 (_uatomic_and((addr), \
119 (unsigned long)(v), \
126 static inline __attribute__((always_inline
))
127 void _uatomic_or(void *addr
, unsigned long val
,
131 #ifdef UATOMIC_HAS_ATOMIC_BYTE
133 __sync_or_and_fetch_1(addr
, val
);
135 #ifdef UATOMIC_HAS_ATOMIC_SHORT
137 __sync_or_and_fetch_2(addr
, val
);
140 __sync_or_and_fetch_4(addr
, val
);
141 #if (CAA_BITS_PER_LONG == 64)
143 __sync_or_and_fetch_8(addr
, val
);
146 _uatomic_link_error();
150 #define uatomic_or(addr, v) \
151 (_uatomic_or((addr), \
152 (unsigned long)(v), \
156 /* uatomic_add_return */
158 #ifndef uatomic_add_return
159 static inline __attribute__((always_inline
))
160 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
164 #ifdef UATOMIC_HAS_ATOMIC_BYTE
166 return __sync_add_and_fetch_1(addr
, val
);
168 #ifdef UATOMIC_HAS_ATOMIC_SHORT
170 return __sync_add_and_fetch_2(addr
, val
);
173 return __sync_add_and_fetch_4(addr
, val
);
174 #if (CAA_BITS_PER_LONG == 64)
176 return __sync_add_and_fetch_8(addr
, val
);
179 _uatomic_link_error();
184 #define uatomic_add_return(addr, v) \
185 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
186 (unsigned long)(v), \
188 #endif /* #ifndef uatomic_add_return */
193 static inline __attribute__((always_inline
))
194 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
197 #ifdef UATOMIC_HAS_ATOMIC_BYTE
203 old
= uatomic_read((unsigned char *)addr
);
204 } while (!__sync_bool_compare_and_swap_1(addr
, old
, val
));
209 #ifdef UATOMIC_HAS_ATOMIC_SHORT
215 old
= uatomic_read((unsigned short *)addr
);
216 } while (!__sync_bool_compare_and_swap_2(addr
, old
, val
));
226 old
= uatomic_read((unsigned int *)addr
);
227 } while (!__sync_bool_compare_and_swap_4(addr
, old
, val
));
231 #if (CAA_BITS_PER_LONG == 64)
237 old
= uatomic_read((unsigned long *)addr
);
238 } while (!__sync_bool_compare_and_swap_8(addr
, old
, val
));
244 _uatomic_link_error();
248 #define uatomic_xchg(addr, v) \
249 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
251 #endif /* #ifndef uatomic_xchg */
253 #else /* #ifndef uatomic_cmpxchg */
258 static inline __attribute__((always_inline
))
259 void _uatomic_and(void *addr
, unsigned long val
, int len
)
262 #ifdef UATOMIC_HAS_ATOMIC_BYTE
265 unsigned char old
, oldt
;
267 oldt
= uatomic_read((unsigned char *)addr
);
270 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 1);
271 } while (oldt
!= old
);
274 #ifdef UATOMIC_HAS_ATOMIC_SHORT
277 unsigned short old
, oldt
;
279 oldt
= uatomic_read((unsigned short *)addr
);
282 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 2);
283 } while (oldt
!= old
);
288 unsigned int old
, oldt
;
290 oldt
= uatomic_read((unsigned int *)addr
);
293 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 4);
294 } while (oldt
!= old
);
296 #if (CAA_BITS_PER_LONG == 64)
299 unsigned long old
, oldt
;
301 oldt
= uatomic_read((unsigned long *)addr
);
304 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 8);
305 } while (oldt
!= old
);
309 _uatomic_link_error();
313 #define uatomic_and(addr, v) \
314 (uatomic_and((addr), \
315 (unsigned long)(v), \
317 #endif /* #ifndef uatomic_and */
322 static inline __attribute__((always_inline
))
323 void _uatomic_or(void *addr
, unsigned long val
, int len
)
326 #ifdef UATOMIC_HAS_ATOMIC_BYTE
329 unsigned char old
, oldt
;
331 oldt
= uatomic_read((unsigned char *)addr
);
334 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 1);
335 } while (oldt
!= old
);
338 #ifdef UATOMIC_HAS_ATOMIC_SHORT
341 unsigned short old
, oldt
;
343 oldt
= uatomic_read((unsigned short *)addr
);
346 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 2);
347 } while (oldt
!= old
);
352 unsigned int old
, oldt
;
354 oldt
= uatomic_read((unsigned int *)addr
);
357 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 4);
358 } while (oldt
!= old
);
360 #if (CAA_BITS_PER_LONG == 64)
363 unsigned long old
, oldt
;
365 oldt
= uatomic_read((unsigned long *)addr
);
368 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 8);
369 } while (oldt
!= old
);
373 _uatomic_link_error();
377 #define uatomic_or(addr, v) \
378 (uatomic_or((addr), \
379 (unsigned long)(v), \
381 #endif /* #ifndef uatomic_or */
383 #ifndef uatomic_add_return
384 /* uatomic_add_return */
386 static inline __attribute__((always_inline
))
387 unsigned long _uatomic_add_return(void *addr
, unsigned long val
, int len
)
390 #ifdef UATOMIC_HAS_ATOMIC_BYTE
393 unsigned char old
, oldt
;
395 oldt
= uatomic_read((unsigned char *)addr
);
398 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 1);
399 } while (oldt
!= old
);
404 #ifdef UATOMIC_HAS_ATOMIC_SHORT
407 unsigned short old
, oldt
;
409 oldt
= uatomic_read((unsigned short *)addr
);
412 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 2);
413 } while (oldt
!= old
);
420 unsigned int old
, oldt
;
422 oldt
= uatomic_read((unsigned int *)addr
);
425 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 4);
426 } while (oldt
!= old
);
430 #if (CAA_BITS_PER_LONG == 64)
433 unsigned long old
, oldt
;
435 oldt
= uatomic_read((unsigned long *)addr
);
438 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 8);
439 } while (oldt
!= old
);
445 _uatomic_link_error();
449 #define uatomic_add_return(addr, v) \
450 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
451 (unsigned long)(v), \
453 #endif /* #ifndef uatomic_add_return */
458 static inline __attribute__((always_inline
))
459 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
462 #ifdef UATOMIC_HAS_ATOMIC_BYTE
465 unsigned char old
, oldt
;
467 oldt
= uatomic_read((unsigned char *)addr
);
470 oldt
= _uatomic_cmpxchg(addr
, old
, val
, 1);
471 } while (oldt
!= old
);
476 #ifdef UATOMIC_HAS_ATOMIC_SHORT
479 unsigned short old
, oldt
;
481 oldt
= uatomic_read((unsigned short *)addr
);
484 oldt
= _uatomic_cmpxchg(addr
, old
, val
, 2);
485 } while (oldt
!= old
);
492 unsigned int old
, oldt
;
494 oldt
= uatomic_read((unsigned int *)addr
);
497 oldt
= _uatomic_cmpxchg(addr
, old
, val
, 4);
498 } while (oldt
!= old
);
502 #if (CAA_BITS_PER_LONG == 64)
505 unsigned long old
, oldt
;
507 oldt
= uatomic_read((unsigned long *)addr
);
510 oldt
= _uatomic_cmpxchg(addr
, old
, val
, 8);
511 } while (oldt
!= old
);
517 _uatomic_link_error();
521 #define uatomic_xchg(addr, v) \
522 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
524 #endif /* #ifndef uatomic_xchg */
526 #endif /* #else #ifndef uatomic_cmpxchg */
528 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
531 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
534 #define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
535 #define uatomic_sub(addr, v) uatomic_add((addr), -(v))
538 #define uatomic_inc(addr) uatomic_add((addr), 1)
542 #define uatomic_dec(addr) uatomic_add((addr), -1)
549 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.046088 seconds and 5 git commands to generate.