4a5705198b1a81fa48713b41c743a10c2feedd22
1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
32 #define uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v))
36 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
39 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
40 static inline __attribute__((always_inline
))
41 void _uatomic_link_error()
44 /* generate an illegal instruction. Cannot catch this with linker tricks
45 * when optimizations are disabled. */
46 __asm__
__volatile__(ILLEGAL_INSTR
);
52 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
53 extern void _uatomic_link_error ();
54 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
58 #ifndef uatomic_cmpxchg
59 static inline __attribute__((always_inline
))
60 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
61 unsigned long _new
, int len
)
64 #ifdef UATOMIC_HAS_ATOMIC_BYTE
66 return __sync_val_compare_and_swap_1(addr
, old
, _new
);
68 #ifdef UATOMIC_HAS_ATOMIC_SHORT
70 return __sync_val_compare_and_swap_2(addr
, old
, _new
);
73 return __sync_val_compare_and_swap_4(addr
, old
, _new
);
74 #if (CAA_BITS_PER_LONG == 64)
76 return __sync_val_compare_and_swap_8(addr
, old
, _new
);
79 _uatomic_link_error();
84 #define uatomic_cmpxchg(addr, old, _new) \
85 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
86 (unsigned long)(_new), \
93 static inline __attribute__((always_inline
))
94 void _uatomic_and(void *addr
, unsigned long val
,
98 #ifdef UATOMIC_HAS_ATOMIC_BYTE
100 __sync_and_and_fetch_1(addr
, val
);
102 #ifdef UATOMIC_HAS_ATOMIC_SHORT
104 __sync_and_and_fetch_2(addr
, val
);
107 __sync_and_and_fetch_4(addr
, val
);
108 #if (CAA_BITS_PER_LONG == 64)
110 __sync_and_and_fetch_8(addr
, val
);
113 _uatomic_link_error();
117 #define uatomic_and(addr, v) \
118 (_uatomic_and((addr), \
119 (unsigned long)(v), \
126 static inline __attribute__((always_inline
))
127 void _uatomic_or(void *addr
, unsigned long val
,
131 #ifdef UATOMIC_HAS_ATOMIC_BYTE
133 __sync_or_and_fetch_1(addr
, val
);
135 #ifdef UATOMIC_HAS_ATOMIC_SHORT
137 __sync_or_and_fetch_2(addr
, val
);
140 __sync_or_and_fetch_4(addr
, val
);
141 #if (CAA_BITS_PER_LONG == 64)
143 __sync_or_and_fetch_8(addr
, val
);
146 _uatomic_link_error();
150 #define uatomic_or(addr, v) \
151 (_uatomic_or((addr), \
152 (unsigned long)(v), \
156 /* uatomic_add_return */
158 #ifndef uatomic_add_return
159 static inline __attribute__((always_inline
))
160 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
164 #ifdef UATOMIC_HAS_ATOMIC_BYTE
166 return __sync_add_and_fetch_1(addr
, val
);
168 #ifdef UATOMIC_HAS_ATOMIC_SHORT
170 return __sync_add_and_fetch_2(addr
, val
);
173 return __sync_add_and_fetch_4(addr
, val
);
174 #if (CAA_BITS_PER_LONG == 64)
176 return __sync_add_and_fetch_8(addr
, val
);
179 _uatomic_link_error();
184 #define uatomic_add_return(addr, v) \
185 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
186 (unsigned long)(v), \
188 #endif /* #ifndef uatomic_add_return */
193 static inline __attribute__((always_inline
))
194 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
197 #ifdef UATOMIC_HAS_ATOMIC_BYTE
203 old
= uatomic_read((unsigned char *)addr
);
204 } while (!__sync_bool_compare_and_swap_1(addr
, old
, val
));
209 #ifdef UATOMIC_HAS_ATOMIC_SHORT
215 old
= uatomic_read((unsigned short *)addr
);
216 } while (!__sync_bool_compare_and_swap_2(addr
, old
, val
));
226 old
= uatomic_read((unsigned int *)addr
);
227 } while (!__sync_bool_compare_and_swap_4(addr
, old
, val
));
231 #if (CAA_BITS_PER_LONG == 64)
237 old
= uatomic_read((unsigned long *)addr
);
238 } while (!__sync_bool_compare_and_swap_8(addr
, old
, val
));
244 _uatomic_link_error();
248 #define uatomic_xchg(addr, v) \
249 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
251 #endif /* #ifndef uatomic_xchg */
253 #else /* #ifndef uatomic_cmpxchg */
258 static inline __attribute__((always_inline
))
259 void _uatomic_and(void *addr
, unsigned long val
, int len
)
262 #ifdef UATOMIC_HAS_ATOMIC_BYTE
265 unsigned char old
, oldt
;
267 oldt
= uatomic_read((unsigned char *)addr
);
270 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 1);
271 } while (oldt
!= old
);
276 #ifdef UATOMIC_HAS_ATOMIC_SHORT
279 unsigned short old
, oldt
;
281 oldt
= uatomic_read((unsigned short *)addr
);
284 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 2);
285 } while (oldt
!= old
);
290 unsigned int old
, oldt
;
292 oldt
= uatomic_read((unsigned int *)addr
);
295 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 4);
296 } while (oldt
!= old
);
300 #if (CAA_BITS_PER_LONG == 64)
303 unsigned long old
, oldt
;
305 oldt
= uatomic_read((unsigned long *)addr
);
308 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 8);
309 } while (oldt
!= old
);
315 _uatomic_link_error();
318 #define uatomic_and(addr, v) \
319 (uatomic_and((addr), \
320 (unsigned long)(v), \
322 #endif /* #ifndef uatomic_and */
327 static inline __attribute__((always_inline
))
328 void _uatomic_or(void *addr
, unsigned long val
, int len
)
331 #ifdef UATOMIC_HAS_ATOMIC_BYTE
334 unsigned char old
, oldt
;
336 oldt
= uatomic_read((unsigned char *)addr
);
339 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 1);
340 } while (oldt
!= old
);
345 #ifdef UATOMIC_HAS_ATOMIC_SHORT
348 unsigned short old
, oldt
;
350 oldt
= uatomic_read((unsigned short *)addr
);
353 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 2);
354 } while (oldt
!= old
);
361 unsigned int old
, oldt
;
363 oldt
= uatomic_read((unsigned int *)addr
);
366 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 4);
367 } while (oldt
!= old
);
371 #if (CAA_BITS_PER_LONG == 64)
374 unsigned long old
, oldt
;
376 oldt
= uatomic_read((unsigned long *)addr
);
379 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 8);
380 } while (oldt
!= old
);
386 _uatomic_link_error();
389 #define uatomic_or(addr, v) \
390 (uatomic_or((addr), \
391 (unsigned long)(v), \
393 #endif /* #ifndef uatomic_or */
395 #ifndef uatomic_add_return
396 /* uatomic_add_return */
398 static inline __attribute__((always_inline
))
399 unsigned long _uatomic_add_return(void *addr
, unsigned long val
, int len
)
402 #ifdef UATOMIC_HAS_ATOMIC_BYTE
405 unsigned char old
, oldt
;
407 oldt
= uatomic_read((unsigned char *)addr
);
410 oldt
= uatomic_cmpxchg((unsigned char *)addr
,
412 } while (oldt
!= old
);
417 #ifdef UATOMIC_HAS_ATOMIC_SHORT
420 unsigned short old
, oldt
;
422 oldt
= uatomic_read((unsigned short *)addr
);
425 oldt
= uatomic_cmpxchg((unsigned short *)addr
,
427 } while (oldt
!= old
);
434 unsigned int old
, oldt
;
436 oldt
= uatomic_read((unsigned int *)addr
);
439 oldt
= uatomic_cmpxchg((unsigned int *)addr
,
441 } while (oldt
!= old
);
445 #if (CAA_BITS_PER_LONG == 64)
448 unsigned long old
, oldt
;
450 oldt
= uatomic_read((unsigned long *)addr
);
453 oldt
= uatomic_cmpxchg((unsigned long *)addr
,
455 } while (oldt
!= old
);
461 _uatomic_link_error();
465 #define uatomic_add_return(addr, v) \
466 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
467 (unsigned long)(v), \
469 #endif /* #ifndef uatomic_add_return */
474 static inline __attribute__((always_inline
))
475 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
478 #ifdef UATOMIC_HAS_ATOMIC_BYTE
481 unsigned char old
, oldt
;
483 oldt
= uatomic_read((unsigned char *)addr
);
486 oldt
= uatomic_cmpxchg((unsigned char *)addr
,
488 } while (oldt
!= old
);
493 #ifdef UATOMIC_HAS_ATOMIC_SHORT
496 unsigned short old
, oldt
;
498 oldt
= uatomic_read((unsigned short *)addr
);
501 oldt
= uatomic_cmpxchg((unsigned short *)addr
,
503 } while (oldt
!= old
);
510 unsigned int old
, oldt
;
512 oldt
= uatomic_read((unsigned int *)addr
);
515 oldt
= uatomic_cmpxchg((unsigned int *)addr
,
517 } while (oldt
!= old
);
521 #if (CAA_BITS_PER_LONG == 64)
524 unsigned long old
, oldt
;
526 oldt
= uatomic_read((unsigned long *)addr
);
529 oldt
= uatomic_cmpxchg((unsigned long *)addr
,
531 } while (oldt
!= old
);
537 _uatomic_link_error();
541 #define uatomic_xchg(addr, v) \
542 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
544 #endif /* #ifndef uatomic_xchg */
546 #endif /* #else #ifndef uatomic_cmpxchg */
548 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
551 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
554 #define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
555 #define uatomic_sub(addr, v) uatomic_add((addr), -(v))
558 #define uatomic_inc(addr) uatomic_add((addr), 1)
562 #define uatomic_dec(addr) uatomic_add((addr), -1)
569 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.040373 seconds and 4 git commands to generate.