1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
32 #define uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v))
36 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
39 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
40 static inline __attribute__((always_inline
))
41 void _uatomic_link_error()
44 /* generate an illegal instruction. Cannot catch this with linker tricks
45 * when optimizations are disabled. */
46 __asm__
__volatile__(ILLEGAL_INSTR
);
52 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
53 extern void _uatomic_link_error ();
54 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
58 #ifndef uatomic_cmpxchg
59 static inline __attribute__((always_inline
))
60 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
61 unsigned long _new
, int len
)
64 #ifdef UATOMIC_HAS_ATOMIC_BYTE
66 return __sync_val_compare_and_swap_1(addr
, old
, _new
);
68 #ifdef UATOMIC_HAS_ATOMIC_SHORT
70 return __sync_val_compare_and_swap_2(addr
, old
, _new
);
73 return __sync_val_compare_and_swap_4(addr
, old
, _new
);
74 #if (CAA_BITS_PER_LONG == 64)
76 return __sync_val_compare_and_swap_8(addr
, old
, _new
);
79 _uatomic_link_error();
84 #define uatomic_cmpxchg(addr, old, _new) \
85 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
86 caa_cast_long_keep_sign(old), \
87 caa_cast_long_keep_sign(_new),\
94 static inline __attribute__((always_inline
))
95 void _uatomic_and(void *addr
, unsigned long val
,
99 #ifdef UATOMIC_HAS_ATOMIC_BYTE
101 __sync_and_and_fetch_1(addr
, val
);
104 #ifdef UATOMIC_HAS_ATOMIC_SHORT
106 __sync_and_and_fetch_2(addr
, val
);
110 __sync_and_and_fetch_4(addr
, val
);
112 #if (CAA_BITS_PER_LONG == 64)
114 __sync_and_and_fetch_8(addr
, val
);
118 _uatomic_link_error();
121 #define uatomic_and(addr, v) \
122 (_uatomic_and((addr), \
123 caa_cast_long_keep_sign(v), \
130 static inline __attribute__((always_inline
))
131 void _uatomic_or(void *addr
, unsigned long val
,
135 #ifdef UATOMIC_HAS_ATOMIC_BYTE
137 __sync_or_and_fetch_1(addr
, val
);
140 #ifdef UATOMIC_HAS_ATOMIC_SHORT
142 __sync_or_and_fetch_2(addr
, val
);
146 __sync_or_and_fetch_4(addr
, val
);
148 #if (CAA_BITS_PER_LONG == 64)
150 __sync_or_and_fetch_8(addr
, val
);
154 _uatomic_link_error();
158 #define uatomic_or(addr, v) \
159 (_uatomic_or((addr), \
160 caa_cast_long_keep_sign(v), \
164 /* uatomic_add_return */
166 #ifndef uatomic_add_return
167 static inline __attribute__((always_inline
))
168 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
172 #ifdef UATOMIC_HAS_ATOMIC_BYTE
174 return __sync_add_and_fetch_1(addr
, val
);
176 #ifdef UATOMIC_HAS_ATOMIC_SHORT
178 return __sync_add_and_fetch_2(addr
, val
);
181 return __sync_add_and_fetch_4(addr
, val
);
182 #if (CAA_BITS_PER_LONG == 64)
184 return __sync_add_and_fetch_8(addr
, val
);
187 _uatomic_link_error();
192 #define uatomic_add_return(addr, v) \
193 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
194 caa_cast_long_keep_sign(v), \
196 #endif /* #ifndef uatomic_add_return */
201 static inline __attribute__((always_inline
))
202 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
205 #ifdef UATOMIC_HAS_ATOMIC_BYTE
211 old
= uatomic_read((unsigned char *)addr
);
212 } while (!__sync_bool_compare_and_swap_1(addr
, old
, val
));
217 #ifdef UATOMIC_HAS_ATOMIC_SHORT
223 old
= uatomic_read((unsigned short *)addr
);
224 } while (!__sync_bool_compare_and_swap_2(addr
, old
, val
));
234 old
= uatomic_read((unsigned int *)addr
);
235 } while (!__sync_bool_compare_and_swap_4(addr
, old
, val
));
239 #if (CAA_BITS_PER_LONG == 64)
245 old
= uatomic_read((unsigned long *)addr
);
246 } while (!__sync_bool_compare_and_swap_8(addr
, old
, val
));
252 _uatomic_link_error();
256 #define uatomic_xchg(addr, v) \
257 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
258 caa_cast_long_keep_sign(v), \
260 #endif /* #ifndef uatomic_xchg */
262 #else /* #ifndef uatomic_cmpxchg */
267 static inline __attribute__((always_inline
))
268 void _uatomic_and(void *addr
, unsigned long val
, int len
)
271 #ifdef UATOMIC_HAS_ATOMIC_BYTE
274 unsigned char old
, oldt
;
276 oldt
= uatomic_read((unsigned char *)addr
);
279 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 1);
280 } while (oldt
!= old
);
285 #ifdef UATOMIC_HAS_ATOMIC_SHORT
288 unsigned short old
, oldt
;
290 oldt
= uatomic_read((unsigned short *)addr
);
293 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 2);
294 } while (oldt
!= old
);
299 unsigned int old
, oldt
;
301 oldt
= uatomic_read((unsigned int *)addr
);
304 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 4);
305 } while (oldt
!= old
);
309 #if (CAA_BITS_PER_LONG == 64)
312 unsigned long old
, oldt
;
314 oldt
= uatomic_read((unsigned long *)addr
);
317 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 8);
318 } while (oldt
!= old
);
324 _uatomic_link_error();
327 #define uatomic_and(addr, v) \
328 (_uatomic_and((addr), \
329 caa_cast_long_keep_sign(v), \
331 #endif /* #ifndef uatomic_and */
336 static inline __attribute__((always_inline
))
337 void _uatomic_or(void *addr
, unsigned long val
, int len
)
340 #ifdef UATOMIC_HAS_ATOMIC_BYTE
343 unsigned char old
, oldt
;
345 oldt
= uatomic_read((unsigned char *)addr
);
348 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 1);
349 } while (oldt
!= old
);
354 #ifdef UATOMIC_HAS_ATOMIC_SHORT
357 unsigned short old
, oldt
;
359 oldt
= uatomic_read((unsigned short *)addr
);
362 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 2);
363 } while (oldt
!= old
);
370 unsigned int old
, oldt
;
372 oldt
= uatomic_read((unsigned int *)addr
);
375 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 4);
376 } while (oldt
!= old
);
380 #if (CAA_BITS_PER_LONG == 64)
383 unsigned long old
, oldt
;
385 oldt
= uatomic_read((unsigned long *)addr
);
388 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 8);
389 } while (oldt
!= old
);
395 _uatomic_link_error();
398 #define uatomic_or(addr, v) \
399 (_uatomic_or((addr), \
400 caa_cast_long_keep_sign(v), \
402 #endif /* #ifndef uatomic_or */
404 #ifndef uatomic_add_return
405 /* uatomic_add_return */
407 static inline __attribute__((always_inline
))
408 unsigned long _uatomic_add_return(void *addr
, unsigned long val
, int len
)
411 #ifdef UATOMIC_HAS_ATOMIC_BYTE
414 unsigned char old
, oldt
;
416 oldt
= uatomic_read((unsigned char *)addr
);
419 oldt
= uatomic_cmpxchg((unsigned char *)addr
,
421 } while (oldt
!= old
);
426 #ifdef UATOMIC_HAS_ATOMIC_SHORT
429 unsigned short old
, oldt
;
431 oldt
= uatomic_read((unsigned short *)addr
);
434 oldt
= uatomic_cmpxchg((unsigned short *)addr
,
436 } while (oldt
!= old
);
443 unsigned int old
, oldt
;
445 oldt
= uatomic_read((unsigned int *)addr
);
448 oldt
= uatomic_cmpxchg((unsigned int *)addr
,
450 } while (oldt
!= old
);
454 #if (CAA_BITS_PER_LONG == 64)
457 unsigned long old
, oldt
;
459 oldt
= uatomic_read((unsigned long *)addr
);
462 oldt
= uatomic_cmpxchg((unsigned long *)addr
,
464 } while (oldt
!= old
);
470 _uatomic_link_error();
474 #define uatomic_add_return(addr, v) \
475 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
476 caa_cast_long_keep_sign(v), \
478 #endif /* #ifndef uatomic_add_return */
483 static inline __attribute__((always_inline
))
484 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
487 #ifdef UATOMIC_HAS_ATOMIC_BYTE
490 unsigned char old
, oldt
;
492 oldt
= uatomic_read((unsigned char *)addr
);
495 oldt
= uatomic_cmpxchg((unsigned char *)addr
,
497 } while (oldt
!= old
);
502 #ifdef UATOMIC_HAS_ATOMIC_SHORT
505 unsigned short old
, oldt
;
507 oldt
= uatomic_read((unsigned short *)addr
);
510 oldt
= uatomic_cmpxchg((unsigned short *)addr
,
512 } while (oldt
!= old
);
519 unsigned int old
, oldt
;
521 oldt
= uatomic_read((unsigned int *)addr
);
524 oldt
= uatomic_cmpxchg((unsigned int *)addr
,
526 } while (oldt
!= old
);
530 #if (CAA_BITS_PER_LONG == 64)
533 unsigned long old
, oldt
;
535 oldt
= uatomic_read((unsigned long *)addr
);
538 oldt
= uatomic_cmpxchg((unsigned long *)addr
,
540 } while (oldt
!= old
);
546 _uatomic_link_error();
550 #define uatomic_xchg(addr, v) \
551 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
552 caa_cast_long_keep_sign(v), \
554 #endif /* #ifndef uatomic_xchg */
556 #endif /* #else #ifndef uatomic_cmpxchg */
558 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
561 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
564 #define uatomic_sub_return(addr, v) \
565 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
566 #define uatomic_sub(addr, v) \
567 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
570 #define uatomic_inc(addr) uatomic_add((addr), 1)
574 #define uatomic_dec(addr) uatomic_add((addr), -1)
581 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.058272 seconds and 4 git commands to generate.