4ef71c77604eb4b96ba7913b4268aef6a3618250
1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
32 #define uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v))
36 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
39 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
40 static inline __attribute__((always_inline
))
41 void _uatomic_link_error()
44 /* generate an illegal instruction. Cannot catch this with linker tricks
45 * when optimizations are disabled. */
46 __asm__
__volatile__(ILLEGAL_INSTR
);
52 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
53 extern void _uatomic_link_error ();
54 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
58 #ifndef uatomic_cmpxchg
59 static inline __attribute__((always_inline
))
60 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
61 unsigned long _new
, int len
)
64 #ifdef UATOMIC_HAS_ATOMIC_BYTE
66 return __sync_val_compare_and_swap_1(addr
, old
, _new
);
68 #ifdef UATOMIC_HAS_ATOMIC_SHORT
70 return __sync_val_compare_and_swap_2(addr
, old
, _new
);
73 return __sync_val_compare_and_swap_4(addr
, old
, _new
);
74 #if (CAA_BITS_PER_LONG == 64)
76 return __sync_val_compare_and_swap_8(addr
, old
, _new
);
79 _uatomic_link_error();
84 #define uatomic_cmpxchg(addr, old, _new) \
85 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
86 caa_cast_long_keep_sign(old), \
87 caa_cast_long_keep_sign(_new),\
94 static inline __attribute__((always_inline
))
95 void _uatomic_and(void *addr
, unsigned long val
,
99 #ifdef UATOMIC_HAS_ATOMIC_BYTE
101 __sync_and_and_fetch_1(addr
, val
);
104 #ifdef UATOMIC_HAS_ATOMIC_SHORT
106 __sync_and_and_fetch_2(addr
, val
);
110 __sync_and_and_fetch_4(addr
, val
);
112 #if (CAA_BITS_PER_LONG == 64)
114 __sync_and_and_fetch_8(addr
, val
);
118 _uatomic_link_error();
121 #define uatomic_and(addr, v) \
122 (_uatomic_and((addr), \
123 caa_cast_long_keep_sign(v), \
125 #define cmm_smp_mb__before_and() cmm_barrier()
126 #define cmm_smp_mb__after_and() cmm_barrier()
133 static inline __attribute__((always_inline
))
134 void _uatomic_or(void *addr
, unsigned long val
,
138 #ifdef UATOMIC_HAS_ATOMIC_BYTE
140 __sync_or_and_fetch_1(addr
, val
);
143 #ifdef UATOMIC_HAS_ATOMIC_SHORT
145 __sync_or_and_fetch_2(addr
, val
);
149 __sync_or_and_fetch_4(addr
, val
);
151 #if (CAA_BITS_PER_LONG == 64)
153 __sync_or_and_fetch_8(addr
, val
);
157 _uatomic_link_error();
161 #define uatomic_or(addr, v) \
162 (_uatomic_or((addr), \
163 caa_cast_long_keep_sign(v), \
165 #define cmm_smp_mb__before_or() cmm_barrier()
166 #define cmm_smp_mb__after_or() cmm_barrier()
171 /* uatomic_add_return */
173 #ifndef uatomic_add_return
174 static inline __attribute__((always_inline
))
175 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
179 #ifdef UATOMIC_HAS_ATOMIC_BYTE
181 return __sync_add_and_fetch_1(addr
, val
);
183 #ifdef UATOMIC_HAS_ATOMIC_SHORT
185 return __sync_add_and_fetch_2(addr
, val
);
188 return __sync_add_and_fetch_4(addr
, val
);
189 #if (CAA_BITS_PER_LONG == 64)
191 return __sync_add_and_fetch_8(addr
, val
);
194 _uatomic_link_error();
199 #define uatomic_add_return(addr, v) \
200 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
201 caa_cast_long_keep_sign(v), \
203 #endif /* #ifndef uatomic_add_return */
208 static inline __attribute__((always_inline
))
209 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
212 #ifdef UATOMIC_HAS_ATOMIC_BYTE
218 old
= uatomic_read((unsigned char *)addr
);
219 } while (!__sync_bool_compare_and_swap_1(addr
, old
, val
));
224 #ifdef UATOMIC_HAS_ATOMIC_SHORT
230 old
= uatomic_read((unsigned short *)addr
);
231 } while (!__sync_bool_compare_and_swap_2(addr
, old
, val
));
241 old
= uatomic_read((unsigned int *)addr
);
242 } while (!__sync_bool_compare_and_swap_4(addr
, old
, val
));
246 #if (CAA_BITS_PER_LONG == 64)
252 old
= uatomic_read((unsigned long *)addr
);
253 } while (!__sync_bool_compare_and_swap_8(addr
, old
, val
));
259 _uatomic_link_error();
263 #define uatomic_xchg(addr, v) \
264 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
265 caa_cast_long_keep_sign(v), \
267 #endif /* #ifndef uatomic_xchg */
269 #else /* #ifndef uatomic_cmpxchg */
274 static inline __attribute__((always_inline
))
275 void _uatomic_and(void *addr
, unsigned long val
, int len
)
278 #ifdef UATOMIC_HAS_ATOMIC_BYTE
281 unsigned char old
, oldt
;
283 oldt
= uatomic_read((unsigned char *)addr
);
286 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 1);
287 } while (oldt
!= old
);
292 #ifdef UATOMIC_HAS_ATOMIC_SHORT
295 unsigned short old
, oldt
;
297 oldt
= uatomic_read((unsigned short *)addr
);
300 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 2);
301 } while (oldt
!= old
);
306 unsigned int old
, oldt
;
308 oldt
= uatomic_read((unsigned int *)addr
);
311 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 4);
312 } while (oldt
!= old
);
316 #if (CAA_BITS_PER_LONG == 64)
319 unsigned long old
, oldt
;
321 oldt
= uatomic_read((unsigned long *)addr
);
324 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 8);
325 } while (oldt
!= old
);
331 _uatomic_link_error();
334 #define uatomic_and(addr, v) \
335 (_uatomic_and((addr), \
336 caa_cast_long_keep_sign(v), \
338 #define cmm_smp_mb__before_and() cmm_barrier()
339 #define cmm_smp_mb__after_and() cmm_barrier()
341 #endif /* #ifndef uatomic_and */
346 static inline __attribute__((always_inline
))
347 void _uatomic_or(void *addr
, unsigned long val
, int len
)
350 #ifdef UATOMIC_HAS_ATOMIC_BYTE
353 unsigned char old
, oldt
;
355 oldt
= uatomic_read((unsigned char *)addr
);
358 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 1);
359 } while (oldt
!= old
);
364 #ifdef UATOMIC_HAS_ATOMIC_SHORT
367 unsigned short old
, oldt
;
369 oldt
= uatomic_read((unsigned short *)addr
);
372 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 2);
373 } while (oldt
!= old
);
380 unsigned int old
, oldt
;
382 oldt
= uatomic_read((unsigned int *)addr
);
385 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 4);
386 } while (oldt
!= old
);
390 #if (CAA_BITS_PER_LONG == 64)
393 unsigned long old
, oldt
;
395 oldt
= uatomic_read((unsigned long *)addr
);
398 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 8);
399 } while (oldt
!= old
);
405 _uatomic_link_error();
408 #define uatomic_or(addr, v) \
409 (_uatomic_or((addr), \
410 caa_cast_long_keep_sign(v), \
412 #define cmm_smp_mb__before_or() cmm_barrier()
413 #define cmm_smp_mb__after_or() cmm_barrier()
415 #endif /* #ifndef uatomic_or */
417 #ifndef uatomic_add_return
418 /* uatomic_add_return */
420 static inline __attribute__((always_inline
))
421 unsigned long _uatomic_add_return(void *addr
, unsigned long val
, int len
)
424 #ifdef UATOMIC_HAS_ATOMIC_BYTE
427 unsigned char old
, oldt
;
429 oldt
= uatomic_read((unsigned char *)addr
);
432 oldt
= uatomic_cmpxchg((unsigned char *)addr
,
434 } while (oldt
!= old
);
439 #ifdef UATOMIC_HAS_ATOMIC_SHORT
442 unsigned short old
, oldt
;
444 oldt
= uatomic_read((unsigned short *)addr
);
447 oldt
= uatomic_cmpxchg((unsigned short *)addr
,
449 } while (oldt
!= old
);
456 unsigned int old
, oldt
;
458 oldt
= uatomic_read((unsigned int *)addr
);
461 oldt
= uatomic_cmpxchg((unsigned int *)addr
,
463 } while (oldt
!= old
);
467 #if (CAA_BITS_PER_LONG == 64)
470 unsigned long old
, oldt
;
472 oldt
= uatomic_read((unsigned long *)addr
);
475 oldt
= uatomic_cmpxchg((unsigned long *)addr
,
477 } while (oldt
!= old
);
483 _uatomic_link_error();
487 #define uatomic_add_return(addr, v) \
488 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
489 caa_cast_long_keep_sign(v), \
491 #endif /* #ifndef uatomic_add_return */
496 static inline __attribute__((always_inline
))
497 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
500 #ifdef UATOMIC_HAS_ATOMIC_BYTE
503 unsigned char old
, oldt
;
505 oldt
= uatomic_read((unsigned char *)addr
);
508 oldt
= uatomic_cmpxchg((unsigned char *)addr
,
510 } while (oldt
!= old
);
515 #ifdef UATOMIC_HAS_ATOMIC_SHORT
518 unsigned short old
, oldt
;
520 oldt
= uatomic_read((unsigned short *)addr
);
523 oldt
= uatomic_cmpxchg((unsigned short *)addr
,
525 } while (oldt
!= old
);
532 unsigned int old
, oldt
;
534 oldt
= uatomic_read((unsigned int *)addr
);
537 oldt
= uatomic_cmpxchg((unsigned int *)addr
,
539 } while (oldt
!= old
);
543 #if (CAA_BITS_PER_LONG == 64)
546 unsigned long old
, oldt
;
548 oldt
= uatomic_read((unsigned long *)addr
);
551 oldt
= uatomic_cmpxchg((unsigned long *)addr
,
553 } while (oldt
!= old
);
559 _uatomic_link_error();
563 #define uatomic_xchg(addr, v) \
564 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
565 caa_cast_long_keep_sign(v), \
567 #endif /* #ifndef uatomic_xchg */
569 #endif /* #else #ifndef uatomic_cmpxchg */
571 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
574 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
575 #define cmm_smp_mb__before_add() cmm_barrier()
576 #define cmm_smp_mb__after_add() cmm_barrier()
579 #define uatomic_sub_return(addr, v) \
580 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
581 #define uatomic_sub(addr, v) \
582 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
583 #define cmm_smp_mb__before_sub() cmm_smp_mb__before_add()
584 #define cmm_smp_mb__after_sub() cmm_smp_mb__after_add()
587 #define uatomic_inc(addr) uatomic_add((addr), 1)
588 #define cmm_smp_mb__before_inc() cmm_smp_mb__before_add()
589 #define cmm_smp_mb__after_inc() cmm_smp_mb__after_add()
593 #define uatomic_dec(addr) uatomic_add((addr), -1)
594 #define cmm_smp_mb__before_dec() cmm_smp_mb__before_add()
595 #define cmm_smp_mb__after_dec() cmm_smp_mb__after_add()
602 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.046037 seconds and 3 git commands to generate.