1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
32 #define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
36 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
39 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
40 static inline __attribute__((always_inline
))
41 void _uatomic_link_error(void)
45 * generate an illegal instruction. Cannot catch this with
46 * linker tricks when optimizations are disabled.
48 __asm__
__volatile__(ILLEGAL_INSTR
);
54 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
55 extern void _uatomic_link_error(void);
56 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
60 #ifndef uatomic_cmpxchg
61 static inline __attribute__((always_inline
))
62 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
63 unsigned long _new
, int len
)
66 #ifdef UATOMIC_HAS_ATOMIC_BYTE
68 return __sync_val_compare_and_swap_1(addr
, old
, _new
);
70 #ifdef UATOMIC_HAS_ATOMIC_SHORT
72 return __sync_val_compare_and_swap_2(addr
, old
, _new
);
75 return __sync_val_compare_and_swap_4(addr
, old
, _new
);
76 #if (CAA_BITS_PER_LONG == 64)
78 return __sync_val_compare_and_swap_8(addr
, old
, _new
);
81 _uatomic_link_error();
86 #define uatomic_cmpxchg(addr, old, _new) \
87 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
88 caa_cast_long_keep_sign(old), \
89 caa_cast_long_keep_sign(_new),\
96 static inline __attribute__((always_inline
))
97 void _uatomic_and(void *addr
, unsigned long val
,
101 #ifdef UATOMIC_HAS_ATOMIC_BYTE
103 __sync_and_and_fetch_1(addr
, val
);
106 #ifdef UATOMIC_HAS_ATOMIC_SHORT
108 __sync_and_and_fetch_2(addr
, val
);
112 __sync_and_and_fetch_4(addr
, val
);
114 #if (CAA_BITS_PER_LONG == 64)
116 __sync_and_and_fetch_8(addr
, val
);
120 _uatomic_link_error();
123 #define uatomic_and(addr, v) \
124 (_uatomic_and((addr), \
125 caa_cast_long_keep_sign(v), \
127 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
128 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
135 static inline __attribute__((always_inline
))
136 void _uatomic_or(void *addr
, unsigned long val
,
140 #ifdef UATOMIC_HAS_ATOMIC_BYTE
142 __sync_or_and_fetch_1(addr
, val
);
145 #ifdef UATOMIC_HAS_ATOMIC_SHORT
147 __sync_or_and_fetch_2(addr
, val
);
151 __sync_or_and_fetch_4(addr
, val
);
153 #if (CAA_BITS_PER_LONG == 64)
155 __sync_or_and_fetch_8(addr
, val
);
159 _uatomic_link_error();
163 #define uatomic_or(addr, v) \
164 (_uatomic_or((addr), \
165 caa_cast_long_keep_sign(v), \
167 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
168 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
173 /* uatomic_add_return */
175 #ifndef uatomic_add_return
176 static inline __attribute__((always_inline
))
177 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
181 #ifdef UATOMIC_HAS_ATOMIC_BYTE
183 return __sync_add_and_fetch_1(addr
, val
);
185 #ifdef UATOMIC_HAS_ATOMIC_SHORT
187 return __sync_add_and_fetch_2(addr
, val
);
190 return __sync_add_and_fetch_4(addr
, val
);
191 #if (CAA_BITS_PER_LONG == 64)
193 return __sync_add_and_fetch_8(addr
, val
);
196 _uatomic_link_error();
201 #define uatomic_add_return(addr, v) \
202 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
203 caa_cast_long_keep_sign(v), \
205 #endif /* #ifndef uatomic_add_return */
210 static inline __attribute__((always_inline
))
211 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
214 #ifdef UATOMIC_HAS_ATOMIC_BYTE
220 old
= uatomic_read((unsigned char *)addr
);
221 } while (!__sync_bool_compare_and_swap_1(addr
, old
, val
));
226 #ifdef UATOMIC_HAS_ATOMIC_SHORT
232 old
= uatomic_read((unsigned short *)addr
);
233 } while (!__sync_bool_compare_and_swap_2(addr
, old
, val
));
243 old
= uatomic_read((unsigned int *)addr
);
244 } while (!__sync_bool_compare_and_swap_4(addr
, old
, val
));
248 #if (CAA_BITS_PER_LONG == 64)
254 old
= uatomic_read((unsigned long *)addr
);
255 } while (!__sync_bool_compare_and_swap_8(addr
, old
, val
));
261 _uatomic_link_error();
265 #define uatomic_xchg(addr, v) \
266 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
267 caa_cast_long_keep_sign(v), \
269 #endif /* #ifndef uatomic_xchg */
271 #else /* #ifndef uatomic_cmpxchg */
276 static inline __attribute__((always_inline
))
277 void _uatomic_and(void *addr
, unsigned long val
, int len
)
280 #ifdef UATOMIC_HAS_ATOMIC_BYTE
283 unsigned char old
, oldt
;
285 oldt
= uatomic_read((unsigned char *)addr
);
288 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 1);
289 } while (oldt
!= old
);
294 #ifdef UATOMIC_HAS_ATOMIC_SHORT
297 unsigned short old
, oldt
;
299 oldt
= uatomic_read((unsigned short *)addr
);
302 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 2);
303 } while (oldt
!= old
);
308 unsigned int old
, oldt
;
310 oldt
= uatomic_read((unsigned int *)addr
);
313 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 4);
314 } while (oldt
!= old
);
318 #if (CAA_BITS_PER_LONG == 64)
321 unsigned long old
, oldt
;
323 oldt
= uatomic_read((unsigned long *)addr
);
326 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 8);
327 } while (oldt
!= old
);
333 _uatomic_link_error();
336 #define uatomic_and(addr, v) \
337 (_uatomic_and((addr), \
338 caa_cast_long_keep_sign(v), \
340 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
341 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
343 #endif /* #ifndef uatomic_and */
348 static inline __attribute__((always_inline
))
349 void _uatomic_or(void *addr
, unsigned long val
, int len
)
352 #ifdef UATOMIC_HAS_ATOMIC_BYTE
355 unsigned char old
, oldt
;
357 oldt
= uatomic_read((unsigned char *)addr
);
360 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 1);
361 } while (oldt
!= old
);
366 #ifdef UATOMIC_HAS_ATOMIC_SHORT
369 unsigned short old
, oldt
;
371 oldt
= uatomic_read((unsigned short *)addr
);
374 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 2);
375 } while (oldt
!= old
);
382 unsigned int old
, oldt
;
384 oldt
= uatomic_read((unsigned int *)addr
);
387 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 4);
388 } while (oldt
!= old
);
392 #if (CAA_BITS_PER_LONG == 64)
395 unsigned long old
, oldt
;
397 oldt
= uatomic_read((unsigned long *)addr
);
400 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 8);
401 } while (oldt
!= old
);
407 _uatomic_link_error();
410 #define uatomic_or(addr, v) \
411 (_uatomic_or((addr), \
412 caa_cast_long_keep_sign(v), \
414 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
415 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
417 #endif /* #ifndef uatomic_or */
419 #ifndef uatomic_add_return
420 /* uatomic_add_return */
422 static inline __attribute__((always_inline
))
423 unsigned long _uatomic_add_return(void *addr
, unsigned long val
, int len
)
426 #ifdef UATOMIC_HAS_ATOMIC_BYTE
429 unsigned char old
, oldt
;
431 oldt
= uatomic_read((unsigned char *)addr
);
434 oldt
= uatomic_cmpxchg((unsigned char *)addr
,
436 } while (oldt
!= old
);
441 #ifdef UATOMIC_HAS_ATOMIC_SHORT
444 unsigned short old
, oldt
;
446 oldt
= uatomic_read((unsigned short *)addr
);
449 oldt
= uatomic_cmpxchg((unsigned short *)addr
,
451 } while (oldt
!= old
);
458 unsigned int old
, oldt
;
460 oldt
= uatomic_read((unsigned int *)addr
);
463 oldt
= uatomic_cmpxchg((unsigned int *)addr
,
465 } while (oldt
!= old
);
469 #if (CAA_BITS_PER_LONG == 64)
472 unsigned long old
, oldt
;
474 oldt
= uatomic_read((unsigned long *)addr
);
477 oldt
= uatomic_cmpxchg((unsigned long *)addr
,
479 } while (oldt
!= old
);
485 _uatomic_link_error();
489 #define uatomic_add_return(addr, v) \
490 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
491 caa_cast_long_keep_sign(v), \
493 #endif /* #ifndef uatomic_add_return */
498 static inline __attribute__((always_inline
))
499 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
502 #ifdef UATOMIC_HAS_ATOMIC_BYTE
505 unsigned char old
, oldt
;
507 oldt
= uatomic_read((unsigned char *)addr
);
510 oldt
= uatomic_cmpxchg((unsigned char *)addr
,
512 } while (oldt
!= old
);
517 #ifdef UATOMIC_HAS_ATOMIC_SHORT
520 unsigned short old
, oldt
;
522 oldt
= uatomic_read((unsigned short *)addr
);
525 oldt
= uatomic_cmpxchg((unsigned short *)addr
,
527 } while (oldt
!= old
);
534 unsigned int old
, oldt
;
536 oldt
= uatomic_read((unsigned int *)addr
);
539 oldt
= uatomic_cmpxchg((unsigned int *)addr
,
541 } while (oldt
!= old
);
545 #if (CAA_BITS_PER_LONG == 64)
548 unsigned long old
, oldt
;
550 oldt
= uatomic_read((unsigned long *)addr
);
553 oldt
= uatomic_cmpxchg((unsigned long *)addr
,
555 } while (oldt
!= old
);
561 _uatomic_link_error();
565 #define uatomic_xchg(addr, v) \
566 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
567 caa_cast_long_keep_sign(v), \
569 #endif /* #ifndef uatomic_xchg */
571 #endif /* #else #ifndef uatomic_cmpxchg */
573 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
576 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
577 #define cmm_smp_mb__before_uatomic_add() cmm_barrier()
578 #define cmm_smp_mb__after_uatomic_add() cmm_barrier()
581 #define uatomic_sub_return(addr, v) \
582 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
583 #define uatomic_sub(addr, v) \
584 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
585 #define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add()
586 #define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add()
589 #define uatomic_inc(addr) uatomic_add((addr), 1)
590 #define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add()
591 #define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add()
595 #define uatomic_dec(addr) uatomic_add((addr), -1)
596 #define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add()
597 #define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add()
604 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.0695519999999999 seconds and 4 git commands to generate.