9ee7b7bdc22a6bedeef45ef6447d930162375c99
1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
32 #define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
36 #define uatomic_set(addr, v) STORE_SHARED(*(addr), (v))
40 #define uatomic_read(addr) LOAD_SHARED(*(addr))
43 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
44 static inline __attribute__((always_inline
))
45 void _uatomic_link_error()
48 /* generate an illegal instruction. Cannot catch this with linker tricks
49 * when optimizations are disabled. */
50 __asm__
__volatile__(ILLEGAL_INSTR
);
56 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
57 extern void _uatomic_link_error ();
58 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
62 #ifndef uatomic_cmpxchg
63 static inline __attribute__((always_inline
))
64 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
65 unsigned long _new
, int len
)
68 #ifdef UATOMIC_HAS_ATOMIC_BYTE
70 return __sync_val_compare_and_swap_1(addr
, old
, _new
);
72 #ifdef UATOMIC_HAS_ATOMIC_SHORT
74 return __sync_val_compare_and_swap_2(addr
, old
, _new
);
77 return __sync_val_compare_and_swap_4(addr
, old
, _new
);
78 #if (BITS_PER_LONG == 64)
80 return __sync_val_compare_and_swap_8(addr
, old
, _new
);
83 _uatomic_link_error();
88 #define uatomic_cmpxchg(addr, old, _new) \
89 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
90 (unsigned long)(_new), \
94 /* uatomic_add_return */
96 #ifndef uatomic_add_return
97 static inline __attribute__((always_inline
))
98 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
102 #ifdef UATOMIC_HAS_ATOMIC_BYTE
104 return __sync_add_and_fetch_1(addr
, val
);
106 #ifdef UATOMIC_HAS_ATOMIC_SHORT
108 return __sync_add_and_fetch_2(addr
, val
);
111 return __sync_add_and_fetch_4(addr
, val
);
112 #if (BITS_PER_LONG == 64)
114 return __sync_add_and_fetch_8(addr
, val
);
117 _uatomic_link_error();
122 #define uatomic_add_return(addr, v) \
123 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
124 (unsigned long)(v), \
126 #endif /* #ifndef uatomic_add_return */
131 static inline __attribute__((always_inline
))
132 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
135 #ifdef UATOMIC_HAS_ATOMIC_BYTE
141 old
= uatomic_read((unsigned char *)addr
);
142 } while (!__sync_bool_compare_and_swap_1(addr
, old
, val
));
147 #ifdef UATOMIC_HAS_ATOMIC_SHORT
153 old
= uatomic_read((unsigned short *)addr
);
154 } while (!__sync_bool_compare_and_swap_2(addr
, old
, val
));
164 old
= uatomic_read((unsigned int *)addr
);
165 while (!__sync_bool_compare_and_swap_4(addr
, old
, val
));
169 #if (BITS_PER_LONG == 64)
175 old
= uatomic_read((unsigned long *)addr
);
176 } while (!__sync_bool_compare_and_swap_8(addr
, old
, val
));
182 _uatomic_link_error();
186 #define uatomic_xchg(addr, v) \
187 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
189 #endif /* #ifndef uatomic_xchg */
191 #else /* #ifndef uatomic_cmpxchg */
193 #ifndef uatomic_add_return
194 /* uatomic_add_return */
196 static inline __attribute__((always_inline
))
197 unsigned long _uatomic_add_return(void *addr
, unsigned long val
, int len
)
200 #ifdef UATOMIC_HAS_ATOMIC_BYTE
203 unsigned char old
, oldt
;
205 oldt
= uatomic_read((unsigned char *)addr
);
208 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 1);
209 } while (oldt
!= old
);
214 #ifdef UATOMIC_HAS_ATOMIC_SHORT
217 unsigned short old
, oldt
;
219 oldt
= uatomic_read((unsigned short *)addr
);
222 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 2);
223 } while (oldt
!= old
);
230 unsigned int old
, oldt
;
232 oldt
= uatomic_read((unsigned int *)addr
);
235 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 4);
236 } while (oldt
!= old
);
240 #if (BITS_PER_LONG == 64)
243 unsigned long old
, oldt
;
245 oldt
= uatomic_read((unsigned long *)addr
);
248 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 8);
249 } while (oldt
!= old
);
255 _uatomic_link_error();
259 #define uatomic_add_return(addr, v) \
260 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
261 (unsigned long)(v), \
263 #endif /* #ifndef uatomic_add_return */
268 static inline __attribute__((always_inline
))
269 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
272 #ifdef UATOMIC_HAS_ATOMIC_BYTE
275 unsigned char old
, oldt
;
277 oldt
= uatomic_read((unsigned char *)addr
);
280 oldt
= _uatomic_cmpxchg(addr
, old
, val
, 1);
281 } while (oldt
!= old
);
286 #ifdef UATOMIC_HAS_ATOMIC_SHORT
289 unsigned short old
, oldt
;
291 oldt
= uatomic_read((unsigned short *)addr
);
294 oldt
= _uatomic_cmpxchg(addr
, old
, val
, 2);
295 } while (oldt
!= old
);
302 unsigned int old
, oldt
;
304 oldt
= uatomic_read((unsigned int *)addr
);
307 oldt
= _uatomic_cmpxchg(addr
, old
, val
, 4);
308 } while (oldt
!= old
);
312 #if (BITS_PER_LONG == 64)
315 unsigned long old
, oldt
;
317 oldt
= uatomic_read((unsigned long *)addr
);
320 oldt
= _uatomic_cmpxchg(addr
, old
, val
, 8);
321 } while (oldt
!= old
);
327 _uatomic_link_error();
331 #define uatomic_xchg(addr, v) \
332 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
334 #endif /* #ifndef uatomic_xchg */
336 #endif /* #else #ifndef uatomic_cmpxchg */
338 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
341 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
344 #define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
345 #define uatomic_sub(addr, v) uatomic_add((addr), -(v))
348 #define uatomic_inc(addr) uatomic_add((addr), 1)
352 #define uatomic_dec(addr) uatomic_add((addr), -1)
359 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.035739 seconds and 4 git commands to generate.