666e498f326c4c3b8ce025352b3f385416dbc32b
1 #ifndef _URCU_ARCH_UATOMIC_X86_H
2 #define _URCU_ARCH_UATOMIC_X86_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
10 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
11 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 * Permission is hereby granted to use or copy this program
14 * for any purpose, provided the above notices are retained on all copies.
15 * Permission to modify the code and to distribute modified code is granted,
16 * provided the above notices are retained, and a notice that the code was
17 * modified is included with the above copyright notice.
19 * Code inspired from libuatomic_ops-1.2, inherited in part from the
20 * Boehm-Demers-Weiser conservative garbage collector.
23 #include <urcu/compiler.h>
24 #include <urcu/system.h>
26 #define UATOMIC_HAS_ATOMIC_BYTE
27 #define UATOMIC_HAS_ATOMIC_SHORT
33 #ifndef __SIZEOF_LONG__
34 #if defined(__x86_64__) || defined(__amd64__)
35 #define __SIZEOF_LONG__ 8
37 #define __SIZEOF_LONG__ 4
42 #define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
46 * Derived from AO_compare_and_swap() and AO_test_and_set_full().
49 struct __uatomic_dummy
{
52 #define __hp(x) ((struct __uatomic_dummy *)(x))
54 #define _uatomic_set(addr, v) STORE_SHARED(*(addr), (v))
58 static inline __attribute__((always_inline
))
59 unsigned long __uatomic_cmpxchg(void *addr
, unsigned long old
,
60 unsigned long _new
, int len
)
65 unsigned char result
= old
;
68 "lock; cmpxchgb %2, %1"
69 : "+a"(result
), "+m"(*__hp(addr
))
70 : "q"((unsigned char)_new
)
76 unsigned short result
= old
;
79 "lock; cmpxchgw %2, %1"
80 : "+a"(result
), "+m"(*__hp(addr
))
81 : "r"((unsigned short)_new
)
87 unsigned int result
= old
;
90 "lock; cmpxchgl %2, %1"
91 : "+a"(result
), "+m"(*__hp(addr
))
92 : "r"((unsigned int)_new
)
96 #if (BITS_PER_LONG == 64)
99 unsigned long result
= old
;
101 __asm__
__volatile__(
102 "lock; cmpxchgq %2, %1"
103 : "+a"(result
), "+m"(*__hp(addr
))
104 : "r"((unsigned long)_new
)
110 /* generate an illegal instruction. Cannot catch this with linker tricks
111 * when optimizations are disabled. */
112 __asm__
__volatile__("ud2");
116 #define _uatomic_cmpxchg(addr, old, _new) \
117 ((__typeof__(*(addr))) __uatomic_cmpxchg((addr), (unsigned long)(old),\
118 (unsigned long)(_new), \
123 static inline __attribute__((always_inline
))
124 unsigned long __uatomic_exchange(void *addr
, unsigned long val
, int len
)
126 /* Note: the "xchg" instruction does not need a "lock" prefix. */
130 unsigned char result
;
131 __asm__
__volatile__(
133 : "=q"(result
), "+m"(*__hp(addr
))
134 : "0" ((unsigned char)val
)
140 unsigned short result
;
141 __asm__
__volatile__(
143 : "=r"(result
), "+m"(*__hp(addr
))
144 : "0" ((unsigned short)val
)
151 __asm__
__volatile__(
153 : "=r"(result
), "+m"(*__hp(addr
))
154 : "0" ((unsigned int)val
)
158 #if (BITS_PER_LONG == 64)
161 unsigned long result
;
162 __asm__
__volatile__(
164 : "=r"(result
), "+m"(*__hp(addr
))
165 : "0" ((unsigned long)val
)
171 /* generate an illegal instruction. Cannot catch this with linker tricks
172 * when optimizations are disabled. */
173 __asm__
__volatile__("ud2");
177 #define _uatomic_xchg(addr, v) \
178 ((__typeof__(*(addr))) __uatomic_exchange((addr), (unsigned long)(v), \
181 /* uatomic_add_return */
183 static inline __attribute__((always_inline
))
184 unsigned long __uatomic_add_return(void *addr
, unsigned long val
,
190 unsigned char result
= val
;
192 __asm__
__volatile__(
194 : "+m"(*__hp(addr
)), "+q" (result
)
197 return result
+ (unsigned char)val
;
201 unsigned short result
= val
;
203 __asm__
__volatile__(
205 : "+m"(*__hp(addr
)), "+r" (result
)
208 return result
+ (unsigned short)val
;
212 unsigned int result
= val
;
214 __asm__
__volatile__(
216 : "+m"(*__hp(addr
)), "+r" (result
)
219 return result
+ (unsigned int)val
;
221 #if (BITS_PER_LONG == 64)
224 unsigned long result
= val
;
226 __asm__
__volatile__(
228 : "+m"(*__hp(addr
)), "+r" (result
)
231 return result
+ (unsigned long)val
;
235 /* generate an illegal instruction. Cannot catch this with linker tricks
236 * when optimizations are disabled. */
237 __asm__
__volatile__("ud2");
241 #define _uatomic_add_return(addr, v) \
242 ((__typeof__(*(addr))) __uatomic_add_return((addr), \
243 (unsigned long)(v), \
248 static inline __attribute__((always_inline
))
249 void __uatomic_add(void *addr
, unsigned long val
, int len
)
254 __asm__
__volatile__(
257 : "iq" ((unsigned char)val
)
263 __asm__
__volatile__(
266 : "ir" ((unsigned short)val
)
272 __asm__
__volatile__(
275 : "ir" ((unsigned int)val
)
279 #if (BITS_PER_LONG == 64)
282 __asm__
__volatile__(
285 : "er" ((unsigned long)val
)
291 /* generate an illegal instruction. Cannot catch this with linker tricks
292 * when optimizations are disabled. */
293 __asm__
__volatile__("ud2");
297 #define _uatomic_add(addr, v) \
298 (__uatomic_add((addr), (unsigned long)(v), sizeof(*(addr))))
303 static inline __attribute__((always_inline
))
304 void __uatomic_inc(void *addr
, int len
)
309 __asm__
__volatile__(
318 __asm__
__volatile__(
327 __asm__
__volatile__(
334 #if (BITS_PER_LONG == 64)
337 __asm__
__volatile__(
346 /* generate an illegal instruction. Cannot catch this with linker tricks
347 * when optimizations are disabled. */
348 __asm__
__volatile__("ud2");
352 #define _uatomic_inc(addr) (__uatomic_inc((addr), sizeof(*(addr))))
356 static inline __attribute__((always_inline
))
357 void __uatomic_dec(void *addr
, int len
)
362 __asm__
__volatile__(
371 __asm__
__volatile__(
380 __asm__
__volatile__(
387 #if (BITS_PER_LONG == 64)
390 __asm__
__volatile__(
399 /* generate an illegal instruction. Cannot catch this with linker tricks
400 * when optimizations are disabled. */
401 __asm__
__volatile__("ud2");
405 #define _uatomic_dec(addr) (__uatomic_dec((addr), sizeof(*(addr))))
407 #if ((BITS_PER_LONG != 64) && defined(CONFIG_RCU_COMPAT_ARCH))
408 extern int __rcu_cas_avail
;
409 extern int __rcu_cas_init(void);
411 #define UATOMIC_COMPAT(insn) \
412 ((likely(__rcu_cas_avail > 0)) \
413 ? (_uatomic_##insn) \
414 : ((unlikely(__rcu_cas_avail < 0) \
415 ? ((__rcu_cas_init() > 0) \
416 ? (_uatomic_##insn) \
417 : (compat_uatomic_##insn)) \
418 : (compat_uatomic_##insn))))
420 extern unsigned long _compat_uatomic_set(void *addr
,
421 unsigned long _new
, int len
);
422 #define compat_uatomic_set(addr, _new) \
423 ((__typeof__(*(addr))) _compat_uatomic_set((addr), \
424 (unsigned long)(_new), \
428 extern unsigned long _compat_uatomic_xchg(void *addr
,
429 unsigned long _new
, int len
);
430 #define compat_uatomic_xchg(addr, _new) \
431 ((__typeof__(*(addr))) _compat_uatomic_xchg((addr), \
432 (unsigned long)(_new), \
435 extern unsigned long _compat_uatomic_cmpxchg(void *addr
, unsigned long old
,
436 unsigned long _new
, int len
);
437 #define compat_uatomic_cmpxchg(addr, old, _new) \
438 ((__typeof__(*(addr))) _compat_uatomic_cmpxchg((addr), \
439 (unsigned long)(old), \
440 (unsigned long)(_new), \
443 extern unsigned long _compat_uatomic_xchg(void *addr
,
444 unsigned long _new
, int len
);
445 #define compat_uatomic_add_return(addr, v) \
446 ((__typeof__(*(addr))) _compat_uatomic_add_return((addr), \
447 (unsigned long)(v), \
450 #define compat_uatomic_add(addr, v) \
451 ((void)compat_uatomic_add_return((addr), (v)))
452 #define compat_uatomic_inc(addr) \
453 (compat_uatomic_add((addr), 1))
454 #define compat_uatomic_dec(addr) \
455 (compat_uatomic_add((addr), -1))
458 #define UATOMIC_COMPAT(insn) (_uatomic_##insn)
461 /* Read is atomic even in compat mode */
462 #define uatomic_set(addr, v) \
463 UATOMIC_COMPAT(set(addr, v))
465 #define uatomic_cmpxchg(addr, old, _new) \
466 UATOMIC_COMPAT(cmpxchg(addr, old, _new))
467 #define uatomic_xchg(addr, v) \
468 UATOMIC_COMPAT(xchg(addr, v))
469 #define uatomic_add_return(addr, v) \
470 UATOMIC_COMPAT(add_return(addr, v))
472 #define uatomic_add(addr, v) UATOMIC_COMPAT(add(addr, v))
473 #define uatomic_inc(addr) UATOMIC_COMPAT(inc(addr))
474 #define uatomic_dec(addr) UATOMIC_COMPAT(dec(addr))
480 #include <urcu/uatomic_generic.h>
482 #endif /* _URCU_ARCH_UATOMIC_X86_H */
This page took 0.041848 seconds and 4 git commands to generate.