1 #ifndef _URCU_ARCH_UATOMIC_X86_H
2 #define _URCU_ARCH_UATOMIC_X86_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
10 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
11 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 * Permission is hereby granted to use or copy this program
14 * for any purpose, provided the above notices are retained on all copies.
15 * Permission to modify the code and to distribute modified code is granted,
16 * provided the above notices are retained, and a notice that the code was
17 * modified is included with the above copyright notice.
19 * Code inspired from libuatomic_ops-1.2, inherited in part from the
20 * Boehm-Demers-Weiser conservative garbage collector.
23 #include <urcu/compiler.h>
24 #include <urcu/system.h>
30 #ifndef __SIZEOF_LONG__
31 #if defined(__x86_64__) || defined(__amd64__)
32 #define __SIZEOF_LONG__ 8
34 #define __SIZEOF_LONG__ 4
39 #define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
43 * Derived from AO_compare_and_swap() and AO_test_and_set_full().
46 struct __uatomic_dummy
{
49 #define __hp(x) ((struct __uatomic_dummy *)(x))
51 #define _uatomic_set(addr, v) STORE_SHARED(*(addr), (v))
52 #define _uatomic_read(addr) LOAD_SHARED(*(addr))
56 static inline __attribute__((always_inline
))
57 unsigned long __uatomic_cmpxchg(void *addr
, unsigned long old
,
58 unsigned long _new
, int len
)
63 unsigned char result
= old
;
66 "lock; cmpxchgb %2, %1"
67 : "+a"(result
), "+m"(*__hp(addr
))
68 : "q"((unsigned char)_new
)
74 unsigned short result
= old
;
77 "lock; cmpxchgw %2, %1"
78 : "+a"(result
), "+m"(*__hp(addr
))
79 : "r"((unsigned short)_new
)
85 unsigned int result
= old
;
88 "lock; cmpxchgl %2, %1"
89 : "+a"(result
), "+m"(*__hp(addr
))
90 : "r"((unsigned int)_new
)
94 #if (BITS_PER_LONG == 64)
97 unsigned long result
= old
;
100 "lock; cmpxchgq %2, %1"
101 : "+a"(result
), "+m"(*__hp(addr
))
102 : "r"((unsigned long)_new
)
108 /* generate an illegal instruction. Cannot catch this with linker tricks
109 * when optimizations are disabled. */
110 __asm__
__volatile__("ud2");
114 #define _uatomic_cmpxchg(addr, old, _new) \
115 ((__typeof__(*(addr))) __uatomic_cmpxchg((addr), (unsigned long)(old),\
116 (unsigned long)(_new), \
121 static inline __attribute__((always_inline
))
122 unsigned long __uatomic_exchange(void *addr
, unsigned long val
, int len
)
124 /* Note: the "xchg" instruction does not need a "lock" prefix. */
128 unsigned char result
;
129 __asm__
__volatile__(
131 : "=q"(result
), "+m"(*__hp(addr
))
132 : "0" ((unsigned char)val
)
138 unsigned short result
;
139 __asm__
__volatile__(
141 : "=r"(result
), "+m"(*__hp(addr
))
142 : "0" ((unsigned short)val
)
149 __asm__
__volatile__(
151 : "=r"(result
), "+m"(*__hp(addr
))
152 : "0" ((unsigned int)val
)
156 #if (BITS_PER_LONG == 64)
159 unsigned long result
;
160 __asm__
__volatile__(
162 : "=r"(result
), "+m"(*__hp(addr
))
163 : "0" ((unsigned long)val
)
169 /* generate an illegal instruction. Cannot catch this with linker tricks
170 * when optimizations are disabled. */
171 __asm__
__volatile__("ud2");
175 #define _uatomic_xchg(addr, v) \
176 ((__typeof__(*(addr))) __uatomic_exchange((addr), (unsigned long)(v), \
179 /* uatomic_add_return, uatomic_sub_return */
181 static inline __attribute__((always_inline
))
182 unsigned long __uatomic_add_return(void *addr
, unsigned long val
,
188 unsigned char result
= val
;
190 __asm__
__volatile__(
192 : "+m"(*__hp(addr
)), "+q" (result
)
195 return result
+ (unsigned char)val
;
199 unsigned short result
= val
;
201 __asm__
__volatile__(
203 : "+m"(*__hp(addr
)), "+r" (result
)
206 return result
+ (unsigned short)val
;
210 unsigned int result
= val
;
212 __asm__
__volatile__(
214 : "+m"(*__hp(addr
)), "+r" (result
)
217 return result
+ (unsigned int)val
;
219 #if (BITS_PER_LONG == 64)
222 unsigned long result
= val
;
224 __asm__
__volatile__(
226 : "+m"(*__hp(addr
)), "+r" (result
)
229 return result
+ (unsigned long)val
;
233 /* generate an illegal instruction. Cannot catch this with linker tricks
234 * when optimizations are disabled. */
235 __asm__
__volatile__("ud2");
239 #define _uatomic_add_return(addr, v) \
240 ((__typeof__(*(addr))) __uatomic_add_return((addr), \
241 (unsigned long)(v), \
244 #define _uatomic_sub_return(addr, v) _uatomic_add_return((addr), -(v))
246 /* uatomic_add, uatomic_sub */
248 static inline __attribute__((always_inline
))
249 void __uatomic_add(void *addr
, unsigned long val
, int len
)
254 __asm__
__volatile__(
257 : "iq" ((unsigned char)val
)
263 __asm__
__volatile__(
266 : "ir" ((unsigned short)val
)
272 __asm__
__volatile__(
275 : "ir" ((unsigned int)val
)
279 #if (BITS_PER_LONG == 64)
282 __asm__
__volatile__(
285 : "er" ((unsigned long)val
)
291 /* generate an illegal instruction. Cannot catch this with linker tricks
292 * when optimizations are disabled. */
293 __asm__
__volatile__("ud2");
297 #define _uatomic_add(addr, v) \
298 (__uatomic_add((addr), (unsigned long)(v), sizeof(*(addr))))
300 #define _uatomic_sub(addr, v) _uatomic_add((addr), -(v))
305 static inline __attribute__((always_inline
))
306 void __uatomic_inc(void *addr
, int len
)
311 __asm__
__volatile__(
320 __asm__
__volatile__(
329 __asm__
__volatile__(
336 #if (BITS_PER_LONG == 64)
339 __asm__
__volatile__(
348 /* generate an illegal instruction. Cannot catch this with linker tricks
349 * when optimizations are disabled. */
350 __asm__
__volatile__("ud2");
354 #define _uatomic_inc(addr) (__uatomic_inc((addr), sizeof(*(addr))))
358 static inline __attribute__((always_inline
))
359 void __uatomic_dec(void *addr
, int len
)
364 __asm__
__volatile__(
373 __asm__
__volatile__(
382 __asm__
__volatile__(
389 #if (BITS_PER_LONG == 64)
392 __asm__
__volatile__(
401 /* generate an illegal instruction. Cannot catch this with linker tricks
402 * when optimizations are disabled. */
403 __asm__
__volatile__("ud2");
407 #define _uatomic_dec(addr) (__uatomic_dec((addr), sizeof(*(addr))))
409 #if ((BITS_PER_LONG != 64) && defined(CONFIG_URCU_COMPAT_ARCH))
410 extern int __urcu_cas_avail
;
411 extern int __urcu_cas_init(void);
413 #define UATOMIC_COMPAT(insn) \
414 ((likely(__urcu_cas_avail > 0)) \
415 ? (_uatomic_##insn) \
416 : ((unlikely(__urcu_cas_avail < 0) \
417 ? ((__urcu_cas_init() > 0) \
418 ? (_uatomic_##insn) \
419 : (compat_uatomic_##insn)) \
420 : (compat_uatomic_##insn))))
422 extern unsigned long _compat_uatomic_set(void *addr
,
423 unsigned long _new
, int len
);
424 #define compat_uatomic_set(addr, _new) \
425 ((__typeof__(*(addr))) _compat_uatomic_set((addr), \
426 (unsigned long)(_new), \
430 extern unsigned long _compat_uatomic_xchg(void *addr
,
431 unsigned long _new
, int len
);
432 #define compat_uatomic_xchg(addr, _new) \
433 ((__typeof__(*(addr))) _compat_uatomic_xchg((addr), \
434 (unsigned long)(_new), \
437 extern unsigned long _compat_uatomic_cmpxchg(void *addr
, unsigned long old
,
438 unsigned long _new
, int len
);
439 #define compat_uatomic_cmpxchg(addr, old, _new) \
440 ((__typeof__(*(addr))) _compat_uatomic_cmpxchg((addr), \
441 (unsigned long)(old), \
442 (unsigned long)(_new), \
445 extern unsigned long _compat_uatomic_xchg(void *addr
,
446 unsigned long _new
, int len
);
447 #define compat_uatomic_add_return(addr, v) \
448 ((__typeof__(*(addr))) _compat_uatomic_add_return((addr), \
449 (unsigned long)(v), \
452 #define compat_uatomic_sub_return(addr, v) \
453 compat_uatomic_add_return((addr), -(v))
454 #define compat_uatomic_add(addr, v) \
455 ((void)compat_uatomic_add_return((addr), (v)))
456 #define compat_uatomic_sub(addr, v) \
457 ((void)compat_uatomic_sub_return((addr), (v)))
458 #define compat_uatomic_inc(addr) \
459 (compat_uatomic_add((addr), 1))
460 #define compat_uatomic_dec(addr) \
461 (compat_uatomic_sub((addr), 1))
464 #define UATOMIC_COMPAT(insn) (_uatomic_##insn)
467 /* Read is atomic even in compat mode */
468 #define uatomic_read(addr) _uatomic_read(addr)
470 #define uatomic_set(addr, v) \
471 UATOMIC_COMPAT(set(addr, v))
472 #define uatomic_cmpxchg(addr, old, _new) \
473 UATOMIC_COMPAT(cmpxchg(addr, old, _new))
474 #define uatomic_xchg(addr, v) \
475 UATOMIC_COMPAT(xchg(addr, v))
476 #define uatomic_add_return(addr, v) \
477 UATOMIC_COMPAT(add_return(addr, v))
478 #define uatomic_sub_return(addr, v) \
479 UATOMIC_COMPAT(sub_return(addr, v))
480 #define uatomic_add(addr, v) UATOMIC_COMPAT(add(addr, v))
481 #define uatomic_sub(addr, v) UATOMIC_COMPAT(sub(addr, v))
482 #define uatomic_inc(addr) UATOMIC_COMPAT(inc(addr))
483 #define uatomic_dec(addr) UATOMIC_COMPAT(dec(addr))
489 #endif /* _URCU_ARCH_UATOMIC_X86_H */
This page took 0.056856 seconds and 5 git commands to generate.