1 #ifndef _URCU_ARCH_UATOMIC_X86_H
2 #define _URCU_ARCH_UATOMIC_X86_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
10 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
11 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 * Permission is hereby granted to use or copy this program
14 * for any purpose, provided the above notices are retained on all copies.
15 * Permission to modify the code and to distribute modified code is granted,
16 * provided the above notices are retained, and a notice that the code was
17 * modified is included with the above copyright notice.
19 * Code inspired from libuatomic_ops-1.2, inherited in part from the
20 * Boehm-Demers-Weiser conservative garbage collector.
23 #include <urcu/config.h>
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
27 #define UATOMIC_HAS_ATOMIC_BYTE
28 #define UATOMIC_HAS_ATOMIC_SHORT
35 * Derived from AO_compare_and_swap() and AO_test_and_set_full().
38 struct __uatomic_dummy
{
41 #define __hp(x) ((struct __uatomic_dummy *)(x))
43 #define _uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
47 static inline __attribute__((always_inline
))
48 unsigned long __uatomic_cmpxchg(void *addr
, unsigned long old
,
49 unsigned long _new
, int len
)
54 unsigned char result
= old
;
57 "lock; cmpxchgb %2, %1"
58 : "+a"(result
), "+m"(*__hp(addr
))
59 : "q"((unsigned char)_new
)
65 unsigned short result
= old
;
68 "lock; cmpxchgw %2, %1"
69 : "+a"(result
), "+m"(*__hp(addr
))
70 : "r"((unsigned short)_new
)
76 unsigned int result
= old
;
79 "lock; cmpxchgl %2, %1"
80 : "+a"(result
), "+m"(*__hp(addr
))
81 : "r"((unsigned int)_new
)
85 #if (CAA_BITS_PER_LONG == 64)
88 unsigned long result
= old
;
91 "lock; cmpxchgq %2, %1"
92 : "+a"(result
), "+m"(*__hp(addr
))
93 : "r"((unsigned long)_new
)
100 * generate an illegal instruction. Cannot catch this with
101 * linker tricks when optimizations are disabled.
103 __asm__
__volatile__("ud2");
107 #define _uatomic_cmpxchg(addr, old, _new) \
108 ((__typeof__(*(addr))) __uatomic_cmpxchg((addr), \
109 caa_cast_long_keep_sign(old), \
110 caa_cast_long_keep_sign(_new),\
115 static inline __attribute__((always_inline
))
116 unsigned long __uatomic_exchange(void *addr
, unsigned long val
, int len
)
118 /* Note: the "xchg" instruction does not need a "lock" prefix. */
122 unsigned char result
;
123 __asm__
__volatile__(
125 : "=q"(result
), "+m"(*__hp(addr
))
126 : "0" ((unsigned char)val
)
132 unsigned short result
;
133 __asm__
__volatile__(
135 : "=r"(result
), "+m"(*__hp(addr
))
136 : "0" ((unsigned short)val
)
143 __asm__
__volatile__(
145 : "=r"(result
), "+m"(*__hp(addr
))
146 : "0" ((unsigned int)val
)
150 #if (CAA_BITS_PER_LONG == 64)
153 unsigned long result
;
154 __asm__
__volatile__(
156 : "=r"(result
), "+m"(*__hp(addr
))
157 : "0" ((unsigned long)val
)
164 * generate an illegal instruction. Cannot catch this with
165 * linker tricks when optimizations are disabled.
167 __asm__
__volatile__("ud2");
171 #define _uatomic_xchg(addr, v) \
172 ((__typeof__(*(addr))) __uatomic_exchange((addr), \
173 caa_cast_long_keep_sign(v), \
176 /* uatomic_add_return */
178 static inline __attribute__((always_inline
))
179 unsigned long __uatomic_add_return(void *addr
, unsigned long val
,
185 unsigned char result
= val
;
187 __asm__
__volatile__(
189 : "+m"(*__hp(addr
)), "+q" (result
)
192 return result
+ (unsigned char)val
;
196 unsigned short result
= val
;
198 __asm__
__volatile__(
200 : "+m"(*__hp(addr
)), "+r" (result
)
203 return result
+ (unsigned short)val
;
207 unsigned int result
= val
;
209 __asm__
__volatile__(
211 : "+m"(*__hp(addr
)), "+r" (result
)
214 return result
+ (unsigned int)val
;
216 #if (CAA_BITS_PER_LONG == 64)
219 unsigned long result
= val
;
221 __asm__
__volatile__(
223 : "+m"(*__hp(addr
)), "+r" (result
)
226 return result
+ (unsigned long)val
;
231 * generate an illegal instruction. Cannot catch this with
232 * linker tricks when optimizations are disabled.
234 __asm__
__volatile__("ud2");
238 #define _uatomic_add_return(addr, v) \
239 ((__typeof__(*(addr))) __uatomic_add_return((addr), \
240 caa_cast_long_keep_sign(v), \
245 static inline __attribute__((always_inline
))
246 void __uatomic_and(void *addr
, unsigned long val
, int len
)
251 __asm__
__volatile__(
254 : "iq" ((unsigned char)val
)
260 __asm__
__volatile__(
263 : "ir" ((unsigned short)val
)
269 __asm__
__volatile__(
272 : "ir" ((unsigned int)val
)
276 #if (CAA_BITS_PER_LONG == 64)
279 __asm__
__volatile__(
282 : "er" ((unsigned long)val
)
289 * generate an illegal instruction. Cannot catch this with
290 * linker tricks when optimizations are disabled.
292 __asm__
__volatile__("ud2");
296 #define _uatomic_and(addr, v) \
297 (__uatomic_and((addr), caa_cast_long_keep_sign(v), sizeof(*(addr))))
301 static inline __attribute__((always_inline
))
302 void __uatomic_or(void *addr
, unsigned long val
, int len
)
307 __asm__
__volatile__(
310 : "iq" ((unsigned char)val
)
316 __asm__
__volatile__(
319 : "ir" ((unsigned short)val
)
325 __asm__
__volatile__(
328 : "ir" ((unsigned int)val
)
332 #if (CAA_BITS_PER_LONG == 64)
335 __asm__
__volatile__(
338 : "er" ((unsigned long)val
)
345 * generate an illegal instruction. Cannot catch this with
346 * linker tricks when optimizations are disabled.
348 __asm__
__volatile__("ud2");
352 #define _uatomic_or(addr, v) \
353 (__uatomic_or((addr), caa_cast_long_keep_sign(v), sizeof(*(addr))))
357 static inline __attribute__((always_inline
))
358 void __uatomic_add(void *addr
, unsigned long val
, int len
)
363 __asm__
__volatile__(
366 : "iq" ((unsigned char)val
)
372 __asm__
__volatile__(
375 : "ir" ((unsigned short)val
)
381 __asm__
__volatile__(
384 : "ir" ((unsigned int)val
)
388 #if (CAA_BITS_PER_LONG == 64)
391 __asm__
__volatile__(
394 : "er" ((unsigned long)val
)
401 * generate an illegal instruction. Cannot catch this with
402 * linker tricks when optimizations are disabled.
404 __asm__
__volatile__("ud2");
408 #define _uatomic_add(addr, v) \
409 (__uatomic_add((addr), caa_cast_long_keep_sign(v), sizeof(*(addr))))
414 static inline __attribute__((always_inline
))
415 void __uatomic_inc(void *addr
, int len
)
420 __asm__
__volatile__(
429 __asm__
__volatile__(
438 __asm__
__volatile__(
445 #if (CAA_BITS_PER_LONG == 64)
448 __asm__
__volatile__(
457 /* generate an illegal instruction. Cannot catch this with linker tricks
458 * when optimizations are disabled. */
459 __asm__
__volatile__("ud2");
463 #define _uatomic_inc(addr) (__uatomic_inc((addr), sizeof(*(addr))))
467 static inline __attribute__((always_inline
))
468 void __uatomic_dec(void *addr
, int len
)
473 __asm__
__volatile__(
482 __asm__
__volatile__(
491 __asm__
__volatile__(
498 #if (CAA_BITS_PER_LONG == 64)
501 __asm__
__volatile__(
511 * generate an illegal instruction. Cannot catch this with
512 * linker tricks when optimizations are disabled.
514 __asm__
__volatile__("ud2");
518 #define _uatomic_dec(addr) (__uatomic_dec((addr), sizeof(*(addr))))
520 #if ((CAA_BITS_PER_LONG != 64) && defined(CONFIG_RCU_COMPAT_ARCH))
521 extern int __rcu_cas_avail
;
522 extern int __rcu_cas_init(void);
524 #define UATOMIC_COMPAT(insn) \
525 ((caa_likely(__rcu_cas_avail > 0)) \
526 ? (_uatomic_##insn) \
527 : ((caa_unlikely(__rcu_cas_avail < 0) \
528 ? ((__rcu_cas_init() > 0) \
529 ? (_uatomic_##insn) \
530 : (compat_uatomic_##insn)) \
531 : (compat_uatomic_##insn))))
534 * We leave the return value so we don't break the ABI, but remove the
535 * return value from the API.
537 extern unsigned long _compat_uatomic_set(void *addr
,
538 unsigned long _new
, int len
);
539 #define compat_uatomic_set(addr, _new) \
540 ((void) _compat_uatomic_set((addr), \
541 caa_cast_long_keep_sign(_new), \
545 extern unsigned long _compat_uatomic_xchg(void *addr
,
546 unsigned long _new
, int len
);
547 #define compat_uatomic_xchg(addr, _new) \
548 ((__typeof__(*(addr))) _compat_uatomic_xchg((addr), \
549 caa_cast_long_keep_sign(_new), \
552 extern unsigned long _compat_uatomic_cmpxchg(void *addr
, unsigned long old
,
553 unsigned long _new
, int len
);
554 #define compat_uatomic_cmpxchg(addr, old, _new) \
555 ((__typeof__(*(addr))) _compat_uatomic_cmpxchg((addr), \
556 caa_cast_long_keep_sign(old), \
557 caa_cast_long_keep_sign(_new), \
560 extern void _compat_uatomic_and(void *addr
, unsigned long _new
, int len
);
561 #define compat_uatomic_and(addr, v) \
562 (_compat_uatomic_and((addr), \
563 caa_cast_long_keep_sign(v), \
566 extern void _compat_uatomic_or(void *addr
, unsigned long _new
, int len
);
567 #define compat_uatomic_or(addr, v) \
568 (_compat_uatomic_or((addr), \
569 caa_cast_long_keep_sign(v), \
572 extern unsigned long _compat_uatomic_add_return(void *addr
,
573 unsigned long _new
, int len
);
574 #define compat_uatomic_add_return(addr, v) \
575 ((__typeof__(*(addr))) _compat_uatomic_add_return((addr), \
576 caa_cast_long_keep_sign(v), \
579 #define compat_uatomic_add(addr, v) \
580 ((void)compat_uatomic_add_return((addr), (v)))
581 #define compat_uatomic_inc(addr) \
582 (compat_uatomic_add((addr), 1))
583 #define compat_uatomic_dec(addr) \
584 (compat_uatomic_add((addr), -1))
587 #define UATOMIC_COMPAT(insn) (_uatomic_##insn)
590 /* Read is atomic even in compat mode */
591 #define uatomic_set(addr, v) \
592 UATOMIC_COMPAT(set(addr, v))
594 #define uatomic_cmpxchg(addr, old, _new) \
595 UATOMIC_COMPAT(cmpxchg(addr, old, _new))
596 #define uatomic_xchg(addr, v) \
597 UATOMIC_COMPAT(xchg(addr, v))
599 #define uatomic_and(addr, v) \
600 UATOMIC_COMPAT(and(addr, v))
601 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
602 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
604 #define uatomic_or(addr, v) \
605 UATOMIC_COMPAT(or(addr, v))
606 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
607 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
609 #define uatomic_add_return(addr, v) \
610 UATOMIC_COMPAT(add_return(addr, v))
612 #define uatomic_add(addr, v) UATOMIC_COMPAT(add(addr, v))
613 #define cmm_smp_mb__before_uatomic_add() cmm_barrier()
614 #define cmm_smp_mb__after_uatomic_add() cmm_barrier()
616 #define uatomic_inc(addr) UATOMIC_COMPAT(inc(addr))
617 #define cmm_smp_mb__before_uatomic_inc() cmm_barrier()
618 #define cmm_smp_mb__after_uatomic_inc() cmm_barrier()
620 #define uatomic_dec(addr) UATOMIC_COMPAT(dec(addr))
621 #define cmm_smp_mb__before_uatomic_dec() cmm_barrier()
622 #define cmm_smp_mb__after_uatomic_dec() cmm_barrier()
628 #include <urcu/uatomic/generic.h>
630 #endif /* _URCU_ARCH_UATOMIC_X86_H */
This page took 0.041577 seconds and 4 git commands to generate.