mutex_lock_signal_restore(&compat_mutex, &mask);
}
+void _compat_uatomic_and(void *addr, unsigned long v, int len)
+{
+ sigset_t mask;
+
+ mutex_lock_signal_save(&compat_mutex, &mask);
+ switch (len) {
+ case 1:
+ *(unsigned char *)addr &= (unsigned char)v;
+ break;
+ case 2:
+ *(unsigned short *)addr &= (unsigned short)v;
+ break;
+ case 4:
+ *(unsigned int *)addr &= (unsigned int)v;
+ break;
+ default:
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
+ __asm__ __volatile__("ud2");
+ }
+ mutex_lock_signal_restore(&compat_mutex, &mask);
+}
+
unsigned long _compat_uatomic_add_return(void *addr, unsigned long v, int len)
{
sigset_t mask;
v = uatomic_sub_return(ptr, 1); \
assert(v == 121); \
assert(uatomic_read(ptr) == 121); \
+ uatomic_and(ptr, 129); \
+ assert(uatomic_read(ptr) == 1); \
} while (0)
int main(int argc, char **argv)
(unsigned long)(v), \
sizeof(*(addr))))
+/* uatomic_and */
+
+static inline __attribute__((always_inline))
+void __uatomic_and(void *addr, unsigned long val, int len)
+{
+ switch (len) {
+ case 1:
+ {
+ __asm__ __volatile__(
+ "lock; andb %1, %0"
+ : "=m"(*__hp(addr))
+ : "iq" ((unsigned char)val)
+ : "memory");
+ return;
+ }
+ case 2:
+ {
+ __asm__ __volatile__(
+ "lock; andw %1, %0"
+ : "=m"(*__hp(addr))
+ : "ir" ((unsigned short)val)
+ : "memory");
+ return;
+ }
+ case 4:
+ {
+ __asm__ __volatile__(
+ "lock; andl %1, %0"
+ : "=m"(*__hp(addr))
+ : "ir" ((unsigned int)val)
+ : "memory");
+ return;
+ }
+#if (CAA_BITS_PER_LONG == 64)
+ case 8:
+ {
+ __asm__ __volatile__(
+ "lock; andq %1, %0"
+ : "=m"(*__hp(addr))
+ : "er" ((unsigned long)val)
+ : "memory");
+ return;
+ }
+#endif
+ }
+ /* generate an illegal instruction. Cannot catch this with linker tricks
+ * when optimizations are disabled. */
+ __asm__ __volatile__("ud2");
+ return;
+}
+
+#define _uatomic_and(addr, v) \
+ (__uatomic_and((addr), (unsigned long)(v), sizeof(*(addr))))
+
/* uatomic_or */
static inline __attribute__((always_inline))
(unsigned long)(_new), \
sizeof(*(addr))))
+extern unsigned long _compat_uatomic_and(void *addr,
+ unsigned long _new, int len);
+#define compat_uatomic_and(addr, v) \
+ ((__typeof__(*(addr))) _compat_uatomic_and((addr), \
+ (unsigned long)(v), \
+ sizeof(*(addr))))
+
extern unsigned long _compat_uatomic_or(void *addr,
unsigned long _new, int len);
#define compat_uatomic_or(addr, v) \
UATOMIC_COMPAT(cmpxchg(addr, old, _new))
#define uatomic_xchg(addr, v) \
UATOMIC_COMPAT(xchg(addr, v))
+#define uatomic_and(addr, v) \
+ UATOMIC_COMPAT(and(addr, v))
#define uatomic_or(addr, v) \
UATOMIC_COMPAT(or(addr, v))
#define uatomic_add_return(addr, v) \
sizeof(*(addr))))
+/* uatomic_and */
+
+#ifndef uatomic_and
+static inline __attribute__((always_inline))
+void _uatomic_and(void *addr, unsigned long val,
+ int len)
+{
+ switch (len) {
+#ifdef UATOMIC_HAS_ATOMIC_BYTE
+ case 1:
+ __sync_and_and_fetch_1(addr, val);
+#endif
+#ifdef UATOMIC_HAS_ATOMIC_SHORT
+ case 2:
+ __sync_and_and_fetch_2(addr, val);
+#endif
+ case 4:
+ __sync_and_and_fetch_4(addr, val);
+#if (CAA_BITS_PER_LONG == 64)
+ case 8:
+ __sync_and_and_fetch_8(addr, val);
+#endif
+ }
+ _uatomic_link_error();
+ return 0;
+}
+
+#define uatomic_and(addr, v) \
+ (_uatomic_and((addr), \
+ (unsigned long)(v), \
+ sizeof(*(addr))))
+#endif
+
/* uatomic_or */
#ifndef uatomic_or
#else /* #ifndef uatomic_cmpxchg */
+#ifndef uatomic_and
+/* uatomic_and */
+
+static inline __attribute__((always_inline))
+void _uatomic_and(void *addr, unsigned long val, int len)
+{
+ switch (len) {
+#ifdef UATOMIC_HAS_ATOMIC_BYTE
+ case 1:
+ {
+ unsigned char old, oldt;
+
+ oldt = uatomic_read((unsigned char *)addr);
+ do {
+ old = oldt;
+ oldt = _uatomic_cmpxchg(addr, old, old & val, 1);
+ } while (oldt != old);
+ }
+#endif
+#ifdef UATOMIC_HAS_ATOMIC_SHORT
+ case 2:
+ {
+ unsigned short old, oldt;
+
+ oldt = uatomic_read((unsigned short *)addr);
+ do {
+ old = oldt;
+ oldt = _uatomic_cmpxchg(addr, old, old & val, 2);
+ } while (oldt != old);
+ }
+#endif
+ case 4:
+ {
+ unsigned int old, oldt;
+
+ oldt = uatomic_read((unsigned int *)addr);
+ do {
+ old = oldt;
+ oldt = _uatomic_cmpxchg(addr, old, old & val, 4);
+ } while (oldt != old);
+ }
+#if (CAA_BITS_PER_LONG == 64)
+ case 8:
+ {
+ unsigned long old, oldt;
+
+ oldt = uatomic_read((unsigned long *)addr);
+ do {
+ old = oldt;
+ oldt = _uatomic_cmpxchg(addr, old, old & val, 8);
+ } while (oldt != old);
+ }
+#endif
+ }
+ _uatomic_link_error();
+ return 0;
+}
+
+#define uatomic_and(addr, v) \
+ (uatomic_and((addr), \
+ (unsigned long)(v), \
+ sizeof(*(addr))))
+#endif /* #ifndef uatomic_and */
+
#ifndef uatomic_or
/* uatomic_or */