#endif /* !COMPILER_HAVE_SHORT_MEM_OPERAND */
+struct __uatomic_dummy {
+ unsigned long v[10];
+};
+#define __hp(x) ((struct __uatomic_dummy *)(x))
+
#define uatomic_set(addr, v) STORE_SHARED(*(addr), (v))
#define uatomic_read(addr) LOAD_SHARED(*(addr))
__asm__ __volatile__(
"0: cs %0,%2," MEMOP_REF(%3) "\n"
" brc 4,0b\n"
- : "=&r" (old_val), MEMOP_OUT (addr)
- : "r" (val), MEMOP_IN (addr)
+ : "=&r" (old_val), MEMOP_OUT (__hp(addr))
+ : "r" (val), MEMOP_IN (__hp(addr))
: "memory", "cc");
return old_val;
}
__asm__ __volatile__(
"0: csg %0,%2," MEMOP_REF(%3) "\n"
" brc 4,0b\n"
- : "=&r" (old_val), MEMOP_OUT (addr)
- : "r" (val), MEMOP_IN (addr)
+ : "=&r" (old_val), MEMOP_OUT (__hp(addr))
+ : "r" (val), MEMOP_IN (__hp(addr))
: "memory", "cc");
return old_val;
}
__asm__ __volatile__(
" cs %0,%2," MEMOP_REF(%3) "\n"
- : "+r" (old_val), MEMOP_OUT (addr)
- : "r" (_new), MEMOP_IN (addr)
+ : "+r" (old_val), MEMOP_OUT (__hp(addr))
+ : "r" (_new), MEMOP_IN (__hp(addr))
: "memory", "cc");
return old_val;
}
{
__asm__ __volatile__(
" csg %0,%2," MEMOP_REF(%3) "\n"
- : "+r" (old), MEMOP_OUT (addr)
- : "r" (_new), MEMOP_IN (addr)
+ : "+r" (old), MEMOP_OUT (__hp(addr))
+ : "r" (_new), MEMOP_IN (__hp(addr))
: "memory", "cc");
return old;
}