#define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
#endif
+#define uatomic_set(addr, v) \
+do { \
+ ACCESS_ONCE(*(addr)) = (v); \
+} while (0)
+
+#define uatomic_read(addr) ACCESS_ONCE(*(addr))
+
static inline __attribute__((always_inline))
unsigned int uatomic_exchange_32(volatile unsigned int *addr, unsigned int val)
{
(__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
sizeof(*(addr)))
+
+static inline __attribute__((always_inline))
+void uatomic_add_32(volatile unsigned int *addr, unsigned int val)
+{
+ unsigned int result, old;
+
+ __asm__ __volatile__(
+ " l %0, %1\n"
+ "0: lr %2, %0\n"
+ " ar %2, %3\n"
+ " cs %0,%2,%1\n"
+ " brc 4,0b\n"
+ : "=&r"(old), "+m" (*addr),
+ "=&r"(result)
+ : "r"(val)
+ : "memory", "cc");
+}
+
+#if (BITS_PER_LONG == 64)
+
+static inline __attribute__((always_inline))
+void uatomic_add_64(volatile unsigned long *addr, unsigned long val)
+{
+ unsigned long result, old;
+
+ __asm__ __volatile__(
+ " lg %0, %1\n"
+ "0: lgr %2, %0\n"
+ " agr %2, %3\n"
+ " csg %0,%2,%1\n"
+ " brc 4,0b\n"
+ : "=&r"(old), "+m" (*addr),
+ "=&r"(result)
+ : "r"(val)
+ : "memory", "cc");
+}
+
+#endif
+
+static inline __attribute__((always_inline))
+void _uatomic_add(void *addr, unsigned long val, int len)
+{
+ switch (len) {
+ case 4:
+ uatomic_add_32(addr, val);
+ return;
+#if (BITS_PER_LONG == 64)
+ case 8:
+ uatomic_add_64(addr, val);
+ return;
+#endif
+ default:
+ __asm__ __volatile__(".long 0xd00d00");
+ }
+
+ return;
+}
+
+#define uatomic_add(addr, val) \
+ _uatomic_add((addr), (unsigned long)(val), sizeof(*(addr)))
+
+static inline __attribute__((always_inline))
+unsigned int uatomic_cmpxchg_32(volatile unsigned int *addr, unsigned int old,
+ unsigned int new)
+{
+ __asm__ __volatile__(
+ " cs %0,%2,%1\n"
+ : "+r"(old), "+m"(*addr)
+ : "r"(new)
+ : "memory", "cc");
+
+ return old;
+}
+
+#if (BITS_PER_LONG == 64)
+
+static inline __attribute__((always_inline))
+unsigned long uatomic_cmpxchg_64(volatile unsigned long *addr,
+ unsigned long old, unsigned long new)
+{
+ __asm__ __volatile__(
+ " csg %0,%2,%1\n"
+ : "+r"(old), "+m"(*addr)
+ : "r"(new)
+ : "memory", "cc");
+
+ return old;
+}
+
+#endif
+
+unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
+ unsigned long new, int len)
+{
+ switch (len) {
+ case 4:
+ return uatomic_cmpxchg_32(addr, old, new);
+#if (BITS_PER_LONG == 64)
+ case 8:
+ return uatomic_cmpxchg_64(addr, old, new);
+#endif
+ default:
+ __asm__ __volatile__(".long 0xd00d00");
+ }
+
+ return 0;
+}
+
+#define uatomic_cmpxchg(addr, old, new) \
+ (__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
+ (unsigned long)(old), \
+ (unsigned long)(new), \
+ sizeof(*(addr)))
+
#endif /* _URCU_ARCH_ATOMIC_S390_H */