summary |
shortlog |
log |
commit | commitdiff |
tree
raw |
patch |
inline | side by side (from parent 1:
6d56bfa)
Implement:
cmm_smp_mb__before_and, cmm_smp_mb__after_and
cmm_smp_mb__before_or, cmm_smp_mb__after_or
cmm_smp_mb__before_add, cmm_smp_mb__after_add
cmm_smp_mb__before_sub, cmm_smp_mb__after_sub
cmm_smp_mb__before_inc, cmm_smp_mb__after_inc
cmm_smp_mb__before_dec, cmm_smp_mb__after_dec
For generic and x86.
These currently translate into simple compiler barriers on all
architectures, but the and/or/add/sub/inc/dec uatomics do not provide
memory ordering guarantees (only uatomic_add_return, uatomic_sub_return,
uatomic_xchg, and uatomic_cmpxchg provides full memory barrier
guarantees before and after the atomic operations).
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
(_uatomic_and((addr), \
caa_cast_long_keep_sign(v), \
sizeof(*(addr))))
(_uatomic_and((addr), \
caa_cast_long_keep_sign(v), \
sizeof(*(addr))))
+#define cmm_smp_mb__before_and() cmm_barrier()
+#define cmm_smp_mb__after_and() cmm_barrier()
+
(_uatomic_or((addr), \
caa_cast_long_keep_sign(v), \
sizeof(*(addr))))
(_uatomic_or((addr), \
caa_cast_long_keep_sign(v), \
sizeof(*(addr))))
+#define cmm_smp_mb__before_or() cmm_barrier()
+#define cmm_smp_mb__after_or() cmm_barrier()
+
/* uatomic_add_return */
#ifndef uatomic_add_return
/* uatomic_add_return */
#ifndef uatomic_add_return
(_uatomic_and((addr), \
caa_cast_long_keep_sign(v), \
sizeof(*(addr))))
(_uatomic_and((addr), \
caa_cast_long_keep_sign(v), \
sizeof(*(addr))))
+#define cmm_smp_mb__before_and() cmm_barrier()
+#define cmm_smp_mb__after_and() cmm_barrier()
+
#endif /* #ifndef uatomic_and */
#ifndef uatomic_or
#endif /* #ifndef uatomic_and */
#ifndef uatomic_or
(_uatomic_or((addr), \
caa_cast_long_keep_sign(v), \
sizeof(*(addr))))
(_uatomic_or((addr), \
caa_cast_long_keep_sign(v), \
sizeof(*(addr))))
+#define cmm_smp_mb__before_or() cmm_barrier()
+#define cmm_smp_mb__after_or() cmm_barrier()
+
#endif /* #ifndef uatomic_or */
#ifndef uatomic_add_return
#endif /* #ifndef uatomic_or */
#ifndef uatomic_add_return
#ifndef uatomic_add
#define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
#ifndef uatomic_add
#define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
+#define cmm_smp_mb__before_add() cmm_barrier()
+#define cmm_smp_mb__after_add() cmm_barrier()
#endif
#define uatomic_sub_return(addr, v) \
uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
#define uatomic_sub(addr, v) \
uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
#endif
#define uatomic_sub_return(addr, v) \
uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
#define uatomic_sub(addr, v) \
uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
+#define cmm_smp_mb__before_sub() cmm_smp_mb__before_add()
+#define cmm_smp_mb__after_sub() cmm_smp_mb__after_add()
#ifndef uatomic_inc
#define uatomic_inc(addr) uatomic_add((addr), 1)
#ifndef uatomic_inc
#define uatomic_inc(addr) uatomic_add((addr), 1)
+#define cmm_smp_mb__before_inc() cmm_smp_mb__before_add()
+#define cmm_smp_mb__after_inc() cmm_smp_mb__after_add()
#endif
#ifndef uatomic_dec
#define uatomic_dec(addr) uatomic_add((addr), -1)
#endif
#ifndef uatomic_dec
#define uatomic_dec(addr) uatomic_add((addr), -1)
+#define cmm_smp_mb__before_dec() cmm_smp_mb__before_add()
+#define cmm_smp_mb__after_dec() cmm_smp_mb__after_add()
#endif
#ifdef __cplusplus
#endif
#ifdef __cplusplus
UATOMIC_COMPAT(cmpxchg(addr, old, _new))
#define uatomic_xchg(addr, v) \
UATOMIC_COMPAT(xchg(addr, v))
UATOMIC_COMPAT(cmpxchg(addr, old, _new))
#define uatomic_xchg(addr, v) \
UATOMIC_COMPAT(xchg(addr, v))
#define uatomic_and(addr, v) \
UATOMIC_COMPAT(and(addr, v))
#define uatomic_and(addr, v) \
UATOMIC_COMPAT(and(addr, v))
+#define cmm_smp_mb__before_and() cmm_barrier()
+#define cmm_smp_mb__after_and() cmm_barrier()
+
#define uatomic_or(addr, v) \
UATOMIC_COMPAT(or(addr, v))
#define uatomic_or(addr, v) \
UATOMIC_COMPAT(or(addr, v))
+#define cmm_smp_mb__before_or() cmm_barrier()
+#define cmm_smp_mb__after_or() cmm_barrier()
+
#define uatomic_add_return(addr, v) \
UATOMIC_COMPAT(add_return(addr, v))
#define uatomic_add(addr, v) UATOMIC_COMPAT(add(addr, v))
#define uatomic_add_return(addr, v) \
UATOMIC_COMPAT(add_return(addr, v))
#define uatomic_add(addr, v) UATOMIC_COMPAT(add(addr, v))
+#define cmm_smp_mb__before_add() cmm_barrier()
+#define cmm_smp_mb__after_add() cmm_barrier()
+
#define uatomic_inc(addr) UATOMIC_COMPAT(inc(addr))
#define uatomic_inc(addr) UATOMIC_COMPAT(inc(addr))
+#define cmm_smp_mb__before_inc() cmm_barrier()
+#define cmm_smp_mb__after_inc() cmm_barrier()
+
#define uatomic_dec(addr) UATOMIC_COMPAT(dec(addr))
#define uatomic_dec(addr) UATOMIC_COMPAT(dec(addr))
+#define cmm_smp_mb__before_dec() cmm_barrier()
+#define cmm_smp_mb__after_dec() cmm_barrier()