API cleanup: use "uatomic_*" in cmm_smp_mb__ API
authorMathieu Desnoyers <mathieu.desnoyers@efficios.com>
Tue, 8 May 2012 21:07:03 +0000 (17:07 -0400)
committerMathieu Desnoyers <mathieu.desnoyers@efficios.com>
Wed, 9 Jan 2013 18:25:37 +0000 (13:25 -0500)
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
urcu/uatomic/generic.h
urcu/uatomic/x86.h

index 4ef71c77604eb4b96ba7913b4268aef6a3618250..bfd9b68f3b14bac2617ede3e51f8285090b05d77 100644 (file)
@@ -122,8 +122,8 @@ void _uatomic_and(void *addr, unsigned long val,
        (_uatomic_and((addr),                   \
                caa_cast_long_keep_sign(v),     \
                sizeof(*(addr))))
-#define cmm_smp_mb__before_and()       cmm_barrier()
-#define cmm_smp_mb__after_and()                cmm_barrier()
+#define cmm_smp_mb__before_uatomic_and()       cmm_barrier()
+#define cmm_smp_mb__after_uatomic_and()                cmm_barrier()
 
 #endif
 
@@ -162,8 +162,8 @@ void _uatomic_or(void *addr, unsigned long val,
        (_uatomic_or((addr),                    \
                caa_cast_long_keep_sign(v),     \
                sizeof(*(addr))))
-#define cmm_smp_mb__before_or()                cmm_barrier()
-#define cmm_smp_mb__after_or()         cmm_barrier()
+#define cmm_smp_mb__before_uatomic_or()                cmm_barrier()
+#define cmm_smp_mb__after_uatomic_or()         cmm_barrier()
 
 #endif
 
@@ -335,8 +335,8 @@ void _uatomic_and(void *addr, unsigned long val, int len)
        (_uatomic_and((addr),                   \
                caa_cast_long_keep_sign(v),     \
                sizeof(*(addr))))
-#define cmm_smp_mb__before_and()       cmm_barrier()
-#define cmm_smp_mb__after_and()                cmm_barrier()
+#define cmm_smp_mb__before_uatomic_and()       cmm_barrier()
+#define cmm_smp_mb__after_uatomic_and()                cmm_barrier()
 
 #endif /* #ifndef uatomic_and */
 
@@ -409,8 +409,8 @@ void _uatomic_or(void *addr, unsigned long val, int len)
        (_uatomic_or((addr),                    \
                caa_cast_long_keep_sign(v),     \
                sizeof(*(addr))))
-#define cmm_smp_mb__before_or()                cmm_barrier()
-#define cmm_smp_mb__after_or()         cmm_barrier()
+#define cmm_smp_mb__before_uatomic_or()                cmm_barrier()
+#define cmm_smp_mb__after_uatomic_or()         cmm_barrier()
 
 #endif /* #ifndef uatomic_or */
 
@@ -572,27 +572,27 @@ unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
 
 #ifndef uatomic_add
 #define uatomic_add(addr, v)           (void)uatomic_add_return((addr), (v))
-#define cmm_smp_mb__before_add()       cmm_barrier()
-#define cmm_smp_mb__after_add()                cmm_barrier()
+#define cmm_smp_mb__before_uatomic_add()       cmm_barrier()
+#define cmm_smp_mb__after_uatomic_add()                cmm_barrier()
 #endif
 
 #define uatomic_sub_return(addr, v)    \
        uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
 #define uatomic_sub(addr, v)           \
        uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
-#define cmm_smp_mb__before_sub()       cmm_smp_mb__before_add()
-#define cmm_smp_mb__after_sub()                cmm_smp_mb__after_add()
+#define cmm_smp_mb__before_uatomic_sub()       cmm_smp_mb__before_uatomic_add()
+#define cmm_smp_mb__after_uatomic_sub()                cmm_smp_mb__after_uatomic_add()
 
 #ifndef uatomic_inc
 #define uatomic_inc(addr)              uatomic_add((addr), 1)
-#define cmm_smp_mb__before_inc()       cmm_smp_mb__before_add()
-#define cmm_smp_mb__after_inc()                cmm_smp_mb__after_add()
+#define cmm_smp_mb__before_uatomic_inc()       cmm_smp_mb__before_uatomic_add()
+#define cmm_smp_mb__after_uatomic_inc()                cmm_smp_mb__after_uatomic_add()
 #endif
 
 #ifndef uatomic_dec
 #define uatomic_dec(addr)              uatomic_add((addr), -1)
-#define cmm_smp_mb__before_dec()       cmm_smp_mb__before_add()
-#define cmm_smp_mb__after_dec()                cmm_smp_mb__after_add()
+#define cmm_smp_mb__before_uatomic_dec()       cmm_smp_mb__before_uatomic_add()
+#define cmm_smp_mb__after_uatomic_dec()                cmm_smp_mb__after_uatomic_add()
 #endif
 
 #ifdef __cplusplus
index 99b0e6c6b405191538ff6ab50de35320c47652ea..c55ac4e49c667a8d45df3654acb047d48bd1c5e2 100644 (file)
@@ -579,28 +579,28 @@ extern unsigned long _compat_uatomic_add_return(void *addr,
 
 #define uatomic_and(addr, v)           \
                UATOMIC_COMPAT(and(addr, v))
-#define cmm_smp_mb__before_and()       cmm_barrier()
-#define cmm_smp_mb__after_and()                cmm_barrier()
+#define cmm_smp_mb__before_uatomic_and()       cmm_barrier()
+#define cmm_smp_mb__after_uatomic_and()                cmm_barrier()
 
 #define uatomic_or(addr, v)            \
                UATOMIC_COMPAT(or(addr, v))
-#define cmm_smp_mb__before_or()                cmm_barrier()
-#define cmm_smp_mb__after_or()         cmm_barrier()
+#define cmm_smp_mb__before_uatomic_or()                cmm_barrier()
+#define cmm_smp_mb__after_uatomic_or()         cmm_barrier()
 
 #define uatomic_add_return(addr, v)            \
                UATOMIC_COMPAT(add_return(addr, v))
 
 #define uatomic_add(addr, v)   UATOMIC_COMPAT(add(addr, v))
-#define cmm_smp_mb__before_add()       cmm_barrier()
-#define cmm_smp_mb__after_add()                cmm_barrier()
+#define cmm_smp_mb__before_uatomic_add()       cmm_barrier()
+#define cmm_smp_mb__after_uatomic_add()                cmm_barrier()
 
 #define uatomic_inc(addr)      UATOMIC_COMPAT(inc(addr))
-#define cmm_smp_mb__before_inc()       cmm_barrier()
-#define cmm_smp_mb__after_inc()                cmm_barrier()
+#define cmm_smp_mb__before_uatomic_inc()       cmm_barrier()
+#define cmm_smp_mb__after_uatomic_inc()                cmm_barrier()
 
 #define uatomic_dec(addr)      UATOMIC_COMPAT(dec(addr))
-#define cmm_smp_mb__before_dec()       cmm_barrier()
-#define cmm_smp_mb__after_dec()                cmm_barrier()
+#define cmm_smp_mb__before_uatomic_dec()       cmm_barrier()
+#define cmm_smp_mb__after_uatomic_dec()                cmm_barrier()
 
 #ifdef __cplusplus 
 }
This page took 0.034598 seconds and 4 git commands to generate.