From: Mathieu Desnoyers Date: Thu, 12 Feb 2009 18:42:31 +0000 (-0500) Subject: Add support for x86 older than P4, with CONFIG_HAS_FENCE option X-Git-Tag: v0.1~288 X-Git-Url: https://git.lttng.org./?a=commitdiff_plain;h=82faadb56d6001ee1dc0bc5b3f22de49cd0ec2fe;p=userspace-rcu.git Add support for x86 older than P4, with CONFIG_HAS_FENCE option Signed-off-by: Mathieu Desnoyers --- diff --git a/urcu.h b/urcu.h index 79d9464..3e1ad78 100644 --- a/urcu.h +++ b/urcu.h @@ -26,10 +26,23 @@ #define likely(x) __builtin_expect(!!(x), 1) #define unlikely(x) __builtin_expect(!!(x), 0) +/* Assume P4 or newer */ +#define CONFIG_HAS_FENCE 1 + /* x86 32/64 specific */ +#ifdef CONFIG_HAS_FENCE #define mb() asm volatile("mfence":::"memory") #define rmb() asm volatile("lfence":::"memory") -#define wmb() asm volatile("sfence" ::: "memory") +#define wmb() asm volatile("sfence"::: "memory") +#else +/* + * Some non-Intel clones support out of order store. wmb() ceases to be a + * nop for these. + */ +#define mb() asm volatile("lock; addl $0,0(%%esp)":::"memory") +#define rmb() asm volatile("lock; addl $0,0(%%esp)":::"memory") +#define wmb() asm volatile("lock; addl $0,0(%%esp)"::: "memory") +#endif /* Assume SMP machine, given we don't have this information */ #define CONFIG_SMP 1