static struct reader_data *reader_data;
static int num_readers, alloc_readers;
+#ifndef DEBUG_FULL_MB
static int sig_done;
+#endif
void internal_urcu_lock(void)
{
#ifdef DEBUG_FULL_MB
static void force_mb_all_threads(void)
{
- mb();
+ smp_mb();
}
#else
static void force_mb_all_threads(void)
{
struct reader_data *index;
/*
- * Ask for each threads to execute a mb() so we can consider the
+ * Ask for each threads to execute a smp_mb() so we can consider the
* compiler barriers around rcu read lock as real memory barriers.
*/
if (!reader_data)
debug_yield_write();
sig_done = 0;
debug_yield_write();
- mb(); /* write sig_done before sending the signals */
+ smp_mb(); /* write sig_done before sending the signals */
debug_yield_write();
for (index = reader_data; index < reader_data + num_readers; index++) {
pthread_kill(index->tid, SIGURCU);
while (sig_done < num_readers)
barrier();
debug_yield_write();
- mb(); /* read sig_done before ending the barrier */
+ smp_mb(); /* read sig_done before ending the barrier */
debug_yield_write();
}
#endif
* waiting forever while new readers are always accessing data (no
* progress).
*/
- mb();
+ smp_mb();
/*
* Wait for previous parity to be empty of readers.
* the writer waiting forever while new readers are always accessing
* data (no progress).
*/
- mb();
+ smp_mb();
switch_next_urcu_qparity(); /* 1 -> 0 */
debug_yield_write();
* waiting forever while new readers are always accessing data (no
* progress).
*/
- mb();
+ smp_mb();
/*
* Wait for previous parity to be empty of readers.
#ifndef DEBUG_FULL_MB
void sigurcu_handler(int signo, siginfo_t *siginfo, void *context)
{
- mb();
+ smp_mb();
atomic_inc(&sig_done);
}
#define rmb() asm volatile("lfence":::"memory")
#define wmb() asm volatile("sfence" ::: "memory")
+/* Assume SMP machine, given we don't have this information */
+#define CONFIG_SMP 1
+
+#ifdef CONFIG_SMP
+#define smp_mb() mb()
+#define smp_rmb() rmb()
+#define smp_wmb() wmb()
+#else
+#define smp_mb() barrier()
+#define smp_rmb() barrier()
+#define smp_wmb() barrier()
+#endif
+
static inline void atomic_inc(int *v)
{
asm volatile("lock; incl %0"
#ifdef DEBUG_FULL_MB
static inline void read_barrier()
{
- mb();
+ smp_mb();
}
#else
static inline void read_barrier()