4 /* The "volatile" is due to gcc bugs */
5 #define barrier() __asm__ __volatile__("": : :"memory")
7 /* x86 32/64 specific */
8 #define mb() asm volatile("mfence":::"memory")
9 #define rmb() asm volatile("lfence":::"memory")
10 #define wmb() asm volatile("sfence" ::: "memory")
15 static inline void atomic_inc(int *v
)
17 asm volatile("lock; incl %0"
21 /* Nop everywhere except on alpha. */
22 #define smp_read_barrier_depends()
24 #define SIGURCU SIGUSR1
26 /* Global quiescent period parity */
27 extern int urcu_qparity
;
29 extern int __thread urcu_active_readers
[2];
31 static inline int get_urcu_qparity(void)
37 * returns urcu_parity.
39 static inline int rcu_read_lock(void)
41 int urcu_parity
= get_urcu_qparity();
42 urcu_active_readers
[urcu_parity
]++;
44 * Increment active readers count before accessing the pointer.
45 * See force_mb_all_threads().
51 static inline void rcu_read_unlock(int urcu_parity
)
55 * Finish using rcu before decrementing the pointer.
56 * See force_mb_all_threads().
58 urcu_active_readers
[urcu_parity
]--;
61 extern void *urcu_publish_content(void **ptr
, void *new);
64 * Reader thread registration.
66 extern void urcu_register_thread(void);
67 extern void urcu_register_thread(void);