+/*
+ * pthread mutex replacement for URCU tls compat layer.
+ */
+static int ust_malloc_lock;
+
+static __attribute__((unused))
+void ust_malloc_spin_lock(pthread_mutex_t *lock)
+{
+ /*
+ * The memory barrier within cmpxchg takes care of ordering
+ * memory accesses with respect to the start of the critical
+ * section.
+ */
+ while (uatomic_cmpxchg(&ust_malloc_lock, 0, 1) != 0)
+ caa_cpu_relax();
+}
+
+static __attribute__((unused))
+void ust_malloc_spin_unlock(pthread_mutex_t *lock)
+{
+ /*
+ * Ensure memory accesses within the critical section do not
+ * leak outside.
+ */
+ cmm_smp_mb();
+ uatomic_set(&ust_malloc_lock, 0);
+}
+