From: Mathieu Desnoyers Date: Tue, 18 Mar 2014 14:27:03 +0000 (-0400) Subject: Fix: malloc wrapper: infinite recursion with compat TLS X-Git-Tag: v2.5.0-rc1~25 X-Git-Url: https://git.lttng.org./?a=commitdiff_plain;h=20ef5166019178c9a27e47d29dd730149b270e74;p=lttng-ust.git Fix: malloc wrapper: infinite recursion with compat TLS Fixes #765 Signed-off-by: Mathieu Desnoyers --- diff --git a/liblttng-ust-libc-wrapper/lttng-ust-malloc.c b/liblttng-ust-libc-wrapper/lttng-ust-malloc.c index 06e0efe8..54afb144 100644 --- a/liblttng-ust-libc-wrapper/lttng-ust-malloc.c +++ b/liblttng-ust-libc-wrapper/lttng-ust-malloc.c @@ -26,6 +26,7 @@ #include #include #include +#include #include #define TRACEPOINT_DEFINE @@ -56,8 +57,40 @@ struct alloc_functions cur_alloc; static void *static_calloc(size_t nmemb, size_t size); +/* + * pthread mutex replacement for URCU tls compat layer. + */ +static int ust_malloc_lock; + +static __attribute__((unused)) +void ust_malloc_spin_lock(pthread_mutex_t *lock) +{ + /* + * The memory barrier within cmpxchg takes care of ordering + * memory accesses with respect to the start of the critical + * section. + */ + while (uatomic_cmpxchg(&ust_malloc_lock, 0, 1) != 0) + caa_cpu_relax(); +} + +static __attribute__((unused)) +void ust_malloc_spin_unlock(pthread_mutex_t *lock) +{ + /* + * Ensure memory accesses within the critical section do not + * leak outside. + */ + cmm_smp_mb(); + uatomic_set(&ust_malloc_lock, 0); +} + #define calloc static_calloc +#define pthread_mutex_lock ust_malloc_spin_lock +#define pthread_mutex_unlock ust_malloc_spin_unlock static DEFINE_URCU_TLS(int, malloc_nesting); +#undef ust_malloc_spin_unlock +#undef ust_malloc_spin_lock #undef calloc /*