From: Mathieu Desnoyers Date: Mon, 15 Jan 2024 18:36:29 +0000 (-0500) Subject: Fix: libc wrapper: use initial-exec for malloc_nesting TLS X-Git-Tag: v2.13.8~4 X-Git-Url: https://git.lttng.org./?a=commitdiff_plain;h=5db715e800d9b721f48495a987cd26c25965e836;p=lttng-ust.git Fix: libc wrapper: use initial-exec for malloc_nesting TLS Use the initial-exec TLS model for the malloc_nesting nesting guard variable to ensure that the glibc implementation of the TLS access don't trigger infinite recursion by calling the memory allocator wrapper functions, which can happen with global-dynamic. Considering that the libc wrapper is meant to be loaded with LD_PRELOAD anyway (never with dlopen(3)), we always expect the libc to have enough space to hold the malloc_nesting variable. In addition to change the malloc_nesting from global-dynamic to initial-exec, this removes the URCU TLS compatibility layer from the libc wrapper, which is a good thing: this compatibility layer relies on pthread key and calloc internally, which makes it a bad fit for TLS accesses guarding access to malloc wrappers, due to possible infinite recursion. Link: https://lists.lttng.org/pipermail/lttng-dev/2024-January/030697.html Reported-by: Florian Weimer Signed-off-by: Mathieu Desnoyers Change-Id: I72c42bc09c1a06e2922b184b85abeb9c94200ee2 --- diff --git a/src/lib/lttng-ust-libc-wrapper/lttng-ust-malloc.c b/src/lib/lttng-ust-libc-wrapper/lttng-ust-malloc.c index c96a51c9..ebb34504 100644 --- a/src/lib/lttng-ust-libc-wrapper/lttng-ust-malloc.c +++ b/src/lib/lttng-ust-libc-wrapper/lttng-ust-malloc.c @@ -22,7 +22,6 @@ #include #include #include -#include #include #include @@ -96,13 +95,13 @@ void ust_malloc_spin_unlock(pthread_mutex_t *lock __attribute__((unused))) uatomic_set(&ust_malloc_lock, 0); } -#define calloc static_calloc -#define pthread_mutex_lock ust_malloc_spin_lock -#define pthread_mutex_unlock ust_malloc_spin_unlock -static DEFINE_URCU_TLS(int, malloc_nesting); -#undef pthread_mutex_unlock -#undef pthread_mutex_lock -#undef calloc +/* + * Use initial-exec TLS model for the malloc_nesting nesting guard + * variable to ensure that the glibc implementation of the TLS access + * don't trigger infinite recursion by calling the memory allocator + * wrapper functions, which could happen with global-dynamic. + */ +static __thread __attribute__((tls_model("initial-exec"))) int malloc_nesting; /* * Static allocator to use when initially executing dlsym(). It keeps a @@ -261,7 +260,7 @@ void *malloc(size_t size) { void *retval; - URCU_TLS(malloc_nesting)++; + malloc_nesting++; if (cur_alloc.malloc == NULL) { lookup_all_symbols(); if (cur_alloc.malloc == NULL) { @@ -270,17 +269,17 @@ void *malloc(size_t size) } } retval = cur_alloc.malloc(size); - if (URCU_TLS(malloc_nesting) == 1) { + if (malloc_nesting == 1) { lttng_ust_tracepoint(lttng_ust_libc, malloc, size, retval, LTTNG_UST_CALLER_IP()); } - URCU_TLS(malloc_nesting)--; + malloc_nesting--; return retval; } void free(void *ptr) { - URCU_TLS(malloc_nesting)++; + malloc_nesting++; /* * Check whether the memory was allocated with * static_calloc_align, in which case there is nothing to free. @@ -290,7 +289,7 @@ void free(void *ptr) goto end; } - if (URCU_TLS(malloc_nesting) == 1) { + if (malloc_nesting == 1) { lttng_ust_tracepoint(lttng_ust_libc, free, ptr, LTTNG_UST_CALLER_IP()); } @@ -304,14 +303,14 @@ void free(void *ptr) } cur_alloc.free(ptr); end: - URCU_TLS(malloc_nesting)--; + malloc_nesting--; } void *calloc(size_t nmemb, size_t size) { void *retval; - URCU_TLS(malloc_nesting)++; + malloc_nesting++; if (cur_alloc.calloc == NULL) { lookup_all_symbols(); if (cur_alloc.calloc == NULL) { @@ -320,11 +319,11 @@ void *calloc(size_t nmemb, size_t size) } } retval = cur_alloc.calloc(nmemb, size); - if (URCU_TLS(malloc_nesting) == 1) { + if (malloc_nesting == 1) { lttng_ust_tracepoint(lttng_ust_libc, calloc, nmemb, size, retval, LTTNG_UST_CALLER_IP()); } - URCU_TLS(malloc_nesting)--; + malloc_nesting--; return retval; } @@ -332,7 +331,7 @@ void *realloc(void *ptr, size_t size) { void *retval; - URCU_TLS(malloc_nesting)++; + malloc_nesting++; /* * Check whether the memory was allocated with * static_calloc_align, in which case there is nothing @@ -373,11 +372,11 @@ void *realloc(void *ptr, size_t size) } retval = cur_alloc.realloc(ptr, size); end: - if (URCU_TLS(malloc_nesting) == 1) { + if (malloc_nesting == 1) { lttng_ust_tracepoint(lttng_ust_libc, realloc, ptr, size, retval, LTTNG_UST_CALLER_IP()); } - URCU_TLS(malloc_nesting)--; + malloc_nesting--; return retval; } @@ -385,7 +384,7 @@ void *memalign(size_t alignment, size_t size) { void *retval; - URCU_TLS(malloc_nesting)++; + malloc_nesting++; if (cur_alloc.memalign == NULL) { lookup_all_symbols(); if (cur_alloc.memalign == NULL) { @@ -394,12 +393,12 @@ void *memalign(size_t alignment, size_t size) } } retval = cur_alloc.memalign(alignment, size); - if (URCU_TLS(malloc_nesting) == 1) { + if (malloc_nesting == 1) { lttng_ust_tracepoint(lttng_ust_libc, memalign, alignment, size, retval, LTTNG_UST_CALLER_IP()); } - URCU_TLS(malloc_nesting)--; + malloc_nesting--; return retval; } @@ -407,7 +406,7 @@ int posix_memalign(void **memptr, size_t alignment, size_t size) { int retval; - URCU_TLS(malloc_nesting)++; + malloc_nesting++; if (cur_alloc.posix_memalign == NULL) { lookup_all_symbols(); if (cur_alloc.posix_memalign == NULL) { @@ -416,28 +415,21 @@ int posix_memalign(void **memptr, size_t alignment, size_t size) } } retval = cur_alloc.posix_memalign(memptr, alignment, size); - if (URCU_TLS(malloc_nesting) == 1) { + if (malloc_nesting == 1) { lttng_ust_tracepoint(lttng_ust_libc, posix_memalign, *memptr, alignment, size, retval, LTTNG_UST_CALLER_IP()); } - URCU_TLS(malloc_nesting)--; + malloc_nesting--; return retval; } -static -void lttng_ust_malloc_nesting_alloc_tls(void) -{ - __asm__ __volatile__ ("" : : "m" (URCU_TLS(malloc_nesting))); -} - void lttng_ust_libc_wrapper_malloc_ctor(void) { /* Initialization already done */ if (cur_alloc.calloc) { return; } - lttng_ust_malloc_nesting_alloc_tls(); /* * Ensure the allocator is in place before the process becomes * multithreaded.