X-Git-Url: https://git.lttng.org/?a=blobdiff_plain;ds=sidebyside;f=liblttng-ust-libc-wrapper%2Flttng-ust-malloc.c;h=54afb1445785381aefeeb658ce3ba30ae573420d;hb=563b8a4e433ead9da5896e57de563e7240979863;hp=06e0efe8679a4a24d635715f01e1130eb0d1ea20;hpb=8be1a6b6e3e21063db43fdceef1465d5c75d77ca;p=lttng-ust.git diff --git a/liblttng-ust-libc-wrapper/lttng-ust-malloc.c b/liblttng-ust-libc-wrapper/lttng-ust-malloc.c index 06e0efe8..54afb144 100644 --- a/liblttng-ust-libc-wrapper/lttng-ust-malloc.c +++ b/liblttng-ust-libc-wrapper/lttng-ust-malloc.c @@ -26,6 +26,7 @@ #include #include #include +#include #include #define TRACEPOINT_DEFINE @@ -56,8 +57,40 @@ struct alloc_functions cur_alloc; static void *static_calloc(size_t nmemb, size_t size); +/* + * pthread mutex replacement for URCU tls compat layer. + */ +static int ust_malloc_lock; + +static __attribute__((unused)) +void ust_malloc_spin_lock(pthread_mutex_t *lock) +{ + /* + * The memory barrier within cmpxchg takes care of ordering + * memory accesses with respect to the start of the critical + * section. + */ + while (uatomic_cmpxchg(&ust_malloc_lock, 0, 1) != 0) + caa_cpu_relax(); +} + +static __attribute__((unused)) +void ust_malloc_spin_unlock(pthread_mutex_t *lock) +{ + /* + * Ensure memory accesses within the critical section do not + * leak outside. + */ + cmm_smp_mb(); + uatomic_set(&ust_malloc_lock, 0); +} + #define calloc static_calloc +#define pthread_mutex_lock ust_malloc_spin_lock +#define pthread_mutex_unlock ust_malloc_spin_unlock static DEFINE_URCU_TLS(int, malloc_nesting); +#undef ust_malloc_spin_unlock +#undef ust_malloc_spin_lock #undef calloc /*