X-Git-Url: http://git.lttng.org/?a=blobdiff_plain;f=liblttng-ust-libc-wrapper%2Flttng-ust-malloc.c;h=e1ff8e807f7a9fb22dc6f783230ce6f1b1693e4f;hb=6ba6fd60507f8e045bdc4f1be14e9d99c6a15f7f;hp=e4f9c35614dafce4ef929268be3fbe82ee279b5a;hpb=8da9deee450bf62c6c3abb1f44f9414c0667dd6f;p=lttng-ust.git diff --git a/liblttng-ust-libc-wrapper/lttng-ust-malloc.c b/liblttng-ust-libc-wrapper/lttng-ust-malloc.c index e4f9c356..e1ff8e80 100644 --- a/liblttng-ust-libc-wrapper/lttng-ust-malloc.c +++ b/liblttng-ust-libc-wrapper/lttng-ust-malloc.c @@ -14,11 +14,16 @@ #include #include #include +#include + #include #include #include #include #include + +#include + #include #include "ust-compat.h" @@ -60,7 +65,7 @@ static void ust_malloc_spin_lock(pthread_mutex_t *lock) __attribute__((unused)); static -void ust_malloc_spin_lock(pthread_mutex_t *lock) +void ust_malloc_spin_lock(pthread_mutex_t *lock __attribute__((unused))) { /* * The memory barrier within cmpxchg takes care of ordering @@ -75,7 +80,7 @@ static void ust_malloc_spin_unlock(pthread_mutex_t *lock) __attribute__((unused)); static -void ust_malloc_spin_unlock(pthread_mutex_t *lock) +void ust_malloc_spin_unlock(pthread_mutex_t *lock __attribute__((unused))) { /* * Ensure memory accesses within the critical section do not @@ -145,7 +150,7 @@ void *static_malloc(size_t size) } static -void static_free(void *ptr) +void static_free(void *ptr __attribute__((unused))) { /* no-op. */ } @@ -420,7 +425,6 @@ void lttng_ust_fixup_malloc_nesting_tls(void) asm volatile ("" : : "m" (URCU_TLS(malloc_nesting))); } -__attribute__((constructor)) void lttng_ust_libc_wrapper_malloc_init(void) { /* Initialization already done */