2 * Copyright (C) 2009 Pierre-Marc Fournier
3 * Copyright (C) 2011 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Lesser General Public
7 * License as published by the Free Software Foundation; either
8 * version 2.1 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Lesser General Public License for more details.
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 #include <lttng/ust-dlfcn.h>
22 #include <sys/types.h>
25 #include <urcu/system.h>
26 #include <urcu/uatomic.h>
27 #include <urcu/compiler.h>
28 #include <urcu/tls-compat.h>
29 #include <lttng/align.h>
31 #define TRACEPOINT_DEFINE
32 #define TRACEPOINT_CREATE_PROBES
35 #define STATIC_CALLOC_LEN 4096
36 static char static_calloc_buf
[STATIC_CALLOC_LEN
];
37 static unsigned long static_calloc_buf_offset
;
39 struct alloc_functions
{
40 void *(*calloc
)(size_t nmemb
, size_t size
);
41 void *(*malloc
)(size_t size
);
42 void (*free
)(void *ptr
);
43 void *(*realloc
)(void *ptr
, size_t size
);
44 void *(*memalign
)(size_t alignment
, size_t size
);
45 int (*posix_memalign
)(void **memptr
, size_t alignment
, size_t size
);
49 struct alloc_functions cur_alloc
;
52 * Make sure our own use of the LTS compat layer will not cause infinite
53 * recursion by calling calloc.
57 void *static_calloc(size_t nmemb
, size_t size
);
59 #define calloc static_calloc
60 static DEFINE_URCU_TLS(int, malloc_nesting
);
64 * Static allocator to use when initially executing dlsym(). It keeps a
65 * size_t value of each object size prior to the object.
68 void *static_calloc_aligned(size_t nmemb
, size_t size
, size_t alignment
)
70 size_t prev_offset
, new_offset
, res_offset
, aligned_offset
;
72 if (nmemb
* size
== 0) {
77 * Protect static_calloc_buf_offset from concurrent updates
78 * using a cmpxchg loop rather than a mutex to remove a
79 * dependency on pthread. This will minimize the risk of bad
80 * interaction between mutex and malloc instrumentation.
82 res_offset
= CMM_LOAD_SHARED(static_calloc_buf_offset
);
84 prev_offset
= res_offset
;
85 aligned_offset
= ALIGN(prev_offset
+ sizeof(size_t), alignment
);
86 new_offset
= aligned_offset
+ nmemb
* size
;
87 if (new_offset
> sizeof(static_calloc_buf
)) {
90 } while ((res_offset
= uatomic_cmpxchg(&static_calloc_buf_offset
,
91 prev_offset
, new_offset
)) != prev_offset
);
92 *(size_t *) &static_calloc_buf
[aligned_offset
- sizeof(size_t)] = size
;
93 return &static_calloc_buf
[aligned_offset
];
97 void *static_calloc(size_t nmemb
, size_t size
)
101 retval
= static_calloc_aligned(nmemb
, size
, 1);
106 void *static_malloc(size_t size
)
110 retval
= static_calloc_aligned(1, size
, 1);
115 void static_free(void *ptr
)
121 void *static_realloc(void *ptr
, size_t size
)
123 size_t *old_size
= NULL
;
132 old_size
= (size_t *) ptr
- 1;
133 if (size
<= *old_size
) {
134 /* We can re-use the old entry. */
140 /* We need to expand. Don't free previous memory location. */
141 retval
= static_calloc_aligned(1, size
, 1);
144 memcpy(retval
, ptr
, *old_size
);
150 void *static_memalign(size_t alignment
, size_t size
)
154 retval
= static_calloc_aligned(1, size
, alignment
);
159 int static_posix_memalign(void **memptr
, size_t alignment
, size_t size
)
163 /* Check for power of 2, larger than void *. */
164 if (alignment
& (alignment
- 1)
165 || alignment
< sizeof(void *)
169 ptr
= static_calloc_aligned(1, size
, alignment
);
176 void setup_static_allocator(void)
178 assert(cur_alloc
.calloc
== NULL
);
179 cur_alloc
.calloc
= static_calloc
;
180 assert(cur_alloc
.malloc
== NULL
);
181 cur_alloc
.malloc
= static_malloc
;
182 assert(cur_alloc
.free
== NULL
);
183 cur_alloc
.free
= static_free
;
184 assert(cur_alloc
.realloc
== NULL
);
185 cur_alloc
.realloc
= static_realloc
;
186 assert(cur_alloc
.memalign
== NULL
);
187 cur_alloc
.memalign
= static_memalign
;
188 assert(cur_alloc
.posix_memalign
== NULL
);
189 cur_alloc
.posix_memalign
= static_posix_memalign
;
193 void lookup_all_symbols(void)
195 struct alloc_functions af
;
198 * Temporarily redirect allocation functions to
199 * static_calloc_aligned, and free function to static_free
200 * (no-op), until the dlsym lookup has completed.
202 setup_static_allocator();
204 /* Perform the actual lookups */
205 af
.calloc
= dlsym(RTLD_NEXT
, "calloc");
206 af
.malloc
= dlsym(RTLD_NEXT
, "malloc");
207 af
.free
= dlsym(RTLD_NEXT
, "free");
208 af
.realloc
= dlsym(RTLD_NEXT
, "realloc");
209 af
.memalign
= dlsym(RTLD_NEXT
, "memalign");
210 af
.posix_memalign
= dlsym(RTLD_NEXT
, "posix_memalign");
212 /* Populate the new allocator functions */
213 memcpy(&cur_alloc
, &af
, sizeof(cur_alloc
));
216 void *malloc(size_t size
)
220 URCU_TLS(malloc_nesting
)++;
221 if (cur_alloc
.malloc
== NULL
) {
222 lookup_all_symbols();
223 if (cur_alloc
.malloc
== NULL
) {
224 fprintf(stderr
, "mallocwrap: unable to find malloc\n");
228 retval
= cur_alloc
.malloc(size
);
229 if (URCU_TLS(malloc_nesting
) == 1) {
230 tracepoint(ust_libc
, malloc
, size
, retval
);
232 URCU_TLS(malloc_nesting
)--;
238 URCU_TLS(malloc_nesting
)++;
240 * Check whether the memory was allocated with
241 * static_calloc_align, in which case there is nothing to free.
243 if (caa_unlikely((char *)ptr
>= static_calloc_buf
&&
244 (char *)ptr
< static_calloc_buf
+ STATIC_CALLOC_LEN
)) {
248 if (URCU_TLS(malloc_nesting
) == 1) {
249 tracepoint(ust_libc
, free
, ptr
);
252 if (cur_alloc
.free
== NULL
) {
253 lookup_all_symbols();
254 if (cur_alloc
.free
== NULL
) {
255 fprintf(stderr
, "mallocwrap: unable to find free\n");
261 URCU_TLS(malloc_nesting
)--;
264 void *calloc(size_t nmemb
, size_t size
)
268 URCU_TLS(malloc_nesting
)++;
269 if (cur_alloc
.calloc
== NULL
) {
270 lookup_all_symbols();
271 if (cur_alloc
.calloc
== NULL
) {
272 fprintf(stderr
, "callocwrap: unable to find calloc\n");
276 retval
= cur_alloc
.calloc(nmemb
, size
);
277 if (URCU_TLS(malloc_nesting
) == 1) {
278 tracepoint(ust_libc
, calloc
, nmemb
, size
, retval
);
280 URCU_TLS(malloc_nesting
)--;
284 void *realloc(void *ptr
, size_t size
)
288 URCU_TLS(malloc_nesting
)++;
290 * Check whether the memory was allocated with
291 * static_calloc_align, in which case there is nothing
292 * to free, and we need to copy the old data.
294 if (caa_unlikely((char *)ptr
>= static_calloc_buf
&&
295 (char *)ptr
< static_calloc_buf
+ STATIC_CALLOC_LEN
)) {
298 old_size
= (size_t *) ptr
- 1;
299 if (cur_alloc
.calloc
== NULL
) {
300 lookup_all_symbols();
301 if (cur_alloc
.calloc
== NULL
) {
302 fprintf(stderr
, "reallocwrap: unable to find calloc\n");
306 retval
= cur_alloc
.calloc(1, size
);
308 memcpy(retval
, ptr
, *old_size
);
311 * Mimick that a NULL pointer has been received, so
312 * memory allocation analysis based on the trace don't
313 * get confused by the address from the static
320 if (cur_alloc
.realloc
== NULL
) {
321 lookup_all_symbols();
322 if (cur_alloc
.realloc
== NULL
) {
323 fprintf(stderr
, "reallocwrap: unable to find realloc\n");
327 retval
= cur_alloc
.realloc(ptr
, size
);
329 if (URCU_TLS(malloc_nesting
) == 1) {
330 tracepoint(ust_libc
, realloc
, ptr
, size
, retval
);
332 URCU_TLS(malloc_nesting
)--;
336 void *memalign(size_t alignment
, size_t size
)
340 URCU_TLS(malloc_nesting
)++;
341 if (cur_alloc
.memalign
== NULL
) {
342 lookup_all_symbols();
343 if (cur_alloc
.memalign
== NULL
) {
344 fprintf(stderr
, "memalignwrap: unable to find memalign\n");
348 retval
= cur_alloc
.memalign(alignment
, size
);
349 if (URCU_TLS(malloc_nesting
) == 1) {
350 tracepoint(ust_libc
, memalign
, alignment
, size
, retval
);
352 URCU_TLS(malloc_nesting
)--;
356 int posix_memalign(void **memptr
, size_t alignment
, size_t size
)
360 URCU_TLS(malloc_nesting
)++;
361 if (cur_alloc
.posix_memalign
== NULL
) {
362 lookup_all_symbols();
363 if (cur_alloc
.posix_memalign
== NULL
) {
364 fprintf(stderr
, "posix_memalignwrap: unable to find posix_memalign\n");
368 retval
= cur_alloc
.posix_memalign(memptr
, alignment
, size
);
369 if (URCU_TLS(malloc_nesting
) == 1) {
370 tracepoint(ust_libc
, posix_memalign
, *memptr
, alignment
, size
,
373 URCU_TLS(malloc_nesting
)--;
377 __attribute__((constructor
))
378 void lttng_ust_malloc_wrapper_init(void)
380 /* Initialization already done */
381 if (cur_alloc
.calloc
) {
385 * Ensure the allocator is in place before the process becomes
388 lookup_all_symbols();