2 * Copyright (C) 2009 Pierre-Marc Fournier
3 * Copyright (C) 2011 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Lesser General Public
7 * License as published by the Free Software Foundation; either
8 * version 2.1 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Lesser General Public License for more details.
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 #include <lttng/ust-dlfcn.h>
22 #include <sys/types.h>
25 #include <urcu/system.h>
26 #include <urcu/uatomic.h>
27 #include <urcu/compiler.h>
28 #include <lttng/align.h>
30 #define TRACEPOINT_DEFINE
31 #define TRACEPOINT_CREATE_PROBES
34 #define STATIC_CALLOC_LEN 4096
35 static char static_calloc_buf
[STATIC_CALLOC_LEN
];
36 static unsigned long static_calloc_buf_offset
;
38 struct alloc_functions
{
39 void *(*calloc
)(size_t nmemb
, size_t size
);
40 void *(*malloc
)(size_t size
);
41 void (*free
)(void *ptr
);
42 void *(*realloc
)(void *ptr
, size_t size
);
43 void *(*memalign
)(size_t alignment
, size_t size
);
44 int (*posix_memalign
)(void **memptr
, size_t alignment
, size_t size
);
48 struct alloc_functions cur_alloc
;
51 * Static allocator to use when initially executing dlsym(). It keeps a
52 * size_t value of each object size prior to the object.
55 void *static_calloc_aligned(size_t nmemb
, size_t size
, size_t alignment
)
57 size_t prev_offset
, new_offset
, res_offset
, aligned_offset
;
59 if (nmemb
* size
== 0) {
64 * Protect static_calloc_buf_offset from concurrent updates
65 * using a cmpxchg loop rather than a mutex to remove a
66 * dependency on pthread. This will minimize the risk of bad
67 * interaction between mutex and malloc instrumentation.
69 res_offset
= CMM_LOAD_SHARED(static_calloc_buf_offset
);
71 prev_offset
= res_offset
;
72 aligned_offset
= ALIGN(prev_offset
+ sizeof(size_t), alignment
);
73 new_offset
= aligned_offset
+ nmemb
* size
;
74 if (new_offset
> sizeof(static_calloc_buf
)) {
77 } while ((res_offset
= uatomic_cmpxchg(&static_calloc_buf_offset
,
78 prev_offset
, new_offset
)) != prev_offset
);
79 *(size_t *) &static_calloc_buf
[aligned_offset
- sizeof(size_t)] = size
;
80 return &static_calloc_buf
[aligned_offset
];
84 void *static_calloc(size_t nmemb
, size_t size
)
88 retval
= static_calloc_aligned(nmemb
, size
, 1);
89 tracepoint(ust_libc
, calloc
, nmemb
, size
, retval
);
94 void *static_malloc(size_t size
)
98 retval
= static_calloc_aligned(1, size
, 1);
99 tracepoint(ust_libc
, malloc
, size
, retval
);
104 void static_free(void *ptr
)
107 tracepoint(ust_libc
, free
, ptr
);
111 void *static_realloc(void *ptr
, size_t size
)
113 size_t *old_size
= NULL
;
122 old_size
= (size_t *) ptr
- 1;
123 if (size
<= *old_size
) {
124 /* We can re-use the old entry. */
130 /* We need to expand. Don't free previous memory location. */
131 retval
= static_calloc_aligned(1, size
, 1);
134 memcpy(retval
, ptr
, *old_size
);
136 tracepoint(ust_libc
, realloc
, ptr
, size
, retval
);
141 void *static_memalign(size_t alignment
, size_t size
)
145 retval
= static_calloc_aligned(1, size
, alignment
);
146 tracepoint(ust_libc
, memalign
, alignment
, size
, retval
);
151 int static_posix_memalign(void **memptr
, size_t alignment
, size_t size
)
156 /* Check for power of 2, larger than void *. */
157 if (alignment
& (alignment
- 1)
158 || alignment
< sizeof(void *)
163 ptr
= static_calloc_aligned(1, size
, alignment
);
168 tracepoint(ust_libc
, posix_memalign
, *memptr
, alignment
, size
, retval
);
173 void setup_static_allocator(void)
175 assert(cur_alloc
.calloc
== NULL
);
176 cur_alloc
.calloc
= static_calloc
;
177 assert(cur_alloc
.malloc
== NULL
);
178 cur_alloc
.malloc
= static_malloc
;
179 assert(cur_alloc
.free
== NULL
);
180 cur_alloc
.free
= static_free
;
181 assert(cur_alloc
.realloc
== NULL
);
182 cur_alloc
.realloc
= static_realloc
;
183 assert(cur_alloc
.memalign
== NULL
);
184 cur_alloc
.memalign
= static_memalign
;
185 assert(cur_alloc
.posix_memalign
== NULL
);
186 cur_alloc
.posix_memalign
= static_posix_memalign
;
190 void lookup_all_symbols(void)
192 struct alloc_functions af
;
195 * Temporarily redirect allocation functions to
196 * static_calloc_aligned, and free function to static_free
197 * (no-op), until the dlsym lookup has completed.
199 setup_static_allocator();
201 /* Perform the actual lookups */
202 af
.calloc
= dlsym(RTLD_NEXT
, "calloc");
203 af
.malloc
= dlsym(RTLD_NEXT
, "malloc");
204 af
.free
= dlsym(RTLD_NEXT
, "free");
205 af
.realloc
= dlsym(RTLD_NEXT
, "realloc");
206 af
.memalign
= dlsym(RTLD_NEXT
, "memalign");
207 af
.posix_memalign
= dlsym(RTLD_NEXT
, "posix_memalign");
209 /* Populate the new allocator functions */
210 memcpy(&cur_alloc
, &af
, sizeof(cur_alloc
));
213 void *malloc(size_t size
)
217 if (cur_alloc
.malloc
== NULL
) {
218 lookup_all_symbols();
219 if (cur_alloc
.malloc
== NULL
) {
220 fprintf(stderr
, "mallocwrap: unable to find malloc\n");
224 retval
= cur_alloc
.malloc(size
);
225 tracepoint(ust_libc
, malloc
, size
, retval
);
231 tracepoint(ust_libc
, free
, ptr
);
234 * Check whether the memory was allocated with
235 * static_calloc_align, in which case there is nothing to free.
237 if (caa_unlikely((char *)ptr
>= static_calloc_buf
&&
238 (char *)ptr
< static_calloc_buf
+ STATIC_CALLOC_LEN
)) {
242 if (cur_alloc
.free
== NULL
) {
243 lookup_all_symbols();
244 if (cur_alloc
.free
== NULL
) {
245 fprintf(stderr
, "mallocwrap: unable to find free\n");
252 void *calloc(size_t nmemb
, size_t size
)
256 if (cur_alloc
.calloc
== NULL
) {
257 lookup_all_symbols();
258 if (cur_alloc
.calloc
== NULL
) {
259 fprintf(stderr
, "callocwrap: unable to find calloc\n");
263 retval
= cur_alloc
.calloc(nmemb
, size
);
264 tracepoint(ust_libc
, calloc
, nmemb
, size
, retval
);
268 void *realloc(void *ptr
, size_t size
)
272 /* Check whether the memory was allocated with
273 * static_calloc_align, in which case there is nothing
274 * to free, and we need to copy the old data.
276 if (caa_unlikely((char *)ptr
>= static_calloc_buf
&&
277 (char *)ptr
< static_calloc_buf
+ STATIC_CALLOC_LEN
)) {
280 old_size
= (size_t *) ptr
- 1;
281 if (cur_alloc
.calloc
== NULL
) {
282 lookup_all_symbols();
283 if (cur_alloc
.calloc
== NULL
) {
284 fprintf(stderr
, "reallocwrap: unable to find calloc\n");
288 retval
= cur_alloc
.calloc(1, size
);
290 memcpy(retval
, ptr
, *old_size
);
295 if (cur_alloc
.realloc
== NULL
) {
296 lookup_all_symbols();
297 if (cur_alloc
.realloc
== NULL
) {
298 fprintf(stderr
, "reallocwrap: unable to find realloc\n");
302 retval
= cur_alloc
.realloc(ptr
, size
);
304 tracepoint(ust_libc
, realloc
, ptr
, size
, retval
);
308 void *memalign(size_t alignment
, size_t size
)
312 if (cur_alloc
.memalign
== NULL
) {
313 lookup_all_symbols();
314 if (cur_alloc
.memalign
== NULL
) {
315 fprintf(stderr
, "memalignwrap: unable to find memalign\n");
319 retval
= cur_alloc
.memalign(alignment
, size
);
320 tracepoint(ust_libc
, memalign
, alignment
, size
, retval
);
324 int posix_memalign(void **memptr
, size_t alignment
, size_t size
)
328 if (cur_alloc
.posix_memalign
== NULL
) {
329 lookup_all_symbols();
330 if (cur_alloc
.posix_memalign
== NULL
) {
331 fprintf(stderr
, "posix_memalignwrap: unable to find posix_memalign\n");
335 retval
= cur_alloc
.posix_memalign(memptr
, alignment
, size
);
336 tracepoint(ust_libc
, posix_memalign
, *memptr
, alignment
, size
, retval
);
340 __attribute__((constructor
))
341 void lttng_ust_malloc_wrapper_init(void)
343 /* Initialization already done */
344 if (cur_alloc
.calloc
) {
348 * Ensure the allocator is in place before the process becomes
351 lookup_all_symbols();