Fix: liblttng-ust-libc-wrapper recursive use of calloc
[lttng-ust.git] / liblttng-ust-libc-wrapper / lttng-ust-malloc.c
1 /*
2 * Copyright (C) 2009 Pierre-Marc Fournier
3 * Copyright (C) 2011 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Lesser General Public
7 * License as published by the Free Software Foundation; either
8 * version 2.1 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Lesser General Public License for more details.
14 *
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 */
19
20 #define _GNU_SOURCE
21 #include <lttng/ust-dlfcn.h>
22 #include <sys/types.h>
23 #include <stdio.h>
24 #include <assert.h>
25 #include <urcu/system.h>
26 #include <urcu/uatomic.h>
27 #include <urcu/compiler.h>
28 #include <urcu/tls-compat.h>
29 #include <lttng/align.h>
30
31 #define TRACEPOINT_DEFINE
32 #define TRACEPOINT_CREATE_PROBES
33 #include "ust_libc.h"
34
35 #define STATIC_CALLOC_LEN 4096
36 static char static_calloc_buf[STATIC_CALLOC_LEN];
37 static unsigned long static_calloc_buf_offset;
38
39 struct alloc_functions {
40 void *(*calloc)(size_t nmemb, size_t size);
41 void *(*malloc)(size_t size);
42 void (*free)(void *ptr);
43 void *(*realloc)(void *ptr, size_t size);
44 void *(*memalign)(size_t alignment, size_t size);
45 int (*posix_memalign)(void **memptr, size_t alignment, size_t size);
46 };
47
48 static
49 struct alloc_functions cur_alloc;
50
51 /*
52 * Make sure our own use of the LTS compat layer will not cause infinite
53 * recursion by calling calloc.
54 */
55
56 static
57 void *static_calloc(size_t nmemb, size_t size);
58
59 #define calloc static_calloc
60 static DEFINE_URCU_TLS(int, malloc_nesting);
61 #undef calloc
62
63 /*
64 * Static allocator to use when initially executing dlsym(). It keeps a
65 * size_t value of each object size prior to the object.
66 */
67 static
68 void *static_calloc_aligned(size_t nmemb, size_t size, size_t alignment)
69 {
70 size_t prev_offset, new_offset, res_offset, aligned_offset;
71
72 if (nmemb * size == 0) {
73 return NULL;
74 }
75
76 /*
77 * Protect static_calloc_buf_offset from concurrent updates
78 * using a cmpxchg loop rather than a mutex to remove a
79 * dependency on pthread. This will minimize the risk of bad
80 * interaction between mutex and malloc instrumentation.
81 */
82 res_offset = CMM_LOAD_SHARED(static_calloc_buf_offset);
83 do {
84 prev_offset = res_offset;
85 aligned_offset = ALIGN(prev_offset + sizeof(size_t), alignment);
86 new_offset = aligned_offset + nmemb * size;
87 if (new_offset > sizeof(static_calloc_buf)) {
88 abort();
89 }
90 } while ((res_offset = uatomic_cmpxchg(&static_calloc_buf_offset,
91 prev_offset, new_offset)) != prev_offset);
92 *(size_t *) &static_calloc_buf[aligned_offset - sizeof(size_t)] = size;
93 return &static_calloc_buf[aligned_offset];
94 }
95
96 static
97 void *static_calloc(size_t nmemb, size_t size)
98 {
99 void *retval;
100
101 retval = static_calloc_aligned(nmemb, size, 1);
102 return retval;
103 }
104
105 static
106 void *static_malloc(size_t size)
107 {
108 void *retval;
109
110 retval = static_calloc_aligned(1, size, 1);
111 return retval;
112 }
113
114 static
115 void static_free(void *ptr)
116 {
117 /* no-op. */
118 }
119
120 static
121 void *static_realloc(void *ptr, size_t size)
122 {
123 size_t *old_size = NULL;
124 void *retval;
125
126 if (size == 0) {
127 retval = NULL;
128 goto end;
129 }
130
131 if (ptr) {
132 old_size = (size_t *) ptr - 1;
133 if (size <= *old_size) {
134 /* We can re-use the old entry. */
135 *old_size = size;
136 retval = ptr;
137 goto end;
138 }
139 }
140 /* We need to expand. Don't free previous memory location. */
141 retval = static_calloc_aligned(1, size, 1);
142 assert(retval);
143 if (ptr)
144 memcpy(retval, ptr, *old_size);
145 end:
146 return retval;
147 }
148
149 static
150 void *static_memalign(size_t alignment, size_t size)
151 {
152 void *retval;
153
154 retval = static_calloc_aligned(1, size, alignment);
155 return retval;
156 }
157
158 static
159 int static_posix_memalign(void **memptr, size_t alignment, size_t size)
160 {
161 void *ptr;
162
163 /* Check for power of 2, larger than void *. */
164 if (alignment & (alignment - 1)
165 || alignment < sizeof(void *)
166 || alignment == 0) {
167 goto end;
168 }
169 ptr = static_calloc_aligned(1, size, alignment);
170 *memptr = ptr;
171 end:
172 return 0;
173 }
174
175 static
176 void setup_static_allocator(void)
177 {
178 assert(cur_alloc.calloc == NULL);
179 cur_alloc.calloc = static_calloc;
180 assert(cur_alloc.malloc == NULL);
181 cur_alloc.malloc = static_malloc;
182 assert(cur_alloc.free == NULL);
183 cur_alloc.free = static_free;
184 assert(cur_alloc.realloc == NULL);
185 cur_alloc.realloc = static_realloc;
186 assert(cur_alloc.memalign == NULL);
187 cur_alloc.memalign = static_memalign;
188 assert(cur_alloc.posix_memalign == NULL);
189 cur_alloc.posix_memalign = static_posix_memalign;
190 }
191
192 static
193 void lookup_all_symbols(void)
194 {
195 struct alloc_functions af;
196
197 /*
198 * Temporarily redirect allocation functions to
199 * static_calloc_aligned, and free function to static_free
200 * (no-op), until the dlsym lookup has completed.
201 */
202 setup_static_allocator();
203
204 /* Perform the actual lookups */
205 af.calloc = dlsym(RTLD_NEXT, "calloc");
206 af.malloc = dlsym(RTLD_NEXT, "malloc");
207 af.free = dlsym(RTLD_NEXT, "free");
208 af.realloc = dlsym(RTLD_NEXT, "realloc");
209 af.memalign = dlsym(RTLD_NEXT, "memalign");
210 af.posix_memalign = dlsym(RTLD_NEXT, "posix_memalign");
211
212 /* Populate the new allocator functions */
213 memcpy(&cur_alloc, &af, sizeof(cur_alloc));
214 }
215
216 void *malloc(size_t size)
217 {
218 void *retval;
219
220 URCU_TLS(malloc_nesting)++;
221 if (cur_alloc.malloc == NULL) {
222 lookup_all_symbols();
223 if (cur_alloc.malloc == NULL) {
224 fprintf(stderr, "mallocwrap: unable to find malloc\n");
225 abort();
226 }
227 }
228 retval = cur_alloc.malloc(size);
229 if (URCU_TLS(malloc_nesting) == 1) {
230 tracepoint(ust_libc, malloc, size, retval);
231 }
232 URCU_TLS(malloc_nesting)--;
233 return retval;
234 }
235
236 void free(void *ptr)
237 {
238 URCU_TLS(malloc_nesting)++;
239 /*
240 * Check whether the memory was allocated with
241 * static_calloc_align, in which case there is nothing to free.
242 */
243 if (caa_unlikely((char *)ptr >= static_calloc_buf &&
244 (char *)ptr < static_calloc_buf + STATIC_CALLOC_LEN)) {
245 goto end;
246 }
247
248 if (URCU_TLS(malloc_nesting) == 1) {
249 tracepoint(ust_libc, free, ptr);
250 }
251
252 if (cur_alloc.free == NULL) {
253 lookup_all_symbols();
254 if (cur_alloc.free == NULL) {
255 fprintf(stderr, "mallocwrap: unable to find free\n");
256 abort();
257 }
258 }
259 cur_alloc.free(ptr);
260 end:
261 URCU_TLS(malloc_nesting)--;
262 }
263
264 void *calloc(size_t nmemb, size_t size)
265 {
266 void *retval;
267
268 URCU_TLS(malloc_nesting)++;
269 if (cur_alloc.calloc == NULL) {
270 lookup_all_symbols();
271 if (cur_alloc.calloc == NULL) {
272 fprintf(stderr, "callocwrap: unable to find calloc\n");
273 abort();
274 }
275 }
276 retval = cur_alloc.calloc(nmemb, size);
277 if (URCU_TLS(malloc_nesting) == 1) {
278 tracepoint(ust_libc, calloc, nmemb, size, retval);
279 }
280 URCU_TLS(malloc_nesting)--;
281 return retval;
282 }
283
284 void *realloc(void *ptr, size_t size)
285 {
286 void *retval;
287
288 URCU_TLS(malloc_nesting)++;
289 /*
290 * Check whether the memory was allocated with
291 * static_calloc_align, in which case there is nothing
292 * to free, and we need to copy the old data.
293 */
294 if (caa_unlikely((char *)ptr >= static_calloc_buf &&
295 (char *)ptr < static_calloc_buf + STATIC_CALLOC_LEN)) {
296 size_t *old_size;
297
298 old_size = (size_t *) ptr - 1;
299 if (cur_alloc.calloc == NULL) {
300 lookup_all_symbols();
301 if (cur_alloc.calloc == NULL) {
302 fprintf(stderr, "reallocwrap: unable to find calloc\n");
303 abort();
304 }
305 }
306 retval = cur_alloc.calloc(1, size);
307 if (retval) {
308 memcpy(retval, ptr, *old_size);
309 }
310 /*
311 * Mimick that a NULL pointer has been received, so
312 * memory allocation analysis based on the trace don't
313 * get confused by the address from the static
314 * allocator.
315 */
316 ptr = NULL;
317 goto end;
318 }
319
320 if (cur_alloc.realloc == NULL) {
321 lookup_all_symbols();
322 if (cur_alloc.realloc == NULL) {
323 fprintf(stderr, "reallocwrap: unable to find realloc\n");
324 abort();
325 }
326 }
327 retval = cur_alloc.realloc(ptr, size);
328 end:
329 if (URCU_TLS(malloc_nesting) == 1) {
330 tracepoint(ust_libc, realloc, ptr, size, retval);
331 }
332 URCU_TLS(malloc_nesting)--;
333 return retval;
334 }
335
336 void *memalign(size_t alignment, size_t size)
337 {
338 void *retval;
339
340 URCU_TLS(malloc_nesting)++;
341 if (cur_alloc.memalign == NULL) {
342 lookup_all_symbols();
343 if (cur_alloc.memalign == NULL) {
344 fprintf(stderr, "memalignwrap: unable to find memalign\n");
345 abort();
346 }
347 }
348 retval = cur_alloc.memalign(alignment, size);
349 if (URCU_TLS(malloc_nesting) == 1) {
350 tracepoint(ust_libc, memalign, alignment, size, retval);
351 }
352 URCU_TLS(malloc_nesting)--;
353 return retval;
354 }
355
356 int posix_memalign(void **memptr, size_t alignment, size_t size)
357 {
358 int retval;
359
360 URCU_TLS(malloc_nesting)++;
361 if (cur_alloc.posix_memalign == NULL) {
362 lookup_all_symbols();
363 if (cur_alloc.posix_memalign == NULL) {
364 fprintf(stderr, "posix_memalignwrap: unable to find posix_memalign\n");
365 abort();
366 }
367 }
368 retval = cur_alloc.posix_memalign(memptr, alignment, size);
369 if (URCU_TLS(malloc_nesting) == 1) {
370 tracepoint(ust_libc, posix_memalign, *memptr, alignment, size,
371 retval);
372 }
373 URCU_TLS(malloc_nesting)--;
374 return retval;
375 }
376
377 __attribute__((constructor))
378 void lttng_ust_malloc_wrapper_init(void)
379 {
380 /* Initialization already done */
381 if (cur_alloc.calloc) {
382 return;
383 }
384 /*
385 * Ensure the allocator is in place before the process becomes
386 * multithreaded.
387 */
388 lookup_all_symbols();
389 }
This page took 0.037768 seconds and 5 git commands to generate.