1 | /* Malloc debug DSO. |
2 | Copyright (C) 2021-2022 Free Software Foundation, Inc. |
3 | Copyright The GNU Toolchain Authors. |
4 | This file is part of the GNU C Library. |
5 | |
6 | The GNU C Library is free software; you can redistribute it and/or |
7 | modify it under the terms of the GNU Lesser General Public License as |
8 | published by the Free Software Foundation; either version 2.1 of the |
9 | License, or (at your option) any later version. |
10 | |
11 | The GNU C Library is distributed in the hope that it will be useful, |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
14 | Lesser General Public License for more details. |
15 | |
16 | You should have received a copy of the GNU Lesser General Public |
17 | License along with the GNU C Library; see the file COPYING.LIB. If |
18 | not, see <https://www.gnu.org/licenses/>. */ |
19 | |
20 | #include <atomic.h> |
21 | #include <libc-symbols.h> |
22 | #include <shlib-compat.h> |
23 | #include <string.h> |
24 | #include <unistd.h> |
25 | #include <sys/param.h> |
26 | |
27 | /* Support only the glibc allocators. */ |
28 | extern void *__libc_malloc (size_t); |
29 | extern void __libc_free (void *); |
30 | extern void *__libc_realloc (void *, size_t); |
31 | extern void *__libc_memalign (size_t, size_t); |
32 | extern void *__libc_valloc (size_t); |
33 | extern void *__libc_pvalloc (size_t); |
34 | extern void *__libc_calloc (size_t, size_t); |
35 | |
36 | #define DEBUG_FN(fn) \ |
37 | static __typeof (__libc_ ## fn) __debug_ ## fn |
38 | |
39 | DEBUG_FN(malloc); |
40 | DEBUG_FN(free); |
41 | DEBUG_FN(realloc); |
42 | DEBUG_FN(memalign); |
43 | DEBUG_FN(valloc); |
44 | DEBUG_FN(pvalloc); |
45 | DEBUG_FN(calloc); |
46 | |
47 | static int debug_initialized = -1; |
48 | |
49 | enum malloc_debug_hooks |
50 | { |
51 | MALLOC_NONE_HOOK = 0, |
52 | MALLOC_MCHECK_HOOK = 1 << 0, /* mcheck() */ |
53 | MALLOC_MTRACE_HOOK = 1 << 1, /* mtrace() */ |
54 | MALLOC_CHECK_HOOK = 1 << 2, /* MALLOC_CHECK_ or glibc.malloc.check. */ |
55 | }; |
56 | static unsigned __malloc_debugging_hooks; |
57 | |
58 | static __always_inline bool |
59 | __is_malloc_debug_enabled (enum malloc_debug_hooks flag) |
60 | { |
61 | return __malloc_debugging_hooks & flag; |
62 | } |
63 | |
64 | static __always_inline void |
65 | __malloc_debug_enable (enum malloc_debug_hooks flag) |
66 | { |
67 | __malloc_debugging_hooks |= flag; |
68 | } |
69 | |
70 | static __always_inline void |
71 | __malloc_debug_disable (enum malloc_debug_hooks flag) |
72 | { |
73 | __malloc_debugging_hooks &= ~flag; |
74 | } |
75 | |
76 | #include "mcheck.c" |
77 | #include "mtrace.c" |
78 | #include "malloc-check.c" |
79 | |
80 | #if SHLIB_COMPAT (libc_malloc_debug, GLIBC_2_0, GLIBC_2_24) |
81 | extern void (*__malloc_initialize_hook) (void); |
82 | compat_symbol_reference (libc, __malloc_initialize_hook, |
83 | __malloc_initialize_hook, GLIBC_2_0); |
84 | #endif |
85 | |
86 | static void *malloc_hook_ini (size_t, const void *) __THROW; |
87 | static void *realloc_hook_ini (void *, size_t, const void *) __THROW; |
88 | static void *memalign_hook_ini (size_t, size_t, const void *) __THROW; |
89 | |
90 | void (*__free_hook) (void *, const void *) = NULL; |
91 | void *(*__malloc_hook) (size_t, const void *) = malloc_hook_ini; |
92 | void *(*__realloc_hook) (void *, size_t, const void *) = realloc_hook_ini; |
93 | void *(*__memalign_hook) (size_t, size_t, const void *) = memalign_hook_ini; |
94 | |
95 | /* Hooks for debugging versions. The initial hooks just call the |
96 | initialization routine, then do the normal work. */ |
97 | |
98 | /* These hooks will get executed only through the interposed allocator |
99 | functions in libc_malloc_debug.so. This means that the calls to malloc, |
100 | realloc, etc. will lead back into the interposed functions, which is what we |
101 | want. |
102 | |
103 | These initial hooks are assumed to be called in a single-threaded context, |
104 | so it is safe to reset all hooks at once upon initialization. */ |
105 | |
106 | static void |
107 | generic_hook_ini (void) |
108 | { |
109 | debug_initialized = 0; |
110 | __malloc_hook = NULL; |
111 | __realloc_hook = NULL; |
112 | __memalign_hook = NULL; |
113 | |
114 | /* malloc check does not quite co-exist with libc malloc, so initialize |
115 | either on or the other. */ |
116 | if (!initialize_malloc_check ()) |
117 | /* The compiler does not know that these functions are allocators, so it |
118 | will not try to optimize it away. */ |
119 | __libc_free (__libc_malloc (0)); |
120 | |
121 | #if SHLIB_COMPAT (libc_malloc_debug, GLIBC_2_0, GLIBC_2_24) |
122 | void (*hook) (void) = __malloc_initialize_hook; |
123 | if (hook != NULL) |
124 | (*hook)(); |
125 | #endif |
126 | |
127 | debug_initialized = 1; |
128 | } |
129 | |
130 | static void * |
131 | malloc_hook_ini (size_t sz, const void *caller) |
132 | { |
133 | generic_hook_ini (); |
134 | return __debug_malloc (sz); |
135 | } |
136 | |
137 | static void * |
138 | realloc_hook_ini (void *ptr, size_t sz, const void *caller) |
139 | { |
140 | generic_hook_ini (); |
141 | return __debug_realloc (ptr, sz); |
142 | } |
143 | |
144 | static void * |
145 | memalign_hook_ini (size_t alignment, size_t sz, const void *caller) |
146 | { |
147 | generic_hook_ini (); |
148 | return __debug_memalign (alignment, sz); |
149 | } |
150 | |
151 | static size_t pagesize; |
152 | |
153 | /* These variables are used for undumping support. Chunked are marked |
154 | as using mmap, but we leave them alone if they fall into this |
155 | range. NB: The chunk size for these chunks only includes the |
156 | initial size field (of SIZE_SZ bytes), there is no trailing size |
157 | field (unlike with regular mmapped chunks). */ |
158 | static mchunkptr dumped_main_arena_start; /* Inclusive. */ |
159 | static mchunkptr dumped_main_arena_end; /* Exclusive. */ |
160 | |
161 | /* True if the pointer falls into the dumped arena. Use this after |
162 | chunk_is_mmapped indicates a chunk is mmapped. */ |
163 | #define DUMPED_MAIN_ARENA_CHUNK(p) \ |
164 | ((p) >= dumped_main_arena_start && (p) < dumped_main_arena_end) |
165 | |
166 | /* The allocator functions. */ |
167 | |
168 | static void * |
169 | __debug_malloc (size_t bytes) |
170 | { |
171 | void *(*hook) (size_t, const void *) = atomic_forced_read (__malloc_hook); |
172 | if (__builtin_expect (hook != NULL, 0)) |
173 | return (*hook)(bytes, RETURN_ADDRESS (0)); |
174 | |
175 | void *victim = NULL; |
176 | size_t orig_bytes = bytes; |
177 | if ((!__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK) |
178 | || !malloc_mcheck_before (&bytes, &victim))) |
179 | { |
180 | victim = (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK) |
181 | ? malloc_check (bytes) : __libc_malloc (bytes)); |
182 | } |
183 | if (__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK) && victim != NULL) |
184 | victim = malloc_mcheck_after (victim, orig_bytes); |
185 | if (__is_malloc_debug_enabled (MALLOC_MTRACE_HOOK)) |
186 | malloc_mtrace_after (victim, orig_bytes, RETURN_ADDRESS (0)); |
187 | |
188 | return victim; |
189 | } |
190 | strong_alias (__debug_malloc, malloc) |
191 | |
192 | static void |
193 | __debug_free (void *mem) |
194 | { |
195 | void (*hook) (void *, const void *) = atomic_forced_read (__free_hook); |
196 | if (__builtin_expect (hook != NULL, 0)) |
197 | { |
198 | (*hook)(mem, RETURN_ADDRESS (0)); |
199 | return; |
200 | } |
201 | |
202 | if (__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK)) |
203 | mem = free_mcheck (mem); |
204 | |
205 | if (DUMPED_MAIN_ARENA_CHUNK (mem2chunk (mem))) |
206 | /* Do nothing. */; |
207 | else if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK)) |
208 | free_check (mem); |
209 | else |
210 | __libc_free (mem); |
211 | if (__is_malloc_debug_enabled (MALLOC_MTRACE_HOOK)) |
212 | free_mtrace (mem, RETURN_ADDRESS (0)); |
213 | } |
214 | strong_alias (__debug_free, free) |
215 | |
216 | static void * |
217 | __debug_realloc (void *oldmem, size_t bytes) |
218 | { |
219 | void *(*hook) (void *, size_t, const void *) = |
220 | atomic_forced_read (__realloc_hook); |
221 | if (__builtin_expect (hook != NULL, 0)) |
222 | return (*hook)(oldmem, bytes, RETURN_ADDRESS (0)); |
223 | |
224 | size_t orig_bytes = bytes, oldsize = 0; |
225 | void *victim = NULL; |
226 | |
227 | if ((!__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK) |
228 | || !realloc_mcheck_before (&oldmem, &bytes, &oldsize, &victim))) |
229 | { |
230 | mchunkptr oldp = mem2chunk (oldmem); |
231 | |
232 | /* If this is a faked mmapped chunk from the dumped main arena, |
233 | always make a copy (and do not free the old chunk). */ |
234 | if (DUMPED_MAIN_ARENA_CHUNK (oldp)) |
235 | { |
236 | if (bytes == 0 && oldmem != NULL) |
237 | victim = NULL; |
238 | else |
239 | { |
240 | const INTERNAL_SIZE_T osize = chunksize (oldp); |
241 | /* Must alloc, copy, free. */ |
242 | victim = __debug_malloc (bytes); |
243 | /* Copy as many bytes as are available from the old chunk |
244 | and fit into the new size. NB: The overhead for faked |
245 | mmapped chunks is only SIZE_SZ, not CHUNK_HDR_SZ as for |
246 | regular mmapped chunks. */ |
247 | if (victim != NULL) |
248 | { |
249 | if (bytes > osize - SIZE_SZ) |
250 | bytes = osize - SIZE_SZ; |
251 | memcpy (victim, oldmem, bytes); |
252 | } |
253 | } |
254 | } |
255 | else if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK)) |
256 | victim = realloc_check (oldmem, bytes); |
257 | else |
258 | victim = __libc_realloc (oldmem, bytes); |
259 | } |
260 | if (__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK) && victim != NULL) |
261 | victim = realloc_mcheck_after (victim, oldmem, orig_bytes, |
262 | oldsize); |
263 | if (__is_malloc_debug_enabled (MALLOC_MTRACE_HOOK)) |
264 | realloc_mtrace_after (victim, oldmem, orig_bytes, RETURN_ADDRESS (0)); |
265 | |
266 | return victim; |
267 | } |
268 | strong_alias (__debug_realloc, realloc) |
269 | |
270 | static void * |
271 | _debug_mid_memalign (size_t alignment, size_t bytes, const void *address) |
272 | { |
273 | void *(*hook) (size_t, size_t, const void *) = |
274 | atomic_forced_read (__memalign_hook); |
275 | if (__builtin_expect (hook != NULL, 0)) |
276 | return (*hook)(alignment, bytes, address); |
277 | |
278 | void *victim = NULL; |
279 | size_t orig_bytes = bytes; |
280 | |
281 | if ((!__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK) |
282 | || !memalign_mcheck_before (alignment, &bytes, &victim))) |
283 | { |
284 | victim = (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK) |
285 | ? memalign_check (alignment, bytes) |
286 | : __libc_memalign (alignment, bytes)); |
287 | } |
288 | if (__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK) && victim != NULL) |
289 | victim = memalign_mcheck_after (victim, alignment, orig_bytes); |
290 | if (__is_malloc_debug_enabled (MALLOC_MTRACE_HOOK)) |
291 | memalign_mtrace_after (victim, orig_bytes, address); |
292 | |
293 | return victim; |
294 | } |
295 | |
296 | static void * |
297 | __debug_memalign (size_t alignment, size_t bytes) |
298 | { |
299 | return _debug_mid_memalign (alignment, bytes, RETURN_ADDRESS (0)); |
300 | } |
301 | strong_alias (__debug_memalign, memalign) |
302 | strong_alias (__debug_memalign, aligned_alloc) |
303 | |
304 | static void * |
305 | __debug_pvalloc (size_t bytes) |
306 | { |
307 | size_t rounded_bytes; |
308 | |
309 | if (!pagesize) |
310 | pagesize = sysconf (_SC_PAGESIZE); |
311 | |
312 | /* ALIGN_UP with overflow check. */ |
313 | if (__glibc_unlikely (__builtin_add_overflow (bytes, |
314 | pagesize - 1, |
315 | &rounded_bytes))) |
316 | { |
317 | errno = ENOMEM; |
318 | return NULL; |
319 | } |
320 | rounded_bytes = rounded_bytes & -(pagesize - 1); |
321 | |
322 | return _debug_mid_memalign (pagesize, rounded_bytes, RETURN_ADDRESS (0)); |
323 | } |
324 | strong_alias (__debug_pvalloc, pvalloc) |
325 | |
326 | static void * |
327 | __debug_valloc (size_t bytes) |
328 | { |
329 | if (!pagesize) |
330 | pagesize = sysconf (_SC_PAGESIZE); |
331 | |
332 | return _debug_mid_memalign (pagesize, bytes, RETURN_ADDRESS (0)); |
333 | } |
334 | strong_alias (__debug_valloc, valloc) |
335 | |
336 | static int |
337 | __debug_posix_memalign (void **memptr, size_t alignment, size_t bytes) |
338 | { |
339 | /* Test whether the SIZE argument is valid. It must be a power of |
340 | two multiple of sizeof (void *). */ |
341 | if (alignment % sizeof (void *) != 0 |
342 | || !powerof2 (alignment / sizeof (void *)) |
343 | || alignment == 0) |
344 | return EINVAL; |
345 | |
346 | *memptr = _debug_mid_memalign (alignment, bytes, RETURN_ADDRESS (0)); |
347 | |
348 | if (*memptr == NULL) |
349 | return ENOMEM; |
350 | |
351 | return 0; |
352 | } |
353 | strong_alias (__debug_posix_memalign, posix_memalign) |
354 | |
355 | static void * |
356 | __debug_calloc (size_t nmemb, size_t size) |
357 | { |
358 | size_t bytes; |
359 | |
360 | if (__glibc_unlikely (__builtin_mul_overflow (nmemb, size, &bytes))) |
361 | { |
362 | errno = ENOMEM; |
363 | return NULL; |
364 | } |
365 | |
366 | void *(*hook) (size_t, const void *) = atomic_forced_read (__malloc_hook); |
367 | if (__builtin_expect (hook != NULL, 0)) |
368 | { |
369 | void *mem = (*hook)(bytes, RETURN_ADDRESS (0)); |
370 | |
371 | if (mem != NULL) |
372 | memset (mem, 0, bytes); |
373 | |
374 | return mem; |
375 | } |
376 | |
377 | size_t orig_bytes = bytes; |
378 | void *victim = NULL; |
379 | |
380 | if ((!__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK) |
381 | || !malloc_mcheck_before (&bytes, &victim))) |
382 | { |
383 | victim = (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK) |
384 | ? malloc_check (bytes) : __libc_malloc (bytes)); |
385 | } |
386 | if (victim != NULL) |
387 | { |
388 | if (__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK)) |
389 | victim = malloc_mcheck_after (victim, orig_bytes); |
390 | memset (victim, 0, orig_bytes); |
391 | } |
392 | if (__is_malloc_debug_enabled (MALLOC_MTRACE_HOOK)) |
393 | malloc_mtrace_after (victim, orig_bytes, RETURN_ADDRESS (0)); |
394 | |
395 | return victim; |
396 | } |
397 | strong_alias (__debug_calloc, calloc) |
398 | |
399 | size_t |
400 | malloc_usable_size (void *mem) |
401 | { |
402 | if (mem == NULL) |
403 | return 0; |
404 | |
405 | if (__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK)) |
406 | return mcheck_usable_size (mem); |
407 | if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK)) |
408 | return malloc_check_get_size (mem); |
409 | |
410 | mchunkptr p = mem2chunk (mem); |
411 | if (DUMPED_MAIN_ARENA_CHUNK (p)) |
412 | return chunksize (p) - SIZE_SZ; |
413 | |
414 | return musable (mem); |
415 | } |
416 | |
417 | #define LIBC_SYMBOL(sym) libc_ ## sym |
418 | #define SYMHANDLE(sym) sym ## _handle |
419 | |
420 | #define LOAD_SYM(sym) ({ \ |
421 | static void *SYMHANDLE (sym); \ |
422 | if (SYMHANDLE (sym) == NULL) \ |
423 | SYMHANDLE (sym) = dlsym (RTLD_NEXT, #sym); \ |
424 | SYMHANDLE (sym); \ |
425 | }) |
426 | |
427 | int |
428 | malloc_info (int options, FILE *fp) |
429 | { |
430 | if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK)) |
431 | return __malloc_info (options, fp); |
432 | |
433 | int (*LIBC_SYMBOL (malloc_info)) (int, FILE *) = LOAD_SYM (malloc_info); |
434 | if (LIBC_SYMBOL (malloc_info) == NULL) |
435 | return -1; |
436 | |
437 | return LIBC_SYMBOL (malloc_info) (options, fp); |
438 | } |
439 | |
440 | int |
441 | mallopt (int param_number, int value) |
442 | { |
443 | if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK)) |
444 | return __libc_mallopt (param_number, value); |
445 | |
446 | int (*LIBC_SYMBOL (mallopt)) (int, int) = LOAD_SYM (mallopt); |
447 | if (LIBC_SYMBOL (mallopt) == NULL) |
448 | return 0; |
449 | |
450 | return LIBC_SYMBOL (mallopt) (param_number, value); |
451 | } |
452 | |
453 | void |
454 | malloc_stats (void) |
455 | { |
456 | if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK)) |
457 | return __malloc_stats (); |
458 | |
459 | void (*LIBC_SYMBOL (malloc_stats)) (void) = LOAD_SYM (malloc_stats); |
460 | if (LIBC_SYMBOL (malloc_stats) == NULL) |
461 | return; |
462 | |
463 | LIBC_SYMBOL (malloc_stats) (); |
464 | } |
465 | |
466 | struct mallinfo2 |
467 | mallinfo2 (void) |
468 | { |
469 | if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK)) |
470 | return __libc_mallinfo2 (); |
471 | |
472 | struct mallinfo2 (*LIBC_SYMBOL (mallinfo2)) (void) = LOAD_SYM (mallinfo2); |
473 | if (LIBC_SYMBOL (mallinfo2) == NULL) |
474 | { |
475 | struct mallinfo2 ret = {0}; |
476 | return ret; |
477 | } |
478 | |
479 | return LIBC_SYMBOL (mallinfo2) (); |
480 | } |
481 | |
482 | struct mallinfo |
483 | mallinfo (void) |
484 | { |
485 | if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK)) |
486 | return __libc_mallinfo (); |
487 | |
488 | struct mallinfo (*LIBC_SYMBOL (mallinfo)) (void) = LOAD_SYM (mallinfo); |
489 | if (LIBC_SYMBOL (mallinfo) == NULL) |
490 | { |
491 | struct mallinfo ret = {0}; |
492 | return ret; |
493 | } |
494 | |
495 | return LIBC_SYMBOL (mallinfo) (); |
496 | } |
497 | |
498 | int |
499 | malloc_trim (size_t s) |
500 | { |
501 | if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK)) |
502 | return __malloc_trim (s); |
503 | |
504 | int (*LIBC_SYMBOL (malloc_trim)) (size_t) = LOAD_SYM (malloc_trim); |
505 | if (LIBC_SYMBOL (malloc_trim) == NULL) |
506 | return 0; |
507 | |
508 | return LIBC_SYMBOL (malloc_trim) (s); |
509 | } |
510 | |
511 | #if SHLIB_COMPAT (libc_malloc_debug, GLIBC_2_0, GLIBC_2_25) |
512 | |
513 | /* Support for restoring dumped heaps contained in historic Emacs |
514 | executables. The heap saving feature (malloc_get_state) is no |
515 | longer implemented in this version of glibc, but we have a heap |
516 | rewriter in malloc_set_state which transforms the heap into a |
517 | version compatible with current malloc. */ |
518 | |
519 | #define MALLOC_STATE_MAGIC 0x444c4541l |
520 | #define MALLOC_STATE_VERSION (0 * 0x100l + 5l) /* major*0x100 + minor */ |
521 | |
522 | struct malloc_save_state |
523 | { |
524 | long magic; |
525 | long version; |
526 | mbinptr av[NBINS * 2 + 2]; |
527 | char *sbrk_base; |
528 | int sbrked_mem_bytes; |
529 | unsigned long trim_threshold; |
530 | unsigned long top_pad; |
531 | unsigned int n_mmaps_max; |
532 | unsigned long mmap_threshold; |
533 | int check_action; |
534 | unsigned long max_sbrked_mem; |
535 | unsigned long max_total_mem; /* Always 0, for backwards compatibility. */ |
536 | unsigned int n_mmaps; |
537 | unsigned int max_n_mmaps; |
538 | unsigned long mmapped_mem; |
539 | unsigned long max_mmapped_mem; |
540 | int using_malloc_checking; |
541 | unsigned long max_fast; |
542 | unsigned long arena_test; |
543 | unsigned long arena_max; |
544 | unsigned long narenas; |
545 | }; |
546 | |
547 | /* Dummy implementation which always fails. We need to provide this |
548 | symbol so that existing Emacs binaries continue to work with |
549 | BIND_NOW. */ |
550 | void * |
551 | malloc_get_state (void) |
552 | { |
553 | __set_errno (ENOSYS); |
554 | return NULL; |
555 | } |
556 | compat_symbol (libc_malloc_debug, malloc_get_state, malloc_get_state, |
557 | GLIBC_2_0); |
558 | |
559 | int |
560 | malloc_set_state (void *msptr) |
561 | { |
562 | struct malloc_save_state *ms = (struct malloc_save_state *) msptr; |
563 | |
564 | if (ms->magic != MALLOC_STATE_MAGIC) |
565 | return -1; |
566 | |
567 | /* Must fail if the major version is too high. */ |
568 | if ((ms->version & ~0xffl) > (MALLOC_STATE_VERSION & ~0xffl)) |
569 | return -2; |
570 | |
571 | if (debug_initialized == 1) |
572 | return -1; |
573 | |
574 | bool check_was_enabled = __is_malloc_debug_enabled (MALLOC_CHECK_HOOK); |
575 | |
576 | /* It's not too late, so disable MALLOC_CHECK_ and all of the hooks. */ |
577 | __malloc_hook = NULL; |
578 | __realloc_hook = NULL; |
579 | __free_hook = NULL; |
580 | __memalign_hook = NULL; |
581 | __malloc_debug_disable (MALLOC_CHECK_HOOK); |
582 | |
583 | /* We do not need to perform locking here because malloc_set_state |
584 | must be called before the first call into the malloc subsytem (usually via |
585 | __malloc_initialize_hook). pthread_create always calls calloc and thus |
586 | must be called only afterwards, so there cannot be more than one thread |
587 | when we reach this point. Also handle initialization if either we ended |
588 | up being called before the first malloc or through the hook when |
589 | malloc-check was enabled. */ |
590 | if (debug_initialized < 0) |
591 | generic_hook_ini (); |
592 | else if (check_was_enabled) |
593 | __libc_free (__libc_malloc (0)); |
594 | |
595 | /* Patch the dumped heap. We no longer try to integrate into the |
596 | existing heap. Instead, we mark the existing chunks as mmapped. |
597 | Together with the update to dumped_main_arena_start and |
598 | dumped_main_arena_end, realloc and free will recognize these |
599 | chunks as dumped fake mmapped chunks and never free them. */ |
600 | |
601 | /* Find the chunk with the lowest address with the heap. */ |
602 | mchunkptr chunk = NULL; |
603 | { |
604 | size_t *candidate = (size_t *) ms->sbrk_base; |
605 | size_t *end = (size_t *) (ms->sbrk_base + ms->sbrked_mem_bytes); |
606 | while (candidate < end) |
607 | if (*candidate != 0) |
608 | { |
609 | chunk = mem2chunk ((void *) (candidate + 1)); |
610 | break; |
611 | } |
612 | else |
613 | ++candidate; |
614 | } |
615 | if (chunk == NULL) |
616 | return 0; |
617 | |
618 | /* Iterate over the dumped heap and patch the chunks so that they |
619 | are treated as fake mmapped chunks. */ |
620 | mchunkptr top = ms->av[2]; |
621 | while (chunk < top) |
622 | { |
623 | if (inuse (chunk)) |
624 | { |
625 | /* Mark chunk as mmapped, to trigger the fallback path. */ |
626 | size_t size = chunksize (chunk); |
627 | set_head (chunk, size | IS_MMAPPED); |
628 | } |
629 | chunk = next_chunk (chunk); |
630 | } |
631 | |
632 | /* The dumped fake mmapped chunks all lie in this address range. */ |
633 | dumped_main_arena_start = (mchunkptr) ms->sbrk_base; |
634 | dumped_main_arena_end = top; |
635 | |
636 | return 0; |
637 | } |
638 | compat_symbol (libc_malloc_debug, malloc_set_state, malloc_set_state, |
639 | GLIBC_2_0); |
640 | #endif |
641 | |
642 | /* Do not allow linking against the library. */ |
643 | compat_symbol (libc_malloc_debug, aligned_alloc, aligned_alloc, GLIBC_2_16); |
644 | compat_symbol (libc_malloc_debug, calloc, calloc, GLIBC_2_0); |
645 | compat_symbol (libc_malloc_debug, free, free, GLIBC_2_0); |
646 | compat_symbol (libc_malloc_debug, mallinfo2, mallinfo2, GLIBC_2_33); |
647 | compat_symbol (libc_malloc_debug, mallinfo, mallinfo, GLIBC_2_0); |
648 | compat_symbol (libc_malloc_debug, malloc_info, malloc_info, GLIBC_2_10); |
649 | compat_symbol (libc_malloc_debug, malloc, malloc, GLIBC_2_0); |
650 | compat_symbol (libc_malloc_debug, malloc_stats, malloc_stats, GLIBC_2_0); |
651 | compat_symbol (libc_malloc_debug, malloc_trim, malloc_trim, GLIBC_2_0); |
652 | compat_symbol (libc_malloc_debug, malloc_usable_size, malloc_usable_size, |
653 | GLIBC_2_0); |
654 | compat_symbol (libc_malloc_debug, mallopt, mallopt, GLIBC_2_0); |
655 | compat_symbol (libc_malloc_debug, mcheck_check_all, mcheck_check_all, |
656 | GLIBC_2_2); |
657 | compat_symbol (libc_malloc_debug, mcheck, mcheck, GLIBC_2_0); |
658 | compat_symbol (libc_malloc_debug, mcheck_pedantic, mcheck_pedantic, GLIBC_2_2); |
659 | compat_symbol (libc_malloc_debug, memalign, memalign, GLIBC_2_0); |
660 | compat_symbol (libc_malloc_debug, mprobe, mprobe, GLIBC_2_0); |
661 | compat_symbol (libc_malloc_debug, mtrace, mtrace, GLIBC_2_0); |
662 | compat_symbol (libc_malloc_debug, muntrace, muntrace, GLIBC_2_0); |
663 | compat_symbol (libc_malloc_debug, posix_memalign, posix_memalign, GLIBC_2_2); |
664 | compat_symbol (libc_malloc_debug, pvalloc, pvalloc, GLIBC_2_0); |
665 | compat_symbol (libc_malloc_debug, realloc, realloc, GLIBC_2_0); |
666 | compat_symbol (libc_malloc_debug, valloc, valloc, GLIBC_2_0); |
667 | compat_symbol (libc_malloc_debug, __free_hook, __free_hook, GLIBC_2_0); |
668 | compat_symbol (libc_malloc_debug, __malloc_hook, __malloc_hook, GLIBC_2_0); |
669 | compat_symbol (libc_malloc_debug, __realloc_hook, __realloc_hook, GLIBC_2_0); |
670 | compat_symbol (libc_malloc_debug, __memalign_hook, __memalign_hook, GLIBC_2_0); |
671 | |