1/* Malloc debug DSO.
2 Copyright (C) 2021-2024 Free Software Foundation, Inc.
3 Copyright The GNU Toolchain Authors.
4 This file is part of the GNU C Library.
5
6 The GNU C Library is free software; you can redistribute it and/or
7 modify it under the terms of the GNU Lesser General Public License as
8 published by the Free Software Foundation; either version 2.1 of the
9 License, or (at your option) any later version.
10
11 The GNU C Library is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 Lesser General Public License for more details.
15
16 You should have received a copy of the GNU Lesser General Public
17 License along with the GNU C Library; see the file COPYING.LIB. If
18 not, see <https://www.gnu.org/licenses/>. */
19
20#include <atomic.h>
21#include <libc-symbols.h>
22#include <shlib-compat.h>
23#include <string.h>
24#include <unistd.h>
25#include <sys/param.h>
26
27/* Support only the glibc allocators. */
28extern void *__libc_malloc (size_t);
29extern void __libc_free (void *);
30extern void *__libc_realloc (void *, size_t);
31extern void *__libc_memalign (size_t, size_t);
32extern void *__libc_valloc (size_t);
33extern void *__libc_pvalloc (size_t);
34extern void *__libc_calloc (size_t, size_t);
35
36#define DEBUG_FN(fn) \
37 static __typeof (__libc_ ## fn) __debug_ ## fn
38
39DEBUG_FN(malloc);
40DEBUG_FN(free);
41DEBUG_FN(realloc);
42DEBUG_FN(memalign);
43DEBUG_FN(valloc);
44DEBUG_FN(pvalloc);
45DEBUG_FN(calloc);
46
47static int debug_initialized = -1;
48
49enum malloc_debug_hooks
50{
51 MALLOC_NONE_HOOK = 0,
52 MALLOC_MCHECK_HOOK = 1 << 0, /* mcheck() */
53 MALLOC_MTRACE_HOOK = 1 << 1, /* mtrace() */
54 MALLOC_CHECK_HOOK = 1 << 2, /* MALLOC_CHECK_ or glibc.malloc.check. */
55};
56static unsigned __malloc_debugging_hooks;
57
58static __always_inline bool
59__is_malloc_debug_enabled (enum malloc_debug_hooks flag)
60{
61 return __malloc_debugging_hooks & flag;
62}
63
64static __always_inline void
65__malloc_debug_enable (enum malloc_debug_hooks flag)
66{
67 __malloc_debugging_hooks |= flag;
68}
69
70static __always_inline void
71__malloc_debug_disable (enum malloc_debug_hooks flag)
72{
73 __malloc_debugging_hooks &= ~flag;
74}
75
76#include "mcheck.c"
77#include "mtrace.c"
78#include "malloc-check.c"
79
80#if SHLIB_COMPAT (libc_malloc_debug, GLIBC_2_0, GLIBC_2_24)
81extern void (*__malloc_initialize_hook) (void);
82compat_symbol_reference (libc, __malloc_initialize_hook,
83 __malloc_initialize_hook, GLIBC_2_0);
84#endif
85
86static void *malloc_hook_ini (size_t, const void *) __THROW;
87static void *realloc_hook_ini (void *, size_t, const void *) __THROW;
88static void *memalign_hook_ini (size_t, size_t, const void *) __THROW;
89
90void (*__free_hook) (void *, const void *) = NULL;
91void *(*__malloc_hook) (size_t, const void *) = malloc_hook_ini;
92void *(*__realloc_hook) (void *, size_t, const void *) = realloc_hook_ini;
93void *(*__memalign_hook) (size_t, size_t, const void *) = memalign_hook_ini;
94
95/* Hooks for debugging versions. The initial hooks just call the
96 initialization routine, then do the normal work. */
97
98/* These hooks will get executed only through the interposed allocator
99 functions in libc_malloc_debug.so. This means that the calls to malloc,
100 realloc, etc. will lead back into the interposed functions, which is what we
101 want.
102
103 These initial hooks are assumed to be called in a single-threaded context,
104 so it is safe to reset all hooks at once upon initialization. */
105
106static void
107generic_hook_ini (void)
108{
109 debug_initialized = 0;
110 __malloc_hook = NULL;
111 __realloc_hook = NULL;
112 __memalign_hook = NULL;
113
114 /* malloc check does not quite co-exist with libc malloc, so initialize
115 either on or the other. */
116 if (!initialize_malloc_check ())
117 /* The compiler does not know that these functions are allocators, so it
118 will not try to optimize it away. */
119 __libc_free (__libc_malloc (0));
120
121#if SHLIB_COMPAT (libc_malloc_debug, GLIBC_2_0, GLIBC_2_24)
122 void (*hook) (void) = __malloc_initialize_hook;
123 if (hook != NULL)
124 (*hook)();
125#endif
126
127 debug_initialized = 1;
128}
129
130static void *
131malloc_hook_ini (size_t sz, const void *caller)
132{
133 generic_hook_ini ();
134 return __debug_malloc (sz);
135}
136
137static void *
138realloc_hook_ini (void *ptr, size_t sz, const void *caller)
139{
140 generic_hook_ini ();
141 return __debug_realloc (ptr, sz);
142}
143
144static void *
145memalign_hook_ini (size_t alignment, size_t sz, const void *caller)
146{
147 generic_hook_ini ();
148 return __debug_memalign (alignment, sz);
149}
150
151static size_t pagesize;
152
153/* These variables are used for undumping support. Chunked are marked
154 as using mmap, but we leave them alone if they fall into this
155 range. NB: The chunk size for these chunks only includes the
156 initial size field (of SIZE_SZ bytes), there is no trailing size
157 field (unlike with regular mmapped chunks). */
158static mchunkptr dumped_main_arena_start; /* Inclusive. */
159static mchunkptr dumped_main_arena_end; /* Exclusive. */
160
161/* True if the pointer falls into the dumped arena. Use this after
162 chunk_is_mmapped indicates a chunk is mmapped. */
163#define DUMPED_MAIN_ARENA_CHUNK(p) \
164 ((p) >= dumped_main_arena_start && (p) < dumped_main_arena_end)
165
166/* The allocator functions. */
167
168static void *
169__debug_malloc (size_t bytes)
170{
171 void *(*hook) (size_t, const void *) = atomic_forced_read (__malloc_hook);
172 if (__builtin_expect (hook != NULL, 0))
173 return (*hook)(bytes, RETURN_ADDRESS (0));
174
175 void *victim = NULL;
176 size_t orig_bytes = bytes;
177 if ((!__is_malloc_debug_enabled (flag: MALLOC_MCHECK_HOOK)
178 || !malloc_mcheck_before (sizep: &bytes, victimp: &victim)))
179 {
180 victim = (__is_malloc_debug_enabled (flag: MALLOC_CHECK_HOOK)
181 ? malloc_check (sz: bytes) : __libc_malloc (bytes));
182 }
183 if (__is_malloc_debug_enabled (flag: MALLOC_MCHECK_HOOK) && victim != NULL)
184 victim = malloc_mcheck_after (mem: victim, size: orig_bytes);
185 if (__is_malloc_debug_enabled (flag: MALLOC_MTRACE_HOOK))
186 malloc_mtrace_after (block: victim, size: orig_bytes, RETURN_ADDRESS (0));
187
188 return victim;
189}
190strong_alias (__debug_malloc, malloc)
191
192static void
193__debug_free (void *mem)
194{
195 void (*hook) (void *, const void *) = atomic_forced_read (__free_hook);
196 if (__builtin_expect (hook != NULL, 0))
197 {
198 (*hook)(mem, RETURN_ADDRESS (0));
199 return;
200 }
201
202 if (__is_malloc_debug_enabled (flag: MALLOC_MCHECK_HOOK))
203 mem = free_mcheck (ptr: mem);
204
205 if (DUMPED_MAIN_ARENA_CHUNK (mem2chunk (mem)))
206 /* Do nothing. */;
207 else if (__is_malloc_debug_enabled (flag: MALLOC_CHECK_HOOK))
208 free_check (mem);
209 else
210 __libc_free (mem);
211 if (__is_malloc_debug_enabled (flag: MALLOC_MTRACE_HOOK))
212 free_mtrace (ptr: mem, RETURN_ADDRESS (0));
213}
214strong_alias (__debug_free, free)
215
216static void *
217__debug_realloc (void *oldmem, size_t bytes)
218{
219 void *(*hook) (void *, size_t, const void *) =
220 atomic_forced_read (__realloc_hook);
221 if (__builtin_expect (hook != NULL, 0))
222 return (*hook)(oldmem, bytes, RETURN_ADDRESS (0));
223
224 size_t orig_bytes = bytes, oldsize = 0;
225 void *victim = NULL;
226
227 if ((!__is_malloc_debug_enabled (flag: MALLOC_MCHECK_HOOK)
228 || !realloc_mcheck_before (ptrp: &oldmem, sizep: &bytes, oldsize: &oldsize, victimp: &victim)))
229 {
230 mchunkptr oldp = mem2chunk (oldmem);
231
232 /* If this is a faked mmapped chunk from the dumped main arena,
233 always make a copy (and do not free the old chunk). */
234 if (DUMPED_MAIN_ARENA_CHUNK (oldp))
235 {
236 if (bytes == 0 && oldmem != NULL)
237 victim = NULL;
238 else
239 {
240 const INTERNAL_SIZE_T osize = chunksize (oldp);
241 /* Must alloc, copy, free. */
242 victim = __debug_malloc (bytes);
243 /* Copy as many bytes as are available from the old chunk
244 and fit into the new size. NB: The overhead for faked
245 mmapped chunks is only SIZE_SZ, not CHUNK_HDR_SZ as for
246 regular mmapped chunks. */
247 if (victim != NULL)
248 {
249 if (bytes > osize - SIZE_SZ)
250 bytes = osize - SIZE_SZ;
251 memcpy (dest: victim, src: oldmem, n: bytes);
252 }
253 }
254 }
255 else if (__is_malloc_debug_enabled (flag: MALLOC_CHECK_HOOK))
256 victim = realloc_check (oldmem, bytes);
257 else
258 victim = __libc_realloc (oldmem, bytes);
259 }
260 if (__is_malloc_debug_enabled (flag: MALLOC_MCHECK_HOOK) && victim != NULL)
261 victim = realloc_mcheck_after (ptr: victim, oldptr: oldmem, size: orig_bytes,
262 osize: oldsize);
263 if (__is_malloc_debug_enabled (flag: MALLOC_MTRACE_HOOK))
264 realloc_mtrace_after (block: victim, oldptr: oldmem, size: orig_bytes, RETURN_ADDRESS (0));
265
266 return victim;
267}
268strong_alias (__debug_realloc, realloc)
269
270static void *
271_debug_mid_memalign (size_t alignment, size_t bytes, const void *address)
272{
273 void *(*hook) (size_t, size_t, const void *) =
274 atomic_forced_read (__memalign_hook);
275 if (__builtin_expect (hook != NULL, 0))
276 return (*hook)(alignment, bytes, address);
277
278 void *victim = NULL;
279 size_t orig_bytes = bytes;
280
281 if ((!__is_malloc_debug_enabled (flag: MALLOC_MCHECK_HOOK)
282 || !memalign_mcheck_before (alignment, sizep: &bytes, victimp: &victim)))
283 {
284 victim = (__is_malloc_debug_enabled (flag: MALLOC_CHECK_HOOK)
285 ? memalign_check (alignment, bytes)
286 : __libc_memalign (alignment, bytes));
287 }
288 if (__is_malloc_debug_enabled (flag: MALLOC_MCHECK_HOOK) && victim != NULL)
289 victim = memalign_mcheck_after (block: victim, alignment, size: orig_bytes);
290 if (__is_malloc_debug_enabled (flag: MALLOC_MTRACE_HOOK))
291 memalign_mtrace_after (block: victim, size: orig_bytes, caller: address);
292
293 return victim;
294}
295
296static void *
297__debug_memalign (size_t alignment, size_t bytes)
298{
299 return _debug_mid_memalign (alignment, bytes, RETURN_ADDRESS (0));
300}
301strong_alias (__debug_memalign, memalign)
302static void *
303__debug_aligned_alloc (size_t alignment, size_t bytes)
304{
305 if (!powerof2 (alignment) || alignment == 0)
306 return NULL;
307 return _debug_mid_memalign (alignment, bytes, RETURN_ADDRESS (0));
308}
309strong_alias (__debug_aligned_alloc, aligned_alloc)
310
311static void *
312__debug_pvalloc (size_t bytes)
313{
314 size_t rounded_bytes;
315
316 if (!pagesize)
317 pagesize = sysconf (_SC_PAGESIZE);
318
319 /* ALIGN_UP with overflow check. */
320 if (__glibc_unlikely (__builtin_add_overflow (bytes,
321 pagesize - 1,
322 &rounded_bytes)))
323 {
324 errno = ENOMEM;
325 return NULL;
326 }
327 rounded_bytes = rounded_bytes & -(pagesize - 1);
328
329 return _debug_mid_memalign (alignment: pagesize, bytes: rounded_bytes, RETURN_ADDRESS (0));
330}
331strong_alias (__debug_pvalloc, pvalloc)
332
333static void *
334__debug_valloc (size_t bytes)
335{
336 if (!pagesize)
337 pagesize = sysconf (_SC_PAGESIZE);
338
339 return _debug_mid_memalign (alignment: pagesize, bytes, RETURN_ADDRESS (0));
340}
341strong_alias (__debug_valloc, valloc)
342
343static int
344__debug_posix_memalign (void **memptr, size_t alignment, size_t bytes)
345{
346 /* Test whether the SIZE argument is valid. It must be a power of
347 two multiple of sizeof (void *). */
348 if (alignment % sizeof (void *) != 0
349 || !powerof2 (alignment / sizeof (void *))
350 || alignment == 0)
351 return EINVAL;
352
353 *memptr = _debug_mid_memalign (alignment, bytes, RETURN_ADDRESS (0));
354
355 if (*memptr == NULL)
356 return ENOMEM;
357
358 return 0;
359}
360strong_alias (__debug_posix_memalign, posix_memalign)
361
362static void *
363__debug_calloc (size_t nmemb, size_t size)
364{
365 size_t bytes;
366
367 if (__glibc_unlikely (__builtin_mul_overflow (nmemb, size, &bytes)))
368 {
369 errno = ENOMEM;
370 return NULL;
371 }
372
373 void *(*hook) (size_t, const void *) = atomic_forced_read (__malloc_hook);
374 if (__builtin_expect (hook != NULL, 0))
375 {
376 void *mem = (*hook)(bytes, RETURN_ADDRESS (0));
377
378 if (mem != NULL)
379 memset (s: mem, c: 0, n: bytes);
380
381 return mem;
382 }
383
384 size_t orig_bytes = bytes;
385 void *victim = NULL;
386
387 if ((!__is_malloc_debug_enabled (flag: MALLOC_MCHECK_HOOK)
388 || !malloc_mcheck_before (sizep: &bytes, victimp: &victim)))
389 {
390 victim = (__is_malloc_debug_enabled (flag: MALLOC_CHECK_HOOK)
391 ? malloc_check (sz: bytes) : __libc_malloc (bytes));
392 }
393 if (victim != NULL)
394 {
395 if (__is_malloc_debug_enabled (flag: MALLOC_MCHECK_HOOK))
396 victim = malloc_mcheck_after (mem: victim, size: orig_bytes);
397 memset (s: victim, c: 0, n: orig_bytes);
398 }
399 if (__is_malloc_debug_enabled (flag: MALLOC_MTRACE_HOOK))
400 malloc_mtrace_after (block: victim, size: orig_bytes, RETURN_ADDRESS (0));
401
402 return victim;
403}
404strong_alias (__debug_calloc, calloc)
405
406size_t
407malloc_usable_size (void *mem)
408{
409 if (mem == NULL)
410 return 0;
411
412 if (__is_malloc_debug_enabled (flag: MALLOC_MCHECK_HOOK))
413 return mcheck_usable_size (h: mem);
414 if (__is_malloc_debug_enabled (flag: MALLOC_CHECK_HOOK))
415 return malloc_check_get_size (mem);
416
417 mchunkptr p = mem2chunk (mem);
418 if (DUMPED_MAIN_ARENA_CHUNK (p))
419 return chunksize (p) - SIZE_SZ;
420
421 return musable (mem);
422}
423
424#define LIBC_SYMBOL(sym) libc_ ## sym
425#define SYMHANDLE(sym) sym ## _handle
426
427#define LOAD_SYM(sym) ({ \
428 static void *SYMHANDLE (sym); \
429 if (SYMHANDLE (sym) == NULL) \
430 SYMHANDLE (sym) = dlsym (RTLD_NEXT, #sym); \
431 SYMHANDLE (sym); \
432})
433
434int
435malloc_info (int options, FILE *fp)
436{
437 if (__is_malloc_debug_enabled (flag: MALLOC_CHECK_HOOK))
438 return __malloc_info (options, fp);
439
440 int (*LIBC_SYMBOL (malloc_info)) (int, FILE *) = LOAD_SYM (malloc_info);
441 if (LIBC_SYMBOL (malloc_info) == NULL)
442 return -1;
443
444 return LIBC_SYMBOL (malloc_info) (options, fp);
445}
446
447int
448mallopt (int param_number, int value)
449{
450 if (__is_malloc_debug_enabled (flag: MALLOC_CHECK_HOOK))
451 return __libc_mallopt (param_number, value);
452
453 int (*LIBC_SYMBOL (mallopt)) (int, int) = LOAD_SYM (mallopt);
454 if (LIBC_SYMBOL (mallopt) == NULL)
455 return 0;
456
457 return LIBC_SYMBOL (mallopt) (param_number, value);
458}
459
460void
461malloc_stats (void)
462{
463 if (__is_malloc_debug_enabled (flag: MALLOC_CHECK_HOOK))
464 return __malloc_stats ();
465
466 void (*LIBC_SYMBOL (malloc_stats)) (void) = LOAD_SYM (malloc_stats);
467 if (LIBC_SYMBOL (malloc_stats) == NULL)
468 return;
469
470 LIBC_SYMBOL (malloc_stats) ();
471}
472
473struct mallinfo2
474mallinfo2 (void)
475{
476 if (__is_malloc_debug_enabled (flag: MALLOC_CHECK_HOOK))
477 return __libc_mallinfo2 ();
478
479 struct mallinfo2 (*LIBC_SYMBOL (mallinfo2)) (void) = LOAD_SYM (mallinfo2);
480 if (LIBC_SYMBOL (mallinfo2) == NULL)
481 {
482 struct mallinfo2 ret = {0};
483 return ret;
484 }
485
486 return LIBC_SYMBOL (mallinfo2) ();
487}
488
489struct mallinfo
490mallinfo (void)
491{
492 if (__is_malloc_debug_enabled (flag: MALLOC_CHECK_HOOK))
493 return __libc_mallinfo ();
494
495 struct mallinfo (*LIBC_SYMBOL (mallinfo)) (void) = LOAD_SYM (mallinfo);
496 if (LIBC_SYMBOL (mallinfo) == NULL)
497 {
498 struct mallinfo ret = {0};
499 return ret;
500 }
501
502 return LIBC_SYMBOL (mallinfo) ();
503}
504
505int
506malloc_trim (size_t s)
507{
508 if (__is_malloc_debug_enabled (flag: MALLOC_CHECK_HOOK))
509 return __malloc_trim (s);
510
511 int (*LIBC_SYMBOL (malloc_trim)) (size_t) = LOAD_SYM (malloc_trim);
512 if (LIBC_SYMBOL (malloc_trim) == NULL)
513 return 0;
514
515 return LIBC_SYMBOL (malloc_trim) (s);
516}
517
518#if SHLIB_COMPAT (libc_malloc_debug, GLIBC_2_0, GLIBC_2_25)
519
520/* Support for restoring dumped heaps contained in historic Emacs
521 executables. The heap saving feature (malloc_get_state) is no
522 longer implemented in this version of glibc, but we have a heap
523 rewriter in malloc_set_state which transforms the heap into a
524 version compatible with current malloc. */
525
526#define MALLOC_STATE_MAGIC 0x444c4541l
527#define MALLOC_STATE_VERSION (0 * 0x100l + 5l) /* major*0x100 + minor */
528
529struct malloc_save_state
530{
531 long magic;
532 long version;
533 mbinptr av[NBINS * 2 + 2];
534 char *sbrk_base;
535 int sbrked_mem_bytes;
536 unsigned long trim_threshold;
537 unsigned long top_pad;
538 unsigned int n_mmaps_max;
539 unsigned long mmap_threshold;
540 int check_action;
541 unsigned long max_sbrked_mem;
542 unsigned long max_total_mem; /* Always 0, for backwards compatibility. */
543 unsigned int n_mmaps;
544 unsigned int max_n_mmaps;
545 unsigned long mmapped_mem;
546 unsigned long max_mmapped_mem;
547 int using_malloc_checking;
548 unsigned long max_fast;
549 unsigned long arena_test;
550 unsigned long arena_max;
551 unsigned long narenas;
552};
553
554/* Dummy implementation which always fails. We need to provide this
555 symbol so that existing Emacs binaries continue to work with
556 BIND_NOW. */
557void *
558malloc_get_state (void)
559{
560 __set_errno (ENOSYS);
561 return NULL;
562}
563compat_symbol (libc_malloc_debug, malloc_get_state, malloc_get_state,
564 GLIBC_2_0);
565
566int
567malloc_set_state (void *msptr)
568{
569 struct malloc_save_state *ms = (struct malloc_save_state *) msptr;
570
571 if (ms->magic != MALLOC_STATE_MAGIC)
572 return -1;
573
574 /* Must fail if the major version is too high. */
575 if ((ms->version & ~0xffl) > (MALLOC_STATE_VERSION & ~0xffl))
576 return -2;
577
578 if (debug_initialized == 1)
579 return -1;
580
581 bool check_was_enabled = __is_malloc_debug_enabled (flag: MALLOC_CHECK_HOOK);
582
583 /* It's not too late, so disable MALLOC_CHECK_ and all of the hooks. */
584 __malloc_hook = NULL;
585 __realloc_hook = NULL;
586 __free_hook = NULL;
587 __memalign_hook = NULL;
588 __malloc_debug_disable (flag: MALLOC_CHECK_HOOK);
589
590 /* We do not need to perform locking here because malloc_set_state
591 must be called before the first call into the malloc subsystem (usually via
592 __malloc_initialize_hook). pthread_create always calls calloc and thus
593 must be called only afterwards, so there cannot be more than one thread
594 when we reach this point. Also handle initialization if either we ended
595 up being called before the first malloc or through the hook when
596 malloc-check was enabled. */
597 if (debug_initialized < 0)
598 generic_hook_ini ();
599 else if (check_was_enabled)
600 __libc_free (__libc_malloc (0));
601
602 /* Patch the dumped heap. We no longer try to integrate into the
603 existing heap. Instead, we mark the existing chunks as mmapped.
604 Together with the update to dumped_main_arena_start and
605 dumped_main_arena_end, realloc and free will recognize these
606 chunks as dumped fake mmapped chunks and never free them. */
607
608 /* Find the chunk with the lowest address with the heap. */
609 mchunkptr chunk = NULL;
610 {
611 size_t *candidate = (size_t *) ms->sbrk_base;
612 size_t *end = (size_t *) (ms->sbrk_base + ms->sbrked_mem_bytes);
613 while (candidate < end)
614 if (*candidate != 0)
615 {
616 chunk = mem2chunk ((void *) (candidate + 1));
617 break;
618 }
619 else
620 ++candidate;
621 }
622 if (chunk == NULL)
623 return 0;
624
625 /* Iterate over the dumped heap and patch the chunks so that they
626 are treated as fake mmapped chunks. */
627 mchunkptr top = ms->av[2];
628 while (chunk < top)
629 {
630 if (inuse (chunk))
631 {
632 /* Mark chunk as mmapped, to trigger the fallback path. */
633 size_t size = chunksize (chunk);
634 set_head (chunk, size | IS_MMAPPED);
635 }
636 chunk = next_chunk (chunk);
637 }
638
639 /* The dumped fake mmapped chunks all lie in this address range. */
640 dumped_main_arena_start = (mchunkptr) ms->sbrk_base;
641 dumped_main_arena_end = top;
642
643 return 0;
644}
645compat_symbol (libc_malloc_debug, malloc_set_state, malloc_set_state,
646 GLIBC_2_0);
647#endif
648
649/* Do not allow linking against the library. */
650compat_symbol (libc_malloc_debug, aligned_alloc, aligned_alloc, GLIBC_2_16);
651compat_symbol (libc_malloc_debug, calloc, calloc, GLIBC_2_0);
652compat_symbol (libc_malloc_debug, free, free, GLIBC_2_0);
653compat_symbol (libc_malloc_debug, mallinfo2, mallinfo2, GLIBC_2_33);
654compat_symbol (libc_malloc_debug, mallinfo, mallinfo, GLIBC_2_0);
655compat_symbol (libc_malloc_debug, malloc_info, malloc_info, GLIBC_2_10);
656compat_symbol (libc_malloc_debug, malloc, malloc, GLIBC_2_0);
657compat_symbol (libc_malloc_debug, malloc_stats, malloc_stats, GLIBC_2_0);
658compat_symbol (libc_malloc_debug, malloc_trim, malloc_trim, GLIBC_2_0);
659compat_symbol (libc_malloc_debug, malloc_usable_size, malloc_usable_size,
660 GLIBC_2_0);
661compat_symbol (libc_malloc_debug, mallopt, mallopt, GLIBC_2_0);
662compat_symbol (libc_malloc_debug, mcheck_check_all, mcheck_check_all,
663 GLIBC_2_2);
664compat_symbol (libc_malloc_debug, mcheck, mcheck, GLIBC_2_0);
665compat_symbol (libc_malloc_debug, mcheck_pedantic, mcheck_pedantic, GLIBC_2_2);
666compat_symbol (libc_malloc_debug, memalign, memalign, GLIBC_2_0);
667compat_symbol (libc_malloc_debug, mprobe, mprobe, GLIBC_2_0);
668compat_symbol (libc_malloc_debug, mtrace, mtrace, GLIBC_2_0);
669compat_symbol (libc_malloc_debug, muntrace, muntrace, GLIBC_2_0);
670compat_symbol (libc_malloc_debug, posix_memalign, posix_memalign, GLIBC_2_2);
671compat_symbol (libc_malloc_debug, pvalloc, pvalloc, GLIBC_2_0);
672compat_symbol (libc_malloc_debug, realloc, realloc, GLIBC_2_0);
673compat_symbol (libc_malloc_debug, valloc, valloc, GLIBC_2_0);
674compat_symbol (libc_malloc_debug, __free_hook, __free_hook, GLIBC_2_0);
675compat_symbol (libc_malloc_debug, __malloc_hook, __malloc_hook, GLIBC_2_0);
676compat_symbol (libc_malloc_debug, __realloc_hook, __realloc_hook, GLIBC_2_0);
677compat_symbol (libc_malloc_debug, __memalign_hook, __memalign_hook, GLIBC_2_0);
678

source code of glibc/malloc/malloc-debug.c