@@ -27,7 +27,8 @@ headers := $(dist-headers) obstack.h mcheck.h
tests := mallocbug tst-malloc tst-valloc tst-calloc tst-obstack \
tst-mallocstate tst-mcheck tst-mallocfork tst-trim1 \
tst-malloc-usable tst-realloc tst-posix_memalign \
- tst-pvalloc tst-memalign tst-mallopt tst-scratch_buffer
+ tst-pvalloc tst-memalign tst-mallopt tst-scratch_buffer \
+ tst-malloc-backtrace
test-srcs = tst-mtrace
routines = malloc morecore mcheck mtrace obstack \
@@ -44,6 +45,9 @@ extra-libs-others = $(extra-libs)
libmemusage-routines = memusage
libmemusage-inhibit-o = $(filter-out .os,$(object-suffixes))
+$(objpfx)tst-malloc-backtrace: $(common-objpfx)nptl/libpthread.so \
+ $(common-objpfx)nptl/libpthread_nonshared.a
+
# These should be removed by `make clean'.
extra-objs = mcheck-init.o libmcheck.a
@@ -99,7 +99,7 @@ int __malloc_initialized = -1;
} while (0)
#define arena_lock(ptr, size) do { \
- if (ptr) \
+ if (ptr && !arena_is_corrupt (ptr)) \
(void) mutex_lock (&ptr->mutex); \
else \
ptr = arena_get2 (ptr, (size), NULL); \
@@ -686,7 +686,7 @@ heap_trim (heap_info *heap, size_t pad)
if (!prev_inuse (p)) /* consolidate backward */
{
p = prev_chunk (p);
- unlink (p, bck, fwd);
+ unlink (ar_ptr, p, bck, fwd);
}
assert (((unsigned long) ((char *) p + new_size) & (pagesz - 1)) == 0);
assert (((char *) p + new_size) == ((char *) heap + heap->size));
@@ -809,7 +809,7 @@ reused_arena (mstate avoid_arena)
result = next_to_use;
do
{
- if (!mutex_trylock (&result->mutex))
+ if (!arena_is_corrupt (result) && !mutex_trylock (&result->mutex))
goto out;
result = result->next;
@@ -821,7 +821,21 @@ reused_arena (mstate avoid_arena)
if (result == avoid_arena)
result = result->next;
- /* No arena available. Wait for the next in line. */
+ /* Make sure that the arena we get is not corrupted. */
+ mstate begin = result;
+ while (arena_is_corrupt (result) || result == avoid_arena)
+ {
+ result = result->next;
+ if (result == begin)
+ break;
+ }
+
+ /* We could not find any arena that was either not corrupted or not the one
+ we wanted to avoid. */
+ if (result == begin || result == avoid_arena)
+ return NULL;
+
+ /* No arena available without contention. Wait for the next in line. */
LIBC_PROBE (memory_arena_reuse_wait, 3, &result->mutex, result, avoid_arena);
(void) mutex_lock (&result->mutex);
@@ -112,7 +112,8 @@ malloc_check_get_size (mchunkptr p)
if (c <= 0 || size < (c + 2 * SIZE_SZ))
{
malloc_printerr (check_action, "malloc_check_get_size: memory corruption",
- chunk2mem (p));
+ chunk2mem (p),
+ chunk_is_mmapped (p) ? NULL : arena_for_chunk (p));
return 0;
}
}
@@ -237,7 +238,8 @@ top_check (void)
(char *) t + chunksize (t) == mp_.sbrk_base + main_arena.system_mem)))
return 0;
- malloc_printerr (check_action, "malloc: top chunk is corrupt", t);
+ malloc_printerr (check_action, "malloc: top chunk is corrupt", t,
+ &main_arena);
/* Try to set up a new top chunk. */
brk = MORECORE (0);
@@ -295,7 +297,8 @@ free_check (void *mem, const void *caller)
{
(void) mutex_unlock (&main_arena.mutex);
- malloc_printerr (check_action, "free(): invalid pointer", mem);
+ malloc_printerr (check_action, "free(): invalid pointer", mem,
+ &main_arena);
return;
}
if (chunk_is_mmapped (p))
@@ -333,7 +336,8 @@ realloc_check (void *oldmem, size_t bytes, const void *caller)
(void) mutex_unlock (&main_arena.mutex);
if (!oldp)
{
- malloc_printerr (check_action, "realloc(): invalid pointer", oldmem);
+ malloc_printerr (check_action, "realloc(): invalid pointer", oldmem,
+ &main_arena);
return malloc_check (bytes, NULL);
}
const INTERNAL_SIZE_T oldsize = chunksize (oldp);
@@ -1059,7 +1059,7 @@ static void* _int_realloc(mstate, mchunkptr, INTERNAL_SIZE_T,
static void* _int_memalign(mstate, size_t, size_t);
static void* _mid_memalign(size_t, size_t, void *);
-static void malloc_printerr(int action, const char *str, void *ptr);
+static void malloc_printerr(int action, const char *str, void *ptr, mstate av);
static void* internal_function mem2mem_check(void *p, size_t sz);
static int internal_function top_check(void);
@@ -1411,11 +1411,11 @@ typedef struct malloc_chunk *mbinptr;
#define last(b) ((b)->bk)
/* Take a chunk off a bin list */
-#define unlink(P, BK, FD) { \
+#define unlink(AV, P, BK, FD) { \
FD = P->fd; \
BK = P->bk; \
if (__builtin_expect (FD->bk != P || BK->fd != P, 0)) \
- malloc_printerr (check_action, "corrupted double-linked list", P); \
+ malloc_printerr (check_action, "corrupted double-linked list", P, AV); \
else { \
FD->bk = BK; \
BK->fd = FD; \
@@ -1424,7 +1424,8 @@ typedef struct malloc_chunk *mbinptr;
if (__builtin_expect (P->fd_nextsize->bk_nextsize != P, 0) \
|| __builtin_expect (P->bk_nextsize->fd_nextsize != P, 0)) \
malloc_printerr (check_action, \
- "corrupted double-linked list (not small)", P);\
+ "corrupted double-linked list (not small)", \
+ P, AV); \
if (FD->fd_nextsize == NULL) { \
if (P->fd_nextsize == P) \
FD->fd_nextsize = FD->bk_nextsize = FD; \
@@ -1656,6 +1657,15 @@ typedef struct malloc_chunk *mfastbinptr;
#define set_noncontiguous(M) ((M)->flags |= NONCONTIGUOUS_BIT)
#define set_contiguous(M) ((M)->flags &= ~NONCONTIGUOUS_BIT)
+/* ARENA_CORRUPTION_BIT is set if a memory corruption was detected on the
+ arena. Such an arena is no longer used to allocate chunks. Chunks
+ allocated in that arena before detecting corruption are not freed. */
+
+#define ARENA_CORRUPTION_BIT (4U)
+
+#define arena_is_corrupt(A) (((A)->flags & ARENA_CORRUPTION_BIT))
+#define set_arena_corrupt(A) ((A)->flags |= ARENA_CORRUPTION_BIT)
+
/*
Set value of max_fast.
Use impossibly small value if 0.
@@ -2280,8 +2290,9 @@ sysmalloc (INTERNAL_SIZE_T nb, mstate av)
rather than expanding top.
*/
- if ((unsigned long) (nb) >= (unsigned long) (mp_.mmap_threshold) &&
- (mp_.n_mmaps < mp_.n_mmaps_max))
+ if (av == NULL
+ || ((unsigned long) (nb) >= (unsigned long) (mp_.mmap_threshold)
+ && (mp_.n_mmaps < mp_.n_mmaps_max)))
{
char *mm; /* return value from mmap call*/
@@ -2354,6 +2365,10 @@ sysmalloc (INTERNAL_SIZE_T nb, mstate av)
}
}
+ /* There are no usable arenas and mmap also failed. */
+ if (av == NULL)
+ return 0;
+
/* Record incoming configuration of top */
old_top = av->top;
@@ -2528,7 +2543,8 @@ sysmalloc (INTERNAL_SIZE_T nb, mstate av)
else if (contiguous (av) && old_size && brk < old_end)
{
/* Oops! Someone else killed our space.. Can't touch anything. */
- malloc_printerr (3, "break adjusted to free malloc space", brk);
+ malloc_printerr (3, "break adjusted to free malloc space", brk,
+ av);
}
/*
@@ -2818,7 +2834,7 @@ munmap_chunk (mchunkptr p)
if (__builtin_expect (((block | total_size) & (GLRO (dl_pagesize) - 1)) != 0, 0))
{
malloc_printerr (check_action, "munmap_chunk(): invalid pointer",
- chunk2mem (p));
+ chunk2mem (p), NULL);
return;
}
@@ -2888,22 +2904,19 @@ __libc_malloc (size_t bytes)
arena_get (ar_ptr, bytes);
- if (!ar_ptr)
- return 0;
-
victim = _int_malloc (ar_ptr, bytes);
- if (!victim)
+ /* Retry with another arena only if we were able to find a usable arena
+ before. */
+ if (!victim && ar_ptr != NULL)
{
LIBC_PROBE (memory_malloc_retry, 1, bytes);
ar_ptr = arena_get_retry (ar_ptr, bytes);
- if (__builtin_expect (ar_ptr != NULL, 1))
- {
- victim = _int_malloc (ar_ptr, bytes);
- (void) mutex_unlock (&ar_ptr->mutex);
- }
+ victim = _int_malloc (ar_ptr, bytes);
}
- else
+
+ if (ar_ptr != NULL)
(void) mutex_unlock (&ar_ptr->mutex);
+
assert (!victim || chunk_is_mmapped (mem2chunk (victim)) ||
ar_ptr == arena_for_chunk (mem2chunk (victim)));
return victim;
@@ -2979,6 +2992,11 @@ __libc_realloc (void *oldmem, size_t bytes)
/* its size */
const INTERNAL_SIZE_T oldsize = chunksize (oldp);
+ if (chunk_is_mmapped (oldp))
+ ar_ptr = NULL;
+ else
+ ar_ptr = arena_for_chunk (oldp);
+
/* Little security check which won't hurt performance: the
allocator never wrapps around at the end of the address space.
Therefore we can exclude some size values which might appear
@@ -2986,7 +3004,8 @@ __libc_realloc (void *oldmem, size_t bytes)
if (__builtin_expect ((uintptr_t) oldp > (uintptr_t) -oldsize, 0)
|| __builtin_expect (misaligned_chunk (oldp), 0))
{
- malloc_printerr (check_action, "realloc(): invalid pointer", oldmem);
+ malloc_printerr (check_action, "realloc(): invalid pointer", oldmem,
+ ar_ptr);
return NULL;
}
@@ -3015,10 +3034,8 @@ __libc_realloc (void *oldmem, size_t bytes)
return newmem;
}
- ar_ptr = arena_for_chunk (oldp);
(void) mutex_lock (&ar_ptr->mutex);
-
newp = _int_realloc (ar_ptr, oldp, oldsize, nb);
(void) mutex_unlock (&ar_ptr->mutex);
@@ -3093,22 +3110,18 @@ _mid_memalign (size_t alignment, size_t bytes, void *address)
}
arena_get (ar_ptr, bytes + alignment + MINSIZE);
- if (!ar_ptr)
- return 0;
p = _int_memalign (ar_ptr, alignment, bytes);
- if (!p)
+ if (!p && ar_ptr != NULL)
{
LIBC_PROBE (memory_memalign_retry, 2, bytes, alignment);
ar_ptr = arena_get_retry (ar_ptr, bytes);
- if (__builtin_expect (ar_ptr != NULL, 1))
- {
- p = _int_memalign (ar_ptr, alignment, bytes);
- (void) mutex_unlock (&ar_ptr->mutex);
- }
+ p = _int_memalign (ar_ptr, alignment, bytes);
}
- else
+
+ if (ar_ptr != NULL)
(void) mutex_unlock (&ar_ptr->mutex);
+
assert (!p || chunk_is_mmapped (mem2chunk (p)) ||
ar_ptr == arena_for_chunk (mem2chunk (p)));
return p;
@@ -3187,47 +3200,53 @@ __libc_calloc (size_t n, size_t elem_size)
sz = bytes;
arena_get (av, sz);
- if (!av)
- return 0;
-
- /* Check if we hand out the top chunk, in which case there may be no
- need to clear. */
+ if (av)
+ {
+ /* Check if we hand out the top chunk, in which case there may be no
+ need to clear. */
#if MORECORE_CLEARS
- oldtop = top (av);
- oldtopsize = chunksize (top (av));
+ oldtop = top (av);
+ oldtopsize = chunksize (top (av));
# if MORECORE_CLEARS < 2
- /* Only newly allocated memory is guaranteed to be cleared. */
- if (av == &main_arena &&
- oldtopsize < mp_.sbrk_base + av->max_system_mem - (char *) oldtop)
- oldtopsize = (mp_.sbrk_base + av->max_system_mem - (char *) oldtop);
+ /* Only newly allocated memory is guaranteed to be cleared. */
+ if (av == &main_arena &&
+ oldtopsize < mp_.sbrk_base + av->max_system_mem - (char *) oldtop)
+ oldtopsize = (mp_.sbrk_base + av->max_system_mem - (char *) oldtop);
# endif
- if (av != &main_arena)
+ if (av != &main_arena)
+ {
+ heap_info *heap = heap_for_ptr (oldtop);
+ if (oldtopsize < (char *) heap + heap->mprotect_size - (char *) oldtop)
+ oldtopsize = (char *) heap + heap->mprotect_size - (char *) oldtop;
+ }
+#endif
+ }
+ else
{
- heap_info *heap = heap_for_ptr (oldtop);
- if (oldtopsize < (char *) heap + heap->mprotect_size - (char *) oldtop)
- oldtopsize = (char *) heap + heap->mprotect_size - (char *) oldtop;
+ /* No usable arenas. */
+ oldtop = 0;
+ oldtopsize = 0;
}
-#endif
mem = _int_malloc (av, sz);
assert (!mem || chunk_is_mmapped (mem2chunk (mem)) ||
av == arena_for_chunk (mem2chunk (mem)));
- if (mem == 0)
+ if (mem == 0 && av != NULL)
{
LIBC_PROBE (memory_calloc_retry, 1, sz);
av = arena_get_retry (av, sz);
- if (__builtin_expect (av != NULL, 1))
- {
- mem = _int_malloc (av, sz);
- (void) mutex_unlock (&av->mutex);
- }
- if (mem == 0)
- return 0;
+ mem = _int_malloc (av, sz);
}
- else
+
+ if (av != NULL)
(void) mutex_unlock (&av->mutex);
+
+ /* Allocation failed even after a retry. */
+ if (mem == 0)
+ return 0;
+
p = mem2chunk (mem);
/* Two optional cases in which clearing not necessary */
@@ -3323,6 +3342,16 @@ _int_malloc (mstate av, size_t bytes)
checked_request2size (bytes, nb);
+ /* There are no usable arenas. Fall back to sysmalloc to get a chunk from
+ mmap. */
+ if (__glibc_unlikely (av == NULL))
+ {
+ void *p = sysmalloc (nb, av);
+ if (p != NULL)
+ alloc_perturb (p, bytes);
+ return p;
+ }
+
/*
If the size qualifies as a fastbin, first check corresponding bin.
This code is safe to execute even if av is not yet initialized, so we
@@ -3348,7 +3377,7 @@ _int_malloc (mstate av, size_t bytes)
{
errstr = "malloc(): memory corruption (fast)";
errout:
- malloc_printerr (check_action, errstr, chunk2mem (victim));
+ malloc_printerr (check_action, errstr, chunk2mem (victim), av);
return NULL;
}
check_remalloced_chunk (av, victim, nb);
@@ -3437,7 +3466,7 @@ _int_malloc (mstate av, size_t bytes)
if (__builtin_expect (victim->size <= 2 * SIZE_SZ, 0)
|| __builtin_expect (victim->size > av->system_mem, 0))
malloc_printerr (check_action, "malloc(): memory corruption",
- chunk2mem (victim));
+ chunk2mem (victim), av);
size = chunksize (victim);
/*
@@ -3584,7 +3613,7 @@ _int_malloc (mstate av, size_t bytes)
victim = victim->fd;
remainder_size = size - nb;
- unlink (victim, bck, fwd);
+ unlink (av, victim, bck, fwd);
/* Exhaust */
if (remainder_size < MINSIZE)
@@ -3689,7 +3718,7 @@ _int_malloc (mstate av, size_t bytes)
remainder_size = size - nb;
/* unlink */
- unlink (victim, bck, fwd);
+ unlink (av, victim, bck, fwd);
/* Exhaust */
if (remainder_size < MINSIZE)
@@ -3829,7 +3858,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
errout:
if (!have_lock && locked)
(void) mutex_unlock (&av->mutex);
- malloc_printerr (check_action, errstr, chunk2mem (p));
+ malloc_printerr (check_action, errstr, chunk2mem (p), av);
return;
}
/* We know that each chunk is at least MINSIZE bytes in size or a
@@ -3967,7 +3996,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
prevsize = p->prev_size;
size += prevsize;
p = chunk_at_offset(p, -((long) prevsize));
- unlink(p, bck, fwd);
+ unlink(av, p, bck, fwd);
}
if (nextchunk != av->top) {
@@ -3976,7 +4005,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
/* consolidate forward */
if (!nextinuse) {
- unlink(nextchunk, bck, fwd);
+ unlink(av, nextchunk, bck, fwd);
size += nextsize;
} else
clear_inuse_bit_at_offset(nextchunk, 0);
@@ -4137,7 +4166,7 @@ static void malloc_consolidate(mstate av)
prevsize = p->prev_size;
size += prevsize;
p = chunk_at_offset(p, -((long) prevsize));
- unlink(p, bck, fwd);
+ unlink(av, p, bck, fwd);
}
if (nextchunk != av->top) {
@@ -4145,7 +4174,7 @@ static void malloc_consolidate(mstate av)
if (!nextinuse) {
size += nextsize;
- unlink(nextchunk, bck, fwd);
+ unlink(av, nextchunk, bck, fwd);
} else
clear_inuse_bit_at_offset(nextchunk, 0);
@@ -4214,7 +4243,7 @@ _int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T oldsize,
{
errstr = "realloc(): invalid old size";
errout:
- malloc_printerr (check_action, errstr, chunk2mem (oldp));
+ malloc_printerr (check_action, errstr, chunk2mem (oldp), av);
return NULL;
}
@@ -4260,7 +4289,7 @@ _int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T oldsize,
(unsigned long) (nb))
{
newp = oldp;
- unlink (next, bck, fwd);
+ unlink (av, next, bck, fwd);
}
/* allocate, copy, free */
@@ -4455,6 +4484,10 @@ _int_memalign (mstate av, size_t alignment, size_t bytes)
static int
mtrim (mstate av, size_t pad)
{
+ /* Don't touch corrupt arenas. */
+ if (arena_is_corrupt (av))
+ return 0;
+
/* Ensure initialization/consolidation */
malloc_consolidate (av);
@@ -4945,8 +4978,14 @@ libc_hidden_def (__libc_mallopt)
extern char **__libc_argv attribute_hidden;
static void
-malloc_printerr (int action, const char *str, void *ptr)
+malloc_printerr (int action, const char *str, void *ptr, mstate ar_ptr)
{
+ /* Avoid using this arena in future. We do not attempt to synchronize this
+ with anything else because we minimally want to ensure that __libc_message
+ gets its resources safely without stumbling on the current corruption. */
+ if (ar_ptr)
+ set_arena_corrupt (ar_ptr);
+
if ((action & 5) == 5)
__libc_message (action & 2, "%s\n", str);
else if (action & 1)
new file mode 100644
@@ -0,0 +1,50 @@
+/* Verify that backtrace does not deadlock on itself on memory corruption.
+ Copyright (C) 2015 Free Software Foundation, Inc.
+ This file is part of the GNU C Library.
+
+ The GNU C Library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ The GNU C Library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with the GNU C Library; if not, see
+ <http://www.gnu.org/licenses/>. */
+
+
+#include <stdlib.h>
+
+#define SIZE 4096
+
+/* Wrap free with a function to prevent gcc from optimizing it out. */
+static void
+__attribute__((noinline))
+call_free (void *ptr)
+{
+ free (ptr);
+ *(size_t *)(ptr - sizeof (size_t)) = 1;
+}
+
+int
+do_test (void)
+{
+ void *ptr1 = malloc (SIZE);
+ void *ptr2 = malloc (SIZE);
+
+ call_free ((void *) ptr1);
+ ptr1 = malloc (SIZE);
+
+ /* Not reached. The return statement is to put ptr2 into use so that gcc
+ doesn't optimize out that malloc call. */
+ return (ptr1 == ptr2);
+}
+
+#define TEST_FUNCTION do_test ()
+#define EXPECTED_SIGNAL SIGABRT
+
+#include "../test-skeleton.c"