r3980 - in glibc-package/trunk/debian: . patches patches/any
Author: aurel32
Date: 2009-11-17 08:33:14 +0000 (Tue, 17 Nov 2009)
New Revision: 3980
Added:
glibc-package/trunk/debian/patches/any/cvs-malloc-check.diff
Modified:
glibc-package/trunk/debian/changelog
glibc-package/trunk/debian/patches/series
Log:
* patches/any/cvs-malloc-check.diff: new patch from upstream to fix bugs
with MALLOC_CHECK.
Modified: glibc-package/trunk/debian/changelog
===================================================================
--- glibc-package/trunk/debian/changelog 2009-11-17 00:08:17 UTC (rev 3979)
+++ glibc-package/trunk/debian/changelog 2009-11-17 08:33:14 UTC (rev 3980)
@@ -5,8 +5,10 @@
* kfreebsd/local-sysdeps.diff: update to revision 2847 (from glibc-bsd).
* patches/ia64/cvs-memchr.diff: new patch from upstream replacing
patches/ia64/submitted-memchr.diff.
+ * patches/any/cvs-malloc-check.diff: new patch from upstream to fix bugs
+ with MALLOC_CHECK.
- -- Aurelien Jarno <aurel32@debian.org> Sun, 15 Nov 2009 13:41:08 +0100
+ -- Aurelien Jarno <aurel32@debian.org> Tue, 17 Nov 2009 09:33:01 +0100
eglibc (2.10.1-7) unstable; urgency=low
Added: glibc-package/trunk/debian/patches/any/cvs-malloc-check.diff
===================================================================
--- glibc-package/trunk/debian/patches/any/cvs-malloc-check.diff (rev 0)
+++ glibc-package/trunk/debian/patches/any/cvs-malloc-check.diff 2009-11-17 08:33:14 UTC (rev 3980)
@@ -0,0 +1,113 @@
+2009-11-01 Ulrich Drepper <drepper@redhat.com>
+
+ * malloc/hooks.c (free_check): Restore locking and call _int_free
+ appropriately.
+
+diff --git a/malloc/hooks.c b/malloc/hooks.c
+index 622a815..28845ee 100644
+--- a/malloc/hooks.c
++++ b/malloc/hooks.c
+@@ -162,8 +162,8 @@ mem2chunk_check(mem, magic_p) Void_t* mem; unsigned char **magic_p;
+ ((char*)p + sz)>=(mp_.sbrk_base+main_arena.system_mem) )) ||
+ sz<MINSIZE || sz&MALLOC_ALIGN_MASK || !inuse(p) ||
+ ( !prev_inuse(p) && (p->prev_size&MALLOC_ALIGN_MASK ||
+- (contig && (char*)prev_chunk(p)<mp_.sbrk_base) ||
+- next_chunk(prev_chunk(p))!=p) ))
++ (contig && (char*)prev_chunk(p)<mp_.sbrk_base) ||
++ next_chunk(prev_chunk(p))!=p) ))
+ return NULL;
+ magic = MAGICBYTE(p);
+ for(sz += SIZE_SZ-1; (c = ((unsigned char*)p)[sz]) != magic; sz -= c) {
+@@ -177,9 +177,9 @@ mem2chunk_check(mem, magic_p) Void_t* mem; unsigned char **magic_p;
+ first. */
+ offset = (unsigned long)mem & page_mask;
+ if((offset!=MALLOC_ALIGNMENT && offset!=0 && offset!=0x10 &&
+- offset!=0x20 && offset!=0x40 && offset!=0x80 && offset!=0x100 &&
+- offset!=0x200 && offset!=0x400 && offset!=0x800 && offset!=0x1000 &&
+- offset<0x2000) ||
++ offset!=0x20 && offset!=0x40 && offset!=0x80 && offset!=0x100 &&
++ offset!=0x200 && offset!=0x400 && offset!=0x800 && offset!=0x1000 &&
++ offset<0x2000) ||
+ !chunk_is_mmapped(p) || (p->size & PREV_INUSE) ||
+ ( (((unsigned long)p - p->prev_size) & page_mask) != 0 ) ||
+ ( (sz = chunksize(p)), ((p->prev_size + sz) & page_mask) != 0 ) )
+@@ -276,13 +276,17 @@ free_check(mem, caller) Void_t* mem; const Void_t *caller;
+ mchunkptr p;
+
+ if(!mem) return;
++ (void)mutex_lock(&main_arena.mutex);
+ p = mem2chunk_check(mem, NULL);
+ if(!p) {
++ (void)mutex_unlock(&main_arena.mutex);
++
+ malloc_printerr(check_action, "free(): invalid pointer", mem);
+ return;
+ }
+ #if HAVE_MMAP
+ if (chunk_is_mmapped(p)) {
++ (void)mutex_unlock(&main_arena.mutex);
+ munmap_chunk(p);
+ return;
+ }
+@@ -291,12 +295,11 @@ free_check(mem, caller) Void_t* mem; const Void_t *caller;
+ memset(mem, 0, chunksize(p) - (SIZE_SZ+1));
+ #endif
+ #ifdef ATOMIC_FASTBINS
+- _int_free(&main_arena, p, 0);
++ _int_free(&main_arena, p, 1);
+ #else
+- (void)mutex_lock(&main_arena.mutex);
+ _int_free(&main_arena, p);
+- (void)mutex_unlock(&main_arena.mutex);
+ #endif
++ (void)mutex_unlock(&main_arena.mutex);
+ }
+
+ static Void_t*
+@@ -345,13 +348,13 @@ realloc_check(oldmem, bytes, caller)
+ if(oldsize - SIZE_SZ >= nb)
+ newmem = oldmem; /* do nothing */
+ else {
+- /* Must alloc, copy, free. */
+- if (top_check() >= 0)
++ /* Must alloc, copy, free. */
++ if (top_check() >= 0)
+ newmem = _int_malloc(&main_arena, bytes+1);
+- if (newmem) {
+- MALLOC_COPY(BOUNDED_N(newmem, bytes+1), oldmem, oldsize - 2*SIZE_SZ);
+- munmap_chunk(oldp);
+- }
++ if (newmem) {
++ MALLOC_COPY(BOUNDED_N(newmem, bytes+1), oldmem, oldsize - 2*SIZE_SZ);
++ munmap_chunk(oldp);
++ }
+ }
+ }
+ } else {
+@@ -367,7 +370,7 @@ realloc_check(oldmem, bytes, caller)
+ nb = chunksize(newp);
+ if(oldp<newp || oldp>=chunk_at_offset(newp, nb)) {
+ memset((char*)oldmem + 2*sizeof(mbinptr), 0,
+- oldsize - (2*sizeof(mbinptr)+2*SIZE_SZ+1));
++ oldsize - (2*sizeof(mbinptr)+2*SIZE_SZ+1));
+ } else if(nb > oldsize+SIZE_SZ) {
+ memset((char*)BOUNDED_N(chunk2mem(newp), bytes) + oldsize,
+ 0, nb - (oldsize+SIZE_SZ));
+@@ -626,7 +629,7 @@ public_sET_STATe(Void_t* msptr)
+ mark_bin(&main_arena, i);
+ } else {
+ /* Oops, index computation from chunksize must have changed.
+- Link the whole list into unsorted_chunks. */
++ Link the whole list into unsorted_chunks. */
+ first(b) = last(b) = b;
+ b = unsorted_chunks(&main_arena);
+ ms->av[2*i+2]->bk = b;
+@@ -667,7 +670,7 @@ public_sET_STATe(Void_t* msptr)
+ /* Check whether it is safe to enable malloc checking, or whether
+ it is necessary to disable it. */
+ if (ms->using_malloc_checking && !using_malloc_checking &&
+- !disallow_malloc_check)
++ !disallow_malloc_check)
+ __malloc_check_init ();
+ else if (!ms->using_malloc_checking && using_malloc_checking) {
+ __malloc_hook = NULL;
Modified: glibc-package/trunk/debian/patches/series
===================================================================
--- glibc-package/trunk/debian/patches/series 2009-11-17 00:08:17 UTC (rev 3979)
+++ glibc-package/trunk/debian/patches/series 2009-11-17 08:33:14 UTC (rev 3980)
@@ -215,3 +215,4 @@
any/cvs-preadv-pwritev.diff
any/submitted-sched_h.2.diff
all/local-ldd.diff
+any/cvs-malloc-check.diff
Reply to: