summary refs log tree commit diff
path: root/malloc
diff options
context:
space:
mode:
authorUlrich Drepper <drepper@redhat.com>2009-04-08 18:00:34 +0000
committerUlrich Drepper <drepper@redhat.com>2009-04-08 18:00:34 +0000
commit4c8b8cc332a4581f7d1627c80030abb922940bfe (patch)
tree7cf57d5b88ade8149680ccccb21b5ba3e14ac4a7 /malloc
parentcd57745bd826356caa533cc2df0cab2aadc883f1 (diff)
downloadglibc-4c8b8cc332a4581f7d1627c80030abb922940bfe.tar.gz
glibc-4c8b8cc332a4581f7d1627c80030abb922940bfe.tar.xz
glibc-4c8b8cc332a4581f7d1627c80030abb922940bfe.zip
* malloc/malloc.c (_int_realloc): Add parameter with old block
	size.  Remove duplicated test.  Don't handle mmap'ed blocks here.
	Adjust all callers.
	* malloc/hooks.c (realloc_check): Adjust _int_realloc call.
Diffstat (limited to 'malloc')
-rw-r--r--malloc/hooks.c26
-rw-r--r--malloc/malloc.c33
2 files changed, 40 insertions, 19 deletions
diff --git a/malloc/hooks.c b/malloc/hooks.c
index fe89db83f4..72c29293d9 100644
--- a/malloc/hooks.c
+++ b/malloc/hooks.c
@@ -358,7 +358,7 @@ realloc_check(oldmem, bytes, caller)
     if (top_check() >= 0) {
       INTERNAL_SIZE_T nb;
       checked_request2size(bytes + 1, nb);
-      newmem = _int_realloc(&main_arena, oldp, nb);
+      newmem = _int_realloc(&main_arena, oldp, oldsize, nb);
     }
 #if 0 /* Erase freed memory. */
     if(newmem)
@@ -501,7 +501,7 @@ free_starter(mem, caller) Void_t* mem; const Void_t *caller;
    then the hooks are reset to 0.  */
 
 #define MALLOC_STATE_MAGIC   0x444c4541l
-#define MALLOC_STATE_VERSION (0*0x100l + 3l) /* major*0x100 + minor */
+#define MALLOC_STATE_VERSION (0*0x100l + 4l) /* major*0x100 + minor */
 
 struct malloc_save_state {
   long          magic;
@@ -521,6 +521,10 @@ struct malloc_save_state {
   unsigned long mmapped_mem;
   unsigned long max_mmapped_mem;
   int           using_malloc_checking;
+  unsigned long max_fast;
+  unsigned long arena_test;
+  unsigned long arena_max;
+  unsigned long narenas;
 };
 
 Void_t*
@@ -568,6 +572,12 @@ public_gET_STATe(void)
   ms->mmapped_mem = mp_.mmapped_mem;
   ms->max_mmapped_mem = mp_.max_mmapped_mem;
   ms->using_malloc_checking = using_malloc_checking;
+  ms->max_fast = get_max_fast();
+#ifdef PER_THREAD
+  ms->arena_test = mp_.arena_test;
+  ms->arena_max = mp_.arena_max;
+  ms->narenas = narenas;
+#endif
   (void)mutex_unlock(&main_arena.mutex);
   return (Void_t*)ms;
 }
@@ -587,7 +597,10 @@ public_sET_STATe(Void_t* msptr)
   (void)mutex_lock(&main_arena.mutex);
   /* There are no fastchunks.  */
   clear_fastchunks(&main_arena);
-  set_max_fast(DEFAULT_MXFAST);
+  if (ms->version >= 4)
+    set_max_fast(ms->max_fast);
+  else
+    set_max_fast(64);	/* 64 used to be the value we always used.  */
   for (i=0; i<NFASTBINS; ++i)
     fastbin (&main_arena, i) = 0;
   for (i=0; i<BINMAPSIZE; ++i)
@@ -663,6 +676,13 @@ public_sET_STATe(Void_t* msptr)
       using_malloc_checking = 0;
     }
   }
+  if (ms->version >= 4) {
+#ifdef PER_THREAD
+    mp_.arena_test = ms->arena_test;
+    mp_.arena_max = ms->arena_max;
+    narenas = ms->narenas;
+#endif
+  }
   check_malloc_state(&main_arena);
 
   (void)mutex_unlock(&main_arena.mutex);
diff --git a/malloc/malloc.c b/malloc/malloc.c
index bb7ea36c80..48a73799e2 100644
--- a/malloc/malloc.c
+++ b/malloc/malloc.c
@@ -1586,7 +1586,8 @@ static void     _int_free(mstate, mchunkptr, int);
 #else
 static void     _int_free(mstate, mchunkptr);
 #endif
-static Void_t*  _int_realloc(mstate, mchunkptr, INTERNAL_SIZE_T);
+static Void_t*  _int_realloc(mstate, mchunkptr, INTERNAL_SIZE_T,
+			     INTERNAL_SIZE_T);
 static Void_t*  _int_memalign(mstate, size_t, size_t);
 static Void_t*  _int_valloc(mstate, size_t);
 static Void_t*  _int_pvalloc(mstate, size_t);
@@ -3778,7 +3779,7 @@ public_rEALLOc(Void_t* oldmem, size_t bytes)
   tsd_setspecific(arena_key, (Void_t *)ar_ptr);
 #endif
 
-  newp = _int_realloc(ar_ptr, oldp, nb);
+  newp = _int_realloc(ar_ptr, oldp, oldsize, nb);
 
   (void)mutex_unlock(&ar_ptr->mutex);
   assert(!newp || chunk_is_mmapped(mem2chunk(newp)) ||
@@ -5102,7 +5103,8 @@ static void malloc_consolidate(av) mstate av;
 */
 
 Void_t*
-_int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T nb)
+_int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T oldsize,
+	     INTERNAL_SIZE_T nb)
 {
   mchunkptr        newp;            /* chunk to return */
   INTERNAL_SIZE_T  newsize;         /* its size */
@@ -5123,28 +5125,25 @@ _int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T nb)
 
   const char *errstr = NULL;
 
-  /* Simple tests for old block integrity.  */
-  if (__builtin_expect (misaligned_chunk (oldp), 0))
-    {
-      errstr = "realloc(): invalid pointer";
-    errout:
-      malloc_printerr (check_action, errstr, chunk2mem(oldp));
-      return NULL;
-    }
-
   /* oldmem size */
-  const INTERNAL_SIZE_T oldsize = chunksize(oldp);
-
   if (__builtin_expect (oldp->size <= 2 * SIZE_SZ, 0)
       || __builtin_expect (oldsize >= av->system_mem, 0))
     {
       errstr = "realloc(): invalid old size";
-      goto errout;
+    errout:
+      malloc_printerr (check_action, errstr, chunk2mem(oldp));
+      return NULL;
     }
 
   check_inuse_chunk(av, oldp);
 
-  if (!chunk_is_mmapped(oldp)) {
+  /* All callers already filter out mmap'ed chunks.  */
+#if 0
+  if (!chunk_is_mmapped(oldp))
+#else
+  assert (!chunk_is_mmapped(oldp));
+#endif
+  {
 
     next = chunk_at_offset(oldp, oldsize);
     INTERNAL_SIZE_T nextsize = chunksize(next);
@@ -5271,6 +5270,7 @@ _int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T nb)
     return chunk2mem(newp);
   }
 
+#if 0
   /*
     Handle mmap cases
   */
@@ -5339,6 +5339,7 @@ _int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T nb)
     return 0;
 #endif
   }
+#endif
 }
 
 /*