about summary refs log tree commit diff
diff options
context:
space:
mode:
authorFlorian Weimer <fweimer@redhat.com>2016-04-13 14:07:58 -0500
committerPaul E. Murphy <murphyp@linux.vnet.ibm.com>2016-04-13 14:07:58 -0500
commitc252c193e2583e4506b141d052df29a0987ac290 (patch)
tree9b9d6a42bdec1e2822d627bc4d6c95b634fdbdba
parentf664b663118642490b8776dcf4f30524a646dcbc (diff)
downloadglibc-c252c193e2583e4506b141d052df29a0987ac290.tar.gz
glibc-c252c193e2583e4506b141d052df29a0987ac290.tar.xz
glibc-c252c193e2583e4506b141d052df29a0987ac290.zip
malloc: Prevent arena free_list from turning cyclic [BZ #19048]
	[BZ# 19048]
	* malloc/malloc.c (struct malloc_state): Update comment.  Add
	attached_threads member.
	(main_arena): Initialize attached_threads.
	* malloc/arena.c (list_lock): Update comment.
	(ptmalloc_lock_all, ptmalloc_unlock_all): Likewise.
	(ptmalloc_unlock_all2): Reinitialize arena reference counts.
	(deattach_arena): New function.
	(_int_new_arena): Initialize arena reference count and deattach
	replaced arena.
	(get_free_list, reused_arena): Update reference count and deattach
	replaced arena.
	(arena_thread_freeres): Update arena reference count and only put
	unreferenced arenas on the free list.

(cherry picked from commit a62719ba90e2fa1728890ae7dc8df9e32a622e7b)
-rw-r--r--ChangeLog17
-rw-r--r--NEWS13
-rw-r--r--malloc/arena.c71
-rw-r--r--malloc/malloc.c11
4 files changed, 104 insertions, 8 deletions
diff --git a/ChangeLog b/ChangeLog
index f7664af614..7032239782 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,20 @@
+2016-04-13  Florian Weimer  <fweimer@redhat.com>
+
+	[BZ# 19048]
+	* malloc/malloc.c (struct malloc_state): Update comment.  Add
+	attached_threads member.
+	(main_arena): Initialize attached_threads.
+	* malloc/arena.c (list_lock): Update comment.
+	(ptmalloc_lock_all, ptmalloc_unlock_all): Likewise.
+	(ptmalloc_unlock_all2): Reinitialize arena reference counts.
+	(deattach_arena): New function.
+	(_int_new_arena): Initialize arena reference count and deattach
+	replaced arena.
+	(get_free_list, reused_arena): Update reference count and deattach
+	replaced arena.
+	(arena_thread_freeres): Update arena reference count and only put
+	unreferenced arenas on the free list.
+
 2016-04-12  Paul E. Murphy  <murphyp@linux.vnet.ibm.com>
 
 	[BZ #19853]
diff --git a/NEWS b/NEWS
index 2a43aeca29..7d6a7d689b 100644
--- a/NEWS
+++ b/NEWS
@@ -24,8 +24,8 @@ Version 2.22.1
 * The following bugs are resolved with this release:
 
   17905, 18420, 18421, 18480, 18589, 18743, 18778, 18781, 18787, 18796,
-  18870, 18887, 18921, 18928, 18969, 18985, 19003, 19018, 19058, 19174,
-  19178, 19590, 19682, 19791, 19822, 19853, 19879.
+  18870, 18887, 18921, 18928, 18969, 18985, 19003, 19018, 19048, 19058,
+  19174, 19178, 19590, 19682, 19791, 19822, 19853, 19879.
 
 * The getnetbyname implementation in nss_dns had a potentially unbounded
   alloca call (in the form of a call to strdupa), leading to a stack
@@ -34,6 +34,15 @@ Version 2.22.1
 
 * The LD_POINTER_GUARD environment variable can no longer be used to
   disable the pointer guard feature.  It is always enabled.
+
+* A defect in the malloc implementation, present since glibc 2.15 (2012) or
+  glibc 2.10 via --enable-experimental-malloc (2009), could result in the
+  unnecessary serialization of memory allocation requests across threads.
+  The defect is now corrected.  Users should see a substantial increase in
+  the concurent throughput of allocation requests for applications which
+  trigger this bug.  Affected applications typically create create and
+  destroy threads frequently.  (Bug 19048 was reported and analyzed by
+  Ericsson.)
 
 Version 2.22
 
diff --git a/malloc/arena.c b/malloc/arena.c
index 21ecc5a137..ca5b8a2b45 100644
--- a/malloc/arena.c
+++ b/malloc/arena.c
@@ -233,7 +233,10 @@ ptmalloc_lock_all (void)
   save_free_hook = __free_hook;
   __malloc_hook = malloc_atfork;
   __free_hook = free_atfork;
-  /* Only the current thread may perform malloc/free calls now. */
+  /* Only the current thread may perform malloc/free calls now.
+     save_arena will be reattached to the current thread, in
+     ptmalloc_lock_all, so save_arena->attached_threads is not
+     updated.  */
   tsd_getspecific (arena_key, save_arena);
   tsd_setspecific (arena_key, ATFORK_ARENA_PTR);
 out:
@@ -251,6 +254,9 @@ ptmalloc_unlock_all (void)
   if (--atfork_recursive_cntr != 0)
     return;
 
+  /* Replace ATFORK_ARENA_PTR with save_arena.
+     save_arena->attached_threads was not changed in ptmalloc_lock_all
+     and is still correct.  */
   tsd_setspecific (arena_key, save_arena);
   __malloc_hook = save_malloc_hook;
   __free_hook = save_free_hook;
@@ -282,12 +288,19 @@ ptmalloc_unlock_all2 (void)
   tsd_setspecific (arena_key, save_arena);
   __malloc_hook = save_malloc_hook;
   __free_hook = save_free_hook;
+
+  /* Push all arenas to the free list, except save_arena, which is
+     attached to the current thread.  */
+  if (save_arena != NULL)
+    ((mstate) save_arena)->attached_threads = 1;
   free_list = NULL;
   for (ar_ptr = &main_arena;; )
     {
       mutex_init (&ar_ptr->mutex);
       if (ar_ptr != save_arena)
         {
+	  /* This arena is no longer attached to any thread.  */
+	  ar_ptr->attached_threads = 0;
           ar_ptr->next_free = free_list;
           free_list = ar_ptr;
         }
@@ -721,6 +734,22 @@ heap_trim (heap_info *heap, size_t pad)
 
 /* Create a new arena with initial size "size".  */
 
+/* If REPLACED_ARENA is not NULL, detach it from this thread.  Must be
+   called while list_lock is held.  */
+static void
+detach_arena (mstate replaced_arena)
+{
+  if (replaced_arena != NULL)
+    {
+      assert (replaced_arena->attached_threads > 0);
+      /* The current implementation only detaches from main_arena in
+	 case of allocation failure.  This means that it is likely not
+	 beneficial to put the arena on free_list even if the
+	 reference count reaches zero.  */
+      --replaced_arena->attached_threads;
+    }
+}
+
 static mstate
 _int_new_arena (size_t size)
 {
@@ -742,6 +771,7 @@ _int_new_arena (size_t size)
     }
   a = h->ar_ptr = (mstate) (h + 1);
   malloc_init_state (a);
+  a->attached_threads = 1;
   /*a->next = NULL;*/
   a->system_mem = a->max_system_mem = h->size;
   arena_mem += h->size;
@@ -755,12 +785,16 @@ _int_new_arena (size_t size)
   set_head (top (a), (((char *) h + h->size) - ptr) | PREV_INUSE);
 
   LIBC_PROBE (memory_arena_new, 2, a, size);
+  mstate replaced_arena;
+  tsd_getspecific (arena_key, replaced_arena);
   tsd_setspecific (arena_key, (void *) a);
   mutex_init (&a->mutex);
   (void) mutex_lock (&a->mutex);
 
   (void) mutex_lock (&list_lock);
 
+  detach_arena (replaced_arena);
+
   /* Add the new arena to the global list.  */
   a->next = main_arena.next;
   atomic_write_barrier ();
@@ -775,13 +809,26 @@ _int_new_arena (size_t size)
 static mstate
 get_free_list (void)
 {
+  mstate replaced_arena;
   mstate result = free_list;
+
+  tsd_getspecific (arena, replaced_arena);
+
   if (result != NULL)
     {
       (void) mutex_lock (&list_lock);
       result = free_list;
       if (result != NULL)
-        free_list = result->next_free;
+	{
+	  free_list = result->next_free;
+
+	  /* Arenas on the free list are not attached to any thread.  */
+	  assert (result->attached_threads == 0);
+	  /* But the arena will now be attached to this thread.  */
+	  result->attached_threads = 1;
+
+	  detach_arena (replaced_arena);
+	}
       (void) mutex_unlock (&list_lock);
 
       if (result != NULL)
@@ -840,6 +887,16 @@ reused_arena (mstate avoid_arena)
   (void) mutex_lock (&result->mutex);
 
 out:
+  {
+    mstate replaced_arena;
+
+    tsd_getspecific (arena, replaced_arena);
+    (void) mutex_lock (&list_lock);
+    detach_arena (replaced_arena);
+    ++result->attached_threads;
+    (void) mutex_unlock (&list_lock);
+  }
+
   LIBC_PROBE (memory_arena_reuse, 2, result, avoid_arena);
   tsd_setspecific (arena_key, (void *) result);
   next_to_use = result->next;
@@ -933,8 +990,14 @@ arena_thread_freeres (void)
   if (a != NULL)
     {
       (void) mutex_lock (&list_lock);
-      a->next_free = free_list;
-      free_list = a;
+      /* If this was the last attached thread for this arena, put the
+	 arena on the free list.  */
+      assert (a->attached_threads > 0);
+      if (--a->attached_threads == 0)
+	{
+	  a->next_free = free_list;
+	  free_list = a;
+	}
       (void) mutex_unlock (&list_lock);
     }
 }
diff --git a/malloc/malloc.c b/malloc/malloc.c
index 452f036387..037d4ff19b 100644
--- a/malloc/malloc.c
+++ b/malloc/malloc.c
@@ -1709,9 +1709,15 @@ struct malloc_state
   /* Linked list */
   struct malloc_state *next;
 
-  /* Linked list for free arenas.  */
+  /* Linked list for free arenas.  Access to this field is serialized
+     by list_lock in arena.c.  */
   struct malloc_state *next_free;
 
+  /* Number of threads attached to this arena.  0 if the arena is on
+     the free list.  Access to this field is serialized by list_lock
+     in arena.c.  */
+  INTERNAL_SIZE_T attached_threads;
+
   /* Memory allocated from the system in this arena.  */
   INTERNAL_SIZE_T system_mem;
   INTERNAL_SIZE_T max_system_mem;
@@ -1755,7 +1761,8 @@ struct malloc_par
 static struct malloc_state main_arena =
 {
   .mutex = MUTEX_INITIALIZER,
-  .next = &main_arena
+  .next = &main_arena,
+  .attached_threads = 1
 };
 
 /* There is only one instance of the malloc parameters.  */