This is the mail archive of the libc-alpha@sourceware.org mailing list for the glibc project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH] Unify malloc locking.


Hi, as another refactoring this moves logic of all allocation to 
pass through one place, function _mid_malloc. There a things like what
arena to use, retrying with different one are placed. An arena used 
could be obtained as an argument.

I merged arena_lock and arena_lookup macros there as they were used only
at that place and writing these explicitly will make modifications
easier.

There is still a custom locking needed for realloc which would be
additional case that we need to consider.

I did a minimal change to make hooks work, I plan to add a checksum
based checks which would be stronger than we have now.

Comments?

	* malloc/arena.c: (arena_lookup, arena_lock): Delete.
	* malloc/malloc.c (_mid_malloc): New function.
	(__libc_malloc): Move functionality to _mid_malloc.
	(_int_memalign, _mid_memalign): Likewise.
	* malloc/hooks.c (malloc_check, realloc_check, memalign_check):
	Call with different parameters.



---
 malloc/arena.c  | 11 -------
 malloc/hooks.c  | 12 +++++---
 malloc/malloc.c | 94 ++++++++++++++++++++++++++-------------------------------
 3 files changed, 51 insertions(+), 66 deletions(-)

diff --git a/malloc/arena.c b/malloc/arena.c
index 60ae9a4..8042274 100644
--- a/malloc/arena.c
+++ b/malloc/arena.c
@@ -93,17 +93,6 @@ int __malloc_initialized = -1;
       arena_lock (ptr, size);						      \
   } while (0)
 
-#define arena_lookup(ptr) do { \
-      void *vptr = NULL;						      \
-      ptr = (mstate) tsd_getspecific (arena_key, vptr);			      \
-  } while (0)
-
-#define arena_lock(ptr, size) do {					      \
-      if (ptr)								      \
-        (void) mutex_lock (&ptr->mutex);				      \
-      else								      \
-        ptr = arena_get2 (ptr, (size), NULL);				      \
-  } while (0)
 
 /* find the heap and corresponding arena for a given ptr */
 
diff --git a/malloc/hooks.c b/malloc/hooks.c
index 00ee6be..e98b9c7 100644
--- a/malloc/hooks.c
+++ b/malloc/hooks.c
@@ -275,9 +275,11 @@ malloc_check (size_t sz, const void *caller)
       return NULL;
     }
 
+  mstate av;
   (void) mutex_lock (&main_arena.mutex);
-  victim = (top_check () >= 0) ? _int_malloc (&main_arena, sz + 1) : NULL;
+  int check = (top_check () >= 0);
   (void) mutex_unlock (&main_arena.mutex);
+  victim = check ? _mid_malloc (&av, sz + 1) : NULL;
   return mem2mem_check (victim, sz);
 }
 
@@ -355,9 +357,10 @@ realloc_check (void *oldmem, size_t bytes, const void *caller)
           newmem = oldmem; /* do nothing */
         else
           {
+            mstate ar_ptr; 
             /* Must alloc, copy, free. */
             if (top_check () >= 0)
-              newmem = _int_malloc (&main_arena, bytes + 1);
+              newmem = _mid_malloc (&ar_ptr, bytes + 1);
             if (newmem)
               {
                 memcpy (newmem, oldmem, oldsize - 2 * SIZE_SZ);
@@ -423,9 +426,10 @@ memalign_check (size_t alignment, size_t bytes, const void *caller)
     }
 
   (void) mutex_lock (&main_arena.mutex);
-  mem = (top_check () >= 0) ? _int_memalign (&main_arena, alignment, bytes + 1) :
-        NULL;
+  int check = (top_check () >= 0);
   (void) mutex_unlock (&main_arena.mutex);
+  mem = check ? _int_memalign (alignment, bytes + 1) :
+        NULL;
   return mem2mem_check (mem, bytes);
 }
 
diff --git a/malloc/malloc.c b/malloc/malloc.c
index 74ad92d..d1aa626 100644
--- a/malloc/malloc.c
+++ b/malloc/malloc.c
@@ -1045,11 +1045,12 @@ typedef struct malloc_chunk* mchunkptr;
 
 /* Internal routines.  */
 
+static void * _mid_malloc (mstate *ar_ptr, size_t bytes);
 static void*  _int_malloc(mstate, size_t);
 static void     _int_free(mstate, mchunkptr, int);
 static void*  _int_realloc(mstate, mchunkptr, INTERNAL_SIZE_T,
 			   INTERNAL_SIZE_T);
-static void*  _int_memalign(mstate, size_t, size_t);
+static void*  _int_memalign(size_t, size_t);
 static void*  _mid_memalign(size_t, size_t, void *);
 
 static void malloc_printerr(int action, const char *str, void *ptr);
@@ -2861,42 +2862,55 @@ mremap_chunk (mchunkptr p, size_t new_size)
 }
 #endif /* HAVE_MREMAP */
 
-/*------------------------ Public wrappers. --------------------------------*/
 
-void *
-__libc_malloc (size_t bytes)
+
+static void *
+_mid_malloc (mstate *ar_ptr, size_t bytes)
 {
-  mstate ar_ptr;
+  void *vptr = NULL;
   void *victim;
+  *ar_ptr = (mstate) tsd_getspecific (arena_key, vptr);
 
-  void *(*hook) (size_t, const void *)
-    = atomic_forced_read (__malloc_hook);
-  if (__builtin_expect (hook != NULL, 0))
-    return (*hook)(bytes, RETURN_ADDRESS (0));
-
-  arena_lookup (ar_ptr);
+  if (*ar_ptr)
+    (void) mutex_lock (&((*ar_ptr)->mutex));
+  else
+    *ar_ptr = arena_get2 ((*ar_ptr), (bytes), NULL);
 
-  arena_lock (ar_ptr, bytes);
-  if (!ar_ptr)
+  if (!*ar_ptr)
     return 0;
 
-  victim = _int_malloc (ar_ptr, bytes);
+  victim = _int_malloc (*ar_ptr, bytes);
   if (!victim)
     {
       LIBC_PROBE (memory_malloc_retry, 1, bytes);
-      ar_ptr = arena_get_retry (ar_ptr, bytes);
-      if (__builtin_expect (ar_ptr != NULL, 1))
-        {
-          victim = _int_malloc (ar_ptr, bytes);
-          (void) mutex_unlock (&ar_ptr->mutex);
-        }
+      *ar_ptr = arena_get_retry (*ar_ptr, bytes);
+      if (__builtin_expect (*ar_ptr != NULL, 1))
+        victim = _int_malloc (*ar_ptr, bytes);
     }
-  else
-    (void) mutex_unlock (&ar_ptr->mutex);
-  assert (!victim || chunk_is_mmapped (mem2chunk (victim)) ||
-          ar_ptr == arena_for_chunk (mem2chunk (victim)));
+
+  if (*ar_ptr)
+    (void) mutex_unlock (&((*ar_ptr)->mutex));
+
   return victim;
 }
+
+
+/*------------------------ Public wrappers. --------------------------------*/
+
+void *
+__libc_malloc (size_t bytes)
+{
+  mstate av;
+
+  void *(*hook) (size_t, const void *)
+    = atomic_forced_read (__malloc_hook);
+  
+  if (__builtin_expect (hook != NULL, 0))
+    return (*hook)(bytes, RETURN_ADDRESS (0));
+
+  return _mid_malloc (&av, bytes);
+}
+
 libc_hidden_def (__libc_malloc)
 
 void
@@ -3040,9 +3054,6 @@ __libc_memalign (size_t alignment, size_t bytes)
 static void *
 _mid_memalign (size_t alignment, size_t bytes, void *address)
 {
-  mstate ar_ptr;
-  void *p;
-
   void *(*hook) (size_t, size_t, const void *) =
     atomic_forced_read (__memalign_hook);
   if (__builtin_expect (hook != NULL, 0))
@@ -3081,26 +3092,7 @@ _mid_memalign (size_t alignment, size_t bytes, void *address)
       alignment = a;
     }
 
-  arena_get (ar_ptr, bytes + alignment + MINSIZE);
-  if (!ar_ptr)
-    return 0;
-
-  p = _int_memalign (ar_ptr, alignment, bytes);
-  if (!p)
-    {
-      LIBC_PROBE (memory_memalign_retry, 2, bytes, alignment);
-      ar_ptr = arena_get_retry (ar_ptr, bytes);
-      if (__builtin_expect (ar_ptr != NULL, 1))
-        {
-          p = _int_memalign (ar_ptr, alignment, bytes);
-          (void) mutex_unlock (&ar_ptr->mutex);
-        }
-    }
-  else
-    (void) mutex_unlock (&ar_ptr->mutex);
-  assert (!p || chunk_is_mmapped (mem2chunk (p)) ||
-          ar_ptr == arena_for_chunk (mem2chunk (p)));
-  return p;
+  return _int_memalign (alignment, bytes);
 }
 /* For ISO C11.  */
 weak_alias (__libc_memalign, aligned_alloc)
@@ -4245,7 +4237,7 @@ _int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T oldsize,
  */
 
 static void *
-_int_memalign (mstate av, size_t alignment, size_t bytes)
+_int_memalign (size_t alignment, size_t bytes)
 {
   INTERNAL_SIZE_T nb;             /* padded  request size */
   char *m;                        /* memory returned by malloc call */
@@ -4257,7 +4249,7 @@ _int_memalign (mstate av, size_t alignment, size_t bytes)
   mchunkptr remainder;            /* spare room at end to split off */
   unsigned long remainder_size;   /* its size */
   INTERNAL_SIZE_T size;
-
+  mstate av;
 
 
   checked_request2size (bytes, nb);
@@ -4270,7 +4262,7 @@ _int_memalign (mstate av, size_t alignment, size_t bytes)
 
   /* Call malloc with worst case padding to hit alignment. */
 
-  m = (char *) (_int_malloc (av, nb + alignment + MINSIZE));
+  m = (char *) (_mid_malloc (&av, nb + alignment + MINSIZE));
 
   if (m == 0)
     return 0;           /* propagate failure */
@@ -4330,7 +4322,7 @@ _int_memalign (mstate av, size_t alignment, size_t bytes)
         }
     }
 
-  check_inuse_chunk (av, p);
+  check_inuse_chunk (*av, p);
   return chunk2mem (p);
 }
 
-- 
1.8.4.rc3


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]