This is the mail archive of the libc-alpha@sourceware.org mailing list for the glibc project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH 2/2] Consolidate valloc/pvalloc code. large.


Hi,

As valloc/pvalloc are legacy functionality they should occupy minimal 
amount of code. Here we redirect valloc/pvalloc calls to memalign calls.

This allows us simplify _int_memalign as several checks are already done
is __libc_memalign.

We could go further with refactoring as hook call is almost duplicated
except that we need to define new function and pass caller address as
argument.

Comments?

	* malloc/malloc.c (__libc_valloc, __libc_pvalloc): Call	__libc_memalign.
	(__libc_memalign): Add rounding of alignment.
	(_int_memalign): Remove redundant checks.
	(_int_valloc, _int_pvalloc): Delete.

diff --git a/malloc/malloc.c b/malloc/malloc.c
index 897c43a..e7fbc2b 100644
--- a/malloc/malloc.c
+++ b/malloc/malloc.c
@@ -1054,8 +1054,6 @@ static void     _int_free(mstate, mchunkptr, int);
 static void*  _int_realloc(mstate, mchunkptr, INTERNAL_SIZE_T,
 			   INTERNAL_SIZE_T);
 static void*  _int_memalign(mstate, size_t, size_t);
-static void*  _int_valloc(mstate, size_t);
-static void*  _int_pvalloc(mstate, size_t);
 static void malloc_printerr(int action, const char *str, void *ptr);
 
 static void* internal_function mem2mem_check(void *p, size_t sz);
@@ -3005,6 +3003,9 @@ __libc_memalign(size_t alignment, size_t bytes)
   mstate ar_ptr;
   void *p;
 
+  if(__malloc_initialized < 0)
+    ptmalloc_init ();
+
   void *(*hook) (size_t, size_t, const void *) =
     force_reg (__memalign_hook);
   if (__builtin_expect (hook != NULL, 0))
@@ -3024,6 +3025,13 @@ __libc_memalign(size_t alignment, size_t bytes)
       return 0;
     }
 
+  /* Make sure alignment is power of 2 (in case MINSIZE is not).  */
+  if ((alignment & (alignment - 1)) != 0) {
+    size_t a = MALLOC_ALIGNMENT * 2;
+    while ((unsigned long)a < (unsigned long)alignment) a <<= 1;
+    alignment = a;
+  }
+
   /* Check for overflow.  */
   if (bytes > SIZE_MAX - alignment - MINSIZE)
     {
@@ -3034,6 +3042,9 @@ __libc_memalign(size_t alignment, size_t bytes)
   arena_get(ar_ptr, bytes + alignment + MINSIZE);
   if(!ar_ptr)
     return 0;
+
+  if (have_fastchunks(ar_ptr) && alignment >= GLRO(dl_pagesize))
+    malloc_consolidate(ar_ptr);
   p = _int_memalign(ar_ptr, alignment, bytes);
   if(!p) {
     LIBC_PROBE (memory_memalign_retry, 2, bytes, alignment);
@@ -3055,51 +3066,22 @@ libc_hidden_def (__libc_memalign)
 void*
 __libc_valloc(size_t bytes)
 {
-  mstate ar_ptr;
-  void *p;
-
   if(__malloc_initialized < 0)
     ptmalloc_init ();
 
   size_t pagesz = GLRO(dl_pagesize);
 
-  /* Check for overflow.  */
-  if (bytes > SIZE_MAX - pagesz - MINSIZE)
-    {
-      __set_errno (ENOMEM);
-      return 0;
-    }
-
   void *(*hook) (size_t, size_t, const void *) =
     force_reg (__memalign_hook);
   if (__builtin_expect (hook != NULL, 0))
     return (*hook)(pagesz, bytes, RETURN_ADDRESS (0));
 
-  arena_get(ar_ptr, bytes + pagesz + MINSIZE);
-  if(!ar_ptr)
-    return 0;
-  p = _int_valloc(ar_ptr, bytes);
-  if(!p) {
-    LIBC_PROBE (memory_valloc_retry, 1, bytes);
-    ar_ptr = arena_get_retry (ar_ptr, bytes);
-    if (__builtin_expect(ar_ptr != NULL, 1)) {
-      p = _int_memalign(ar_ptr, pagesz, bytes);
-      (void)mutex_unlock(&ar_ptr->mutex);
-    }
-  } else
-    (void)mutex_unlock (&ar_ptr->mutex);
-  assert(!p || chunk_is_mmapped(mem2chunk(p)) ||
-	 ar_ptr == arena_for_chunk(mem2chunk(p)));
-
-  return p;
+  return __libc_memalign(pagesz, bytes);
 }
 
 void*
 __libc_pvalloc(size_t bytes)
 {
-  mstate ar_ptr;
-  void *p;
-
   if(__malloc_initialized < 0)
     ptmalloc_init ();
 
@@ -3108,7 +3090,7 @@ __libc_pvalloc(size_t bytes)
   size_t rounded_bytes = (bytes + page_mask) & ~(page_mask);
 
   /* Check for overflow.  */
-  if (bytes > SIZE_MAX - 2*pagesz - MINSIZE)
+  if (bytes > rounded_bytes)
     {
       __set_errno (ENOMEM);
       return 0;
@@ -3119,21 +3101,7 @@ __libc_pvalloc(size_t bytes)
   if (__builtin_expect (hook != NULL, 0))
     return (*hook)(pagesz, rounded_bytes, RETURN_ADDRESS (0));
 
-  arena_get(ar_ptr, bytes + 2*pagesz + MINSIZE);
-  p = _int_pvalloc(ar_ptr, bytes);
-  if(!p) {
-    LIBC_PROBE (memory_pvalloc_retry, 1, bytes);
-    ar_ptr = arena_get_retry (ar_ptr, bytes + 2*pagesz + MINSIZE);
-    if (__builtin_expect(ar_ptr != NULL, 1)) {
-      p = _int_memalign(ar_ptr, pagesz, rounded_bytes);
-      (void)mutex_unlock(&ar_ptr->mutex);
-    }
-  } else
-    (void)mutex_unlock(&ar_ptr->mutex);
-  assert(!p || chunk_is_mmapped(mem2chunk(p)) ||
-	 ar_ptr == arena_for_chunk(mem2chunk(p)));
-
-  return p;
+  return __libc_memalign(pagesz, rounded_bytes);
 }
 
 void*
@@ -4318,20 +4286,8 @@ _int_memalign(mstate av, size_t alignment, size_t bytes)
   unsigned long   remainder_size; /* its size */
   INTERNAL_SIZE_T size;
 
-  /* If need less alignment than we give anyway, just relay to malloc */
 
-  if (alignment <= MALLOC_ALIGNMENT) return _int_malloc(av, bytes);
 
-  /* Otherwise, ensure that it is at least a minimum chunk size */
-
-  if (alignment <  MINSIZE) alignment = MINSIZE;
-
-  /* Make sure alignment is power of 2 (in case MINSIZE is not).  */
-  if ((alignment & (alignment - 1)) != 0) {
-    size_t a = MALLOC_ALIGNMENT * 2;
-    while ((unsigned long)a < (unsigned long)alignment) a <<= 1;
-    alignment = a;
-  }
 
   checked_request2size(bytes, nb);
 
@@ -4406,35 +4362,6 @@ _int_memalign(mstate av, size_t alignment, size_t bytes)
 
 
 /*
-  ------------------------------ valloc ------------------------------
-*/
-
-static void*
-_int_valloc(mstate av, size_t bytes)
-{
-  /* Ensure initialization/consolidation */
-  if (have_fastchunks(av)) malloc_consolidate(av);
-  return _int_memalign(av, GLRO(dl_pagesize), bytes);
-}
-
-/*
-  ------------------------------ pvalloc ------------------------------
-*/
-
-
-static void*
-_int_pvalloc(mstate av, size_t bytes)
-{
-  size_t pagesz;
-
-  /* Ensure initialization/consolidation */
-  if (have_fastchunks(av)) malloc_consolidate(av);
-  pagesz = GLRO(dl_pagesize);
-  return _int_memalign(av, pagesz, (bytes + pagesz - 1) & ~(pagesz - 1));
-}
-
-
-/*
   ------------------------------ malloc_trim ------------------------------
 */
 


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]