This is the mail archive of the libc-alpha@sourceware.org mailing list for the glibc project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH] malloc: remove mstate typedef


From: JÃrn Engel <joern@purestorage.com>

Improve readability

JIRA: PURE-27597
---
 tpc/malloc2.13/arena.ch |  42 ++++++++++----------
 tpc/malloc2.13/malloc.c | 103 ++++++++++++++++++++++++------------------------
 2 files changed, 72 insertions(+), 73 deletions(-)

diff --git a/tpc/malloc2.13/arena.ch b/tpc/malloc2.13/arena.ch
index b8e7c611c42c..0aaccb914d92 100644
--- a/tpc/malloc2.13/arena.ch
+++ b/tpc/malloc2.13/arena.ch
@@ -57,7 +57,7 @@
    USE_ARENAS. */
 
 typedef struct _heap_info {
-  mstate ar_ptr; /* Arena for this heap. */
+  struct malloc_state * ar_ptr; /* Arena for this heap. */
   struct _heap_info *prev; /* Previous heap. */
   size_t size;   /* Current size in bytes. */
   size_t mprotect_size;	/* Size in bytes that has been mprotected
@@ -80,7 +80,7 @@ static tsd_key_t arena_key;
 static mutex_t list_lock;
 #ifdef PER_THREAD
 static size_t narenas;
-static mstate free_list;
+static struct malloc_state * free_list;
 #endif
 
 #if THREAD_STATS
@@ -115,7 +115,7 @@ static int __malloc_initialized = -1;
 
 #define arena_lookup(ptr) do { \
   Void_t *vptr = NULL; \
-  ptr = (mstate)tsd_getspecific(arena_key, vptr); \
+  ptr = (struct malloc_state *)tsd_getspecific(arena_key, vptr); \
 } while(0)
 
 #ifdef PER_THREAD
@@ -224,7 +224,7 @@ static void
 free_atfork(Void_t* mem, const Void_t *caller)
 {
   Void_t *vptr = NULL;
-  mstate ar_ptr;
+  struct malloc_state * ar_ptr;
   mchunkptr p;                          /* chunk corresponding to mem */
 
   if (mem == 0)                              /* free(0) has no effect */
@@ -268,7 +268,7 @@ static unsigned int atfork_recursive_cntr;
 static void
 ptmalloc_lock_all (void)
 {
-  mstate ar_ptr;
+  struct malloc_state * ar_ptr;
 
   if(__malloc_initialized < 1)
     return;
@@ -303,7 +303,7 @@ ptmalloc_lock_all (void)
 static void
 ptmalloc_unlock_all (void)
 {
-  mstate ar_ptr;
+  struct malloc_state * ar_ptr;
 
   if(__malloc_initialized < 1)
     return;
@@ -330,7 +330,7 @@ ptmalloc_unlock_all (void)
 static void
 ptmalloc_unlock_all2 (void)
 {
-  mstate ar_ptr;
+  struct malloc_state * ar_ptr;
 
   if(__malloc_initialized < 1)
     return;
@@ -649,7 +649,7 @@ dump_heap(heap_info *heap)
   mchunkptr p;
 
   fprintf(stderr, "Heap %p, size %10lx:\n", heap, (long)heap->size);
-  ptr = (heap->ar_ptr != (mstate)(heap+1)) ?
+  ptr = (heap->ar_ptr != (struct malloc_state *)(heap+1)) ?
     (char*)(heap + 1) : (char*)(heap + 1) + sizeof(struct malloc_state);
   p = (mchunkptr)(((unsigned long)ptr + MALLOC_ALIGN_MASK) &
 		  ~MALLOC_ALIGN_MASK);
@@ -812,7 +812,7 @@ static int
 internal_function
 heap_trim(heap_info *heap, size_t pad)
 {
-  mstate ar_ptr = heap->ar_ptr;
+  struct malloc_state * ar_ptr = heap->ar_ptr;
   unsigned long pagesz = mp_.pagesize;
   mchunkptr top_chunk = top(ar_ptr), p, bck, fwd;
   heap_info *prev_heap;
@@ -863,10 +863,10 @@ heap_trim(heap_info *heap, size_t pad)
 
 /* Create a new arena with initial size "size".  */
 
-static mstate
+static struct malloc_state *
 _int_new_arena(size_t size)
 {
-  mstate a;
+  struct malloc_state * a;
   heap_info *h;
   char *ptr;
   unsigned long misalign;
@@ -881,7 +881,7 @@ _int_new_arena(size_t size)
     if(!h)
       return 0;
   }
-  a = h->ar_ptr = (mstate)(h+1);
+  a = h->ar_ptr = (struct malloc_state *)(h+1);
   malloc_init_state(a);
   /*a->next = NULL;*/
   a->system_mem = a->max_system_mem = h->size;
@@ -926,10 +926,10 @@ _int_new_arena(size_t size)
 
 
 #ifdef PER_THREAD
-static mstate
+static struct malloc_state *
 get_free_list (void)
 {
-  mstate result = free_list;
+  struct malloc_state * result = free_list;
   if (result != NULL)
     {
       (void)mutex_lock(&list_lock);
@@ -950,7 +950,7 @@ get_free_list (void)
 }
 
 
-static mstate
+static struct malloc_state *
 reused_arena (void)
 {
   if (narenas <= mp_.arena_test)
@@ -977,8 +977,8 @@ reused_arena (void)
   if (narenas < narenas_limit)
     return NULL;
 
-  mstate result;
-  static mstate next_to_use;
+  struct malloc_state * result;
+  static struct malloc_state * next_to_use;
   if (next_to_use == NULL)
     next_to_use = &main_arena;
 
@@ -1004,11 +1004,11 @@ reused_arena (void)
 }
 #endif
 
-static mstate
+static struct malloc_state *
 internal_function
-arena_get2(mstate a_tsd, size_t size)
+arena_get2(struct malloc_state * a_tsd, size_t size)
 {
-  mstate a;
+  struct malloc_state * a;
 
 #ifdef PER_THREAD
   if ((a = get_free_list ()) == NULL
@@ -1069,7 +1069,7 @@ static void __attribute__ ((section ("__libc_thread_freeres_fn")))
 arena_thread_freeres (void)
 {
   Void_t *vptr = NULL;
-  mstate a = tsd_getspecific(arena_key, vptr);
+  struct malloc_state * a = tsd_getspecific(arena_key, vptr);
   tsd_setspecific(arena_key, NULL);
 
   if (a != NULL)
diff --git a/tpc/malloc2.13/malloc.c b/tpc/malloc2.13/malloc.c
index b17b17bba4d4..6b75c9a6beb0 100644
--- a/tpc/malloc2.13/malloc.c
+++ b/tpc/malloc2.13/malloc.c
@@ -1484,7 +1484,6 @@ int      dlposix_memalign(void **, size_t, size_t);
 #include <dlmalloc.h>
 
 struct malloc_state;
-typedef struct malloc_state *mstate;
 
 #define __malloc_ptr_t void *
 
@@ -1528,27 +1527,27 @@ struct mallinfo2 {
 /* Internal routines.  */
 
 
-static Void_t*  _int_malloc(mstate, size_t);
+static Void_t*  _int_malloc(struct malloc_state *, size_t);
 #ifdef ATOMIC_FASTBINS
-static void     _int_free(mstate, mchunkptr, int);
+static void     _int_free(struct malloc_state *, mchunkptr, int);
 #else
-static void     _int_free(mstate, mchunkptr);
+static void     _int_free(struct malloc_state *, mchunkptr);
 #endif
-static Void_t*  _int_realloc(mstate, mchunkptr, INTERNAL_SIZE_T,
+static Void_t*  _int_realloc(struct malloc_state *, mchunkptr, INTERNAL_SIZE_T,
 			     INTERNAL_SIZE_T);
-static Void_t*  _int_memalign(mstate, size_t, size_t);
-static Void_t*  _int_valloc(mstate, size_t);
-static Void_t*  _int_pvalloc(mstate, size_t);
+static Void_t*  _int_memalign(struct malloc_state *, size_t, size_t);
+static Void_t*  _int_valloc(struct malloc_state *, size_t);
+static Void_t*  _int_pvalloc(struct malloc_state *, size_t);
 /*static Void_t*  cALLOc(size_t, size_t);*/
 #ifndef _LIBC
-static Void_t** _int_icalloc(mstate, size_t, size_t, Void_t**);
-static Void_t** _int_icomalloc(mstate, size_t, size_t*, Void_t**);
+static Void_t** _int_icalloc(struct malloc_state *, size_t, size_t, Void_t**);
+static Void_t** _int_icomalloc(struct malloc_state *, size_t, size_t*, Void_t**);
 #endif
-static int      mTRIm(mstate, size_t);
+static int      mTRIm(struct malloc_state *, size_t);
 static size_t   mUSABLe(Void_t*);
 static void     mSTATs(void);
 static int      mALLOPt(int, int);
-static struct mallinfo2 mALLINFo(mstate);
+static struct mallinfo2 mALLINFo(struct malloc_state *);
 static void malloc_printerr(int action, const char *str, void *ptr);
 
 static Void_t* internal_function mem2mem_check(Void_t *p, size_t sz);
@@ -2363,7 +2362,7 @@ static INTERNAL_SIZE_T global_max_fast;
   optimization at all. (Inlining it in malloc_consolidate is fine though.)
 */
 
-static void malloc_init_state(mstate av)
+static void malloc_init_state(struct malloc_state * av)
 {
   int     i;
   mbinptr bin;
@@ -2389,11 +2388,11 @@ static void malloc_init_state(mstate av)
    Other internal utilities operating on mstates
 */
 
-static Void_t*  sYSMALLOc(INTERNAL_SIZE_T, mstate);
-static int      sYSTRIm(size_t, mstate);
-static void     malloc_consolidate(mstate);
+static Void_t*  sYSMALLOc(INTERNAL_SIZE_T, struct malloc_state *);
+static int      sYSTRIm(size_t, struct malloc_state *);
+static void     malloc_consolidate(struct malloc_state *);
 #ifndef _LIBC
-static Void_t** iALLOc(mstate, size_t, size_t*, int, Void_t**);
+static Void_t** iALLOc(struct malloc_state *, size_t, size_t*, int, Void_t**);
 #endif
 
 
@@ -2485,7 +2484,7 @@ static int perturb_byte;
   Properties of all chunks
 */
 
-static void do_check_chunk(mstate av, mchunkptr p)
+static void do_check_chunk(struct malloc_state * av, mchunkptr p)
 {
   unsigned long sz = chunksize(p);
   /* min and max possible addresses assuming contiguous allocation */
@@ -2530,7 +2529,7 @@ static void do_check_chunk(mstate av, mchunkptr p)
   Properties of free chunks
 */
 
-static void do_check_free_chunk(mstate av, mchunkptr p)
+static void do_check_free_chunk(struct malloc_state * av, mchunkptr p)
 {
   INTERNAL_SIZE_T sz = p->size & ~(PREV_INUSE|NON_MAIN_ARENA);
   mchunkptr next = chunk_at_offset(p, sz);
@@ -2564,7 +2563,7 @@ static void do_check_free_chunk(mstate av, mchunkptr p)
   Properties of inuse chunks
 */
 
-static void do_check_inuse_chunk(mstate av, mchunkptr p)
+static void do_check_inuse_chunk(struct malloc_state * av, mchunkptr p)
 {
   mchunkptr next;
 
@@ -2601,7 +2600,7 @@ static void do_check_inuse_chunk(mstate av, mchunkptr p)
   Properties of chunks recycled from fastbins
 */
 
-static void do_check_remalloced_chunk(mstate av, mchunkptr p, INTERNAL_SIZE_T s)
+static void do_check_remalloced_chunk(struct malloc_state * av, mchunkptr p, INTERNAL_SIZE_T s)
 {
   INTERNAL_SIZE_T sz = p->size & ~(PREV_INUSE|NON_MAIN_ARENA);
 
@@ -2629,7 +2628,7 @@ static void do_check_remalloced_chunk(mstate av, mchunkptr p, INTERNAL_SIZE_T s)
   Properties of nonrecycled chunks at the point they are malloced
 */
 
-static void do_check_malloced_chunk(mstate av, mchunkptr p, INTERNAL_SIZE_T s)
+static void do_check_malloced_chunk(struct malloc_state * av, mchunkptr p, INTERNAL_SIZE_T s)
 {
   /* same as recycled case ... */
   do_check_remalloced_chunk(av, p, s);
@@ -2659,7 +2658,7 @@ static void do_check_malloced_chunk(mstate av, mchunkptr p, INTERNAL_SIZE_T s)
   display chunk addresses, sizes, bins, and other instrumentation.
 */
 
-static void do_check_malloc_state(mstate av)
+static void do_check_malloc_state(struct malloc_state * av)
 {
   int i;
   mchunkptr p;
@@ -2826,7 +2825,7 @@ static void do_check_malloc_state(mstate av)
   be extended or replaced.
 */
 
-static Void_t* sYSMALLOc(INTERNAL_SIZE_T nb, mstate av)
+static Void_t* sYSMALLOc(INTERNAL_SIZE_T nb, struct malloc_state * av)
 {
   mchunkptr       old_top;        /* incoming value of av->top */
   INTERNAL_SIZE_T old_size;       /* its size */
@@ -3310,7 +3309,7 @@ static Void_t* sYSMALLOc(INTERNAL_SIZE_T nb, mstate av)
   returns 1 if it actually released any memory, else 0.
 */
 
-static int sYSTRIm(size_t pad, mstate av)
+static int sYSTRIm(size_t pad, struct malloc_state * av)
 {
   long  top_size;        /* Amount of top-most memory */
   long  extra;           /* Amount to release */
@@ -3462,7 +3461,7 @@ mremap_chunk(mchunkptr p, size_t new_size)
 Void_t*
 public_mALLOc(size_t bytes)
 {
-  mstate ar_ptr;
+  struct malloc_state * ar_ptr;
   Void_t *victim;
 
   __malloc_ptr_t (*hook) (size_t, __const __malloc_ptr_t)
@@ -3539,7 +3538,7 @@ libc_hidden_def(public_mALLOc)
 void
 public_fREe(Void_t* mem)
 {
-  mstate ar_ptr;
+  struct malloc_state * ar_ptr;
   mchunkptr p;                          /* chunk corresponding to mem */
 
   void (*hook) (__malloc_ptr_t, __const __malloc_ptr_t)
@@ -3595,7 +3594,7 @@ libc_hidden_def (public_fREe)
 Void_t*
 public_rEALLOc(Void_t* oldmem, size_t bytes)
 {
-  mstate ar_ptr;
+  struct malloc_state * ar_ptr;
   INTERNAL_SIZE_T    nb;      /* padded request size */
 
   Void_t* newp;             /* chunk to return */
@@ -3708,7 +3707,7 @@ libc_hidden_def (public_rEALLOc)
 Void_t*
 public_mEMALIGn(size_t alignment, size_t bytes)
 {
-  mstate ar_ptr;
+  struct malloc_state * ar_ptr;
   Void_t *p;
 
   __malloc_ptr_t (*hook) __MALLOC_PMT ((size_t, size_t,
@@ -3738,7 +3737,7 @@ public_mEMALIGn(size_t alignment, size_t bytes)
     } else {
 #if USE_ARENAS
       /* ... or sbrk() has failed and there is still a chance to mmap() */
-      mstate prev = ar_ptr->next ? ar_ptr : 0;
+      struct malloc_state * prev = ar_ptr->next ? ar_ptr : 0;
       (void)mutex_unlock(&ar_ptr->mutex);
       ar_ptr = arena_get2(prev, bytes);
       if(ar_ptr) {
@@ -3760,7 +3759,7 @@ libc_hidden_def (public_mEMALIGn)
 Void_t*
 public_vALLOc(size_t bytes)
 {
-  mstate ar_ptr;
+  struct malloc_state * ar_ptr;
   Void_t *p;
 
   if(__malloc_initialized < 0)
@@ -3806,7 +3805,7 @@ public_vALLOc(size_t bytes)
 Void_t*
 public_pVALLOc(size_t bytes)
 {
-  mstate ar_ptr;
+  struct malloc_state * ar_ptr;
   Void_t *p;
 
   if(__malloc_initialized < 0)
@@ -3853,7 +3852,7 @@ public_pVALLOc(size_t bytes)
 Void_t*
 public_cALLOc(size_t n, size_t elem_size)
 {
-  mstate av;
+  struct malloc_state * av;
   mchunkptr oldtop, p;
   INTERNAL_SIZE_T bytes, sz, csz, oldtopsize;
   Void_t* mem;
@@ -3997,7 +3996,7 @@ public_cALLOc(size_t n, size_t elem_size)
 Void_t**
 public_iCALLOc(size_t n, size_t elem_size, Void_t** chunks)
 {
-  mstate ar_ptr;
+  struct malloc_state * ar_ptr;
   Void_t** m;
 
   arena_get(ar_ptr, n*elem_size);
@@ -4012,7 +4011,7 @@ public_iCALLOc(size_t n, size_t elem_size, Void_t** chunks)
 Void_t**
 public_iCOMALLOc(size_t n, size_t sizes[], Void_t** chunks)
 {
-  mstate ar_ptr;
+  struct malloc_state * ar_ptr;
   Void_t** m;
 
   arena_get(ar_ptr, 0);
@@ -4040,7 +4039,7 @@ public_mTRIm(size_t s)
   if(__malloc_initialized < 0)
     ptmalloc_init ();
 
-  mstate ar_ptr = &main_arena;
+  struct malloc_state * ar_ptr = &main_arena;
   do
     {
       (void) mutex_lock (&ar_ptr->mutex);
@@ -4105,7 +4104,7 @@ public_mALLOPt(int p, int v)
 */
 
 static Void_t*
-_int_malloc(mstate av, size_t bytes)
+_int_malloc(struct malloc_state * av, size_t bytes)
 {
   INTERNAL_SIZE_T nb;               /* normalized request size */
   unsigned int    idx;              /* associated bin index */
@@ -4618,9 +4617,9 @@ _int_malloc(mstate av, size_t bytes)
 
 static void
 #ifdef ATOMIC_FASTBINS
-_int_free(mstate av, mchunkptr p, int have_lock)
+_int_free(struct malloc_state * av, mchunkptr p, int have_lock)
 #else
-_int_free(mstate av, mchunkptr p)
+_int_free(struct malloc_state * av, mchunkptr p)
 #endif
 {
   INTERNAL_SIZE_T size;        /* its size */
@@ -4945,7 +4944,7 @@ _int_free(mstate av, mchunkptr p)
   initialization code.
 */
 
-static void malloc_consolidate(mstate av)
+static void malloc_consolidate(struct malloc_state * av)
 {
   mfastbinptr*    fb;                 /* current fastbin being consolidated */
   mfastbinptr*    maxfb;              /* last fastbin (for loop control) */
@@ -5063,7 +5062,7 @@ static void malloc_consolidate(mstate av)
 */
 
 Void_t*
-_int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T oldsize,
+_int_realloc(struct malloc_state * av, mchunkptr oldp, INTERNAL_SIZE_T oldsize,
 	     INTERNAL_SIZE_T nb)
 {
   mchunkptr        newp;            /* chunk to return */
@@ -5307,7 +5306,7 @@ _int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T oldsize,
 */
 
 static Void_t*
-_int_memalign(mstate av, size_t alignment, size_t bytes)
+_int_memalign(struct malloc_state * av, size_t alignment, size_t bytes)
 {
   INTERNAL_SIZE_T nb;             /* padded  request size */
   char*           m;              /* memory returned by malloc call */
@@ -5478,7 +5477,7 @@ Void_t* cALLOc(size_t n_elements, size_t elem_size)
 */
 
 Void_t**
-_int_icalloc(mstate av, size_t n_elements, size_t elem_size, Void_t* chunks[])
+_int_icalloc(struct malloc_state * av, size_t n_elements, size_t elem_size, Void_t* chunks[])
 {
   size_t sz = elem_size; /* serves as 1-element array */
   /* opts arg of 3 means all elements are same size, and should be cleared */
@@ -5490,7 +5489,7 @@ _int_icalloc(mstate av, size_t n_elements, size_t elem_size, Void_t* chunks[])
 */
 
 Void_t**
-_int_icomalloc(mstate av, size_t n_elements, size_t sizes[], Void_t* chunks[])
+_int_icomalloc(struct malloc_state * av, size_t n_elements, size_t sizes[], Void_t* chunks[])
 {
   return iALLOc(av, n_elements, sizes, 0, chunks);
 }
@@ -5508,7 +5507,7 @@ _int_icomalloc(mstate av, size_t n_elements, size_t sizes[], Void_t* chunks[])
 
 
 static Void_t**
-iALLOc(mstate av, size_t n_elements, size_t* sizes, int opts, Void_t* chunks[])
+iALLOc(struct malloc_state * av, size_t n_elements, size_t* sizes, int opts, Void_t* chunks[])
 {
   INTERNAL_SIZE_T element_size;   /* chunksize of each element, if all same */
   INTERNAL_SIZE_T contents_size;  /* total size of elements */
@@ -5629,7 +5628,7 @@ iALLOc(mstate av, size_t n_elements, size_t* sizes, int opts, Void_t* chunks[])
 */
 
 static Void_t*
-_int_valloc(mstate av, size_t bytes)
+_int_valloc(struct malloc_state * av, size_t bytes)
 {
   /* Ensure initialization/consolidation */
   if (have_fastchunks(av)) malloc_consolidate(av);
@@ -5642,7 +5641,7 @@ _int_valloc(mstate av, size_t bytes)
 
 
 static Void_t*
-_int_pvalloc(mstate av, size_t bytes)
+_int_pvalloc(struct malloc_state * av, size_t bytes)
 {
   size_t pagesz;
 
@@ -5657,7 +5656,7 @@ _int_pvalloc(mstate av, size_t bytes)
   ------------------------------ malloc_trim ------------------------------
 */
 
-static int mTRIm(mstate av, size_t pad)
+static int mTRIm(struct malloc_state * av, size_t pad)
 {
   /* Ensure initialization/consolidation */
   malloc_consolidate (av);
@@ -5733,7 +5732,7 @@ size_t mUSABLe(Void_t* mem)
   ------------------------------ mallinfo ------------------------------
 */
 
-struct mallinfo2 mALLINFo(mstate av)
+struct mallinfo2 mALLINFo(struct malloc_state * av)
 {
   struct mallinfo2 mi;
   size_t i;
@@ -5795,7 +5794,7 @@ struct mallinfo2 mALLINFo(mstate av)
 void mSTATs()
 {
   int i;
-  mstate ar_ptr;
+  struct malloc_state * ar_ptr;
   struct mallinfo2 mi;
   unsigned long in_use_b = mp_.mmapped_mem, system_b = in_use_b;
 #if THREAD_STATS
@@ -5866,7 +5865,7 @@ void mSTATs()
 
 int mALLOPt(int param_number, int value)
 {
-  mstate av = &main_arena;
+  struct malloc_state * av = &main_arena;
   int res = 1;
 
   if(__malloc_initialized < 0)
@@ -6173,7 +6172,7 @@ dlmalloc_info (int options, FILE *fp)
   fputs ("<malloc version=\"1\">\n", fp);
 
   /* Iterate over all arenas currently in use.  */
-  mstate ar_ptr = &main_arena;
+  struct malloc_state * ar_ptr = &main_arena;
   do {
     fprintf (fp, "<heap nr=\"%d\">\n<sizes>\n", n++);
 
-- 
2.7.0.rc3


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]