diff --git a/malloc/arena.c b/malloc/arena.c index 9d49f93..9c13daa 100644 --- a/malloc/arena.c +++ b/malloc/arena.c @@ -113,7 +113,7 @@ int __malloc_initialized = -1; } while(0) # define arena_lock(ptr, size) do { \ - if(ptr) \ + if(__glibc_likely (ptr)) \ (void)mutex_lock(&ptr->mutex); \ else \ ptr = arena_get2(ptr, (size), NULL); \ diff --git a/malloc/malloc.c b/malloc/malloc.c index b1668b5..374eb09 100644 --- a/malloc/malloc.c +++ b/malloc/malloc.c @@ -1849,14 +1849,14 @@ static int check_action = DEFAULT_CHECK_ACTION; static int perturb_byte; -static inline void +static inline __attribute__ ((always_inline)) void alloc_perturb (char *p, size_t n) { if (__glibc_unlikely (perturb_byte)) memset (p, perturb_byte ^ 0xff, n); } -static inline void +static inline __attribute__ ((always_inline)) void free_perturb (char *p, size_t n) { if (__glibc_unlikely (perturb_byte)) @@ -2835,10 +2835,10 @@ __libc_malloc(size_t bytes) arena_lookup(ar_ptr); arena_lock(ar_ptr, bytes); - if(!ar_ptr) + if(__glibc_unlikely (!ar_ptr)) return 0; victim = _int_malloc(ar_ptr, bytes); - if(!victim) { + if(__glibc_unlikely (!victim)) { LIBC_PROBE (memory_malloc_retry, 1, bytes); ar_ptr = arena_get_retry(ar_ptr, bytes); if (__builtin_expect(ar_ptr != NULL, 1)) { @@ -2866,12 +2866,12 @@ __libc_free(void* mem) return; } - if (mem == 0) /* free(0) has no effect */ + if (__glibc_unlikely (mem == 0)) /* free(0) has no effect */ return; p = mem2chunk(mem); - if (chunk_is_mmapped(p)) /* release mmapped memory. */ + if (__glibc_unlikely (chunk_is_mmapped(p))) /* release mmapped memory. */ { /* see if the dynamic brk/mmap threshold needs adjusting */ if (!mp_.no_dyn_threshold @@ -3251,19 +3251,21 @@ _int_malloc(mstate av, size_t bytes) can try it without checking, which saves some time on this fast path. */ - if ((unsigned long)(nb) <= (unsigned long)(get_max_fast ())) { + if (__glibc_likely ((unsigned long)(nb) <= (unsigned long)(get_max_fast ()))) + { idx = fastbin_index(nb); mfastbinptr* fb = &fastbin (av, idx); mchunkptr pp = *fb; do { victim = pp; - if (victim == NULL) + if (__glibc_unlikely (victim == NULL)) break; } - while ((pp = catomic_compare_and_exchange_val_acq (fb, victim->fd, victim)) + while (__glibc_unlikely ( + (pp = catomic_compare_and_exchange_val_acq (fb, victim->fd, victim))) != victim); - if (victim != 0) { + if (__glibc_likely (victim != 0)) { if (__builtin_expect (fastbin_index (chunksize (victim)) != idx, 0)) { errstr = "malloc(): memory corruption (fast)"; @@ -3741,7 +3743,7 @@ _int_free(mstate av, mchunkptr p, int have_lock) and used quickly in malloc. */ - if ((unsigned long)(size) <= (unsigned long)(get_max_fast ()) + if (__glibc_likely ((unsigned long)(size) <= (unsigned long)(get_max_fast ())) #if TRIM_FASTBINS /* @@ -3795,13 +3797,14 @@ _int_free(mstate av, mchunkptr p, int have_lock) errstr = "double free or corruption (fasttop)"; goto errout; } - if (old != NULL) + if (__glibc_likely (old != NULL)) old_idx = fastbin_index(chunksize(old)); p->fd = fd = old; } - while ((old = catomic_compare_and_exchange_val_rel (fb, p, fd)) != fd); + while (__glibc_unlikely ( + (old = catomic_compare_and_exchange_val_rel (fb, p, fd)) != fd)); - if (fd != NULL && __builtin_expect (old_idx != idx, 0)) + if (__glibc_likely (fd != NULL) && __builtin_expect (old_idx != idx, 0)) { errstr = "invalid fastbin entry (free)"; goto errout; -- 1.7.10.4