10 #include "pthread_impl.h"
12 #if defined(__GNUC__) && defined(__PIC__)
13 #define inline inline __attribute__((always_inline))
18 void *
__mremap(
void *,
size_t,
size_t,
int, ...);
39 #define SIZE_ALIGN (4*sizeof(size_t))
40 #define SIZE_MASK (-SIZE_ALIGN)
41 #define OVERHEAD (2*sizeof(size_t))
42 #define MMAP_THRESHOLD (0x1c00*SIZE_ALIGN)
44 #define RECLAIM 163840
46 #define CHUNK_SIZE(c) ((c)->csize & -2)
47 #define CHUNK_PSIZE(c) ((c)->psize & -2)
48 #define PREV_CHUNK(c) ((struct chunk *)((char *)(c) - CHUNK_PSIZE(c)))
49 #define NEXT_CHUNK(c) ((struct chunk *)((char *)(c) + CHUNK_SIZE(c)))
50 #define MEM_TO_CHUNK(p) (struct chunk *)((char *)(p) - OVERHEAD)
51 #define CHUNK_TO_MEM(c) (void *)((char *)(c) + OVERHEAD)
52 #define BIN_TO_CHUNK(i) (MEM_TO_CHUNK(&mal.bins[i].head))
54 #define C_INUSE ((size_t)1)
56 #define IS_MMAPPED(c) !((c)->csize & (C_INUSE))
61 static inline void lock(
volatile int *lk)
63 if (libc.threads_minus_1)
64 while(a_swap(lk, 1)) __wait(lk, lk+1, 1, 1);
67 static inline void unlock(
volatile int *lk)
71 if (lk[1]) __wake(lk, 1, 1);
78 if (!
mal.bins[
i].head)
92 static const char debruijn64[64] = {
93 0, 1, 2, 53, 3, 7, 54, 27, 4, 38, 41, 8, 34, 55, 48, 28,
94 62, 5, 39, 46, 44, 42, 22, 9, 24, 35, 59, 56, 49, 18, 29, 11,
95 63, 52, 6, 26, 37, 40, 33, 47, 61, 45, 43, 21, 23, 58, 17, 10,
96 51, 25, 36, 32, 60, 20, 57, 16, 50, 31, 19, 15, 30, 14, 13, 12
98 static const char debruijn32[32] = {
99 0, 1, 23, 2, 29, 24, 19, 3, 30, 27, 25, 11, 20, 8, 4, 13,
100 31, 22, 28, 18, 26, 10, 7, 12, 21, 17, 9, 6, 16, 5, 15, 14
102 if (
sizeof(
long) < 8) {
106 return 32 + debruijn32[(y&-y)*0x076be629 >> 27];
108 return debruijn32[(y&-y)*0x076be629 >> 27];
110 return debruijn64[(
x&-
x)*0x022fdd63cc95386dull >> 58];
115 32,33,34,35,36,36,37,37,38,38,39,39,
116 40,40,40,40,41,41,41,41,42,42,42,42,43,43,43,43,
117 44,44,44,44,44,44,44,44,45,45,45,45,45,45,45,45,
118 46,46,46,46,46,46,46,46,47,47,47,47,47,47,47,47,
124 if (
x <= 32)
return x;
126 if (
x > 0x1c00)
return 63;
133 if (
x <= 32)
return x;
140 void __dump_heap(
int x)
145 fprintf(stderr,
"base %p size %zu (%d) flags %d/%d\n",
149 for (
i=0;
i<64;
i++) {
151 fprintf(stderr,
"bin %d: %p\n",
i,
mal.bins[
i].head);
152 if (!(
mal.binmap & 1ULL<<
i))
153 fprintf(stderr,
"missing from binmap!\n");
154 }
else if (
mal.binmap & 1ULL<<
i)
155 fprintf(stderr,
"binmap wrongly contains %d!\n",
i);
164 static int heap_lock[2];
226 if (
c->prev ==
c->next)
227 a_and_64(&
mal.binmap, ~(1ULL<<
i));
228 c->prev->next =
c->next;
229 c->next->prev =
c->prev;
278 if (j < 40)
return 0;
280 if (j != 63)
return 0;
289 split = (
void *)((
char *)
self +
n);
310 split = (
void *)((
char *)
self +
n);
331 if (base == (
void *)-1)
return 0;
354 c =
mal.bins[j].head;
374 n = (
n +
sizeof *z - 1)/
sizeof *z;
375 for (z=
p;
n;
n--, z++)
if (*z) *z=0;
394 size_t extra =
self->psize;
395 char *base = (
char *)
self - extra;
396 size_t oldlen = n0 + extra;
397 size_t newlen =
n + extra;
399 if (extra & 1) a_crash();
400 if (newlen < PAGE_SIZE && (
new =
malloc(
n))) {
405 newlen = (newlen + PAGE_SIZE-1) & -PAGE_SIZE;
406 if (oldlen == newlen)
return p;
407 base =
__mremap(base, oldlen, newlen, MREMAP_MAYMOVE);
408 if (base == (
void *)-1)
410 self = (
void *)(base + extra);
411 self->csize = newlen - extra;
418 if (
next->
psize != self->csize) a_crash();
455 size_t final_size, new_size,
size;
462 size_t extra =
self->psize;
463 char *base = (
char *)
self - extra;
466 if (extra & 1) a_crash();
475 if (
next->
psize != self->csize) a_crash();
479 self->csize = final_size |
C_INUSE;
507 if (!(
mal.binmap & 1ULL<<
i))
508 a_or_64(&
mal.binmap, 1ULL<<
i);
510 self->csize = final_size;
515 self->prev =
mal.bins[
i].tail;
516 self->next->prev =
self;
517 self->prev->next =
self;
memcpy(mem, inblock.get(), min(CONTAINING_RECORD(inblock.get(), MEMBLOCK, data) ->size, size))
void * realloc(void *ptr, size_t size)
void * malloc(size_t size)
_W64 unsigned int uintptr_t
static struct chunk * expand_heap(size_t n)
int __munmap(void *, size_t)
void * __mremap(void *, size_t, size_t, int,...)
static void unlock(volatile int *lk)
static void unlock_bin(int i)
static int pretrim(struct chunk *self, size_t n, int i, int j)
static int alloc_rev(struct chunk *c)
static int bin_index(size_t x)
static int bin_index_up(size_t x)
void * __expand_heap(size_t *)
static int first_set(uint64_t x)
static void lock(volatile int *lk)
static void trim(struct chunk *self, size_t n)
static void unbin(struct chunk *c, int i)
static int alloc_fwd(struct chunk *c)
void * __malloc0(size_t n)
static int adjust_size(size_t *n)
int __madvise(void *, size_t, int)
void * __mmap(void *, size_t, int, int, int, off_t)
static const unsigned char bin_tab[60]
static void lock_bin(int i)
volatile int free_lock[2]