2 #ifdef JEMALLOC_H_TYPES
4 #define LARGE_MINCLASS (ZU(1) << LG_LARGE_MINCLASS)
7 #define LG_RUN_MAXREGS (LG_PAGE - LG_TINY_MIN)
8 #define RUN_MAXREGS (1U << LG_RUN_MAXREGS)
14 #define REDZONE_MINSIZE 16
24 #define LG_DIRTY_MULT_DEFAULT 3
32 #define PURGE_DEFAULT purge_mode_ratio
34 #define DECAY_TIME_DEFAULT 10
36 #define DECAY_NTICKS_PER_UPDATE 1000
38 typedef struct arena_runs_dirty_link_s arena_runs_dirty_link_t;
39 typedef struct arena_avail_links_s arena_avail_links_t;
40 typedef struct arena_run_s arena_run_t;
41 typedef struct arena_chunk_map_bits_s arena_chunk_map_bits_t;
42 typedef struct arena_chunk_map_misc_s arena_chunk_map_misc_t;
43 typedef struct arena_chunk_s arena_chunk_t;
44 typedef struct arena_bin_info_s arena_bin_info_t;
45 typedef struct arena_decay_s arena_decay_t;
46 typedef struct arena_bin_s arena_bin_t;
47 typedef struct arena_s arena_t;
48 typedef struct arena_tdata_s arena_tdata_t;
52 #ifdef JEMALLOC_H_STRUCTS
54 #ifdef JEMALLOC_ARENA_STRUCTS_A
63 bitmap_t bitmap[BITMAP_GROUPS_MAX];
67 struct arena_chunk_map_bits_s {
127 #define CHUNK_MAP_ALLOCATED ((size_t)0x01U)
128 #define CHUNK_MAP_LARGE ((size_t)0x02U)
129 #define CHUNK_MAP_STATE_MASK ((size_t)0x3U)
131 #define CHUNK_MAP_DECOMMITTED ((size_t)0x04U)
132 #define CHUNK_MAP_UNZEROED ((size_t)0x08U)
133 #define CHUNK_MAP_DIRTY ((size_t)0x10U)
134 #define CHUNK_MAP_FLAGS_MASK ((size_t)0x1cU)
136 #define CHUNK_MAP_BININD_SHIFT 5
137 #define BININD_INVALID ((size_t)0xffU)
138 #define CHUNK_MAP_BININD_MASK (BININD_INVALID << CHUNK_MAP_BININD_SHIFT)
139 #define CHUNK_MAP_BININD_INVALID CHUNK_MAP_BININD_MASK
141 #define CHUNK_MAP_RUNIND_SHIFT (CHUNK_MAP_BININD_SHIFT + 8)
142 #define CHUNK_MAP_SIZE_SHIFT (CHUNK_MAP_RUNIND_SHIFT - LG_PAGE)
143 #define CHUNK_MAP_SIZE_MASK \
144 (~(CHUNK_MAP_BININD_MASK | CHUNK_MAP_FLAGS_MASK | CHUNK_MAP_STATE_MASK))
147 struct arena_runs_dirty_link_s {
148 qr(arena_runs_dirty_link_t) rd_link;
156 struct arena_chunk_map_misc_s {
164 phn(arena_chunk_map_misc_t) ph_link;
168 arena_runs_dirty_link_t
rd;
173 prof_tctx_t *prof_tctx;
180 typedef ph(arena_chunk_map_misc_t) arena_run_heap_t;
183 #ifdef JEMALLOC_ARENA_STRUCTS_B
185 struct arena_chunk_s {
207 arena_chunk_map_bits_t map_bits[1];
243 struct arena_bin_info_s {
263 bitmap_info_t bitmap_info;
269 struct arena_decay_s {
309 size_t backlog[SMOOTHSTEP_NSTEPS];
334 arena_run_heap_t runs;
337 malloc_bin_stats_t stats;
386 ql_head(extent_node_t) achunks;
389 size_t extent_sn_next;
401 arena_chunk_t *spare;
448 arena_runs_dirty_link_t runs_dirty;
449 extent_node_t chunks_cache;
457 malloc_mutex_t huge_mtx;
466 extent_tree_t chunks_szsnad_cached;
467 extent_tree_t chunks_ad_cached;
468 extent_tree_t chunks_szsnad_retained;
469 extent_tree_t chunks_ad_retained;
471 malloc_mutex_t chunks_mtx;
473 ql_head(extent_node_t) node_cache;
474 malloc_mutex_t node_cache_mtx;
480 arena_bin_t
bins[JM_NBINS];
488 arena_run_heap_t runs_avail[NPSIZES];
492 struct arena_tdata_s {
493 ticker_t decay_ticker;
499 #ifdef JEMALLOC_H_EXTERNS
501 static const size_t large_pad =
502 #ifdef JEMALLOC_CACHE_OBLIVIOUS
536 size_t alignment,
size_t *sn,
bool *zero);
538 size_t usize,
size_t sn);
540 void *
chunk,
size_t oldsize,
size_t usize);
542 void *
chunk,
size_t oldsize,
size_t usize,
size_t sn);
544 void *
chunk,
size_t oldsize,
size_t usize,
bool *zero);
561 typedef void (arena_dalloc_junk_small_t)(
void *, arena_bin_info_t *);
571 void *
arena_palloc(tsdn_t *tsdn, arena_t *arena,
size_t usize,
572 size_t alignment,
bool zero, tcache_t *tcache);
575 arena_chunk_t *
chunk,
void *ptr, arena_chunk_map_bits_t *bitselm);
577 void *ptr,
size_t pageind, arena_chunk_map_bits_t *bitselm);
579 void *ptr,
size_t pageind);
581 typedef void (arena_dalloc_junk_large_t)(
void *,
size_t);
587 arena_chunk_t *
chunk,
void *ptr);
591 typedef void (arena_ralloc_junk_large_t)(
void *,
size_t,
size_t);
595 size_t size,
size_t extra,
bool zero);
596 void *
arena_ralloc(tsd_t *tsd, arena_t *arena,
void *ptr,
size_t oldsize,
597 size_t size,
size_t alignment,
bool zero, tcache_t *tcache);
606 ssize_t *decay_time,
size_t *nactive,
size_t *ndirty);
609 size_t *nactive,
size_t *ndirty, arena_stats_t *astats,
610 malloc_bin_stats_t *bstats, malloc_large_stats_t *lstats,
611 malloc_huge_stats_t *hstats);
627 #ifdef JEMALLOC_H_INLINES
629 #ifndef JEMALLOC_ENABLE_INLINE
633 const arena_chunk_t *
chunk,
size_t pageind);
637 const arena_chunk_t *
chunk,
size_t pageind);
689 const void *old_ptr, prof_tctx_t *old_tctx);
693 bool zero, tcache_t *tcache,
bool slow_path);
695 size_t arena_salloc(tsdn_t *tsdn,
const void *ptr,
bool demote);
696 void arena_dalloc(tsdn_t *tsdn,
void *ptr, tcache_t *tcache,
bool slow_path);
701 #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
702 # ifdef JEMALLOC_ARENA_INLINE_A
738 arena_chunk_t *
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(miscelm);
751 arena_chunk_t *
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(miscelm);
760 arena_chunk_map_misc_t *miscelm = (arena_chunk_map_misc_t
771 arena_chunk_map_misc_t *miscelm = (arena_chunk_map_misc_t
814 #if CHUNK_MAP_SIZE_SHIFT > 0
815 size = (mapbits & CHUNK_MAP_SIZE_MASK) >> CHUNK_MAP_SIZE_SHIFT;
816 #elif CHUNK_MAP_SIZE_SHIFT == 0
817 size = mapbits & CHUNK_MAP_SIZE_MASK;
819 size = (mapbits & CHUNK_MAP_SIZE_MASK) << -CHUNK_MAP_SIZE_SHIFT;
831 assert((mapbits & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) == 0);
841 assert((mapbits & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) ==
842 (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED));
852 assert((mapbits & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) ==
853 CHUNK_MAP_ALLOCATED);
854 return (mapbits >> CHUNK_MAP_RUNIND_SHIFT);
864 binind = (mapbits & CHUNK_MAP_BININD_MASK) >> CHUNK_MAP_BININD_SHIFT;
865 assert(binind < JM_NBINS || binind == BININD_INVALID);
875 assert((mapbits & CHUNK_MAP_DECOMMITTED) == 0 || (mapbits &
876 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
877 return (mapbits & CHUNK_MAP_DIRTY);
886 assert((mapbits & CHUNK_MAP_DECOMMITTED) == 0 || (mapbits &
887 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
888 return (mapbits & CHUNK_MAP_UNZEROED);
897 assert((mapbits & CHUNK_MAP_DECOMMITTED) == 0 || (mapbits &
898 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
899 return (mapbits & CHUNK_MAP_DECOMMITTED);
908 return (mapbits & CHUNK_MAP_LARGE);
917 return (mapbits & CHUNK_MAP_ALLOCATED);
932 #if CHUNK_MAP_SIZE_SHIFT > 0
933 mapbits =
size << CHUNK_MAP_SIZE_SHIFT;
934 #elif CHUNK_MAP_SIZE_SHIFT == 0
937 mapbits =
size >> -CHUNK_MAP_SIZE_SHIFT;
939 assert((mapbits & ~CHUNK_MAP_SIZE_MASK) == 0);
951 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
953 CHUNK_MAP_BININD_INVALID |
flags);
963 assert((mapbits & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) == 0);
965 (mapbits & ~CHUNK_MAP_SIZE_MASK));
986 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
988 CHUNK_MAP_BININD_INVALID |
flags | CHUNK_MAP_LARGE |
989 CHUNK_MAP_ALLOCATED);
999 assert(binind <= BININD_INVALID);
1003 (binind << CHUNK_MAP_BININD_SHIFT));
1012 assert(binind < BININD_INVALID);
1016 (binind << CHUNK_MAP_BININD_SHIFT) |
flags | CHUNK_MAP_ALLOCATED);
1037 return (atomic_read_z(&arena->stats.metadata_allocated));
1046 arena->prof_accumbytes += accumbytes;
1090 binind = (mapbits & CHUNK_MAP_BININD_MASK) >> CHUNK_MAP_BININD_SHIFT;
1093 arena_chunk_t *
chunk;
1096 size_t actual_mapbits;
1098 const arena_run_t *
run;
1100 szind_t run_binind, actual_binind;
1101 arena_bin_info_t *bin_info;
1102 const arena_chunk_map_misc_t *miscelm;
1105 assert(binind != BININD_INVALID);
1106 assert(binind < JM_NBINS);
1107 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1111 assert(mapbits == actual_mapbits);
1117 run = &miscelm->run;
1118 run_binind =
run->binind;
1119 bin = &arena->bins[run_binind];
1120 actual_binind = (
szind_t)(
bin - arena->bins);
1121 assert(run_binind == actual_binind);
1125 (
uintptr_t)bin_info->reg0_offset)) % bin_info->reg_interval
1133 # ifdef JEMALLOC_ARENA_INLINE_B
1139 assert(binind < JM_NBINS);
1146 size_t diff, interval,
shift, regind;
1162 bin_info->reg0_offset);
1165 interval = bin_info->reg_interval;
1170 if (interval == 1) {
1188 #define SIZE_INV_SHIFT ((sizeof(size_t) << 3) - LG_RUN_MAXREGS)
1189 #define SIZE_INV(s) (((ZU(1) << SIZE_INV_SHIFT) / (s)) + 1)
1190 static const size_t interval_invs[] = {
1192 SIZE_INV(4), SIZE_INV(5), SIZE_INV(6), SIZE_INV(7),
1193 SIZE_INV(8), SIZE_INV(9), SIZE_INV(10), SIZE_INV(11),
1194 SIZE_INV(12), SIZE_INV(13), SIZE_INV(14), SIZE_INV(15),
1195 SIZE_INV(16), SIZE_INV(17), SIZE_INV(18), SIZE_INV(19),
1196 SIZE_INV(20), SIZE_INV(21), SIZE_INV(22), SIZE_INV(23),
1197 SIZE_INV(24), SIZE_INV(25), SIZE_INV(26), SIZE_INV(27),
1198 SIZE_INV(28), SIZE_INV(29), SIZE_INV(30), SIZE_INV(31)
1201 if (
likely(interval <= ((
sizeof(interval_invs) /
sizeof(
size_t))
1203 regind = (diff * interval_invs[interval - 3]) >>
1206 regind = diff / interval;
1208 #undef SIZE_INV_SHIFT
1211 assert(diff == regind * interval);
1212 assert(regind < bin_info->nregs);
1221 arena_chunk_t *
chunk;
1226 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1231 if (
unlikely(mapbits & CHUNK_MAP_ALLOCATED) == 0)
1234 if (
likely((mapbits & CHUNK_MAP_LARGE) == 0))
1237 arena_chunk_map_misc_t *elm =
1239 ret = atomic_read_p(&elm->prof_tctx_pun);
1251 arena_chunk_t *
chunk;
1256 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1264 arena_chunk_map_misc_t *elm;
1285 const void *old_ptr, prof_tctx_t *old_tctx)
1291 if (
unlikely(usize > SMALL_MAXCLASS || (ptr == old_ptr &&
1293 arena_chunk_t *
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1296 arena_chunk_map_misc_t *elm;
1316 ticker_t *decay_ticker;
1337 tcache_t *tcache,
bool slow_path)
1346 tcache,
size,
ind, zero, slow_path));
1350 tcache,
size,
ind, zero, slow_path));
1364 arena_chunk_t *
chunk;
1366 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1375 arena_salloc(tsdn_t *tsdn,
const void *ptr,
bool demote)
1378 arena_chunk_t *
chunk;
1384 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1406 pageind+((ret+large_pad)>>
LG_PAGE)-1));
1424 arena_dalloc(tsdn_t *tsdn,
void *ptr, tcache_t *tcache,
bool slow_path)
1426 arena_chunk_t *
chunk;
1427 size_t pageind, mapbits;
1431 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1436 if (
likely((mapbits & CHUNK_MAP_LARGE) == 0)) {
1458 size - large_pad, slow_path);
1473 arena_chunk_t *
chunk;
1477 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1490 pageind) - large_pad;
static RZ_NULLABLE RzILOpBitVector * shift(RzILOpBitVector *val, RZ_NULLABLE RzILOpBool **carry_out, arm_shifter type, RZ_OWN RzILOpBitVector *dist)
int bits(struct state *s, int need)
ticker_t * decay_ticker_get(tsd_t *tsd, unsigned ind)
szind_t size2index(size_t size)
static const bool config_cache_oblivious
static const bool config_prof
static const bool config_debug
size_t index2size(szind_t index)
#define offsetof(type, member)
#define JEMALLOC_ALWAYS_INLINE
static static fork const void static count static fd const char const char static newpath char char char static envp time
assert(limit<=UINT32_MAX/2)
#define malloc_mutex_lock
#define arena_dalloc_large
#define arena_mapbits_large_binind_set
#define arena_alloc_junk_small
#define tcache_alloc_small
#define arena_ralloc_no_move
#define arena_mapbits_small_set
#define arena_node_dalloc
#define arena_metadata_allocated_get
#define arena_decay_time_default_set
#define arena_rd_to_miscelm
#define run_quantize_floor
#define arena_mapbitsp_get_const
#define arena_dalloc_large_junked_locked
#define arena_mapbits_unzeroed_get
#define opt_lg_dirty_mult
#define arena_dalloc_bin_junked_locked
#define arena_miscelm_get_const
#define arena_dalloc_junk_large
#define tcache_dalloc_large
#define arena_mapbits_size_encode
#define arena_prof_tctx_reset
#define arena_prof_promoted
#define arena_basic_stats_merge
#define arena_mapbits_decommitted_get
#define huge_prof_tctx_set
#define arena_nthreads_get
#define arena_mapbits_large_size_get
#define arena_run_to_miscelm
#define arena_nthreads_inc
#define arena_prof_accum_locked
#define arena_postfork_parent
#define arena_decay_time_default_get
#define arena_mapbits_get
#define arena_bitselm_get_const
#define extent_node_arena_get
#define arena_malloc_large
#define arena_metadata_allocated_sub
#define arena_dss_prec_get
#define arena_chunk_cache_maybe_insert
#define arena_mapbitsp_write
#define arena_lg_dirty_mult_get
#define tcache_alloc_large
#define arena_dalloc_junk_small
#define arena_chunk_ralloc_huge_expand
#define arena_chunk_cache_maybe_remove
#define arena_lg_dirty_mult_default_get
#define arena_prof_tctx_set
#define arena_chunk_ralloc_huge_shrink
#define arena_bitselm_get_mutable
#define arena_miscelm_to_rpages
#define arena_mapbits_unallocated_size_get
#define arena_miscelm_to_pageind
#define arena_mapbits_large_get
#define huge_prof_tctx_reset
#define arena_maybe_purge
#define malloc_mutex_unlock
#define arena_mapbits_small_runind_get
#define arena_chunk_ralloc_huge_similar
#define arena_prof_accum_impl
#define arena_dalloc_small
#define arena_mapbits_size_decode
#define arena_nthreads_dec
#define arena_tcache_fill_small
#define arena_lg_dirty_mult_default_set
#define arena_mapbits_unallocated_size_set
#define arena_mapbitsp_get_mutable
#define arena_quarantine_junk_small
#define arena_malloc_hard
#define arena_decay_ticks
#define arena_ptr_small_binind_get
#define arena_extent_sn_next
#define arena_mapbits_large_set
#define arena_lg_dirty_mult_set
#define arena_mapbits_dirty_get
#define huge_prof_tctx_get
#define arena_mapbits_unallocated_set
#define arena_redzone_corruption
#define arena_decay_time_set
#define arena_chunk_dalloc_huge
#define arena_postfork_child
#define tcache_dalloc_small
#define arena_mapbitsp_read
#define run_quantize_ceil
#define arena_ralloc_junk_large
#define arena_mapbits_allocated_get
#define arena_miscelm_get_mutable
#define arena_dss_prec_set
#define arena_mapbits_binind_get
#define arena_chunk_alloc_huge
#define arena_mapbits_internal_set
#define arena_metadata_allocated_add
#define arena_stats_merge
#define arena_decay_time_get
#define arena_prof_tctx_get
static int run(int i, const char *arg)
static struct sockaddr static addrlen static backlog const void static flags void flags
_W64 unsigned int uintptr_t
static unsigned int nthreads
static void lock(volatile int *lk)