1 #ifndef JEMALLOC_INTERNAL_ARENA_INLINES_A_H
2 #define JEMALLOC_INTERNAL_ARENA_INLINES_A_H
5 arena_ind_get(const arena_t *arena) {
6 return base_ind_get(arena->base);
10 arena_internal_add(arena_t *arena, size_t size) {
11 atomic_fetch_add_zu(&arena->stats.internal, size, ATOMIC_RELAXED);
15 arena_internal_sub(arena_t *arena, size_t size) {
16 atomic_fetch_sub_zu(&arena->stats.internal, size, ATOMIC_RELAXED);
20 arena_internal_get(arena_t *arena) {
21 return atomic_load_zu(&arena->stats.internal, ATOMIC_RELAXED);
25 arena_prof_accum(tsdn_t *tsdn, arena_t *arena, uint64_t accumbytes) {
28 if (likely(prof_interval == 0 || !prof_active_get_unlocked())) {
32 return prof_accum_add(tsdn, &arena->prof_accum, accumbytes);
36 percpu_arena_update(tsd_t *tsd, unsigned cpu) {
37 assert(have_percpu_arena);
38 arena_t *oldarena = tsd_arena_get(tsd);
39 assert(oldarena != NULL);
40 unsigned oldind = arena_ind_get(oldarena);
43 unsigned newind = cpu;
44 arena_t *newarena = arena_get(tsd_tsdn(tsd), newind, true);
45 assert(newarena != NULL);
47 /* Set new arena/tcache associations. */
48 arena_migrate(tsd, oldind, newind);
49 tcache_t *tcache = tcache_get(tsd);
51 tcache_arena_reassociate(tsd_tsdn(tsd), tcache,
57 #endif /* JEMALLOC_INTERNAL_ARENA_INLINES_A_H */