1 #define JEMALLOC_CTL_C_
2 #include "jemalloc/internal/jemalloc_internal.h"
4 /******************************************************************************/
8 * ctl_mtx protects the following:
11 static malloc_mutex_t ctl_mtx;
12 static bool ctl_initialized;
13 static uint64_t ctl_epoch;
14 static ctl_stats_t ctl_stats;
16 /******************************************************************************/
17 /* Helpers for named and indexed nodes. */
19 JEMALLOC_INLINE_C const ctl_named_node_t *
20 ctl_named_node(const ctl_node_t *node)
23 return ((node->named) ? (const ctl_named_node_t *)node : NULL);
26 JEMALLOC_INLINE_C const ctl_named_node_t *
27 ctl_named_children(const ctl_named_node_t *node, size_t index)
29 const ctl_named_node_t *children = ctl_named_node(node->children);
31 return (children ? &children[index] : NULL);
34 JEMALLOC_INLINE_C const ctl_indexed_node_t *
35 ctl_indexed_node(const ctl_node_t *node)
38 return (!node->named ? (const ctl_indexed_node_t *)node : NULL);
41 /******************************************************************************/
42 /* Function prototypes for non-inline static functions. */
44 #define CTL_PROTO(n) \
45 static int n##_ctl(const size_t *mib, size_t miblen, void *oldp, \
46 size_t *oldlenp, void *newp, size_t newlen);
48 #define INDEX_PROTO(n) \
49 static const ctl_named_node_t *n##_index(const size_t *mib, \
50 size_t miblen, size_t i);
52 static bool ctl_arena_init(ctl_arena_stats_t *astats);
53 static void ctl_arena_clear(ctl_arena_stats_t *astats);
54 static void ctl_arena_stats_amerge(ctl_arena_stats_t *cstats,
56 static void ctl_arena_stats_smerge(ctl_arena_stats_t *sstats,
57 ctl_arena_stats_t *astats);
58 static void ctl_arena_refresh(arena_t *arena, unsigned i);
59 static bool ctl_grow(void);
60 static void ctl_refresh(void);
61 static bool ctl_init(void);
62 static int ctl_lookup(const char *name, ctl_node_t const **nodesp,
63 size_t *mibp, size_t *depthp);
67 CTL_PROTO(thread_tcache_enabled)
68 CTL_PROTO(thread_tcache_flush)
69 CTL_PROTO(thread_prof_name)
70 CTL_PROTO(thread_prof_active)
71 CTL_PROTO(thread_arena)
72 CTL_PROTO(thread_allocated)
73 CTL_PROTO(thread_allocatedp)
74 CTL_PROTO(thread_deallocated)
75 CTL_PROTO(thread_deallocatedp)
76 CTL_PROTO(config_cache_oblivious)
77 CTL_PROTO(config_debug)
78 CTL_PROTO(config_fill)
79 CTL_PROTO(config_lazy_lock)
80 CTL_PROTO(config_malloc_conf)
81 CTL_PROTO(config_munmap)
82 CTL_PROTO(config_prof)
83 CTL_PROTO(config_prof_libgcc)
84 CTL_PROTO(config_prof_libunwind)
85 CTL_PROTO(config_stats)
86 CTL_PROTO(config_tcache)
88 CTL_PROTO(config_utrace)
89 CTL_PROTO(config_valgrind)
90 CTL_PROTO(config_xmalloc)
93 CTL_PROTO(opt_lg_chunk)
94 CTL_PROTO(opt_narenas)
96 CTL_PROTO(opt_lg_dirty_mult)
97 CTL_PROTO(opt_decay_time)
98 CTL_PROTO(opt_stats_print)
101 CTL_PROTO(opt_quarantine)
102 CTL_PROTO(opt_redzone)
103 CTL_PROTO(opt_utrace)
104 CTL_PROTO(opt_xmalloc)
105 CTL_PROTO(opt_tcache)
106 CTL_PROTO(opt_lg_tcache_max)
108 CTL_PROTO(opt_prof_prefix)
109 CTL_PROTO(opt_prof_active)
110 CTL_PROTO(opt_prof_thread_active_init)
111 CTL_PROTO(opt_lg_prof_sample)
112 CTL_PROTO(opt_lg_prof_interval)
113 CTL_PROTO(opt_prof_gdump)
114 CTL_PROTO(opt_prof_final)
115 CTL_PROTO(opt_prof_leak)
116 CTL_PROTO(opt_prof_accum)
117 CTL_PROTO(tcache_create)
118 CTL_PROTO(tcache_flush)
119 CTL_PROTO(tcache_destroy)
120 static void arena_i_purge(unsigned arena_ind, bool all);
121 CTL_PROTO(arena_i_purge)
122 CTL_PROTO(arena_i_decay)
123 CTL_PROTO(arena_i_dss)
124 CTL_PROTO(arena_i_lg_dirty_mult)
125 CTL_PROTO(arena_i_decay_time)
126 CTL_PROTO(arena_i_chunk_hooks)
128 CTL_PROTO(arenas_bin_i_size)
129 CTL_PROTO(arenas_bin_i_nregs)
130 CTL_PROTO(arenas_bin_i_run_size)
131 INDEX_PROTO(arenas_bin_i)
132 CTL_PROTO(arenas_lrun_i_size)
133 INDEX_PROTO(arenas_lrun_i)
134 CTL_PROTO(arenas_hchunk_i_size)
135 INDEX_PROTO(arenas_hchunk_i)
136 CTL_PROTO(arenas_narenas)
137 CTL_PROTO(arenas_initialized)
138 CTL_PROTO(arenas_lg_dirty_mult)
139 CTL_PROTO(arenas_decay_time)
140 CTL_PROTO(arenas_quantum)
141 CTL_PROTO(arenas_page)
142 CTL_PROTO(arenas_tcache_max)
143 CTL_PROTO(arenas_nbins)
144 CTL_PROTO(arenas_nhbins)
145 CTL_PROTO(arenas_nlruns)
146 CTL_PROTO(arenas_nhchunks)
147 CTL_PROTO(arenas_extend)
148 CTL_PROTO(prof_thread_active_init)
149 CTL_PROTO(prof_active)
151 CTL_PROTO(prof_gdump)
152 CTL_PROTO(prof_reset)
153 CTL_PROTO(prof_interval)
154 CTL_PROTO(lg_prof_sample)
155 CTL_PROTO(stats_arenas_i_small_allocated)
156 CTL_PROTO(stats_arenas_i_small_nmalloc)
157 CTL_PROTO(stats_arenas_i_small_ndalloc)
158 CTL_PROTO(stats_arenas_i_small_nrequests)
159 CTL_PROTO(stats_arenas_i_large_allocated)
160 CTL_PROTO(stats_arenas_i_large_nmalloc)
161 CTL_PROTO(stats_arenas_i_large_ndalloc)
162 CTL_PROTO(stats_arenas_i_large_nrequests)
163 CTL_PROTO(stats_arenas_i_huge_allocated)
164 CTL_PROTO(stats_arenas_i_huge_nmalloc)
165 CTL_PROTO(stats_arenas_i_huge_ndalloc)
166 CTL_PROTO(stats_arenas_i_huge_nrequests)
167 CTL_PROTO(stats_arenas_i_bins_j_nmalloc)
168 CTL_PROTO(stats_arenas_i_bins_j_ndalloc)
169 CTL_PROTO(stats_arenas_i_bins_j_nrequests)
170 CTL_PROTO(stats_arenas_i_bins_j_curregs)
171 CTL_PROTO(stats_arenas_i_bins_j_nfills)
172 CTL_PROTO(stats_arenas_i_bins_j_nflushes)
173 CTL_PROTO(stats_arenas_i_bins_j_nruns)
174 CTL_PROTO(stats_arenas_i_bins_j_nreruns)
175 CTL_PROTO(stats_arenas_i_bins_j_curruns)
176 INDEX_PROTO(stats_arenas_i_bins_j)
177 CTL_PROTO(stats_arenas_i_lruns_j_nmalloc)
178 CTL_PROTO(stats_arenas_i_lruns_j_ndalloc)
179 CTL_PROTO(stats_arenas_i_lruns_j_nrequests)
180 CTL_PROTO(stats_arenas_i_lruns_j_curruns)
181 INDEX_PROTO(stats_arenas_i_lruns_j)
182 CTL_PROTO(stats_arenas_i_hchunks_j_nmalloc)
183 CTL_PROTO(stats_arenas_i_hchunks_j_ndalloc)
184 CTL_PROTO(stats_arenas_i_hchunks_j_nrequests)
185 CTL_PROTO(stats_arenas_i_hchunks_j_curhchunks)
186 INDEX_PROTO(stats_arenas_i_hchunks_j)
187 CTL_PROTO(stats_arenas_i_nthreads)
188 CTL_PROTO(stats_arenas_i_dss)
189 CTL_PROTO(stats_arenas_i_lg_dirty_mult)
190 CTL_PROTO(stats_arenas_i_decay_time)
191 CTL_PROTO(stats_arenas_i_pactive)
192 CTL_PROTO(stats_arenas_i_pdirty)
193 CTL_PROTO(stats_arenas_i_mapped)
194 CTL_PROTO(stats_arenas_i_npurge)
195 CTL_PROTO(stats_arenas_i_nmadvise)
196 CTL_PROTO(stats_arenas_i_purged)
197 CTL_PROTO(stats_arenas_i_metadata_mapped)
198 CTL_PROTO(stats_arenas_i_metadata_allocated)
199 INDEX_PROTO(stats_arenas_i)
200 CTL_PROTO(stats_cactive)
201 CTL_PROTO(stats_allocated)
202 CTL_PROTO(stats_active)
203 CTL_PROTO(stats_metadata)
204 CTL_PROTO(stats_resident)
205 CTL_PROTO(stats_mapped)
207 /******************************************************************************/
210 /* Maximum tree depth. */
211 #define CTL_MAX_DEPTH 6
213 #define NAME(n) {true}, n
214 #define CHILD(t, c) \
215 sizeof(c##_node) / sizeof(ctl_##t##_node_t), \
216 (ctl_node_t *)c##_node, \
218 #define CTL(c) 0, NULL, c##_ctl
221 * Only handles internal indexed nodes, since there are currently no external
224 #define INDEX(i) {false}, i##_index
226 static const ctl_named_node_t thread_tcache_node[] = {
227 {NAME("enabled"), CTL(thread_tcache_enabled)},
228 {NAME("flush"), CTL(thread_tcache_flush)}
231 static const ctl_named_node_t thread_prof_node[] = {
232 {NAME("name"), CTL(thread_prof_name)},
233 {NAME("active"), CTL(thread_prof_active)}
236 static const ctl_named_node_t thread_node[] = {
237 {NAME("arena"), CTL(thread_arena)},
238 {NAME("allocated"), CTL(thread_allocated)},
239 {NAME("allocatedp"), CTL(thread_allocatedp)},
240 {NAME("deallocated"), CTL(thread_deallocated)},
241 {NAME("deallocatedp"), CTL(thread_deallocatedp)},
242 {NAME("tcache"), CHILD(named, thread_tcache)},
243 {NAME("prof"), CHILD(named, thread_prof)}
246 static const ctl_named_node_t config_node[] = {
247 {NAME("cache_oblivious"), CTL(config_cache_oblivious)},
248 {NAME("debug"), CTL(config_debug)},
249 {NAME("fill"), CTL(config_fill)},
250 {NAME("lazy_lock"), CTL(config_lazy_lock)},
251 {NAME("malloc_conf"), CTL(config_malloc_conf)},
252 {NAME("munmap"), CTL(config_munmap)},
253 {NAME("prof"), CTL(config_prof)},
254 {NAME("prof_libgcc"), CTL(config_prof_libgcc)},
255 {NAME("prof_libunwind"), CTL(config_prof_libunwind)},
256 {NAME("stats"), CTL(config_stats)},
257 {NAME("tcache"), CTL(config_tcache)},
258 {NAME("tls"), CTL(config_tls)},
259 {NAME("utrace"), CTL(config_utrace)},
260 {NAME("valgrind"), CTL(config_valgrind)},
261 {NAME("xmalloc"), CTL(config_xmalloc)}
264 static const ctl_named_node_t opt_node[] = {
265 {NAME("abort"), CTL(opt_abort)},
266 {NAME("dss"), CTL(opt_dss)},
267 {NAME("lg_chunk"), CTL(opt_lg_chunk)},
268 {NAME("narenas"), CTL(opt_narenas)},
269 {NAME("purge"), CTL(opt_purge)},
270 {NAME("lg_dirty_mult"), CTL(opt_lg_dirty_mult)},
271 {NAME("decay_time"), CTL(opt_decay_time)},
272 {NAME("stats_print"), CTL(opt_stats_print)},
273 {NAME("junk"), CTL(opt_junk)},
274 {NAME("zero"), CTL(opt_zero)},
275 {NAME("quarantine"), CTL(opt_quarantine)},
276 {NAME("redzone"), CTL(opt_redzone)},
277 {NAME("utrace"), CTL(opt_utrace)},
278 {NAME("xmalloc"), CTL(opt_xmalloc)},
279 {NAME("tcache"), CTL(opt_tcache)},
280 {NAME("lg_tcache_max"), CTL(opt_lg_tcache_max)},
281 {NAME("prof"), CTL(opt_prof)},
282 {NAME("prof_prefix"), CTL(opt_prof_prefix)},
283 {NAME("prof_active"), CTL(opt_prof_active)},
284 {NAME("prof_thread_active_init"), CTL(opt_prof_thread_active_init)},
285 {NAME("lg_prof_sample"), CTL(opt_lg_prof_sample)},
286 {NAME("lg_prof_interval"), CTL(opt_lg_prof_interval)},
287 {NAME("prof_gdump"), CTL(opt_prof_gdump)},
288 {NAME("prof_final"), CTL(opt_prof_final)},
289 {NAME("prof_leak"), CTL(opt_prof_leak)},
290 {NAME("prof_accum"), CTL(opt_prof_accum)}
293 static const ctl_named_node_t tcache_node[] = {
294 {NAME("create"), CTL(tcache_create)},
295 {NAME("flush"), CTL(tcache_flush)},
296 {NAME("destroy"), CTL(tcache_destroy)}
299 static const ctl_named_node_t arena_i_node[] = {
300 {NAME("purge"), CTL(arena_i_purge)},
301 {NAME("decay"), CTL(arena_i_decay)},
302 {NAME("dss"), CTL(arena_i_dss)},
303 {NAME("lg_dirty_mult"), CTL(arena_i_lg_dirty_mult)},
304 {NAME("decay_time"), CTL(arena_i_decay_time)},
305 {NAME("chunk_hooks"), CTL(arena_i_chunk_hooks)}
307 static const ctl_named_node_t super_arena_i_node[] = {
308 {NAME(""), CHILD(named, arena_i)}
311 static const ctl_indexed_node_t arena_node[] = {
315 static const ctl_named_node_t arenas_bin_i_node[] = {
316 {NAME("size"), CTL(arenas_bin_i_size)},
317 {NAME("nregs"), CTL(arenas_bin_i_nregs)},
318 {NAME("run_size"), CTL(arenas_bin_i_run_size)}
320 static const ctl_named_node_t super_arenas_bin_i_node[] = {
321 {NAME(""), CHILD(named, arenas_bin_i)}
324 static const ctl_indexed_node_t arenas_bin_node[] = {
325 {INDEX(arenas_bin_i)}
328 static const ctl_named_node_t arenas_lrun_i_node[] = {
329 {NAME("size"), CTL(arenas_lrun_i_size)}
331 static const ctl_named_node_t super_arenas_lrun_i_node[] = {
332 {NAME(""), CHILD(named, arenas_lrun_i)}
335 static const ctl_indexed_node_t arenas_lrun_node[] = {
336 {INDEX(arenas_lrun_i)}
339 static const ctl_named_node_t arenas_hchunk_i_node[] = {
340 {NAME("size"), CTL(arenas_hchunk_i_size)}
342 static const ctl_named_node_t super_arenas_hchunk_i_node[] = {
343 {NAME(""), CHILD(named, arenas_hchunk_i)}
346 static const ctl_indexed_node_t arenas_hchunk_node[] = {
347 {INDEX(arenas_hchunk_i)}
350 static const ctl_named_node_t arenas_node[] = {
351 {NAME("narenas"), CTL(arenas_narenas)},
352 {NAME("initialized"), CTL(arenas_initialized)},
353 {NAME("lg_dirty_mult"), CTL(arenas_lg_dirty_mult)},
354 {NAME("decay_time"), CTL(arenas_decay_time)},
355 {NAME("quantum"), CTL(arenas_quantum)},
356 {NAME("page"), CTL(arenas_page)},
357 {NAME("tcache_max"), CTL(arenas_tcache_max)},
358 {NAME("nbins"), CTL(arenas_nbins)},
359 {NAME("nhbins"), CTL(arenas_nhbins)},
360 {NAME("bin"), CHILD(indexed, arenas_bin)},
361 {NAME("nlruns"), CTL(arenas_nlruns)},
362 {NAME("lrun"), CHILD(indexed, arenas_lrun)},
363 {NAME("nhchunks"), CTL(arenas_nhchunks)},
364 {NAME("hchunk"), CHILD(indexed, arenas_hchunk)},
365 {NAME("extend"), CTL(arenas_extend)}
368 static const ctl_named_node_t prof_node[] = {
369 {NAME("thread_active_init"), CTL(prof_thread_active_init)},
370 {NAME("active"), CTL(prof_active)},
371 {NAME("dump"), CTL(prof_dump)},
372 {NAME("gdump"), CTL(prof_gdump)},
373 {NAME("reset"), CTL(prof_reset)},
374 {NAME("interval"), CTL(prof_interval)},
375 {NAME("lg_sample"), CTL(lg_prof_sample)}
378 static const ctl_named_node_t stats_arenas_i_metadata_node[] = {
379 {NAME("mapped"), CTL(stats_arenas_i_metadata_mapped)},
380 {NAME("allocated"), CTL(stats_arenas_i_metadata_allocated)}
383 static const ctl_named_node_t stats_arenas_i_small_node[] = {
384 {NAME("allocated"), CTL(stats_arenas_i_small_allocated)},
385 {NAME("nmalloc"), CTL(stats_arenas_i_small_nmalloc)},
386 {NAME("ndalloc"), CTL(stats_arenas_i_small_ndalloc)},
387 {NAME("nrequests"), CTL(stats_arenas_i_small_nrequests)}
390 static const ctl_named_node_t stats_arenas_i_large_node[] = {
391 {NAME("allocated"), CTL(stats_arenas_i_large_allocated)},
392 {NAME("nmalloc"), CTL(stats_arenas_i_large_nmalloc)},
393 {NAME("ndalloc"), CTL(stats_arenas_i_large_ndalloc)},
394 {NAME("nrequests"), CTL(stats_arenas_i_large_nrequests)}
397 static const ctl_named_node_t stats_arenas_i_huge_node[] = {
398 {NAME("allocated"), CTL(stats_arenas_i_huge_allocated)},
399 {NAME("nmalloc"), CTL(stats_arenas_i_huge_nmalloc)},
400 {NAME("ndalloc"), CTL(stats_arenas_i_huge_ndalloc)},
401 {NAME("nrequests"), CTL(stats_arenas_i_huge_nrequests)}
404 static const ctl_named_node_t stats_arenas_i_bins_j_node[] = {
405 {NAME("nmalloc"), CTL(stats_arenas_i_bins_j_nmalloc)},
406 {NAME("ndalloc"), CTL(stats_arenas_i_bins_j_ndalloc)},
407 {NAME("nrequests"), CTL(stats_arenas_i_bins_j_nrequests)},
408 {NAME("curregs"), CTL(stats_arenas_i_bins_j_curregs)},
409 {NAME("nfills"), CTL(stats_arenas_i_bins_j_nfills)},
410 {NAME("nflushes"), CTL(stats_arenas_i_bins_j_nflushes)},
411 {NAME("nruns"), CTL(stats_arenas_i_bins_j_nruns)},
412 {NAME("nreruns"), CTL(stats_arenas_i_bins_j_nreruns)},
413 {NAME("curruns"), CTL(stats_arenas_i_bins_j_curruns)}
415 static const ctl_named_node_t super_stats_arenas_i_bins_j_node[] = {
416 {NAME(""), CHILD(named, stats_arenas_i_bins_j)}
419 static const ctl_indexed_node_t stats_arenas_i_bins_node[] = {
420 {INDEX(stats_arenas_i_bins_j)}
423 static const ctl_named_node_t stats_arenas_i_lruns_j_node[] = {
424 {NAME("nmalloc"), CTL(stats_arenas_i_lruns_j_nmalloc)},
425 {NAME("ndalloc"), CTL(stats_arenas_i_lruns_j_ndalloc)},
426 {NAME("nrequests"), CTL(stats_arenas_i_lruns_j_nrequests)},
427 {NAME("curruns"), CTL(stats_arenas_i_lruns_j_curruns)}
429 static const ctl_named_node_t super_stats_arenas_i_lruns_j_node[] = {
430 {NAME(""), CHILD(named, stats_arenas_i_lruns_j)}
433 static const ctl_indexed_node_t stats_arenas_i_lruns_node[] = {
434 {INDEX(stats_arenas_i_lruns_j)}
437 static const ctl_named_node_t stats_arenas_i_hchunks_j_node[] = {
438 {NAME("nmalloc"), CTL(stats_arenas_i_hchunks_j_nmalloc)},
439 {NAME("ndalloc"), CTL(stats_arenas_i_hchunks_j_ndalloc)},
440 {NAME("nrequests"), CTL(stats_arenas_i_hchunks_j_nrequests)},
441 {NAME("curhchunks"), CTL(stats_arenas_i_hchunks_j_curhchunks)}
443 static const ctl_named_node_t super_stats_arenas_i_hchunks_j_node[] = {
444 {NAME(""), CHILD(named, stats_arenas_i_hchunks_j)}
447 static const ctl_indexed_node_t stats_arenas_i_hchunks_node[] = {
448 {INDEX(stats_arenas_i_hchunks_j)}
451 static const ctl_named_node_t stats_arenas_i_node[] = {
452 {NAME("nthreads"), CTL(stats_arenas_i_nthreads)},
453 {NAME("dss"), CTL(stats_arenas_i_dss)},
454 {NAME("lg_dirty_mult"), CTL(stats_arenas_i_lg_dirty_mult)},
455 {NAME("decay_time"), CTL(stats_arenas_i_decay_time)},
456 {NAME("pactive"), CTL(stats_arenas_i_pactive)},
457 {NAME("pdirty"), CTL(stats_arenas_i_pdirty)},
458 {NAME("mapped"), CTL(stats_arenas_i_mapped)},
459 {NAME("npurge"), CTL(stats_arenas_i_npurge)},
460 {NAME("nmadvise"), CTL(stats_arenas_i_nmadvise)},
461 {NAME("purged"), CTL(stats_arenas_i_purged)},
462 {NAME("metadata"), CHILD(named, stats_arenas_i_metadata)},
463 {NAME("small"), CHILD(named, stats_arenas_i_small)},
464 {NAME("large"), CHILD(named, stats_arenas_i_large)},
465 {NAME("huge"), CHILD(named, stats_arenas_i_huge)},
466 {NAME("bins"), CHILD(indexed, stats_arenas_i_bins)},
467 {NAME("lruns"), CHILD(indexed, stats_arenas_i_lruns)},
468 {NAME("hchunks"), CHILD(indexed, stats_arenas_i_hchunks)}
470 static const ctl_named_node_t super_stats_arenas_i_node[] = {
471 {NAME(""), CHILD(named, stats_arenas_i)}
474 static const ctl_indexed_node_t stats_arenas_node[] = {
475 {INDEX(stats_arenas_i)}
478 static const ctl_named_node_t stats_node[] = {
479 {NAME("cactive"), CTL(stats_cactive)},
480 {NAME("allocated"), CTL(stats_allocated)},
481 {NAME("active"), CTL(stats_active)},
482 {NAME("metadata"), CTL(stats_metadata)},
483 {NAME("resident"), CTL(stats_resident)},
484 {NAME("mapped"), CTL(stats_mapped)},
485 {NAME("arenas"), CHILD(indexed, stats_arenas)}
488 static const ctl_named_node_t root_node[] = {
489 {NAME("version"), CTL(version)},
490 {NAME("epoch"), CTL(epoch)},
491 {NAME("thread"), CHILD(named, thread)},
492 {NAME("config"), CHILD(named, config)},
493 {NAME("opt"), CHILD(named, opt)},
494 {NAME("tcache"), CHILD(named, tcache)},
495 {NAME("arena"), CHILD(indexed, arena)},
496 {NAME("arenas"), CHILD(named, arenas)},
497 {NAME("prof"), CHILD(named, prof)},
498 {NAME("stats"), CHILD(named, stats)}
500 static const ctl_named_node_t super_root_node[] = {
501 {NAME(""), CHILD(named, root)}
509 /******************************************************************************/
512 ctl_arena_init(ctl_arena_stats_t *astats)
515 if (astats->lstats == NULL) {
516 astats->lstats = (malloc_large_stats_t *)a0malloc(nlclasses *
517 sizeof(malloc_large_stats_t));
518 if (astats->lstats == NULL)
522 if (astats->hstats == NULL) {
523 astats->hstats = (malloc_huge_stats_t *)a0malloc(nhclasses *
524 sizeof(malloc_huge_stats_t));
525 if (astats->hstats == NULL)
533 ctl_arena_clear(ctl_arena_stats_t *astats)
536 astats->nthreads = 0;
537 astats->dss = dss_prec_names[dss_prec_limit];
538 astats->lg_dirty_mult = -1;
539 astats->decay_time = -1;
543 memset(&astats->astats, 0, sizeof(arena_stats_t));
544 astats->allocated_small = 0;
545 astats->nmalloc_small = 0;
546 astats->ndalloc_small = 0;
547 astats->nrequests_small = 0;
548 memset(astats->bstats, 0, NBINS * sizeof(malloc_bin_stats_t));
549 memset(astats->lstats, 0, nlclasses *
550 sizeof(malloc_large_stats_t));
551 memset(astats->hstats, 0, nhclasses *
552 sizeof(malloc_huge_stats_t));
557 ctl_arena_stats_amerge(ctl_arena_stats_t *cstats, arena_t *arena)
562 arena_stats_merge(arena, &cstats->nthreads, &cstats->dss,
563 &cstats->lg_dirty_mult, &cstats->decay_time,
564 &cstats->pactive, &cstats->pdirty, &cstats->astats,
565 cstats->bstats, cstats->lstats, cstats->hstats);
567 for (i = 0; i < NBINS; i++) {
568 cstats->allocated_small += cstats->bstats[i].curregs *
570 cstats->nmalloc_small += cstats->bstats[i].nmalloc;
571 cstats->ndalloc_small += cstats->bstats[i].ndalloc;
572 cstats->nrequests_small += cstats->bstats[i].nrequests;
575 arena_basic_stats_merge(arena, &cstats->nthreads, &cstats->dss,
576 &cstats->lg_dirty_mult, &cstats->decay_time,
577 &cstats->pactive, &cstats->pdirty);
582 ctl_arena_stats_smerge(ctl_arena_stats_t *sstats, ctl_arena_stats_t *astats)
586 sstats->nthreads += astats->nthreads;
587 sstats->pactive += astats->pactive;
588 sstats->pdirty += astats->pdirty;
591 sstats->astats.mapped += astats->astats.mapped;
592 sstats->astats.npurge += astats->astats.npurge;
593 sstats->astats.nmadvise += astats->astats.nmadvise;
594 sstats->astats.purged += astats->astats.purged;
596 sstats->astats.metadata_mapped +=
597 astats->astats.metadata_mapped;
598 sstats->astats.metadata_allocated +=
599 astats->astats.metadata_allocated;
601 sstats->allocated_small += astats->allocated_small;
602 sstats->nmalloc_small += astats->nmalloc_small;
603 sstats->ndalloc_small += astats->ndalloc_small;
604 sstats->nrequests_small += astats->nrequests_small;
606 sstats->astats.allocated_large +=
607 astats->astats.allocated_large;
608 sstats->astats.nmalloc_large += astats->astats.nmalloc_large;
609 sstats->astats.ndalloc_large += astats->astats.ndalloc_large;
610 sstats->astats.nrequests_large +=
611 astats->astats.nrequests_large;
613 sstats->astats.allocated_huge += astats->astats.allocated_huge;
614 sstats->astats.nmalloc_huge += astats->astats.nmalloc_huge;
615 sstats->astats.ndalloc_huge += astats->astats.ndalloc_huge;
617 for (i = 0; i < NBINS; i++) {
618 sstats->bstats[i].nmalloc += astats->bstats[i].nmalloc;
619 sstats->bstats[i].ndalloc += astats->bstats[i].ndalloc;
620 sstats->bstats[i].nrequests +=
621 astats->bstats[i].nrequests;
622 sstats->bstats[i].curregs += astats->bstats[i].curregs;
624 sstats->bstats[i].nfills +=
625 astats->bstats[i].nfills;
626 sstats->bstats[i].nflushes +=
627 astats->bstats[i].nflushes;
629 sstats->bstats[i].nruns += astats->bstats[i].nruns;
630 sstats->bstats[i].reruns += astats->bstats[i].reruns;
631 sstats->bstats[i].curruns += astats->bstats[i].curruns;
634 for (i = 0; i < nlclasses; i++) {
635 sstats->lstats[i].nmalloc += astats->lstats[i].nmalloc;
636 sstats->lstats[i].ndalloc += astats->lstats[i].ndalloc;
637 sstats->lstats[i].nrequests +=
638 astats->lstats[i].nrequests;
639 sstats->lstats[i].curruns += astats->lstats[i].curruns;
642 for (i = 0; i < nhclasses; i++) {
643 sstats->hstats[i].nmalloc += astats->hstats[i].nmalloc;
644 sstats->hstats[i].ndalloc += astats->hstats[i].ndalloc;
645 sstats->hstats[i].curhchunks +=
646 astats->hstats[i].curhchunks;
652 ctl_arena_refresh(arena_t *arena, unsigned i)
654 ctl_arena_stats_t *astats = &ctl_stats.arenas[i];
655 ctl_arena_stats_t *sstats = &ctl_stats.arenas[ctl_stats.narenas];
657 ctl_arena_clear(astats);
658 ctl_arena_stats_amerge(astats, arena);
659 /* Merge into sum stats as well. */
660 ctl_arena_stats_smerge(sstats, astats);
666 ctl_arena_stats_t *astats;
668 /* Initialize new arena. */
669 if (arena_init(ctl_stats.narenas) == NULL)
672 /* Allocate extended arena stats. */
673 astats = (ctl_arena_stats_t *)a0malloc((ctl_stats.narenas + 2) *
674 sizeof(ctl_arena_stats_t));
678 /* Initialize the new astats element. */
679 memcpy(astats, ctl_stats.arenas, (ctl_stats.narenas + 1) *
680 sizeof(ctl_arena_stats_t));
681 memset(&astats[ctl_stats.narenas + 1], 0, sizeof(ctl_arena_stats_t));
682 if (ctl_arena_init(&astats[ctl_stats.narenas + 1])) {
686 /* Swap merged stats to their new location. */
688 ctl_arena_stats_t tstats;
689 memcpy(&tstats, &astats[ctl_stats.narenas],
690 sizeof(ctl_arena_stats_t));
691 memcpy(&astats[ctl_stats.narenas],
692 &astats[ctl_stats.narenas + 1], sizeof(ctl_arena_stats_t));
693 memcpy(&astats[ctl_stats.narenas + 1], &tstats,
694 sizeof(ctl_arena_stats_t));
696 a0dalloc(ctl_stats.arenas);
697 ctl_stats.arenas = astats;
707 VARIABLE_ARRAY(arena_t *, tarenas, ctl_stats.narenas);
710 * Clear sum stats, since they will be merged into by
711 * ctl_arena_refresh().
713 ctl_arena_clear(&ctl_stats.arenas[ctl_stats.narenas]);
715 for (i = 0; i < ctl_stats.narenas; i++)
716 tarenas[i] = arena_get(i, false);
718 for (i = 0; i < ctl_stats.narenas; i++) {
719 bool initialized = (tarenas[i] != NULL);
721 ctl_stats.arenas[i].initialized = initialized;
723 ctl_arena_refresh(tarenas[i], i);
727 size_t base_allocated, base_resident, base_mapped;
728 base_stats_get(&base_allocated, &base_resident, &base_mapped);
729 ctl_stats.allocated =
730 ctl_stats.arenas[ctl_stats.narenas].allocated_small +
731 ctl_stats.arenas[ctl_stats.narenas].astats.allocated_large +
732 ctl_stats.arenas[ctl_stats.narenas].astats.allocated_huge;
734 (ctl_stats.arenas[ctl_stats.narenas].pactive << LG_PAGE);
735 ctl_stats.metadata = base_allocated +
736 ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped +
737 ctl_stats.arenas[ctl_stats.narenas].astats
739 ctl_stats.resident = base_resident +
740 ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped +
741 ((ctl_stats.arenas[ctl_stats.narenas].pactive +
742 ctl_stats.arenas[ctl_stats.narenas].pdirty) << LG_PAGE);
743 ctl_stats.mapped = base_mapped +
744 ctl_stats.arenas[ctl_stats.narenas].astats.mapped;
755 malloc_mutex_lock(&ctl_mtx);
756 if (!ctl_initialized) {
758 * Allocate space for one extra arena stats element, which
759 * contains summed stats across all arenas.
761 ctl_stats.narenas = narenas_total_get();
762 ctl_stats.arenas = (ctl_arena_stats_t *)a0malloc(
763 (ctl_stats.narenas + 1) * sizeof(ctl_arena_stats_t));
764 if (ctl_stats.arenas == NULL) {
768 memset(ctl_stats.arenas, 0, (ctl_stats.narenas + 1) *
769 sizeof(ctl_arena_stats_t));
772 * Initialize all stats structures, regardless of whether they
773 * ever get used. Lazy initialization would allow errors to
774 * cause inconsistent state to be viewable by the application.
778 for (i = 0; i <= ctl_stats.narenas; i++) {
779 if (ctl_arena_init(&ctl_stats.arenas[i])) {
781 for (j = 0; j < i; j++) {
783 ctl_stats.arenas[j].lstats);
785 ctl_stats.arenas[j].hstats);
787 a0dalloc(ctl_stats.arenas);
788 ctl_stats.arenas = NULL;
794 ctl_stats.arenas[ctl_stats.narenas].initialized = true;
798 ctl_initialized = true;
803 malloc_mutex_unlock(&ctl_mtx);
808 ctl_lookup(const char *name, ctl_node_t const **nodesp, size_t *mibp,
812 const char *elm, *tdot, *dot;
814 const ctl_named_node_t *node;
817 /* Equivalent to strchrnul(). */
818 dot = ((tdot = strchr(elm, '.')) != NULL) ? tdot : strchr(elm, '\0');
819 elen = (size_t)((uintptr_t)dot - (uintptr_t)elm);
824 node = super_root_node;
825 for (i = 0; i < *depthp; i++) {
827 assert(node->nchildren > 0);
828 if (ctl_named_node(node->children) != NULL) {
829 const ctl_named_node_t *pnode = node;
831 /* Children are named. */
832 for (j = 0; j < node->nchildren; j++) {
833 const ctl_named_node_t *child =
834 ctl_named_children(node, j);
835 if (strlen(child->name) == elen &&
836 strncmp(elm, child->name, elen) == 0) {
840 (const ctl_node_t *)node;
851 const ctl_indexed_node_t *inode;
853 /* Children are indexed. */
854 index = malloc_strtoumax(elm, NULL, 10);
855 if (index == UINTMAX_MAX || index > SIZE_T_MAX) {
860 inode = ctl_indexed_node(node->children);
861 node = inode->index(mibp, *depthp, (size_t)index);
868 nodesp[i] = (const ctl_node_t *)node;
869 mibp[i] = (size_t)index;
872 if (node->ctl != NULL) {
876 * The name contains more elements than are
877 * in this path through the tree.
882 /* Complete lookup successful. */
889 /* No more elements. */
894 dot = ((tdot = strchr(elm, '.')) != NULL) ? tdot :
896 elen = (size_t)((uintptr_t)dot - (uintptr_t)elm);
905 ctl_byname(const char *name, void *oldp, size_t *oldlenp, void *newp,
910 ctl_node_t const *nodes[CTL_MAX_DEPTH];
911 size_t mib[CTL_MAX_DEPTH];
912 const ctl_named_node_t *node;
914 if (!ctl_initialized && ctl_init()) {
919 depth = CTL_MAX_DEPTH;
920 ret = ctl_lookup(name, nodes, mib, &depth);
924 node = ctl_named_node(nodes[depth-1]);
925 if (node != NULL && node->ctl)
926 ret = node->ctl(mib, depth, oldp, oldlenp, newp, newlen);
928 /* The name refers to a partial path through the ctl tree. */
937 ctl_nametomib(const char *name, size_t *mibp, size_t *miblenp)
941 if (!ctl_initialized && ctl_init()) {
946 ret = ctl_lookup(name, NULL, mibp, miblenp);
952 ctl_bymib(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
953 void *newp, size_t newlen)
956 const ctl_named_node_t *node;
959 if (!ctl_initialized && ctl_init()) {
964 /* Iterate down the tree. */
965 node = super_root_node;
966 for (i = 0; i < miblen; i++) {
968 assert(node->nchildren > 0);
969 if (ctl_named_node(node->children) != NULL) {
970 /* Children are named. */
971 if (node->nchildren <= (unsigned)mib[i]) {
975 node = ctl_named_children(node, mib[i]);
977 const ctl_indexed_node_t *inode;
979 /* Indexed element. */
980 inode = ctl_indexed_node(node->children);
981 node = inode->index(mib, miblen, mib[i]);
989 /* Call the ctl function. */
990 if (node && node->ctl)
991 ret = node->ctl(mib, miblen, oldp, oldlenp, newp, newlen);
1005 if (malloc_mutex_init(&ctl_mtx))
1008 ctl_initialized = false;
1017 malloc_mutex_prefork(&ctl_mtx);
1021 ctl_postfork_parent(void)
1024 malloc_mutex_postfork_parent(&ctl_mtx);
1028 ctl_postfork_child(void)
1031 malloc_mutex_postfork_child(&ctl_mtx);
1034 /******************************************************************************/
1035 /* *_ctl() functions. */
1037 #define READONLY() do { \
1038 if (newp != NULL || newlen != 0) { \
1040 goto label_return; \
1044 #define WRITEONLY() do { \
1045 if (oldp != NULL || oldlenp != NULL) { \
1047 goto label_return; \
1051 #define READ_XOR_WRITE() do { \
1052 if ((oldp != NULL && oldlenp != NULL) && (newp != NULL || \
1055 goto label_return; \
1059 #define READ(v, t) do { \
1060 if (oldp != NULL && oldlenp != NULL) { \
1061 if (*oldlenp != sizeof(t)) { \
1062 size_t copylen = (sizeof(t) <= *oldlenp) \
1063 ? sizeof(t) : *oldlenp; \
1064 memcpy(oldp, (void *)&(v), copylen); \
1066 goto label_return; \
1072 #define WRITE(v, t) do { \
1073 if (newp != NULL) { \
1074 if (newlen != sizeof(t)) { \
1076 goto label_return; \
1083 * There's a lot of code duplication in the following macros due to limitations
1084 * in how nested cpp macros are expanded.
1086 #define CTL_RO_CLGEN(c, l, n, v, t) \
1088 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1089 void *newp, size_t newlen) \
1097 malloc_mutex_lock(&ctl_mtx); \
1105 malloc_mutex_unlock(&ctl_mtx); \
1109 #define CTL_RO_CGEN(c, n, v, t) \
1111 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1112 void *newp, size_t newlen) \
1119 malloc_mutex_lock(&ctl_mtx); \
1126 malloc_mutex_unlock(&ctl_mtx); \
1130 #define CTL_RO_GEN(n, v, t) \
1132 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1133 void *newp, size_t newlen) \
1138 malloc_mutex_lock(&ctl_mtx); \
1145 malloc_mutex_unlock(&ctl_mtx); \
1150 * ctl_mtx is not acquired, under the assumption that no pertinent data will
1151 * mutate during the call.
1153 #define CTL_RO_NL_CGEN(c, n, v, t) \
1155 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1156 void *newp, size_t newlen) \
1172 #define CTL_RO_NL_GEN(n, v, t) \
1174 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1175 void *newp, size_t newlen) \
1189 #define CTL_TSD_RO_NL_CGEN(c, n, m, t) \
1191 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1192 void *newp, size_t newlen) \
1201 tsd = tsd_fetch(); \
1202 oldval = (m(tsd)); \
1210 #define CTL_RO_CONFIG_GEN(n, t) \
1212 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1213 void *newp, size_t newlen) \
1227 /******************************************************************************/
1229 CTL_RO_NL_GEN(version, JEMALLOC_VERSION, const char *)
1232 epoch_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1233 void *newp, size_t newlen)
1236 UNUSED uint64_t newval;
1238 malloc_mutex_lock(&ctl_mtx);
1239 WRITE(newval, uint64_t);
1242 READ(ctl_epoch, uint64_t);
1246 malloc_mutex_unlock(&ctl_mtx);
1250 /******************************************************************************/
1252 CTL_RO_CONFIG_GEN(config_cache_oblivious, bool)
1253 CTL_RO_CONFIG_GEN(config_debug, bool)
1254 CTL_RO_CONFIG_GEN(config_fill, bool)
1255 CTL_RO_CONFIG_GEN(config_lazy_lock, bool)
1256 CTL_RO_CONFIG_GEN(config_malloc_conf, const char *)
1257 CTL_RO_CONFIG_GEN(config_munmap, bool)
1258 CTL_RO_CONFIG_GEN(config_prof, bool)
1259 CTL_RO_CONFIG_GEN(config_prof_libgcc, bool)
1260 CTL_RO_CONFIG_GEN(config_prof_libunwind, bool)
1261 CTL_RO_CONFIG_GEN(config_stats, bool)
1262 CTL_RO_CONFIG_GEN(config_tcache, bool)
1263 CTL_RO_CONFIG_GEN(config_tls, bool)
1264 CTL_RO_CONFIG_GEN(config_utrace, bool)
1265 CTL_RO_CONFIG_GEN(config_valgrind, bool)
1266 CTL_RO_CONFIG_GEN(config_xmalloc, bool)
1268 /******************************************************************************/
1270 CTL_RO_NL_GEN(opt_abort, opt_abort, bool)
1271 CTL_RO_NL_GEN(opt_dss, opt_dss, const char *)
1272 CTL_RO_NL_GEN(opt_lg_chunk, opt_lg_chunk, size_t)
1273 CTL_RO_NL_GEN(opt_narenas, opt_narenas, unsigned)
1274 CTL_RO_NL_GEN(opt_purge, purge_mode_names[opt_purge], const char *)
1275 CTL_RO_NL_GEN(opt_lg_dirty_mult, opt_lg_dirty_mult, ssize_t)
1276 CTL_RO_NL_GEN(opt_decay_time, opt_decay_time, ssize_t)
1277 CTL_RO_NL_GEN(opt_stats_print, opt_stats_print, bool)
1278 CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, const char *)
1279 CTL_RO_NL_CGEN(config_fill, opt_quarantine, opt_quarantine, size_t)
1280 CTL_RO_NL_CGEN(config_fill, opt_redzone, opt_redzone, bool)
1281 CTL_RO_NL_CGEN(config_fill, opt_zero, opt_zero, bool)
1282 CTL_RO_NL_CGEN(config_utrace, opt_utrace, opt_utrace, bool)
1283 CTL_RO_NL_CGEN(config_xmalloc, opt_xmalloc, opt_xmalloc, bool)
1284 CTL_RO_NL_CGEN(config_tcache, opt_tcache, opt_tcache, bool)
1285 CTL_RO_NL_CGEN(config_tcache, opt_lg_tcache_max, opt_lg_tcache_max, ssize_t)
1286 CTL_RO_NL_CGEN(config_prof, opt_prof, opt_prof, bool)
1287 CTL_RO_NL_CGEN(config_prof, opt_prof_prefix, opt_prof_prefix, const char *)
1288 CTL_RO_NL_CGEN(config_prof, opt_prof_active, opt_prof_active, bool)
1289 CTL_RO_NL_CGEN(config_prof, opt_prof_thread_active_init,
1290 opt_prof_thread_active_init, bool)
1291 CTL_RO_NL_CGEN(config_prof, opt_lg_prof_sample, opt_lg_prof_sample, size_t)
1292 CTL_RO_NL_CGEN(config_prof, opt_prof_accum, opt_prof_accum, bool)
1293 CTL_RO_NL_CGEN(config_prof, opt_lg_prof_interval, opt_lg_prof_interval, ssize_t)
1294 CTL_RO_NL_CGEN(config_prof, opt_prof_gdump, opt_prof_gdump, bool)
1295 CTL_RO_NL_CGEN(config_prof, opt_prof_final, opt_prof_final, bool)
1296 CTL_RO_NL_CGEN(config_prof, opt_prof_leak, opt_prof_leak, bool)
1298 /******************************************************************************/
1301 thread_arena_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1302 void *newp, size_t newlen)
1307 unsigned newind, oldind;
1310 oldarena = arena_choose(tsd, NULL);
1311 if (oldarena == NULL)
1314 malloc_mutex_lock(&ctl_mtx);
1315 newind = oldind = oldarena->ind;
1316 WRITE(newind, unsigned);
1317 READ(oldind, unsigned);
1318 if (newind != oldind) {
1321 if (newind >= ctl_stats.narenas) {
1322 /* New arena index is out of range. */
1327 /* Initialize arena if necessary. */
1328 newarena = arena_get(newind, true);
1329 if (newarena == NULL) {
1333 /* Set new arena/tcache associations. */
1334 arena_migrate(tsd, oldind, newind);
1335 if (config_tcache) {
1336 tcache_t *tcache = tsd_tcache_get(tsd);
1337 if (tcache != NULL) {
1338 tcache_arena_reassociate(tcache, oldarena,
1346 malloc_mutex_unlock(&ctl_mtx);
1350 CTL_TSD_RO_NL_CGEN(config_stats, thread_allocated, tsd_thread_allocated_get,
1352 CTL_TSD_RO_NL_CGEN(config_stats, thread_allocatedp, tsd_thread_allocatedp_get,
1354 CTL_TSD_RO_NL_CGEN(config_stats, thread_deallocated, tsd_thread_deallocated_get,
1356 CTL_TSD_RO_NL_CGEN(config_stats, thread_deallocatedp,
1357 tsd_thread_deallocatedp_get, uint64_t *)
1360 thread_tcache_enabled_ctl(const size_t *mib, size_t miblen, void *oldp,
1361 size_t *oldlenp, void *newp, size_t newlen)
1369 oldval = tcache_enabled_get();
1371 if (newlen != sizeof(bool)) {
1375 tcache_enabled_set(*(bool *)newp);
1385 thread_tcache_flush_ctl(const size_t *mib, size_t miblen, void *oldp,
1386 size_t *oldlenp, void *newp, size_t newlen)
1404 thread_prof_name_ctl(const size_t *mib, size_t miblen, void *oldp,
1405 size_t *oldlenp, void *newp, size_t newlen)
1417 if (newlen != sizeof(const char *)) {
1424 if ((ret = prof_thread_name_set(tsd, *(const char **)newp)) !=
1428 const char *oldname = prof_thread_name_get();
1429 READ(oldname, const char *);
1438 thread_prof_active_ctl(const size_t *mib, size_t miblen, void *oldp,
1439 size_t *oldlenp, void *newp, size_t newlen)
1447 oldval = prof_thread_active_get();
1449 if (newlen != sizeof(bool)) {
1453 if (prof_thread_active_set(*(bool *)newp)) {
1465 /******************************************************************************/
1468 tcache_create_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1469 void *newp, size_t newlen)
1473 unsigned tcache_ind;
1480 malloc_mutex_lock(&ctl_mtx);
1482 if (tcaches_create(tsd, &tcache_ind)) {
1486 READ(tcache_ind, unsigned);
1490 malloc_mutex_unlock(&ctl_mtx);
1495 tcache_flush_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1496 void *newp, size_t newlen)
1500 unsigned tcache_ind;
1508 tcache_ind = UINT_MAX;
1509 WRITE(tcache_ind, unsigned);
1510 if (tcache_ind == UINT_MAX) {
1514 tcaches_flush(tsd, tcache_ind);
1522 tcache_destroy_ctl(const size_t *mib, size_t miblen, void *oldp,
1523 size_t *oldlenp, void *newp, size_t newlen)
1527 unsigned tcache_ind;
1535 tcache_ind = UINT_MAX;
1536 WRITE(tcache_ind, unsigned);
1537 if (tcache_ind == UINT_MAX) {
1541 tcaches_destroy(tsd, tcache_ind);
1548 /******************************************************************************/
1551 arena_i_purge(unsigned arena_ind, bool all)
1554 malloc_mutex_lock(&ctl_mtx);
1556 unsigned narenas = ctl_stats.narenas;
1558 if (arena_ind == narenas) {
1560 VARIABLE_ARRAY(arena_t *, tarenas, narenas);
1562 for (i = 0; i < narenas; i++)
1563 tarenas[i] = arena_get(i, false);
1566 * No further need to hold ctl_mtx, since narenas and
1567 * tarenas contain everything needed below.
1569 malloc_mutex_unlock(&ctl_mtx);
1571 for (i = 0; i < narenas; i++) {
1572 if (tarenas[i] != NULL)
1573 arena_purge(tarenas[i], all);
1578 assert(arena_ind < narenas);
1580 tarena = arena_get(arena_ind, false);
1582 /* No further need to hold ctl_mtx. */
1583 malloc_mutex_unlock(&ctl_mtx);
1586 arena_purge(tarena, all);
1592 arena_i_purge_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1593 void *newp, size_t newlen)
1599 arena_i_purge((unsigned)mib[1], true);
1607 arena_i_decay_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1608 void *newp, size_t newlen)
1614 arena_i_purge((unsigned)mib[1], false);
1622 arena_i_dss_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1623 void *newp, size_t newlen)
1626 const char *dss = NULL;
1627 unsigned arena_ind = (unsigned)mib[1];
1628 dss_prec_t dss_prec_old = dss_prec_limit;
1629 dss_prec_t dss_prec = dss_prec_limit;
1631 malloc_mutex_lock(&ctl_mtx);
1632 WRITE(dss, const char *);
1637 for (i = 0; i < dss_prec_limit; i++) {
1638 if (strcmp(dss_prec_names[i], dss) == 0) {
1651 if (arena_ind < ctl_stats.narenas) {
1652 arena_t *arena = arena_get(arena_ind, false);
1653 if (arena == NULL || (dss_prec != dss_prec_limit &&
1654 arena_dss_prec_set(arena, dss_prec))) {
1658 dss_prec_old = arena_dss_prec_get(arena);
1660 if (dss_prec != dss_prec_limit &&
1661 chunk_dss_prec_set(dss_prec)) {
1665 dss_prec_old = chunk_dss_prec_get();
1668 dss = dss_prec_names[dss_prec_old];
1669 READ(dss, const char *);
1673 malloc_mutex_unlock(&ctl_mtx);
1678 arena_i_lg_dirty_mult_ctl(const size_t *mib, size_t miblen, void *oldp,
1679 size_t *oldlenp, void *newp, size_t newlen)
1682 unsigned arena_ind = (unsigned)mib[1];
1685 arena = arena_get(arena_ind, false);
1686 if (arena == NULL) {
1691 if (oldp != NULL && oldlenp != NULL) {
1692 size_t oldval = arena_lg_dirty_mult_get(arena);
1693 READ(oldval, ssize_t);
1696 if (newlen != sizeof(ssize_t)) {
1700 if (arena_lg_dirty_mult_set(arena, *(ssize_t *)newp)) {
1712 arena_i_decay_time_ctl(const size_t *mib, size_t miblen, void *oldp,
1713 size_t *oldlenp, void *newp, size_t newlen)
1716 unsigned arena_ind = (unsigned)mib[1];
1719 arena = arena_get(arena_ind, false);
1720 if (arena == NULL) {
1725 if (oldp != NULL && oldlenp != NULL) {
1726 size_t oldval = arena_decay_time_get(arena);
1727 READ(oldval, ssize_t);
1730 if (newlen != sizeof(ssize_t)) {
1734 if (arena_decay_time_set(arena, *(ssize_t *)newp)) {
1746 arena_i_chunk_hooks_ctl(const size_t *mib, size_t miblen, void *oldp,
1747 size_t *oldlenp, void *newp, size_t newlen)
1750 unsigned arena_ind = (unsigned)mib[1];
1753 malloc_mutex_lock(&ctl_mtx);
1754 if (arena_ind < narenas_total_get() && (arena =
1755 arena_get(arena_ind, false)) != NULL) {
1757 chunk_hooks_t old_chunk_hooks, new_chunk_hooks;
1758 WRITE(new_chunk_hooks, chunk_hooks_t);
1759 old_chunk_hooks = chunk_hooks_set(arena,
1761 READ(old_chunk_hooks, chunk_hooks_t);
1763 chunk_hooks_t old_chunk_hooks = chunk_hooks_get(arena);
1764 READ(old_chunk_hooks, chunk_hooks_t);
1772 malloc_mutex_unlock(&ctl_mtx);
1776 static const ctl_named_node_t *
1777 arena_i_index(const size_t *mib, size_t miblen, size_t i)
1779 const ctl_named_node_t * ret;
1781 malloc_mutex_lock(&ctl_mtx);
1782 if (i > ctl_stats.narenas) {
1787 ret = super_arena_i_node;
1789 malloc_mutex_unlock(&ctl_mtx);
1793 /******************************************************************************/
1796 arenas_narenas_ctl(const size_t *mib, size_t miblen, void *oldp,
1797 size_t *oldlenp, void *newp, size_t newlen)
1802 malloc_mutex_lock(&ctl_mtx);
1804 if (*oldlenp != sizeof(unsigned)) {
1808 narenas = ctl_stats.narenas;
1809 READ(narenas, unsigned);
1813 malloc_mutex_unlock(&ctl_mtx);
1818 arenas_initialized_ctl(const size_t *mib, size_t miblen, void *oldp,
1819 size_t *oldlenp, void *newp, size_t newlen)
1824 malloc_mutex_lock(&ctl_mtx);
1826 if (*oldlenp != ctl_stats.narenas * sizeof(bool)) {
1828 nread = (*oldlenp < ctl_stats.narenas * sizeof(bool))
1829 ? (unsigned)(*oldlenp / sizeof(bool)) : ctl_stats.narenas;
1832 nread = ctl_stats.narenas;
1835 for (i = 0; i < nread; i++)
1836 ((bool *)oldp)[i] = ctl_stats.arenas[i].initialized;
1839 malloc_mutex_unlock(&ctl_mtx);
1844 arenas_lg_dirty_mult_ctl(const size_t *mib, size_t miblen, void *oldp,
1845 size_t *oldlenp, void *newp, size_t newlen)
1849 if (oldp != NULL && oldlenp != NULL) {
1850 size_t oldval = arena_lg_dirty_mult_default_get();
1851 READ(oldval, ssize_t);
1854 if (newlen != sizeof(ssize_t)) {
1858 if (arena_lg_dirty_mult_default_set(*(ssize_t *)newp)) {
1870 arenas_decay_time_ctl(const size_t *mib, size_t miblen, void *oldp,
1871 size_t *oldlenp, void *newp, size_t newlen)
1875 if (oldp != NULL && oldlenp != NULL) {
1876 size_t oldval = arena_decay_time_default_get();
1877 READ(oldval, ssize_t);
1880 if (newlen != sizeof(ssize_t)) {
1884 if (arena_decay_time_default_set(*(ssize_t *)newp)) {
1895 CTL_RO_NL_GEN(arenas_quantum, QUANTUM, size_t)
1896 CTL_RO_NL_GEN(arenas_page, PAGE, size_t)
1897 CTL_RO_NL_CGEN(config_tcache, arenas_tcache_max, tcache_maxclass, size_t)
1898 CTL_RO_NL_GEN(arenas_nbins, NBINS, unsigned)
1899 CTL_RO_NL_CGEN(config_tcache, arenas_nhbins, nhbins, unsigned)
1900 CTL_RO_NL_GEN(arenas_bin_i_size, arena_bin_info[mib[2]].reg_size, size_t)
1901 CTL_RO_NL_GEN(arenas_bin_i_nregs, arena_bin_info[mib[2]].nregs, uint32_t)
1902 CTL_RO_NL_GEN(arenas_bin_i_run_size, arena_bin_info[mib[2]].run_size, size_t)
1903 static const ctl_named_node_t *
1904 arenas_bin_i_index(const size_t *mib, size_t miblen, size_t i)
1909 return (super_arenas_bin_i_node);
1912 CTL_RO_NL_GEN(arenas_nlruns, nlclasses, unsigned)
1913 CTL_RO_NL_GEN(arenas_lrun_i_size, index2size(NBINS+(szind_t)mib[2]), size_t)
1914 static const ctl_named_node_t *
1915 arenas_lrun_i_index(const size_t *mib, size_t miblen, size_t i)
1920 return (super_arenas_lrun_i_node);
1923 CTL_RO_NL_GEN(arenas_nhchunks, nhclasses, unsigned)
1924 CTL_RO_NL_GEN(arenas_hchunk_i_size, index2size(NBINS+nlclasses+(szind_t)mib[2]),
1926 static const ctl_named_node_t *
1927 arenas_hchunk_i_index(const size_t *mib, size_t miblen, size_t i)
1932 return (super_arenas_hchunk_i_node);
1936 arenas_extend_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1937 void *newp, size_t newlen)
1942 malloc_mutex_lock(&ctl_mtx);
1948 narenas = ctl_stats.narenas - 1;
1949 READ(narenas, unsigned);
1953 malloc_mutex_unlock(&ctl_mtx);
1957 /******************************************************************************/
1960 prof_thread_active_init_ctl(const size_t *mib, size_t miblen, void *oldp,
1961 size_t *oldlenp, void *newp, size_t newlen)
1970 if (newlen != sizeof(bool)) {
1974 oldval = prof_thread_active_init_set(*(bool *)newp);
1976 oldval = prof_thread_active_init_get();
1985 prof_active_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1986 void *newp, size_t newlen)
1995 if (newlen != sizeof(bool)) {
1999 oldval = prof_active_set(*(bool *)newp);
2001 oldval = prof_active_get();
2010 prof_dump_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
2011 void *newp, size_t newlen)
2014 const char *filename = NULL;
2020 WRITE(filename, const char *);
2022 if (prof_mdump(filename)) {
2033 prof_gdump_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
2034 void *newp, size_t newlen)
2043 if (newlen != sizeof(bool)) {
2047 oldval = prof_gdump_set(*(bool *)newp);
2049 oldval = prof_gdump_get();
2058 prof_reset_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
2059 void *newp, size_t newlen)
2062 size_t lg_sample = lg_prof_sample;
2069 WRITE(lg_sample, size_t);
2070 if (lg_sample >= (sizeof(uint64_t) << 3))
2071 lg_sample = (sizeof(uint64_t) << 3) - 1;
2075 prof_reset(tsd, lg_sample);
2082 CTL_RO_NL_CGEN(config_prof, prof_interval, prof_interval, uint64_t)
2083 CTL_RO_NL_CGEN(config_prof, lg_prof_sample, lg_prof_sample, size_t)
2085 /******************************************************************************/
2087 CTL_RO_CGEN(config_stats, stats_cactive, &stats_cactive, size_t *)
2088 CTL_RO_CGEN(config_stats, stats_allocated, ctl_stats.allocated, size_t)
2089 CTL_RO_CGEN(config_stats, stats_active, ctl_stats.active, size_t)
2090 CTL_RO_CGEN(config_stats, stats_metadata, ctl_stats.metadata, size_t)
2091 CTL_RO_CGEN(config_stats, stats_resident, ctl_stats.resident, size_t)
2092 CTL_RO_CGEN(config_stats, stats_mapped, ctl_stats.mapped, size_t)
2094 CTL_RO_GEN(stats_arenas_i_dss, ctl_stats.arenas[mib[2]].dss, const char *)
2095 CTL_RO_GEN(stats_arenas_i_lg_dirty_mult, ctl_stats.arenas[mib[2]].lg_dirty_mult,
2097 CTL_RO_GEN(stats_arenas_i_decay_time, ctl_stats.arenas[mib[2]].decay_time,
2099 CTL_RO_GEN(stats_arenas_i_nthreads, ctl_stats.arenas[mib[2]].nthreads, unsigned)
2100 CTL_RO_GEN(stats_arenas_i_pactive, ctl_stats.arenas[mib[2]].pactive, size_t)
2101 CTL_RO_GEN(stats_arenas_i_pdirty, ctl_stats.arenas[mib[2]].pdirty, size_t)
2102 CTL_RO_CGEN(config_stats, stats_arenas_i_mapped,
2103 ctl_stats.arenas[mib[2]].astats.mapped, size_t)
2104 CTL_RO_CGEN(config_stats, stats_arenas_i_npurge,
2105 ctl_stats.arenas[mib[2]].astats.npurge, uint64_t)
2106 CTL_RO_CGEN(config_stats, stats_arenas_i_nmadvise,
2107 ctl_stats.arenas[mib[2]].astats.nmadvise, uint64_t)
2108 CTL_RO_CGEN(config_stats, stats_arenas_i_purged,
2109 ctl_stats.arenas[mib[2]].astats.purged, uint64_t)
2110 CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_mapped,
2111 ctl_stats.arenas[mib[2]].astats.metadata_mapped, size_t)
2112 CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_allocated,
2113 ctl_stats.arenas[mib[2]].astats.metadata_allocated, size_t)
2115 CTL_RO_CGEN(config_stats, stats_arenas_i_small_allocated,
2116 ctl_stats.arenas[mib[2]].allocated_small, size_t)
2117 CTL_RO_CGEN(config_stats, stats_arenas_i_small_nmalloc,
2118 ctl_stats.arenas[mib[2]].nmalloc_small, uint64_t)
2119 CTL_RO_CGEN(config_stats, stats_arenas_i_small_ndalloc,
2120 ctl_stats.arenas[mib[2]].ndalloc_small, uint64_t)
2121 CTL_RO_CGEN(config_stats, stats_arenas_i_small_nrequests,
2122 ctl_stats.arenas[mib[2]].nrequests_small, uint64_t)
2123 CTL_RO_CGEN(config_stats, stats_arenas_i_large_allocated,
2124 ctl_stats.arenas[mib[2]].astats.allocated_large, size_t)
2125 CTL_RO_CGEN(config_stats, stats_arenas_i_large_nmalloc,
2126 ctl_stats.arenas[mib[2]].astats.nmalloc_large, uint64_t)
2127 CTL_RO_CGEN(config_stats, stats_arenas_i_large_ndalloc,
2128 ctl_stats.arenas[mib[2]].astats.ndalloc_large, uint64_t)
2129 CTL_RO_CGEN(config_stats, stats_arenas_i_large_nrequests,
2130 ctl_stats.arenas[mib[2]].astats.nrequests_large, uint64_t)
2131 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_allocated,
2132 ctl_stats.arenas[mib[2]].astats.allocated_huge, size_t)
2133 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_nmalloc,
2134 ctl_stats.arenas[mib[2]].astats.nmalloc_huge, uint64_t)
2135 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_ndalloc,
2136 ctl_stats.arenas[mib[2]].astats.ndalloc_huge, uint64_t)
2137 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_nrequests,
2138 ctl_stats.arenas[mib[2]].astats.nmalloc_huge, uint64_t) /* Intentional. */
2140 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nmalloc,
2141 ctl_stats.arenas[mib[2]].bstats[mib[4]].nmalloc, uint64_t)
2142 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_ndalloc,
2143 ctl_stats.arenas[mib[2]].bstats[mib[4]].ndalloc, uint64_t)
2144 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nrequests,
2145 ctl_stats.arenas[mib[2]].bstats[mib[4]].nrequests, uint64_t)
2146 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_curregs,
2147 ctl_stats.arenas[mib[2]].bstats[mib[4]].curregs, size_t)
2148 CTL_RO_CGEN(config_stats && config_tcache, stats_arenas_i_bins_j_nfills,
2149 ctl_stats.arenas[mib[2]].bstats[mib[4]].nfills, uint64_t)
2150 CTL_RO_CGEN(config_stats && config_tcache, stats_arenas_i_bins_j_nflushes,
2151 ctl_stats.arenas[mib[2]].bstats[mib[4]].nflushes, uint64_t)
2152 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nruns,
2153 ctl_stats.arenas[mib[2]].bstats[mib[4]].nruns, uint64_t)
2154 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nreruns,
2155 ctl_stats.arenas[mib[2]].bstats[mib[4]].reruns, uint64_t)
2156 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_curruns,
2157 ctl_stats.arenas[mib[2]].bstats[mib[4]].curruns, size_t)
2159 static const ctl_named_node_t *
2160 stats_arenas_i_bins_j_index(const size_t *mib, size_t miblen, size_t j)
2165 return (super_stats_arenas_i_bins_j_node);
2168 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_nmalloc,
2169 ctl_stats.arenas[mib[2]].lstats[mib[4]].nmalloc, uint64_t)
2170 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_ndalloc,
2171 ctl_stats.arenas[mib[2]].lstats[mib[4]].ndalloc, uint64_t)
2172 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_nrequests,
2173 ctl_stats.arenas[mib[2]].lstats[mib[4]].nrequests, uint64_t)
2174 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_curruns,
2175 ctl_stats.arenas[mib[2]].lstats[mib[4]].curruns, size_t)
2177 static const ctl_named_node_t *
2178 stats_arenas_i_lruns_j_index(const size_t *mib, size_t miblen, size_t j)
2183 return (super_stats_arenas_i_lruns_j_node);
2186 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_nmalloc,
2187 ctl_stats.arenas[mib[2]].hstats[mib[4]].nmalloc, uint64_t)
2188 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_ndalloc,
2189 ctl_stats.arenas[mib[2]].hstats[mib[4]].ndalloc, uint64_t)
2190 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_nrequests,
2191 ctl_stats.arenas[mib[2]].hstats[mib[4]].nmalloc, /* Intentional. */
2193 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_curhchunks,
2194 ctl_stats.arenas[mib[2]].hstats[mib[4]].curhchunks, size_t)
2196 static const ctl_named_node_t *
2197 stats_arenas_i_hchunks_j_index(const size_t *mib, size_t miblen, size_t j)
2202 return (super_stats_arenas_i_hchunks_j_node);
2205 static const ctl_named_node_t *
2206 stats_arenas_i_index(const size_t *mib, size_t miblen, size_t i)
2208 const ctl_named_node_t * ret;
2210 malloc_mutex_lock(&ctl_mtx);
2211 if (i > ctl_stats.narenas || !ctl_stats.arenas[i].initialized) {
2216 ret = super_stats_arenas_i_node;
2218 malloc_mutex_unlock(&ctl_mtx);