1 /******************************************************************************/
2 #ifdef JEMALLOC_H_TYPES
4 typedef struct tcache_bin_stats_s tcache_bin_stats_t;
5 typedef struct malloc_bin_stats_s malloc_bin_stats_t;
6 typedef struct malloc_large_stats_s malloc_large_stats_t;
7 typedef struct malloc_huge_stats_s malloc_huge_stats_t;
8 typedef struct arena_stats_s arena_stats_t;
9 typedef struct chunk_stats_s chunk_stats_t;
11 #endif /* JEMALLOC_H_TYPES */
12 /******************************************************************************/
13 #ifdef JEMALLOC_H_STRUCTS
15 struct tcache_bin_stats_s {
17 * Number of allocation requests that corresponded to the size of this
23 struct malloc_bin_stats_s {
25 * Total number of allocation/deallocation requests served directly by
26 * the bin. Note that tcache may allocate an object, then recycle it
27 * many times, resulting many increments to nrequests, but only one
28 * each to nmalloc and ndalloc.
34 * Number of allocation requests that correspond to the size of this
35 * bin. This includes requests served by tcache, though tcache only
36 * periodically merges into this counter.
41 * Current number of regions of this size class, including regions
42 * currently cached by tcache.
46 /* Number of tcache fills from this bin. */
49 /* Number of tcache flushes to this bin. */
52 /* Total number of runs created for this bin's size class. */
56 * Total number of runs reused by extracting them from the runs tree for
57 * this bin's size class.
61 /* Current number of runs in this bin. */
65 struct malloc_large_stats_s {
67 * Total number of allocation/deallocation requests served directly by
68 * the arena. Note that tcache may allocate an object, then recycle it
69 * many times, resulting many increments to nrequests, but only one
70 * each to nmalloc and ndalloc.
76 * Number of allocation requests that correspond to this size class.
77 * This includes requests served by tcache, though tcache only
78 * periodically merges into this counter.
83 * Current number of runs of this size class, including runs currently
89 struct malloc_huge_stats_s {
91 * Total number of allocation/deallocation requests served directly by
97 /* Current number of (multi-)chunk allocations of this size class. */
101 struct arena_stats_s {
102 /* Number of bytes currently mapped. */
106 * Total number of purge sweeps, total number of madvise calls made,
107 * and total pages purged in order to keep dirty unused memory under
115 * Number of bytes currently mapped purely for metadata purposes, and
116 * number of bytes currently allocated for internal metadata.
118 size_t metadata_mapped;
119 size_t metadata_allocated; /* Protected via atomic_*_z(). */
121 /* Per-size-category statistics. */
122 size_t allocated_large;
123 uint64_t nmalloc_large;
124 uint64_t ndalloc_large;
125 uint64_t nrequests_large;
127 size_t allocated_huge;
128 uint64_t nmalloc_huge;
129 uint64_t ndalloc_huge;
131 /* One element for each large size class. */
132 malloc_large_stats_t *lstats;
134 /* One element for each huge size class. */
135 malloc_huge_stats_t *hstats;
138 #endif /* JEMALLOC_H_STRUCTS */
139 /******************************************************************************/
140 #ifdef JEMALLOC_H_EXTERNS
142 extern bool opt_stats_print;
144 extern size_t stats_cactive;
146 void stats_print(void (*write)(void *, const char *), void *cbopaque,
149 #endif /* JEMALLOC_H_EXTERNS */
150 /******************************************************************************/
151 #ifdef JEMALLOC_H_INLINES
153 #ifndef JEMALLOC_ENABLE_INLINE
154 size_t stats_cactive_get(void);
155 void stats_cactive_add(size_t size);
156 void stats_cactive_sub(size_t size);
159 #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_STATS_C_))
160 JEMALLOC_INLINE size_t
161 stats_cactive_get(void)
164 return (atomic_read_z(&stats_cactive));
168 stats_cactive_add(size_t size)
170 UNUSED size_t cactive;
173 assert((size & chunksize_mask) == 0);
175 cactive = atomic_add_z(&stats_cactive, size);
176 assert(cactive - size < cactive);
180 stats_cactive_sub(size_t size)
182 UNUSED size_t cactive;
185 assert((size & chunksize_mask) == 0);
187 cactive = atomic_sub_z(&stats_cactive, size);
188 assert(cactive + size > cactive);
192 #endif /* JEMALLOC_H_INLINES */
193 /******************************************************************************/