1 #define JEMALLOC_EXTENT_C_
2 #include "jemalloc/internal/jemalloc_internal.h"
4 /******************************************************************************/
10 extent_size_quantize_floor(size_t size) {
16 ind = size2index(size + 1);
18 /* Avoid underflow. */
19 return (index2size(0));
21 ret = index2size(ind - 1);
27 extent_size_quantize_ceil(size_t size) {
32 ret = extent_size_quantize_floor(size);
35 * Skip a quantization that may have an adequately large extent,
36 * because under-sized extents may be mixed in. This only
37 * happens when an unusual size is requested, i.e. for aligned
38 * allocation, and is just one of several places where linear
39 * search would potentially find sufficiently aligned available
40 * memory somewhere lower.
42 ret = index2size(size2index(ret + 1));
48 extent_sz_comp(const extent_node_t *a, const extent_node_t *b)
50 size_t a_qsize = extent_size_quantize_floor(extent_node_size_get(a));
51 size_t b_qsize = extent_size_quantize_floor(extent_node_size_get(b));
53 return ((a_qsize > b_qsize) - (a_qsize < b_qsize));
57 extent_sn_comp(const extent_node_t *a, const extent_node_t *b)
59 size_t a_sn = extent_node_sn_get(a);
60 size_t b_sn = extent_node_sn_get(b);
62 return ((a_sn > b_sn) - (a_sn < b_sn));
66 extent_ad_comp(const extent_node_t *a, const extent_node_t *b)
68 uintptr_t a_addr = (uintptr_t)extent_node_addr_get(a);
69 uintptr_t b_addr = (uintptr_t)extent_node_addr_get(b);
71 return ((a_addr > b_addr) - (a_addr < b_addr));
75 extent_szsnad_comp(const extent_node_t *a, const extent_node_t *b)
79 ret = extent_sz_comp(a, b);
83 ret = extent_sn_comp(a, b);
87 ret = extent_ad_comp(a, b);
91 /* Generate red-black tree functions. */
92 rb_gen(, extent_tree_szsnad_, extent_tree_t, extent_node_t, szsnad_link,
95 /* Generate red-black tree functions. */
96 rb_gen(, extent_tree_ad_, extent_tree_t, extent_node_t, ad_link, extent_ad_comp)