1
/******************************************************************************/
2
#ifdef JEMALLOC_H_TYPES
4
typedef struct tcache_bin_stats_s tcache_bin_stats_t;
5
typedef struct malloc_bin_stats_s malloc_bin_stats_t;
6
typedef struct malloc_large_stats_s malloc_large_stats_t;
7
typedef struct arena_stats_s arena_stats_t;
8
typedef struct chunk_stats_s chunk_stats_t;
10
#endif /* JEMALLOC_H_TYPES */
11
/******************************************************************************/
12
#ifdef JEMALLOC_H_STRUCTS
14
struct tcache_bin_stats_s {
16
* Number of allocation requests that corresponded to the size of this
22
struct malloc_bin_stats_s {
24
* Current number of bytes allocated, including objects currently
30
* Total number of allocation/deallocation requests served directly by
31
* the bin. Note that tcache may allocate an object, then recycle it
32
* many times, resulting many increments to nrequests, but only one
33
* each to nmalloc and ndalloc.
39
* Number of allocation requests that correspond to the size of this
40
* bin. This includes requests served by tcache, though tcache only
41
* periodically merges into this counter.
45
/* Number of tcache fills from this bin. */
48
/* Number of tcache flushes to this bin. */
51
/* Total number of runs created for this bin's size class. */
55
* Total number of runs reused by extracting them from the runs tree for
56
* this bin's size class.
60
/* Current number of runs in this bin. */
64
struct malloc_large_stats_s {
66
* Total number of allocation/deallocation requests served directly by
67
* the arena. Note that tcache may allocate an object, then recycle it
68
* many times, resulting many increments to nrequests, but only one
69
* each to nmalloc and ndalloc.
75
* Number of allocation requests that correspond to this size class.
76
* This includes requests served by tcache, though tcache only
77
* periodically merges into this counter.
81
/* Current number of runs of this size class. */
85
struct arena_stats_s {
86
/* Number of bytes currently mapped. */
90
* Total number of purge sweeps, total number of madvise calls made,
91
* and total pages purged in order to keep dirty unused memory under
98
/* Per-size-category statistics. */
99
size_t allocated_large;
100
uint64_t nmalloc_large;
101
uint64_t ndalloc_large;
102
uint64_t nrequests_large;
105
* One element for each possible size class, including sizes that
106
* overlap with bin size classes. This is necessary because ipalloc()
107
* sometimes has to use such large objects in order to assure proper
110
malloc_large_stats_t *lstats;
113
struct chunk_stats_s {
114
/* Number of chunks that were allocated. */
117
/* High-water mark for number of chunks allocated. */
121
* Current number of chunks allocated. This value isn't maintained for
122
* any other purpose, so keep track of it in order to be able to set
128
#endif /* JEMALLOC_H_STRUCTS */
129
/******************************************************************************/
130
#ifdef JEMALLOC_H_EXTERNS
132
extern bool opt_stats_print;
134
extern size_t stats_cactive;
136
void stats_print(void (*write)(void *, const char *), void *cbopaque,
139
#endif /* JEMALLOC_H_EXTERNS */
140
/******************************************************************************/
141
#ifdef JEMALLOC_H_INLINES
143
#ifndef JEMALLOC_ENABLE_INLINE
144
size_t stats_cactive_get(void);
145
void stats_cactive_add(size_t size);
146
void stats_cactive_sub(size_t size);
149
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_STATS_C_))
150
JEMALLOC_INLINE size_t
151
stats_cactive_get(void)
154
return (atomic_read_z(&stats_cactive));
158
stats_cactive_add(size_t size)
161
atomic_add_z(&stats_cactive, size);
165
stats_cactive_sub(size_t size)
168
atomic_sub_z(&stats_cactive, size);
172
#endif /* JEMALLOC_H_INLINES */
173
/******************************************************************************/