Blame jemalloc/include/jemalloc/internal/arena_stats.h

Packit 345191
#ifndef JEMALLOC_INTERNAL_ARENA_STATS_H
Packit 345191
#define JEMALLOC_INTERNAL_ARENA_STATS_H
Packit 345191
Packit 345191
#include "jemalloc/internal/atomic.h"
Packit 345191
#include "jemalloc/internal/mutex.h"
Packit 345191
#include "jemalloc/internal/mutex_prof.h"
Packit 345191
#include "jemalloc/internal/sc.h"
Packit 345191
Packit 345191
JEMALLOC_DIAGNOSTIC_DISABLE_SPURIOUS
Packit 345191
Packit 345191
/*
Packit 345191
 * In those architectures that support 64-bit atomics, we use atomic updates for
Packit 345191
 * our 64-bit values.  Otherwise, we use a plain uint64_t and synchronize
Packit 345191
 * externally.
Packit 345191
 */
Packit 345191
#ifdef JEMALLOC_ATOMIC_U64
Packit 345191
typedef atomic_u64_t arena_stats_u64_t;
Packit 345191
#else
Packit 345191
/* Must hold the arena stats mutex while reading atomically. */
Packit 345191
typedef uint64_t arena_stats_u64_t;
Packit 345191
#endif
Packit 345191
Packit 345191
typedef struct arena_stats_large_s arena_stats_large_t;
Packit 345191
struct arena_stats_large_s {
Packit 345191
	/*
Packit 345191
	 * Total number of allocation/deallocation requests served directly by
Packit 345191
	 * the arena.
Packit 345191
	 */
Packit 345191
	arena_stats_u64_t	nmalloc;
Packit 345191
	arena_stats_u64_t	ndalloc;
Packit 345191
Packit 345191
	/*
Packit 345191
	 * Number of allocation requests that correspond to this size class.
Packit 345191
	 * This includes requests served by tcache, though tcache only
Packit 345191
	 * periodically merges into this counter.
Packit 345191
	 */
Packit 345191
	arena_stats_u64_t	nrequests; /* Partially derived. */
Packit 345191
	/*
Packit 345191
	 * Number of tcache fills / flushes for large (similarly, periodically
Packit 345191
	 * merged).  Note that there is no large tcache batch-fill currently
Packit 345191
	 * (i.e. only fill 1 at a time); however flush may be batched.
Packit 345191
	 */
Packit 345191
	arena_stats_u64_t	nfills; /* Partially derived. */
Packit 345191
	arena_stats_u64_t	nflushes; /* Partially derived. */
Packit 345191
Packit 345191
	/* Current number of allocations of this size class. */
Packit 345191
	size_t		curlextents; /* Derived. */
Packit 345191
};
Packit 345191
Packit 345191
typedef struct arena_stats_decay_s arena_stats_decay_t;
Packit 345191
struct arena_stats_decay_s {
Packit 345191
	/* Total number of purge sweeps. */
Packit 345191
	arena_stats_u64_t	npurge;
Packit 345191
	/* Total number of madvise calls made. */
Packit 345191
	arena_stats_u64_t	nmadvise;
Packit 345191
	/* Total number of pages purged. */
Packit 345191
	arena_stats_u64_t	purged;
Packit 345191
};
Packit 345191
Packit 345191
typedef struct arena_stats_extents_s arena_stats_extents_t;
Packit 345191
struct arena_stats_extents_s {
Packit 345191
	/*
Packit 345191
	 * Stats for a given index in the range [0, SC_NPSIZES] in an extents_t.
Packit 345191
	 * We track both bytes and # of extents: two extents in the same bucket
Packit 345191
	 * may have different sizes if adjacent size classes differ by more than
Packit 345191
	 * a page, so bytes cannot always be derived from # of extents.
Packit 345191
	 */
Packit 345191
	atomic_zu_t ndirty;
Packit 345191
	atomic_zu_t dirty_bytes;
Packit 345191
	atomic_zu_t nmuzzy;
Packit 345191
	atomic_zu_t muzzy_bytes;
Packit 345191
	atomic_zu_t nretained;
Packit 345191
	atomic_zu_t retained_bytes;
Packit 345191
};
Packit 345191
Packit 345191
/*
Packit 345191
 * Arena stats.  Note that fields marked "derived" are not directly maintained
Packit 345191
 * within the arena code; rather their values are derived during stats merge
Packit 345191
 * requests.
Packit 345191
 */
Packit 345191
typedef struct arena_stats_s arena_stats_t;
Packit 345191
struct arena_stats_s {
Packit 345191
#ifndef JEMALLOC_ATOMIC_U64
Packit 345191
	malloc_mutex_t		mtx;
Packit 345191
#endif
Packit 345191
Packit 345191
	/* Number of bytes currently mapped, excluding retained memory. */
Packit 345191
	atomic_zu_t		mapped; /* Partially derived. */
Packit 345191
Packit 345191
	/*
Packit 345191
	 * Number of unused virtual memory bytes currently retained.  Retained
Packit 345191
	 * bytes are technically mapped (though always decommitted or purged),
Packit 345191
	 * but they are excluded from the mapped statistic (above).
Packit 345191
	 */
Packit 345191
	atomic_zu_t		retained; /* Derived. */
Packit 345191
Packit 345191
	/* Number of extent_t structs allocated by base, but not being used. */
Packit 345191
	atomic_zu_t		extent_avail;
Packit 345191
Packit 345191
	arena_stats_decay_t	decay_dirty;
Packit 345191
	arena_stats_decay_t	decay_muzzy;
Packit 345191
Packit 345191
	atomic_zu_t		base; /* Derived. */
Packit 345191
	atomic_zu_t		internal;
Packit 345191
	atomic_zu_t		resident; /* Derived. */
Packit 345191
	atomic_zu_t		metadata_thp;
Packit 345191
Packit 345191
	atomic_zu_t		allocated_large; /* Derived. */
Packit 345191
	arena_stats_u64_t	nmalloc_large; /* Derived. */
Packit 345191
	arena_stats_u64_t	ndalloc_large; /* Derived. */
Packit 345191
	arena_stats_u64_t	nfills_large; /* Derived. */
Packit 345191
	arena_stats_u64_t	nflushes_large; /* Derived. */
Packit 345191
	arena_stats_u64_t	nrequests_large; /* Derived. */
Packit 345191
Packit 345191
	/* VM space had to be leaked (undocumented).  Normally 0. */
Packit 345191
	atomic_zu_t		abandoned_vm;
Packit 345191
Packit 345191
	/* Number of bytes cached in tcache associated with this arena. */
Packit 345191
	atomic_zu_t		tcache_bytes; /* Derived. */
Packit 345191
Packit 345191
	mutex_prof_data_t mutex_prof_data[mutex_prof_num_arena_mutexes];
Packit 345191
Packit 345191
	/* One element for each large size class. */
Packit 345191
	arena_stats_large_t	lstats[SC_NSIZES - SC_NBINS];
Packit 345191
Packit 345191
	/* Arena uptime. */
Packit 345191
	nstime_t		uptime;
Packit 345191
};
Packit 345191
Packit 345191
static inline bool
Packit 345191
arena_stats_init(tsdn_t *tsdn, arena_stats_t *arena_stats) {
Packit 345191
	if (config_debug) {
Packit 345191
		for (size_t i = 0; i < sizeof(arena_stats_t); i++) {
Packit 345191
			assert(((char *)arena_stats)[i] == 0);
Packit 345191
		}
Packit 345191
	}
Packit 345191
#ifndef JEMALLOC_ATOMIC_U64
Packit 345191
	if (malloc_mutex_init(&arena_stats->mtx, "arena_stats",
Packit 345191
	    WITNESS_RANK_ARENA_STATS, malloc_mutex_rank_exclusive)) {
Packit 345191
		return true;
Packit 345191
	}
Packit 345191
#endif
Packit 345191
	/* Memory is zeroed, so there is no need to clear stats. */
Packit 345191
	return false;
Packit 345191
}
Packit 345191
Packit 345191
static inline void
Packit 345191
arena_stats_lock(tsdn_t *tsdn, arena_stats_t *arena_stats) {
Packit 345191
#ifndef JEMALLOC_ATOMIC_U64
Packit 345191
	malloc_mutex_lock(tsdn, &arena_stats->mtx);
Packit 345191
#endif
Packit 345191
}
Packit 345191
Packit 345191
static inline void
Packit 345191
arena_stats_unlock(tsdn_t *tsdn, arena_stats_t *arena_stats) {
Packit 345191
#ifndef JEMALLOC_ATOMIC_U64
Packit 345191
	malloc_mutex_unlock(tsdn, &arena_stats->mtx);
Packit 345191
#endif
Packit 345191
}
Packit 345191
Packit 345191
static inline uint64_t
Packit 345191
arena_stats_read_u64(tsdn_t *tsdn, arena_stats_t *arena_stats,
Packit 345191
    arena_stats_u64_t *p) {
Packit 345191
#ifdef JEMALLOC_ATOMIC_U64
Packit 345191
	return atomic_load_u64(p, ATOMIC_RELAXED);
Packit 345191
#else
Packit 345191
	malloc_mutex_assert_owner(tsdn, &arena_stats->mtx);
Packit 345191
	return *p;
Packit 345191
#endif
Packit 345191
}
Packit 345191
Packit 345191
static inline void
Packit 345191
arena_stats_add_u64(tsdn_t *tsdn, arena_stats_t *arena_stats,
Packit 345191
    arena_stats_u64_t *p, uint64_t x) {
Packit 345191
#ifdef JEMALLOC_ATOMIC_U64
Packit 345191
	atomic_fetch_add_u64(p, x, ATOMIC_RELAXED);
Packit 345191
#else
Packit 345191
	malloc_mutex_assert_owner(tsdn, &arena_stats->mtx);
Packit 345191
	*p += x;
Packit 345191
#endif
Packit 345191
}
Packit 345191
Packit 345191
static inline void
Packit 345191
arena_stats_sub_u64(tsdn_t *tsdn, arena_stats_t *arena_stats,
Packit 345191
    arena_stats_u64_t *p, uint64_t x) {
Packit 345191
#ifdef JEMALLOC_ATOMIC_U64
Packit 345191
	uint64_t r = atomic_fetch_sub_u64(p, x, ATOMIC_RELAXED);
Packit 345191
	assert(r - x <= r);
Packit 345191
#else
Packit 345191
	malloc_mutex_assert_owner(tsdn, &arena_stats->mtx);
Packit 345191
	*p -= x;
Packit 345191
	assert(*p + x >= *p);
Packit 345191
#endif
Packit 345191
}
Packit 345191
Packit 345191
/*
Packit 345191
 * Non-atomically sets *dst += src.  *dst needs external synchronization.
Packit 345191
 * This lets us avoid the cost of a fetch_add when its unnecessary (note that
Packit 345191
 * the types here are atomic).
Packit 345191
 */
Packit 345191
static inline void
Packit 345191
arena_stats_accum_u64(arena_stats_u64_t *dst, uint64_t src) {
Packit 345191
#ifdef JEMALLOC_ATOMIC_U64
Packit 345191
	uint64_t cur_dst = atomic_load_u64(dst, ATOMIC_RELAXED);
Packit 345191
	atomic_store_u64(dst, src + cur_dst, ATOMIC_RELAXED);
Packit 345191
#else
Packit 345191
	*dst += src;
Packit 345191
#endif
Packit 345191
}
Packit 345191
Packit 345191
static inline size_t
Packit 345191
arena_stats_read_zu(tsdn_t *tsdn, arena_stats_t *arena_stats,
Packit 345191
    atomic_zu_t *p) {
Packit 345191
#ifdef JEMALLOC_ATOMIC_U64
Packit 345191
	return atomic_load_zu(p, ATOMIC_RELAXED);
Packit 345191
#else
Packit 345191
	malloc_mutex_assert_owner(tsdn, &arena_stats->mtx);
Packit 345191
	return atomic_load_zu(p, ATOMIC_RELAXED);
Packit 345191
#endif
Packit 345191
}
Packit 345191
Packit 345191
static inline void
Packit 345191
arena_stats_add_zu(tsdn_t *tsdn, arena_stats_t *arena_stats,
Packit 345191
    atomic_zu_t *p, size_t x) {
Packit 345191
#ifdef JEMALLOC_ATOMIC_U64
Packit 345191
	atomic_fetch_add_zu(p, x, ATOMIC_RELAXED);
Packit 345191
#else
Packit 345191
	malloc_mutex_assert_owner(tsdn, &arena_stats->mtx);
Packit 345191
	size_t cur = atomic_load_zu(p, ATOMIC_RELAXED);
Packit 345191
	atomic_store_zu(p, cur + x, ATOMIC_RELAXED);
Packit 345191
#endif
Packit 345191
}
Packit 345191
Packit 345191
static inline void
Packit 345191
arena_stats_sub_zu(tsdn_t *tsdn, arena_stats_t *arena_stats,
Packit 345191
    atomic_zu_t *p, size_t x) {
Packit 345191
#ifdef JEMALLOC_ATOMIC_U64
Packit 345191
	size_t r = atomic_fetch_sub_zu(p, x, ATOMIC_RELAXED);
Packit 345191
	assert(r - x <= r);
Packit 345191
#else
Packit 345191
	malloc_mutex_assert_owner(tsdn, &arena_stats->mtx);
Packit 345191
	size_t cur = atomic_load_zu(p, ATOMIC_RELAXED);
Packit 345191
	atomic_store_zu(p, cur - x, ATOMIC_RELAXED);
Packit 345191
#endif
Packit 345191
}
Packit 345191
Packit 345191
/* Like the _u64 variant, needs an externally synchronized *dst. */
Packit 345191
static inline void
Packit 345191
arena_stats_accum_zu(atomic_zu_t *dst, size_t src) {
Packit 345191
	size_t cur_dst = atomic_load_zu(dst, ATOMIC_RELAXED);
Packit 345191
	atomic_store_zu(dst, src + cur_dst, ATOMIC_RELAXED);
Packit 345191
}
Packit 345191
Packit 345191
static inline void
Packit 345191
arena_stats_large_flush_nrequests_add(tsdn_t *tsdn, arena_stats_t *arena_stats,
Packit 345191
    szind_t szind, uint64_t nrequests) {
Packit 345191
	arena_stats_lock(tsdn, arena_stats);
Packit 345191
	arena_stats_large_t *lstats = &arena_stats->lstats[szind - SC_NBINS];
Packit 345191
	arena_stats_add_u64(tsdn, arena_stats, &lstats->nrequests, nrequests);
Packit 345191
	arena_stats_add_u64(tsdn, arena_stats, &lstats->nflushes, 1);
Packit 345191
	arena_stats_unlock(tsdn, arena_stats);
Packit 345191
}
Packit 345191
Packit 345191
static inline void
Packit 345191
arena_stats_mapped_add(tsdn_t *tsdn, arena_stats_t *arena_stats, size_t size) {
Packit 345191
	arena_stats_lock(tsdn, arena_stats);
Packit 345191
	arena_stats_add_zu(tsdn, arena_stats, &arena_stats->mapped, size);
Packit 345191
	arena_stats_unlock(tsdn, arena_stats);
Packit 345191
}
Packit 345191
Packit 345191
#endif /* JEMALLOC_INTERNAL_ARENA_STATS_H */