Blame jemalloc/include/jemalloc/internal/arena_inlines_b.h

Packit 345191
#ifndef JEMALLOC_INTERNAL_ARENA_INLINES_B_H
Packit 345191
#define JEMALLOC_INTERNAL_ARENA_INLINES_B_H
Packit 345191
Packit 345191
#include "jemalloc/internal/jemalloc_internal_types.h"
Packit 345191
#include "jemalloc/internal/mutex.h"
Packit 345191
#include "jemalloc/internal/rtree.h"
Packit 345191
#include "jemalloc/internal/sc.h"
Packit 345191
#include "jemalloc/internal/sz.h"
Packit 345191
#include "jemalloc/internal/ticker.h"
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE bool
Packit 345191
arena_has_default_hooks(arena_t *arena) {
Packit 345191
	return (extent_hooks_get(arena) == &extent_hooks_default);
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE arena_t *
Packit 345191
arena_choose_maybe_huge(tsd_t *tsd, arena_t *arena, size_t size) {
Packit 345191
	if (arena != NULL) {
Packit 345191
		return arena;
Packit 345191
	}
Packit 345191
Packit 345191
	/*
Packit 345191
	 * For huge allocations, use the dedicated huge arena if both are true:
Packit 345191
	 * 1) is using auto arena selection (i.e. arena == NULL), and 2) the
Packit 345191
	 * thread is not assigned to a manual arena.
Packit 345191
	 */
Packit 345191
	if (unlikely(size >= oversize_threshold)) {
Packit 345191
		arena_t *tsd_arena = tsd_arena_get(tsd);
Packit 345191
		if (tsd_arena == NULL || arena_is_auto(tsd_arena)) {
Packit 345191
			return arena_choose_huge(tsd);
Packit 345191
		}
Packit 345191
	}
Packit 345191
Packit 345191
	return arena_choose(tsd, NULL);
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
Packit 345191
arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) {
Packit 345191
	cassert(config_prof);
Packit 345191
	assert(ptr != NULL);
Packit 345191
Packit 345191
	/* Static check. */
Packit 345191
	if (alloc_ctx == NULL) {
Packit 345191
		const extent_t *extent = iealloc(tsdn, ptr);
Packit 345191
		if (unlikely(!extent_slab_get(extent))) {
Packit 345191
			return large_prof_tctx_get(tsdn, extent);
Packit 345191
		}
Packit 345191
	} else {
Packit 345191
		if (unlikely(!alloc_ctx->slab)) {
Packit 345191
			return large_prof_tctx_get(tsdn, iealloc(tsdn, ptr));
Packit 345191
		}
Packit 345191
	}
Packit 345191
	return (prof_tctx_t *)(uintptr_t)1U;
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE void
Packit 345191
arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
Packit 345191
    alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx) {
Packit 345191
	cassert(config_prof);
Packit 345191
	assert(ptr != NULL);
Packit 345191
Packit 345191
	/* Static check. */
Packit 345191
	if (alloc_ctx == NULL) {
Packit 345191
		extent_t *extent = iealloc(tsdn, ptr);
Packit 345191
		if (unlikely(!extent_slab_get(extent))) {
Packit 345191
			large_prof_tctx_set(tsdn, extent, tctx);
Packit 345191
		}
Packit 345191
	} else {
Packit 345191
		if (unlikely(!alloc_ctx->slab)) {
Packit 345191
			large_prof_tctx_set(tsdn, iealloc(tsdn, ptr), tctx);
Packit 345191
		}
Packit 345191
	}
Packit 345191
}
Packit 345191
Packit 345191
static inline void
Packit 345191
arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx) {
Packit 345191
	cassert(config_prof);
Packit 345191
	assert(ptr != NULL);
Packit 345191
Packit 345191
	extent_t *extent = iealloc(tsdn, ptr);
Packit 345191
	assert(!extent_slab_get(extent));
Packit 345191
Packit 345191
	large_prof_tctx_reset(tsdn, extent);
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE nstime_t
Packit 345191
arena_prof_alloc_time_get(tsdn_t *tsdn, const void *ptr,
Packit 345191
    alloc_ctx_t *alloc_ctx) {
Packit 345191
	cassert(config_prof);
Packit 345191
	assert(ptr != NULL);
Packit 345191
Packit 345191
	extent_t *extent = iealloc(tsdn, ptr);
Packit 345191
	/*
Packit 345191
	 * Unlike arena_prof_prof_tctx_{get, set}, we only call this once we're
Packit 345191
	 * sure we have a sampled allocation.
Packit 345191
	 */
Packit 345191
	assert(!extent_slab_get(extent));
Packit 345191
	return large_prof_alloc_time_get(extent);
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE void
Packit 345191
arena_prof_alloc_time_set(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx,
Packit 345191
    nstime_t t) {
Packit 345191
	cassert(config_prof);
Packit 345191
	assert(ptr != NULL);
Packit 345191
Packit 345191
	extent_t *extent = iealloc(tsdn, ptr);
Packit 345191
	assert(!extent_slab_get(extent));
Packit 345191
	large_prof_alloc_time_set(extent, t);
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE void
Packit 345191
arena_decay_ticks(tsdn_t *tsdn, arena_t *arena, unsigned nticks) {
Packit 345191
	tsd_t *tsd;
Packit 345191
	ticker_t *decay_ticker;
Packit 345191
Packit 345191
	if (unlikely(tsdn_null(tsdn))) {
Packit 345191
		return;
Packit 345191
	}
Packit 345191
	tsd = tsdn_tsd(tsdn);
Packit 345191
	decay_ticker = decay_ticker_get(tsd, arena_ind_get(arena));
Packit 345191
	if (unlikely(decay_ticker == NULL)) {
Packit 345191
		return;
Packit 345191
	}
Packit 345191
	if (unlikely(ticker_ticks(decay_ticker, nticks))) {
Packit 345191
		arena_decay(tsdn, arena, false, false);
Packit 345191
	}
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE void
Packit 345191
arena_decay_tick(tsdn_t *tsdn, arena_t *arena) {
Packit 345191
	malloc_mutex_assert_not_owner(tsdn, &arena->decay_dirty.mtx);
Packit 345191
	malloc_mutex_assert_not_owner(tsdn, &arena->decay_muzzy.mtx);
Packit 345191
Packit 345191
	arena_decay_ticks(tsdn, arena, 1);
Packit 345191
}
Packit 345191
Packit 345191
/* Purge a single extent to retained / unmapped directly. */
Packit 345191
JEMALLOC_ALWAYS_INLINE void
Packit 345191
arena_decay_extent(tsdn_t *tsdn,arena_t *arena, extent_hooks_t **r_extent_hooks,
Packit 345191
    extent_t *extent) {
Packit 345191
	size_t extent_size = extent_size_get(extent);
Packit 345191
	extent_dalloc_wrapper(tsdn, arena,
Packit 345191
	    r_extent_hooks, extent);
Packit 345191
	if (config_stats) {
Packit 345191
		/* Update stats accordingly. */
Packit 345191
		arena_stats_lock(tsdn, &arena->stats);
Packit 345191
		arena_stats_add_u64(tsdn, &arena->stats,
Packit 345191
		    &arena->decay_dirty.stats->nmadvise, 1);
Packit 345191
		arena_stats_add_u64(tsdn, &arena->stats,
Packit 345191
		    &arena->decay_dirty.stats->purged, extent_size >> LG_PAGE);
Packit 345191
		arena_stats_sub_zu(tsdn, &arena->stats, &arena->stats.mapped,
Packit 345191
		    extent_size);
Packit 345191
		arena_stats_unlock(tsdn, &arena->stats);
Packit 345191
	}
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE void *
Packit 345191
arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero,
Packit 345191
    tcache_t *tcache, bool slow_path) {
Packit 345191
	assert(!tsdn_null(tsdn) || tcache == NULL);
Packit 345191
Packit 345191
	if (likely(tcache != NULL)) {
Packit 345191
		if (likely(size <= SC_SMALL_MAXCLASS)) {
Packit 345191
			return tcache_alloc_small(tsdn_tsd(tsdn), arena,
Packit 345191
			    tcache, size, ind, zero, slow_path);
Packit 345191
		}
Packit 345191
		if (likely(size <= tcache_maxclass)) {
Packit 345191
			return tcache_alloc_large(tsdn_tsd(tsdn), arena,
Packit 345191
			    tcache, size, ind, zero, slow_path);
Packit 345191
		}
Packit 345191
		/* (size > tcache_maxclass) case falls through. */
Packit 345191
		assert(size > tcache_maxclass);
Packit 345191
	}
Packit 345191
Packit 345191
	return arena_malloc_hard(tsdn, arena, size, ind, zero);
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE arena_t *
Packit 345191
arena_aalloc(tsdn_t *tsdn, const void *ptr) {
Packit 345191
	return extent_arena_get(iealloc(tsdn, ptr));
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE size_t
Packit 345191
arena_salloc(tsdn_t *tsdn, const void *ptr) {
Packit 345191
	assert(ptr != NULL);
Packit 345191
Packit 345191
	rtree_ctx_t rtree_ctx_fallback;
Packit 345191
	rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
Packit 345191
Packit 345191
	szind_t szind = rtree_szind_read(tsdn, &extents_rtree, rtree_ctx,
Packit 345191
	    (uintptr_t)ptr, true);
Packit 345191
	assert(szind != SC_NSIZES);
Packit 345191
Packit 345191
	return sz_index2size(szind);
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE size_t
Packit 345191
arena_vsalloc(tsdn_t *tsdn, const void *ptr) {
Packit 345191
	/*
Packit 345191
	 * Return 0 if ptr is not within an extent managed by jemalloc.  This
Packit 345191
	 * function has two extra costs relative to isalloc():
Packit 345191
	 * - The rtree calls cannot claim to be dependent lookups, which induces
Packit 345191
	 *   rtree lookup load dependencies.
Packit 345191
	 * - The lookup may fail, so there is an extra branch to check for
Packit 345191
	 *   failure.
Packit 345191
	 */
Packit 345191
Packit 345191
	rtree_ctx_t rtree_ctx_fallback;
Packit 345191
	rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
Packit 345191
Packit 345191
	extent_t *extent;
Packit 345191
	szind_t szind;
Packit 345191
	if (rtree_extent_szind_read(tsdn, &extents_rtree, rtree_ctx,
Packit 345191
	    (uintptr_t)ptr, false, &extent, &szind)) {
Packit 345191
		return 0;
Packit 345191
	}
Packit 345191
Packit 345191
	if (extent == NULL) {
Packit 345191
		return 0;
Packit 345191
	}
Packit 345191
	assert(extent_state_get(extent) == extent_state_active);
Packit 345191
	/* Only slab members should be looked up via interior pointers. */
Packit 345191
	assert(extent_addr_get(extent) == ptr || extent_slab_get(extent));
Packit 345191
Packit 345191
	assert(szind != SC_NSIZES);
Packit 345191
Packit 345191
	return sz_index2size(szind);
Packit 345191
}
Packit 345191
Packit 345191
static inline void
Packit 345191
arena_dalloc_large_no_tcache(tsdn_t *tsdn, void *ptr, szind_t szind) {
Packit 345191
	if (config_prof && unlikely(szind < SC_NBINS)) {
Packit 345191
		arena_dalloc_promoted(tsdn, ptr, NULL, true);
Packit 345191
	} else {
Packit 345191
		extent_t *extent = iealloc(tsdn, ptr);
Packit 345191
		large_dalloc(tsdn, extent);
Packit 345191
	}
Packit 345191
}
Packit 345191
Packit 345191
static inline void
Packit 345191
arena_dalloc_no_tcache(tsdn_t *tsdn, void *ptr) {
Packit 345191
	assert(ptr != NULL);
Packit 345191
Packit 345191
	rtree_ctx_t rtree_ctx_fallback;
Packit 345191
	rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
Packit 345191
Packit 345191
	szind_t szind;
Packit 345191
	bool slab;
Packit 345191
	rtree_szind_slab_read(tsdn, &extents_rtree, rtree_ctx, (uintptr_t)ptr,
Packit 345191
	    true, &szind, &slab;;
Packit 345191
Packit 345191
	if (config_debug) {
Packit 345191
		extent_t *extent = rtree_extent_read(tsdn, &extents_rtree,
Packit 345191
		    rtree_ctx, (uintptr_t)ptr, true);
Packit 345191
		assert(szind == extent_szind_get(extent));
Packit 345191
		assert(szind < SC_NSIZES);
Packit 345191
		assert(slab == extent_slab_get(extent));
Packit 345191
	}
Packit 345191
Packit 345191
	if (likely(slab)) {
Packit 345191
		/* Small allocation. */
Packit 345191
		arena_dalloc_small(tsdn, ptr);
Packit 345191
	} else {
Packit 345191
		arena_dalloc_large_no_tcache(tsdn, ptr, szind);
Packit 345191
	}
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE void
Packit 345191
arena_dalloc_large(tsdn_t *tsdn, void *ptr, tcache_t *tcache, szind_t szind,
Packit 345191
    bool slow_path) {
Packit 345191
	if (szind < nhbins) {
Packit 345191
		if (config_prof && unlikely(szind < SC_NBINS)) {
Packit 345191
			arena_dalloc_promoted(tsdn, ptr, tcache, slow_path);
Packit 345191
		} else {
Packit 345191
			tcache_dalloc_large(tsdn_tsd(tsdn), tcache, ptr, szind,
Packit 345191
			    slow_path);
Packit 345191
		}
Packit 345191
	} else {
Packit 345191
		extent_t *extent = iealloc(tsdn, ptr);
Packit 345191
		large_dalloc(tsdn, extent);
Packit 345191
	}
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE void
Packit 345191
arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
Packit 345191
    alloc_ctx_t *alloc_ctx, bool slow_path) {
Packit 345191
	assert(!tsdn_null(tsdn) || tcache == NULL);
Packit 345191
	assert(ptr != NULL);
Packit 345191
Packit 345191
	if (unlikely(tcache == NULL)) {
Packit 345191
		arena_dalloc_no_tcache(tsdn, ptr);
Packit 345191
		return;
Packit 345191
	}
Packit 345191
Packit 345191
	szind_t szind;
Packit 345191
	bool slab;
Packit 345191
	rtree_ctx_t *rtree_ctx;
Packit 345191
	if (alloc_ctx != NULL) {
Packit 345191
		szind = alloc_ctx->szind;
Packit 345191
		slab = alloc_ctx->slab;
Packit 345191
		assert(szind != SC_NSIZES);
Packit 345191
	} else {
Packit 345191
		rtree_ctx = tsd_rtree_ctx(tsdn_tsd(tsdn));
Packit 345191
		rtree_szind_slab_read(tsdn, &extents_rtree, rtree_ctx,
Packit 345191
		    (uintptr_t)ptr, true, &szind, &slab;;
Packit 345191
	}
Packit 345191
Packit 345191
	if (config_debug) {
Packit 345191
		rtree_ctx = tsd_rtree_ctx(tsdn_tsd(tsdn));
Packit 345191
		extent_t *extent = rtree_extent_read(tsdn, &extents_rtree,
Packit 345191
		    rtree_ctx, (uintptr_t)ptr, true);
Packit 345191
		assert(szind == extent_szind_get(extent));
Packit 345191
		assert(szind < SC_NSIZES);
Packit 345191
		assert(slab == extent_slab_get(extent));
Packit 345191
	}
Packit 345191
Packit 345191
	if (likely(slab)) {
Packit 345191
		/* Small allocation. */
Packit 345191
		tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
Packit 345191
		    slow_path);
Packit 345191
	} else {
Packit 345191
		arena_dalloc_large(tsdn, ptr, tcache, szind, slow_path);
Packit 345191
	}
Packit 345191
}
Packit 345191
Packit 345191
static inline void
Packit 345191
arena_sdalloc_no_tcache(tsdn_t *tsdn, void *ptr, size_t size) {
Packit 345191
	assert(ptr != NULL);
Packit 345191
	assert(size <= SC_LARGE_MAXCLASS);
Packit 345191
Packit 345191
	szind_t szind;
Packit 345191
	bool slab;
Packit 345191
	if (!config_prof || !opt_prof) {
Packit 345191
		/*
Packit 345191
		 * There is no risk of being confused by a promoted sampled
Packit 345191
		 * object, so base szind and slab on the given size.
Packit 345191
		 */
Packit 345191
		szind = sz_size2index(size);
Packit 345191
		slab = (szind < SC_NBINS);
Packit 345191
	}
Packit 345191
Packit 345191
	if ((config_prof && opt_prof) || config_debug) {
Packit 345191
		rtree_ctx_t rtree_ctx_fallback;
Packit 345191
		rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn,
Packit 345191
		    &rtree_ctx_fallback);
Packit 345191
Packit 345191
		rtree_szind_slab_read(tsdn, &extents_rtree, rtree_ctx,
Packit 345191
		    (uintptr_t)ptr, true, &szind, &slab;;
Packit 345191
Packit 345191
		assert(szind == sz_size2index(size));
Packit 345191
		assert((config_prof && opt_prof) || slab == (szind < SC_NBINS));
Packit 345191
Packit 345191
		if (config_debug) {
Packit 345191
			extent_t *extent = rtree_extent_read(tsdn,
Packit 345191
			    &extents_rtree, rtree_ctx, (uintptr_t)ptr, true);
Packit 345191
			assert(szind == extent_szind_get(extent));
Packit 345191
			assert(slab == extent_slab_get(extent));
Packit 345191
		}
Packit 345191
	}
Packit 345191
Packit 345191
	if (likely(slab)) {
Packit 345191
		/* Small allocation. */
Packit 345191
		arena_dalloc_small(tsdn, ptr);
Packit 345191
	} else {
Packit 345191
		arena_dalloc_large_no_tcache(tsdn, ptr, szind);
Packit 345191
	}
Packit 345191
}
Packit 345191
Packit 345191
JEMALLOC_ALWAYS_INLINE void
Packit 345191
arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
Packit 345191
    alloc_ctx_t *alloc_ctx, bool slow_path) {
Packit 345191
	assert(!tsdn_null(tsdn) || tcache == NULL);
Packit 345191
	assert(ptr != NULL);
Packit 345191
	assert(size <= SC_LARGE_MAXCLASS);
Packit 345191
Packit 345191
	if (unlikely(tcache == NULL)) {
Packit 345191
		arena_sdalloc_no_tcache(tsdn, ptr, size);
Packit 345191
		return;
Packit 345191
	}
Packit 345191
Packit 345191
	szind_t szind;
Packit 345191
	bool slab;
Packit 345191
	alloc_ctx_t local_ctx;
Packit 345191
	if (config_prof && opt_prof) {
Packit 345191
		if (alloc_ctx == NULL) {
Packit 345191
			/* Uncommon case and should be a static check. */
Packit 345191
			rtree_ctx_t rtree_ctx_fallback;
Packit 345191
			rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn,
Packit 345191
			    &rtree_ctx_fallback);
Packit 345191
			rtree_szind_slab_read(tsdn, &extents_rtree, rtree_ctx,
Packit 345191
			    (uintptr_t)ptr, true, &local_ctx.szind,
Packit 345191
			    &local_ctx.slab);
Packit 345191
			assert(local_ctx.szind == sz_size2index(size));
Packit 345191
			alloc_ctx = &local_ctx;
Packit 345191
		}
Packit 345191
		slab = alloc_ctx->slab;
Packit 345191
		szind = alloc_ctx->szind;
Packit 345191
	} else {
Packit 345191
		/*
Packit 345191
		 * There is no risk of being confused by a promoted sampled
Packit 345191
		 * object, so base szind and slab on the given size.
Packit 345191
		 */
Packit 345191
		szind = sz_size2index(size);
Packit 345191
		slab = (szind < SC_NBINS);
Packit 345191
	}
Packit 345191
Packit 345191
	if (config_debug) {
Packit 345191
		rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsdn_tsd(tsdn));
Packit 345191
		rtree_szind_slab_read(tsdn, &extents_rtree, rtree_ctx,
Packit 345191
		    (uintptr_t)ptr, true, &szind, &slab;;
Packit 345191
		extent_t *extent = rtree_extent_read(tsdn,
Packit 345191
		    &extents_rtree, rtree_ctx, (uintptr_t)ptr, true);
Packit 345191
		assert(szind == extent_szind_get(extent));
Packit 345191
		assert(slab == extent_slab_get(extent));
Packit 345191
	}
Packit 345191
Packit 345191
	if (likely(slab)) {
Packit 345191
		/* Small allocation. */
Packit 345191
		tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
Packit 345191
		    slow_path);
Packit 345191
	} else {
Packit 345191
		arena_dalloc_large(tsdn, ptr, tcache, szind, slow_path);
Packit 345191
	}
Packit 345191
}
Packit 345191
Packit 345191
#endif /* JEMALLOC_INTERNAL_ARENA_INLINES_B_H */