1 #ifndef JEMALLOC_INTERNAL_BACKGROUND_THREAD_INLINES_H
2 #define JEMALLOC_INTERNAL_BACKGROUND_THREAD_INLINES_H
3 
4 JEMALLOC_ALWAYS_INLINE bool
background_thread_enabled(void)5 background_thread_enabled(void) {
6 	return atomic_load_b(&background_thread_enabled_state, ATOMIC_RELAXED);
7 }
8 
9 JEMALLOC_ALWAYS_INLINE void
background_thread_enabled_set(tsdn_t * tsdn,bool state)10 background_thread_enabled_set(tsdn_t *tsdn, bool state) {
11 	malloc_mutex_assert_owner(tsdn, &background_thread_lock);
12 	atomic_store_b(&background_thread_enabled_state, state, ATOMIC_RELAXED);
13 }
14 
15 JEMALLOC_ALWAYS_INLINE background_thread_info_t *
arena_background_thread_info_get(arena_t * arena)16 arena_background_thread_info_get(arena_t *arena) {
17 	unsigned arena_ind = arena_ind_get(arena);
18 	return &background_thread_info[arena_ind % max_background_threads];
19 }
20 
21 JEMALLOC_ALWAYS_INLINE background_thread_info_t *
background_thread_info_get(size_t ind)22 background_thread_info_get(size_t ind) {
23 	return &background_thread_info[ind % max_background_threads];
24 }
25 
26 JEMALLOC_ALWAYS_INLINE uint64_t
background_thread_wakeup_time_get(background_thread_info_t * info)27 background_thread_wakeup_time_get(background_thread_info_t *info) {
28 	uint64_t next_wakeup = nstime_ns(&info->next_wakeup);
29 	assert(atomic_load_b(&info->indefinite_sleep, ATOMIC_ACQUIRE) ==
30 	    (next_wakeup == BACKGROUND_THREAD_INDEFINITE_SLEEP));
31 	return next_wakeup;
32 }
33 
34 JEMALLOC_ALWAYS_INLINE void
background_thread_wakeup_time_set(tsdn_t * tsdn,background_thread_info_t * info,uint64_t wakeup_time)35 background_thread_wakeup_time_set(tsdn_t *tsdn, background_thread_info_t *info,
36     uint64_t wakeup_time) {
37 	malloc_mutex_assert_owner(tsdn, &info->mtx);
38 	atomic_store_b(&info->indefinite_sleep,
39 	    wakeup_time == BACKGROUND_THREAD_INDEFINITE_SLEEP, ATOMIC_RELEASE);
40 	nstime_init(&info->next_wakeup, wakeup_time);
41 }
42 
43 JEMALLOC_ALWAYS_INLINE bool
background_thread_indefinite_sleep(background_thread_info_t * info)44 background_thread_indefinite_sleep(background_thread_info_t *info) {
45 	return atomic_load_b(&info->indefinite_sleep, ATOMIC_ACQUIRE);
46 }
47 
48 JEMALLOC_ALWAYS_INLINE void
arena_background_thread_inactivity_check(tsdn_t * tsdn,arena_t * arena,bool is_background_thread)49 arena_background_thread_inactivity_check(tsdn_t *tsdn, arena_t *arena,
50     bool is_background_thread) {
51 	if (!background_thread_enabled() || is_background_thread) {
52 		return;
53 	}
54 	background_thread_info_t *info =
55 	    arena_background_thread_info_get(arena);
56 	if (background_thread_indefinite_sleep(info)) {
57 		background_thread_interval_check(tsdn, arena,
58 		    &arena->decay_dirty, 0);
59 	}
60 }
61 
62 #endif /* JEMALLOC_INTERNAL_BACKGROUND_THREAD_INLINES_H */
63