nr = folio_pte_batch(folio, pvmw.pte, pteval, max_nr);
}
- if (lru_gen_enabled() && pvmw.pte) {
+ /*
+ * When LRU is switching, we don’t know where the surrounding folios
+ * are. —they could be on active/inactive lists or on MGLRU. So the
+ * simplest approach is to disable this look-around optimization.
+ */
+ if (lru_gen_enabled() && !lru_gen_switching() && pvmw.pte) {
if (lru_gen_look_around(&pvmw, nr))
referenced++;
} else if (pvmw.pte) {
if (referenced_ptes == -1)
return FOLIOREF_KEEP;
- if (lru_gen_enabled()) {
+ if (lru_gen_enabled() && !lru_gen_switching()) {
if (!referenced_ptes)
return FOLIOREF_RECLAIM;
unsigned long file;
struct lruvec *target_lruvec;
- if (lru_gen_enabled())
+ if (lru_gen_enabled() && !lru_gen_switching())
return;
target_lruvec = mem_cgroup_lruvec(sc->target_mem_cgroup, pgdat);
#ifdef CONFIG_LRU_GEN
+DEFINE_STATIC_KEY_FALSE(lru_switch);
#ifdef CONFIG_LRU_GEN_ENABLED
DEFINE_STATIC_KEY_ARRAY_TRUE(lru_gen_caps, NR_LRU_GEN_CAPS);
#define get_cap(cap) static_branch_likely(&lru_gen_caps[cap])
if (enabled == lru_gen_enabled())
goto unlock;
+ static_branch_enable_cpuslocked(&lru_switch);
+
if (enabled)
static_branch_enable_cpuslocked(&lru_gen_caps[LRU_GEN_CORE]);
else
cond_resched();
} while ((memcg = mem_cgroup_iter(NULL, memcg, NULL)));
+
+ static_branch_disable_cpuslocked(&lru_switch);
+
unlock:
mutex_unlock(&state_mutex);
put_online_mems();
bool proportional_reclaim;
struct blk_plug plug;
- if (lru_gen_enabled() && !root_reclaim(sc)) {
+ if ((lru_gen_enabled() || lru_gen_switching()) && !root_reclaim(sc)) {
lru_gen_shrink_lruvec(lruvec, sc);
- return;
+
+ if (!lru_gen_switching())
+ return;
+
}
get_scan_count(lruvec, sc, nr);
struct lruvec *target_lruvec;
bool reclaimable = false;
- if (lru_gen_enabled() && root_reclaim(sc)) {
+ if ((lru_gen_enabled() || lru_gen_switching()) && root_reclaim(sc)) {
memset(&sc->nr, 0, sizeof(sc->nr));
lru_gen_shrink_node(pgdat, sc);
- return;
+
+ if (!lru_gen_switching())
+ return;
+
}
target_lruvec = mem_cgroup_lruvec(sc->target_mem_cgroup, pgdat);
struct lruvec *target_lruvec;
unsigned long refaults;
- if (lru_gen_enabled())
+ if (lru_gen_enabled() && !lru_gen_switching())
return;
target_lruvec = mem_cgroup_lruvec(target_memcg, pgdat);
struct mem_cgroup *memcg;
struct lruvec *lruvec;
- if (lru_gen_enabled()) {
+ if (lru_gen_enabled() || lru_gen_switching()) {
lru_gen_age_node(pgdat, sc);
- return;
+
+ if (!lru_gen_switching())
+ return;
+
}
lruvec = mem_cgroup_lruvec(NULL, pgdat);