return PTR_ALIGN_DOWN(ptep, sizeof(*ptep) * CONT_PTES);
}
+static inline pte_t *contpte_align_addr_ptep(unsigned long *start,
+ unsigned long *end, pte_t *ptep,
+ unsigned int nr)
+{
+ /*
+ * Note: caller must ensure these nr PTEs are consecutive (present)
+ * PTEs that map consecutive pages of the same large folio within a
+ * single VMA and a single page table.
+ */
+ if (pte_cont(__ptep_get(ptep + nr - 1)))
+ *end = ALIGN(*end, CONT_PTE_SIZE);
+
+ if (pte_cont(__ptep_get(ptep))) {
+ *start = ALIGN_DOWN(*start, CONT_PTE_SIZE);
+ ptep = contpte_align_down(ptep);
+ }
+
+ return ptep;
+}
+
static void contpte_try_unfold_partial(struct mm_struct *mm, unsigned long addr,
pte_t *ptep, unsigned int nr)
{
unsigned long start = addr;
unsigned long end = start + nr * PAGE_SIZE;
- if (pte_cont(__ptep_get(ptep + nr - 1)))
- end = ALIGN(end, CONT_PTE_SIZE);
-
- if (pte_cont(__ptep_get(ptep))) {
- start = ALIGN_DOWN(start, CONT_PTE_SIZE);
- ptep = contpte_align_down(ptep);
- }
-
+ ptep = contpte_align_addr_ptep(&start, &end, ptep, nr);
__clear_young_dirty_ptes(vma, start, ptep, (end - start) / PAGE_SIZE, flags);
}
EXPORT_SYMBOL_GPL(contpte_clear_young_dirty_ptes);