]> git.ipfire.org Git - thirdparty/kernel/linux.git/commitdiff
arm64/sysreg: Replace TCR_EL1 field macros
authorAnshuman Khandual <anshuman.khandual@arm.com>
Mon, 13 Oct 2025 05:29:44 +0000 (10:59 +0530)
committerCatalin Marinas <catalin.marinas@arm.com>
Thu, 13 Nov 2025 15:58:30 +0000 (15:58 +0000)
This just replaces all used TCR_EL1 field macros with tools sysreg variant
based fields and subsequently drops them from the header (pgtable-hwdef.h),
although while retaining the ones used for KVM (represented via the sysreg
tools format).

Cc: Will Deacon <will@kernel.org>
Cc: Mark Brown <broonie@kernel.org>
Cc: linux-arm-kernel@lists.infradead.org
Cc: linux-kernel@vger.kernel.org
Signed-off-by: Anshuman Khandual <anshuman.khandual@arm.com>
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
arch/arm64/include/asm/assembler.h
arch/arm64/include/asm/cputype.h
arch/arm64/include/asm/mmu_context.h
arch/arm64/include/asm/pgtable-hwdef.h
arch/arm64/include/asm/pgtable-prot.h
arch/arm64/kernel/cpufeature.c
arch/arm64/kernel/pi/map_kernel.c
arch/arm64/kernel/vmcore_info.c
arch/arm64/mm/proc.S

index 23be85d933485001c00a084f1aa0bdbc0457cf47..1392860a3c975b4cb9e890f93435e90397f538ce 100644 (file)
@@ -325,14 +325,14 @@ alternative_cb_end
  * tcr_set_t0sz - update TCR.T0SZ so that we can load the ID map
  */
        .macro  tcr_set_t0sz, valreg, t0sz
-       bfi     \valreg, \t0sz, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH
+       bfi     \valreg, \t0sz, #TCR_EL1_T0SZ_SHIFT, #TCR_EL1_T0SZ_WIDTH
        .endm
 
 /*
  * tcr_set_t1sz - update TCR.T1SZ
  */
        .macro  tcr_set_t1sz, valreg, t1sz
-       bfi     \valreg, \t1sz, #TCR_T1SZ_OFFSET, #TCR_TxSZ_WIDTH
+       bfi     \valreg, \t1sz, #TCR_EL1_T1SZ_SHIFT, #TCR_EL1_T1SZ_WIDTH
        .endm
 
 /*
@@ -589,7 +589,7 @@ alternative_endif
        .macro  offset_ttbr1, ttbr, tmp
 #if defined(CONFIG_ARM64_VA_BITS_52) && !defined(CONFIG_ARM64_LPA2)
        mrs     \tmp, tcr_el1
-       and     \tmp, \tmp, #TCR_T1SZ_MASK
+       and     \tmp, \tmp, #TCR_EL1_T1SZ_MASK
        cmp     \tmp, #TCR_T1SZ(VA_BITS_MIN)
        orr     \tmp, \ttbr, #TTBR1_BADDR_4852_OFFSET
        csel    \ttbr, \tmp, \ttbr, eq
index 9b00b75acbf2967b8a3da7d9f0e7599e31f38fa5..f14eb942cb4aefa89f89bda566caf4dad1c86b19 100644 (file)
 /* Fujitsu Erratum 010001 affects A64FX 1.0 and 1.1, (v0r0 and v1r0) */
 #define MIDR_FUJITSU_ERRATUM_010001            MIDR_FUJITSU_A64FX
 #define MIDR_FUJITSU_ERRATUM_010001_MASK       (~MIDR_CPU_VAR_REV(1, 0))
-#define TCR_CLEAR_FUJITSU_ERRATUM_010001       (TCR_NFD1 | TCR_NFD0)
+#define TCR_CLEAR_FUJITSU_ERRATUM_010001       (TCR_EL1_NFD1 | TCR_EL1_NFD0)
 
 #ifndef __ASSEMBLY__
 
index 0dbe3b29049b742327ea391c13c1813bc8f312f8..1b4ac7b23e180dcc2dabb81c2462dd548782310c 100644 (file)
@@ -73,10 +73,10 @@ static inline void __cpu_set_tcr_t0sz(unsigned long t0sz)
 {
        unsigned long tcr = read_sysreg(tcr_el1);
 
-       if ((tcr & TCR_T0SZ_MASK) == t0sz)
+       if ((tcr & TCR_EL1_T0SZ_MASK) == t0sz)
                return;
 
-       tcr &= ~TCR_T0SZ_MASK;
+       tcr &= ~TCR_EL1_T0SZ_MASK;
        tcr |= t0sz;
        write_sysreg(tcr, tcr_el1);
        isb();
index f3b77deedfa2cd5a75b629ca1b133eaacaa5fe4a..d49180bb7cb3d8e7fac66017975d80eef0b7189d 100644 (file)
 /*
  * TCR flags.
  */
-#define TCR_T0SZ_OFFSET                0
-#define TCR_T1SZ_OFFSET                16
-#define TCR_T0SZ(x)            ((UL(64) - (x)) << TCR_T0SZ_OFFSET)
-#define TCR_T1SZ(x)            ((UL(64) - (x)) << TCR_T1SZ_OFFSET)
-#define TCR_TxSZ(x)            (TCR_T0SZ(x) | TCR_T1SZ(x))
-#define TCR_TxSZ_WIDTH         6
-#define TCR_T0SZ_MASK          (((UL(1) << TCR_TxSZ_WIDTH) - 1) << TCR_T0SZ_OFFSET)
-#define TCR_T1SZ_MASK          (((UL(1) << TCR_TxSZ_WIDTH) - 1) << TCR_T1SZ_OFFSET)
-
-#define TCR_EPD0_SHIFT         7
-#define TCR_EPD0_MASK          (UL(1) << TCR_EPD0_SHIFT)
-#define TCR_IRGN0_SHIFT                8
-#define TCR_IRGN0_MASK         (UL(3) << TCR_IRGN0_SHIFT)
-#define TCR_IRGN0_NC           (UL(0) << TCR_IRGN0_SHIFT)
-#define TCR_IRGN0_WBWA         (UL(1) << TCR_IRGN0_SHIFT)
-#define TCR_IRGN0_WT           (UL(2) << TCR_IRGN0_SHIFT)
-#define TCR_IRGN0_WBnWA                (UL(3) << TCR_IRGN0_SHIFT)
-
-#define TCR_EPD1_SHIFT         23
-#define TCR_EPD1_MASK          (UL(1) << TCR_EPD1_SHIFT)
-#define TCR_IRGN1_SHIFT                24
-#define TCR_IRGN1_MASK         (UL(3) << TCR_IRGN1_SHIFT)
-#define TCR_IRGN1_NC           (UL(0) << TCR_IRGN1_SHIFT)
-#define TCR_IRGN1_WBWA         (UL(1) << TCR_IRGN1_SHIFT)
-#define TCR_IRGN1_WT           (UL(2) << TCR_IRGN1_SHIFT)
-#define TCR_IRGN1_WBnWA                (UL(3) << TCR_IRGN1_SHIFT)
-
-#define TCR_IRGN_NC            (TCR_IRGN0_NC | TCR_IRGN1_NC)
-#define TCR_IRGN_WBWA          (TCR_IRGN0_WBWA | TCR_IRGN1_WBWA)
-#define TCR_IRGN_WT            (TCR_IRGN0_WT | TCR_IRGN1_WT)
-#define TCR_IRGN_WBnWA         (TCR_IRGN0_WBnWA | TCR_IRGN1_WBnWA)
-#define TCR_IRGN_MASK          (TCR_IRGN0_MASK | TCR_IRGN1_MASK)
-
-
-#define TCR_ORGN0_SHIFT                10
-#define TCR_ORGN0_MASK         (UL(3) << TCR_ORGN0_SHIFT)
-#define TCR_ORGN0_NC           (UL(0) << TCR_ORGN0_SHIFT)
-#define TCR_ORGN0_WBWA         (UL(1) << TCR_ORGN0_SHIFT)
-#define TCR_ORGN0_WT           (UL(2) << TCR_ORGN0_SHIFT)
-#define TCR_ORGN0_WBnWA                (UL(3) << TCR_ORGN0_SHIFT)
-
-#define TCR_ORGN1_SHIFT                26
-#define TCR_ORGN1_MASK         (UL(3) << TCR_ORGN1_SHIFT)
-#define TCR_ORGN1_NC           (UL(0) << TCR_ORGN1_SHIFT)
-#define TCR_ORGN1_WBWA         (UL(1) << TCR_ORGN1_SHIFT)
-#define TCR_ORGN1_WT           (UL(2) << TCR_ORGN1_SHIFT)
-#define TCR_ORGN1_WBnWA                (UL(3) << TCR_ORGN1_SHIFT)
-
-#define TCR_ORGN_NC            (TCR_ORGN0_NC | TCR_ORGN1_NC)
-#define TCR_ORGN_WBWA          (TCR_ORGN0_WBWA | TCR_ORGN1_WBWA)
-#define TCR_ORGN_WT            (TCR_ORGN0_WT | TCR_ORGN1_WT)
-#define TCR_ORGN_WBnWA         (TCR_ORGN0_WBnWA | TCR_ORGN1_WBnWA)
-#define TCR_ORGN_MASK          (TCR_ORGN0_MASK | TCR_ORGN1_MASK)
-
-#define TCR_SH0_SHIFT          12
-#define TCR_SH0_MASK           (UL(3) << TCR_SH0_SHIFT)
-#define TCR_SH0_INNER          (UL(3) << TCR_SH0_SHIFT)
-
-#define TCR_SH1_SHIFT          28
-#define TCR_SH1_MASK           (UL(3) << TCR_SH1_SHIFT)
-#define TCR_SH1_INNER          (UL(3) << TCR_SH1_SHIFT)
-#define TCR_SHARED             (TCR_SH0_INNER | TCR_SH1_INNER)
-
-#define TCR_TG0_SHIFT          14
-#define TCR_TG0_MASK           (UL(3) << TCR_TG0_SHIFT)
-#define TCR_TG0_4K             (UL(0) << TCR_TG0_SHIFT)
-#define TCR_TG0_64K            (UL(1) << TCR_TG0_SHIFT)
-#define TCR_TG0_16K            (UL(2) << TCR_TG0_SHIFT)
-
-#define TCR_TG1_SHIFT          30
-#define TCR_TG1_MASK           (UL(3) << TCR_TG1_SHIFT)
-#define TCR_TG1_16K            (UL(1) << TCR_TG1_SHIFT)
-#define TCR_TG1_4K             (UL(2) << TCR_TG1_SHIFT)
-#define TCR_TG1_64K            (UL(3) << TCR_TG1_SHIFT)
-
-#define TCR_IPS_SHIFT          32
-#define TCR_IPS_MASK           (UL(7) << TCR_IPS_SHIFT)
-#define TCR_A1                 (UL(1) << 22)
-#define TCR_ASID16             (UL(1) << 36)
-#define TCR_TBI0               (UL(1) << 37)
-#define TCR_TBI1               (UL(1) << 38)
-#define TCR_HA                 (UL(1) << 39)
-#define TCR_HD                 (UL(1) << 40)
-#define TCR_HPD0_SHIFT         41
-#define TCR_HPD0               (UL(1) << TCR_HPD0_SHIFT)
-#define TCR_HPD1_SHIFT         42
-#define TCR_HPD1               (UL(1) << TCR_HPD1_SHIFT)
-#define TCR_TBID0              (UL(1) << 51)
-#define TCR_TBID1              (UL(1) << 52)
-#define TCR_NFD0               (UL(1) << 53)
-#define TCR_NFD1               (UL(1) << 54)
-#define TCR_E0PD0              (UL(1) << 55)
-#define TCR_E0PD1              (UL(1) << 56)
-#define TCR_TCMA0              (UL(1) << 57)
-#define TCR_TCMA1              (UL(1) << 58)
-#define TCR_DS                 (UL(1) << 59)
+#define TCR_T0SZ(x)            ((UL(64) - (x)) << TCR_EL1_T0SZ_SHIFT)
+#define TCR_T1SZ(x)            ((UL(64) - (x)) << TCR_EL1_T1SZ_SHIFT)
+
+#define TCR_T0SZ_MASK          TCR_EL1_T0SZ_MASK
+#define TCR_T1SZ_MASK          TCR_EL1_T1SZ_MASK
+
+#define TCR_EPD0_MASK          TCR_EL1_EPD0_MASK
+#define TCR_EPD1_MASK          TCR_EL1_EPD1_MASK
+
+#define TCR_IRGN0_MASK         TCR_EL1_IRGN0_MASK
+#define TCR_IRGN0_WBWA         (TCR_EL1_IRGN0_WBWA << TCR_EL1_IRGN0_SHIFT)
+
+#define TCR_ORGN0_MASK         TCR_EL1_ORGN0_MASK
+#define TCR_ORGN0_WBWA         (TCR_EL1_ORGN0_WBWA << TCR_EL1_ORGN0_SHIFT)
+
+#define TCR_SH0_MASK           TCR_EL1_SH0_MASK
+#define TCR_SH0_INNER          (TCR_EL1_SH0_INNER << TCR_EL1_SH0_SHIFT)
+
+#define TCR_SH1_MASK           TCR_EL1_SH1_MASK
+
+#define TCR_TG0_SHIFT          TCR_EL1_TG0_SHIFT
+#define TCR_TG0_MASK           TCR_EL1_TG0_MASK
+#define TCR_TG0_4K             (TCR_EL1_TG0_4K << TCR_EL1_TG0_SHIFT)
+#define TCR_TG0_64K            (TCR_EL1_TG0_64K << TCR_EL1_TG0_SHIFT)
+#define TCR_TG0_16K            (TCR_EL1_TG0_16K << TCR_EL1_TG0_SHIFT)
+
+#define TCR_TG1_SHIFT          TCR_EL1_TG1_SHIFT
+#define TCR_TG1_MASK           TCR_EL1_TG1_MASK
+#define TCR_TG1_16K            (TCR_EL1_TG1_16K << TCR_EL1_TG1_SHIFT)
+#define TCR_TG1_4K             (TCR_EL1_TG1_4K << TCR_EL1_TG1_SHIFT)
+#define TCR_TG1_64K            (TCR_EL1_TG1_64K << TCR_EL1_TG1_SHIFT)
+
+#define TCR_IPS_SHIFT          TCR_EL1_IPS_SHIFT
+#define TCR_IPS_MASK           TCR_EL1_IPS_MASK
+#define TCR_A1                 TCR_EL1_A1
+#define TCR_ASID16             TCR_EL1_AS
+#define TCR_TBI0               TCR_EL1_TBI0
+#define TCR_TBI1               TCR_EL1_TBI1
+#define TCR_HA                 TCR_EL1_HA
+#define TCR_HD                 TCR_EL1_HD
+#define TCR_HPD0               TCR_EL1_HPD0
+#define TCR_HPD1               TCR_EL1_HPD1
+#define TCR_TBID0              TCR_EL1_TBID0
+#define TCR_TBID1              TCR_EL1_TBID1
+#define TCR_E0PD0              TCR_EL1_E0PD0
+#define TCR_E0PD1              TCR_EL1_E0PD1
+#define TCR_DS                 TCR_EL1_DS
 
 /*
  * TTBR.
index 85dceb1c66f45b6d057f607f6ae64e96526c1bc0..21a3d3342283236441dd4b4378fd2b05a556d367 100644 (file)
@@ -84,7 +84,7 @@ extern unsigned long prot_ns_shared;
 #else
 static inline bool __pure lpa2_is_enabled(void)
 {
-       return read_tcr() & TCR_DS;
+       return read_tcr() & TCR_EL1_DS;
 }
 
 #define PTE_MAYBE_SHARED       (lpa2_is_enabled() ? 0 : PTE_SHARED)
index 5ed401ff79e3e388b63825acbd156546cf91e103..c8e33abfdaeff71e207726c95af2e247de42977a 100644 (file)
@@ -1969,7 +1969,7 @@ static struct cpumask dbm_cpus __read_mostly;
 
 static inline void __cpu_enable_hw_dbm(void)
 {
-       u64 tcr = read_sysreg(tcr_el1) | TCR_HD;
+       u64 tcr = read_sysreg(tcr_el1) | TCR_EL1_HD;
 
        write_sysreg(tcr, tcr_el1);
        isb();
@@ -2255,7 +2255,7 @@ static bool has_generic_auth(const struct arm64_cpu_capabilities *entry,
 static void cpu_enable_e0pd(struct arm64_cpu_capabilities const *cap)
 {
        if (this_cpu_has_cap(ARM64_HAS_E0PD))
-               sysreg_clear_set(tcr_el1, 0, TCR_E0PD1);
+               sysreg_clear_set(tcr_el1, 0, TCR_EL1_E0PD1);
 }
 #endif /* CONFIG_ARM64_E0PD */
 
index e8ddbde31a833d9735863e64db3490e97bafdcf8..8ac26be77685eebac67370b7feef8ef8af82cde2 100644 (file)
@@ -141,13 +141,13 @@ static void __init map_kernel(u64 kaslr_offset, u64 va_offset, int root_level)
 static void noinline __section(".idmap.text") set_ttbr0_for_lpa2(phys_addr_t ttbr)
 {
        u64 sctlr = read_sysreg(sctlr_el1);
-       u64 tcr = read_sysreg(tcr_el1) | TCR_DS;
+       u64 tcr = read_sysreg(tcr_el1) | TCR_EL1_DS;
        u64 mmfr0 = read_sysreg(id_aa64mmfr0_el1);
        u64 parange = cpuid_feature_extract_unsigned_field(mmfr0,
                                                           ID_AA64MMFR0_EL1_PARANGE_SHIFT);
 
-       tcr &= ~TCR_IPS_MASK;
-       tcr |= parange << TCR_IPS_SHIFT;
+       tcr &= ~TCR_EL1_IPS_MASK;
+       tcr |= parange << TCR_EL1_IPS_SHIFT;
 
        asm("   msr     sctlr_el1, %0           ;"
            "   isb                             ;"
@@ -263,7 +263,7 @@ asmlinkage void __init early_map_kernel(u64 boot_status, phys_addr_t fdt)
        }
 
        if (va_bits > VA_BITS_MIN)
-               sysreg_clear_set(tcr_el1, TCR_T1SZ_MASK, TCR_T1SZ(va_bits));
+               sysreg_clear_set(tcr_el1, TCR_EL1_T1SZ_MASK, TCR_T1SZ(va_bits));
 
        /*
         * The virtual KASLR displacement modulo 2MiB is decided by the
index b19d5d6cb8b3872ddf0fa729e412d69a0fe35b38..9619ece66b79ca9b190cbfd3a43ba4a14a28cff0 100644 (file)
@@ -14,7 +14,7 @@ static inline u64 get_tcr_el1_t1sz(void);
 
 static inline u64 get_tcr_el1_t1sz(void)
 {
-       return (read_sysreg(tcr_el1) & TCR_T1SZ_MASK) >> TCR_T1SZ_OFFSET;
+       return (read_sysreg(tcr_el1) & TCR_EL1_T1SZ_MASK) >> TCR_EL1_T1SZ_SHIFT;
 }
 
 void arch_crash_save_vmcoreinfo(void)
index 86818511962b6121c199ffbddf128163c49a8d51..01e868116448886d5c3c8dcd89687bc5df005fcb 100644 (file)
 #include <asm/sysreg.h>
 
 #ifdef CONFIG_ARM64_64K_PAGES
-#define TCR_TG_FLAGS   TCR_TG0_64K | TCR_TG1_64K
+#define TCR_TG_FLAGS   ((TCR_EL1_TG0_64K << TCR_EL1_TG0_SHIFT) |\
+                        (TCR_EL1_TG1_64K << TCR_EL1_TG1_SHIFT))
 #elif defined(CONFIG_ARM64_16K_PAGES)
-#define TCR_TG_FLAGS   TCR_TG0_16K | TCR_TG1_16K
+#define TCR_TG_FLAGS   ((TCR_EL1_TG0_16K << TCR_EL1_TG0_SHIFT) |\
+                        (TCR_EL1_TG1_16K << TCR_EL1_TG1_SHIFT))
 #else /* CONFIG_ARM64_4K_PAGES */
-#define TCR_TG_FLAGS   TCR_TG0_4K | TCR_TG1_4K
+#define TCR_TG_FLAGS   ((TCR_EL1_TG0_4K << TCR_EL1_TG0_SHIFT) |\
+                        (TCR_EL1_TG1_4K << TCR_EL1_TG1_SHIFT))
 #endif
 
 #ifdef CONFIG_RANDOMIZE_BASE
-#define TCR_KASLR_FLAGS        TCR_NFD1
+#define TCR_KASLR_FLAGS        TCR_EL1_NFD1
 #else
 #define TCR_KASLR_FLAGS        0
 #endif
 #define TCR_CACHE_FLAGS        TCR_IRGN_WBWA | TCR_ORGN_WBWA
 
 #ifdef CONFIG_KASAN_SW_TAGS
-#define TCR_KASAN_SW_FLAGS TCR_TBI1 | TCR_TBID1
+#define TCR_KASAN_SW_FLAGS TCR_EL1_TBI1 | TCR_EL1_TBID1
 #else
 #define TCR_KASAN_SW_FLAGS 0
 #endif
 
 #ifdef CONFIG_KASAN_HW_TAGS
-#define TCR_MTE_FLAGS TCR_TCMA1 | TCR_TBI1 | TCR_TBID1
+#define TCR_MTE_FLAGS TCR_EL1_TCMA1 | TCR_EL1_TBI1 | TCR_EL1_TBID1
 #elif defined(CONFIG_ARM64_MTE)
 /*
  * The mte_zero_clear_page_tags() implementation uses DC GZVA, which relies on
  * TBI being enabled at EL1.
  */
-#define TCR_MTE_FLAGS TCR_TBI1 | TCR_TBID1
+#define TCR_MTE_FLAGS TCR_EL1_TBI1 | TCR_EL1_TBID1
 #else
 #define TCR_MTE_FLAGS 0
 #endif
 
+#define TCR_IRGN_WBWA  ((TCR_EL1_IRGN0_WBWA << TCR_EL1_IRGN0_SHIFT) |\
+                        (TCR_EL1_IRGN1_WBWA << TCR_EL1_IRGN1_SHIFT))
+#define TCR_ORGN_WBWA  ((TCR_EL1_ORGN0_WBWA << TCR_EL1_ORGN0_SHIFT) |\
+                        (TCR_EL1_ORGN1_WBWA << TCR_EL1_ORGN1_SHIFT))
+#define TCR_SHARED     ((TCR_EL1_SH0_INNER << TCR_EL1_SH0_SHIFT) |\
+                        (TCR_EL1_SH1_INNER << TCR_EL1_SH1_SHIFT))
+
 /*
  * Default MAIR_EL1. MT_NORMAL_TAGGED is initially mapped as Normal memory and
  * changed during mte_cpu_setup to Normal Tagged if the system supports MTE.
@@ -129,7 +139,7 @@ SYM_FUNC_START(cpu_do_resume)
 
        /* Don't change t0sz here, mask those bits when restoring */
        mrs     x7, tcr_el1
-       bfi     x8, x7, TCR_T0SZ_OFFSET, TCR_TxSZ_WIDTH
+       bfi     x8, x7, TCR_EL1_T0SZ_SHIFT, TCR_EL1_T0SZ_WIDTH
 
        msr     tcr_el1, x8
        msr     vbar_el1, x9
@@ -481,8 +491,8 @@ SYM_FUNC_START(__cpu_setup)
        tcr2    .req    x15
        mov_q   mair, MAIR_EL1_SET
        mov_q   tcr, TCR_T0SZ(IDMAP_VA_BITS) | TCR_T1SZ(VA_BITS_MIN) | TCR_CACHE_FLAGS | \
-                    TCR_SHARED | TCR_TG_FLAGS | TCR_KASLR_FLAGS | TCR_ASID16 | \
-                    TCR_TBI0 | TCR_A1 | TCR_KASAN_SW_FLAGS | TCR_MTE_FLAGS
+                    TCR_SHARED | TCR_TG_FLAGS | TCR_KASLR_FLAGS | TCR_EL1_AS | \
+                    TCR_EL1_TBI0 | TCR_EL1_A1 | TCR_KASAN_SW_FLAGS | TCR_MTE_FLAGS
        mov     tcr2, xzr
 
        tcr_clear_errata_bits tcr, x9, x5
@@ -492,7 +502,7 @@ SYM_FUNC_START(__cpu_setup)
 alternative_if ARM64_HAS_VA52
        tcr_set_t1sz    tcr, x9
 #ifdef CONFIG_ARM64_LPA2
-       orr             tcr, tcr, #TCR_DS
+       orr             tcr, tcr, #TCR_EL1_DS
 #endif
 alternative_else_nop_endif
 #endif
@@ -500,7 +510,7 @@ alternative_else_nop_endif
        /*
         * Set the IPS bits in TCR_EL1.
         */
-       tcr_compute_pa_size tcr, #TCR_IPS_SHIFT, x5, x6
+       tcr_compute_pa_size tcr, #TCR_EL1_IPS_SHIFT, x5, x6
 #ifdef CONFIG_ARM64_HW_AFDBM
        /*
         * Enable hardware update of the Access Flags bit.
@@ -510,7 +520,7 @@ alternative_else_nop_endif
        mrs     x9, ID_AA64MMFR1_EL1
        ubfx    x9, x9, ID_AA64MMFR1_EL1_HAFDBS_SHIFT, #4
        cbz     x9, 1f
-       orr     tcr, tcr, #TCR_HA               // hardware Access flag update
+       orr     tcr, tcr, #TCR_EL1_HA           // hardware Access flag update
 #ifdef CONFIG_ARM64_HAFT
        cmp     x9, ID_AA64MMFR1_EL1_HAFDBS_HAFT
        b.lt    1f