]> git.ipfire.org Git - thirdparty/kernel/stable.git/commitdiff
ARM: 9464/1: fix input-only operand modification in load_unaligned_zeropad()
authorLiyuan Pang <pangliyuan1@huawei.com>
Tue, 9 Dec 2025 02:19:45 +0000 (03:19 +0100)
committerRussell King (Oracle) <rmk+kernel@armlinux.org.uk>
Tue, 9 Dec 2025 09:18:53 +0000 (09:18 +0000)
In the inline assembly inside load_unaligned_zeropad(), the "addr" is
constrained as input-only operand. The compiler assumes that on exit
from the asm statement these operands contain the same values as they
had before executing the statement, but when kernel page fault happened, the assembly fixup code "bic %2 %2, #0x3" modify the value of "addr", which may lead to an unexpected behavior.

Use a temporary variable "tmp" to handle it, instead of modifying the
input-only operand, just like what arm64's load_unaligned_zeropad()
does.

Fixes: b9a50f74905a ("ARM: 7450/1: dcache: select DCACHE_WORD_ACCESS for little-endian ARMv6+ CPUs")
Co-developed-by: Xie Yuanbin <xieyuanbin1@huawei.com>
Signed-off-by: Xie Yuanbin <xieyuanbin1@huawei.com>
Signed-off-by: Liyuan Pang <pangliyuan1@huawei.com>
Signed-off-by: Russell King (Oracle) <rmk+kernel@armlinux.org.uk>
arch/arm/include/asm/word-at-a-time.h

index f9a3897b06e7ff8af5ae5887ed0b0949b9b93c3b..5023f98d8293d5a73c2469c349802098766f9105 100644 (file)
@@ -67,7 +67,7 @@ static inline unsigned long find_zero(unsigned long mask)
  */
 static inline unsigned long load_unaligned_zeropad(const void *addr)
 {
-       unsigned long ret, offset;
+       unsigned long ret, tmp;
 
        /* Load word from unaligned pointer addr */
        asm(
@@ -75,9 +75,9 @@ static inline unsigned long load_unaligned_zeropad(const void *addr)
        "2:\n"
        "       .pushsection .text.fixup,\"ax\"\n"
        "       .align 2\n"
-       "3:     and     %1, %2, #0x3\n"
-       "       bic     %2, %2, #0x3\n"
-       "       ldr     %0, [%2]\n"
+       "3:     bic     %1, %2, #0x3\n"
+       "       ldr     %0, [%1]\n"
+       "       and     %1, %2, #0x3\n"
        "       lsl     %1, %1, #0x3\n"
 #ifndef __ARMEB__
        "       lsr     %0, %0, %1\n"
@@ -90,7 +90,7 @@ static inline unsigned long load_unaligned_zeropad(const void *addr)
        "       .align  3\n"
        "       .long   1b, 3b\n"
        "       .popsection"
-       : "=&r" (ret), "=&r" (offset)
+       : "=&r" (ret), "=&r" (tmp)
        : "r" (addr), "Qo" (*(unsigned long *)addr));
 
        return ret;