]> git.ipfire.org Git - thirdparty/kernel/stable.git/commitdiff
arm64: Fix minor issues with the dcache_by_line_op macro
authorWill Deacon <will.deacon@arm.com>
Mon, 10 Dec 2018 13:39:48 +0000 (13:39 +0000)
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>
Sat, 26 Jan 2019 08:37:03 +0000 (09:37 +0100)
[ Upstream commit 33309ecda0070506c49182530abe7728850ebe78 ]

The dcache_by_line_op macro suffers from a couple of small problems:

First, the GAS directives that are currently being used rely on
assembler behavior that is not documented, and probably not guaranteed
to produce the correct behavior going forward. As a result, we end up
with some undefined symbols in cache.o:

$ nm arch/arm64/mm/cache.o
         ...
         U civac
         ...
         U cvac
         U cvap
         U cvau

This is due to the fact that the comparisons used to select the
operation type in the dcache_by_line_op macro are comparing symbols
not strings, and even though it seems that GAS is doing the right
thing here (undefined symbols by the same name are equal to each
other), it seems unwise to rely on this.

Second, when patching in a DC CVAP instruction on CPUs that support it,
the fallback path consists of a DC CVAU instruction which may be
affected by CPU errata that require ARM64_WORKAROUND_CLEAN_CACHE.

Solve these issues by unrolling the various maintenance routines and
using the conditional directives that are documented as operating on
strings. To avoid the complexity of nested alternatives, we move the
DC CVAP patching to __clean_dcache_area_pop, falling back to a branch
to __clean_dcache_area_poc if DCPOP is not supported by the CPU.

Reported-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Suggested-by: Robin Murphy <robin.murphy@arm.com>
Signed-off-by: Will Deacon <will.deacon@arm.com>
Signed-off-by: Sasha Levin <sashal@kernel.org>
arch/arm64/include/asm/assembler.h
arch/arm64/mm/cache.S

index 66aea4aa455df5b6e78f2a64d549d5ab85c63aa1..02d73d83f0deb1146e11e70f60e61aca7c79dcc4 100644 (file)
@@ -373,27 +373,33 @@ alternative_endif
  *     size:           size of the region
  *     Corrupts:       kaddr, size, tmp1, tmp2
  */
+       .macro __dcache_op_workaround_clean_cache, op, kaddr
+alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE
+       dc      \op, \kaddr
+alternative_else
+       dc      civac, \kaddr
+alternative_endif
+       .endm
+
        .macro dcache_by_line_op op, domain, kaddr, size, tmp1, tmp2
        dcache_line_size \tmp1, \tmp2
        add     \size, \kaddr, \size
        sub     \tmp2, \tmp1, #1
        bic     \kaddr, \kaddr, \tmp2
 9998:
-       .if     (\op == cvau || \op == cvac)
-alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE
-       dc      \op, \kaddr
-alternative_else
-       dc      civac, \kaddr
-alternative_endif
-       .elseif (\op == cvap)
-alternative_if ARM64_HAS_DCPOP
-       sys 3, c7, c12, 1, \kaddr       // dc cvap
-alternative_else
-       dc      cvac, \kaddr
-alternative_endif
+       .ifc    \op, cvau
+       __dcache_op_workaround_clean_cache \op, \kaddr
+       .else
+       .ifc    \op, cvac
+       __dcache_op_workaround_clean_cache \op, \kaddr
+       .else
+       .ifc    \op, cvap
+       sys     3, c7, c12, 1, \kaddr   // dc cvap
        .else
        dc      \op, \kaddr
        .endif
+       .endif
+       .endif
        add     \kaddr, \kaddr, \tmp1
        cmp     \kaddr, \size
        b.lo    9998b
index 91464e7f77cc33ff6c6d972fa38b5e92d4394e40..c1e8f3c6ffd5b07cd6278b0c2c2d207cdc47e153 100644 (file)
@@ -181,6 +181,9 @@ ENDPROC(__dma_clean_area)
  *     - size    - size in question
  */
 ENTRY(__clean_dcache_area_pop)
+       alternative_if_not ARM64_HAS_DCPOP
+       b       __clean_dcache_area_poc
+       alternative_else_nop_endif
        dcache_by_line_op cvap, sy, x0, x1, x2, x3
        ret
 ENDPIPROC(__clean_dcache_area_pop)