]> git.ipfire.org Git - thirdparty/kernel/linux.git/commitdiff
lib/crypto: riscv: Depend on RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
authorEric Biggers <ebiggers@kernel.org>
Sat, 6 Dec 2025 21:37:50 +0000 (13:37 -0800)
committerEric Biggers <ebiggers@kernel.org>
Tue, 9 Dec 2025 23:10:21 +0000 (15:10 -0800)
Replace the RISCV_ISA_V dependency of the RISC-V crypto code with
RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS, which implies RISCV_ISA_V as
well as vector unaligned accesses being efficient.

This is necessary because this code assumes that vector unaligned
accesses are supported and are efficient.  (It does so to avoid having
to use lots of extra vsetvli instructions to switch the element width
back and forth between 8 and either 32 or 64.)

This was omitted from the code originally just because the RISC-V kernel
support for detecting this feature didn't exist yet.  Support has now
been added, but it's fragmented into per-CPU runtime detection, a
command-line parameter, and a kconfig option.  The kconfig option is the
only reasonable way to do it, though, so let's just rely on that.

Fixes: eb24af5d7a05 ("crypto: riscv - add vector crypto accelerated AES-{ECB,CBC,CTR,XTS}")
Fixes: bb54668837a0 ("crypto: riscv - add vector crypto accelerated ChaCha20")
Fixes: 600a3853dfa0 ("crypto: riscv - add vector crypto accelerated GHASH")
Fixes: 8c8e40470ffe ("crypto: riscv - add vector crypto accelerated SHA-{256,224}")
Fixes: b3415925a08b ("crypto: riscv - add vector crypto accelerated SHA-{512,384}")
Fixes: 563a5255afa2 ("crypto: riscv - add vector crypto accelerated SM3")
Fixes: b8d06352bbf3 ("crypto: riscv - add vector crypto accelerated SM4")
Cc: stable@vger.kernel.org
Reported-by: Vivian Wang <wangruikang@iscas.ac.cn>
Closes: https://lore.kernel.org/r/b3cfcdac-0337-4db0-a611-258f2868855f@iscas.ac.cn/
Reviewed-by: Jerry Shih <jerry.shih@sifive.com>
Link: https://lore.kernel.org/r/20251206213750.81474-1-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
arch/riscv/crypto/Kconfig
lib/crypto/Kconfig

index a75d6325607b4c46184cc61a40b76121a44ad320..14c5acb935e94145c8c45955d7243b51631bce4b 100644 (file)
@@ -4,7 +4,8 @@ menu "Accelerated Cryptographic Algorithms for CPU (riscv)"
 
 config CRYPTO_AES_RISCV64
        tristate "Ciphers: AES, modes: ECB, CBC, CTS, CTR, XTS"
-       depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
+       depends on 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
+                  RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
        select CRYPTO_ALGAPI
        select CRYPTO_LIB_AES
        select CRYPTO_SKCIPHER
@@ -20,7 +21,8 @@ config CRYPTO_AES_RISCV64
 
 config CRYPTO_GHASH_RISCV64
        tristate "Hash functions: GHASH"
-       depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
+       depends on 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
+                  RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
        select CRYPTO_GCM
        help
          GCM GHASH function (NIST SP 800-38D)
@@ -30,7 +32,8 @@ config CRYPTO_GHASH_RISCV64
 
 config CRYPTO_SM3_RISCV64
        tristate "Hash functions: SM3 (ShangMi 3)"
-       depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
+       depends on 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
+                  RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
        select CRYPTO_HASH
        select CRYPTO_LIB_SM3
        help
@@ -42,7 +45,8 @@ config CRYPTO_SM3_RISCV64
 
 config CRYPTO_SM4_RISCV64
        tristate "Ciphers: SM4 (ShangMi 4)"
-       depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
+       depends on 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
+                  RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
        select CRYPTO_ALGAPI
        select CRYPTO_SM4
        help
index a3647352bff600e89e2d909516ec8a690ced2964..6871a41e5069f71e8db9eb50570f938a0774542d 100644 (file)
@@ -61,7 +61,8 @@ config CRYPTO_LIB_CHACHA_ARCH
        default y if ARM64 && KERNEL_MODE_NEON
        default y if MIPS && CPU_MIPS32_R2
        default y if PPC64 && CPU_LITTLE_ENDIAN && VSX
-       default y if RISCV && 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
+       default y if RISCV && 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
+                    RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
        default y if S390
        default y if X86_64
 
@@ -184,7 +185,8 @@ config CRYPTO_LIB_SHA256_ARCH
        default y if ARM64
        default y if MIPS && CPU_CAVIUM_OCTEON
        default y if PPC && SPE
-       default y if RISCV && 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
+       default y if RISCV && 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
+                    RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
        default y if S390
        default y if SPARC64
        default y if X86_64
@@ -202,7 +204,8 @@ config CRYPTO_LIB_SHA512_ARCH
        default y if ARM && !CPU_V7M
        default y if ARM64
        default y if MIPS && CPU_CAVIUM_OCTEON
-       default y if RISCV && 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
+       default y if RISCV && 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
+                    RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS
        default y if S390
        default y if SPARC64
        default y if X86_64