]> git.ipfire.org Git - thirdparty/kernel/linux.git/commitdiff
crypto: powerpc/poly1305 - Add SIMD fallback
authorHerbert Xu <herbert@gondor.apana.org.au>
Sat, 10 May 2025 09:13:47 +0000 (17:13 +0800)
committerHerbert Xu <herbert@gondor.apana.org.au>
Wed, 14 May 2025 09:45:22 +0000 (17:45 +0800)
Add a SIMD fallback path for poly1305-p10 by converting the 2^64
based hash state into a 2^44 base.  In order to ensure that the
generic fallback is actually 2^44, add ARCH_SUPPORTS_INT128 to
powerpc and make poly1305-p10 depend on it.

Fixes: ba8f8624fde2 ("crypto: poly1305-p10 - Glue code for optmized Poly1305 implementation for ppc64le")
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
arch/powerpc/Kconfig
arch/powerpc/lib/crypto/Kconfig
arch/powerpc/lib/crypto/poly1305-p10-glue.c

index 6722625a406a019a24295b0c7de19da930244e3c..651e0c32957ad6828522a8ba820961a49a83212f 100644 (file)
@@ -173,6 +173,7 @@ config PPC
        select ARCH_STACKWALK
        select ARCH_SUPPORTS_ATOMIC_RMW
        select ARCH_SUPPORTS_DEBUG_PAGEALLOC    if PPC_BOOK3S || PPC_8xx
+       select ARCH_SUPPORTS_INT128             if PPC64 && CC_HAS_INT128
        select ARCH_USE_BUILTIN_BSWAP
        select ARCH_USE_CMPXCHG_LOCKREF         if PPC64
        select ARCH_USE_MEMTEST
index ffa541ad6d5da479c7b679ea2edef6035b18e911..6761fdb6193c4df3bb8236ebd90608e4111a3801 100644 (file)
@@ -9,7 +9,7 @@ config CRYPTO_CHACHA20_P10
 
 config CRYPTO_POLY1305_P10
        tristate
-       depends on PPC64 && CPU_LITTLE_ENDIAN && VSX
+       depends on PPC64 && CPU_LITTLE_ENDIAN && VSX && ARCH_SUPPORTS_INT128
        default CRYPTO_LIB_POLY1305
        select CRYPTO_ARCH_HAVE_LIB_POLY1305
        select CRYPTO_LIB_POLY1305_GENERIC
index 3f1664a724b655f0be0ab4d268b3a0d383add1c2..280c10c48c5337837aff5b55c06a1764d98952f7 100644 (file)
@@ -6,6 +6,7 @@
  */
 #include <asm/switch_to.h>
 #include <crypto/internal/poly1305.h>
+#include <crypto/internal/simd.h>
 #include <linux/cpufeature.h>
 #include <linux/jump_label.h>
 #include <linux/kernel.h>
@@ -18,6 +19,11 @@ asmlinkage void poly1305_emit_64(const struct poly1305_state *state, const u32 n
 
 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_p10);
 
+static inline bool is_state_base64(struct poly1305_block_state *state)
+{
+       return state->core_r.precomputed_s.r64[2];
+}
+
 static void vsx_begin(void)
 {
        preempt_disable();
@@ -30,12 +36,35 @@ static void vsx_end(void)
        preempt_enable();
 }
 
+static void convert_to_base2_44(struct poly1305_block_state *state)
+{
+       u8 raw_key[POLY1305_BLOCK_SIZE];
+       u64 h0, h1, h2;
+
+       if (!is_state_base64(state))
+               return;
+
+       state->core_r.precomputed_s.r64[2] = 0;
+       put_unaligned_le64(state->core_r.key.r64[0], raw_key + 0);
+       put_unaligned_le64(state->core_r.key.r64[1], raw_key + 8);
+       poly1305_core_setkey(&state->core_r, raw_key);
+
+       h0 = state->h.h64[0];
+       h1 = state->h.h64[1];
+       h2 = state->h.h64[2];
+       state->h.h64[0] = h0 & 0xfffffffffffULL;
+       state->h.h64[1] = h0 >> 44 | (h1 & 0xffffffULL) << 20;
+       state->h.h64[2] = h1 >> 24 | h2 << 40;
+}
+
 void poly1305_block_init_arch(struct poly1305_block_state *dctx,
                              const u8 raw_key[POLY1305_BLOCK_SIZE])
 {
-       if (!static_key_enabled(&have_p10))
+       dctx->core_r.precomputed_s.r64[2] = 0;
+       if (!static_key_enabled(&have_p10) || !crypto_simd_usable())
                return poly1305_block_init_generic(dctx, raw_key);
 
+       dctx->core_r.precomputed_s.r64[2] = 1;
        dctx->h = (struct poly1305_state){};
        dctx->core_r.key.r64[0] = get_unaligned_le64(raw_key + 0);
        dctx->core_r.key.r64[1] = get_unaligned_le64(raw_key + 8);
@@ -45,8 +74,11 @@ EXPORT_SYMBOL_GPL(poly1305_block_init_arch);
 void poly1305_blocks_arch(struct poly1305_block_state *state, const u8 *src,
                          unsigned int len, u32 padbit)
 {
-       if (!static_key_enabled(&have_p10))
+       if (!static_key_enabled(&have_p10) || !is_state_base64(state) ||
+           !crypto_simd_usable()) {
+               convert_to_base2_44(state);
                return poly1305_blocks_generic(state, src, len, padbit);
+       }
        vsx_begin();
        if (len >= POLY1305_BLOCK_SIZE * 4) {
                poly1305_p10le_4blocks(state, src, len);
@@ -66,7 +98,10 @@ void poly1305_emit_arch(const struct poly1305_state *state,
                        u8 digest[POLY1305_DIGEST_SIZE],
                        const u32 nonce[4])
 {
-       if (!static_key_enabled(&have_p10))
+       struct poly1305_block_state *dctx =
+               container_of(state, struct poly1305_block_state, h);
+
+       if (!static_key_enabled(&have_p10) || !is_state_base64(dctx))
                return poly1305_emit_generic(state, digest, nonce);
        poly1305_emit_64(state, nonce, digest);
 }