From c13aebfeeed4dc991a80ec1110e1cbac5d902908 Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Wed, 17 Sep 2025 23:33:30 +0200 Subject: [PATCH] crypto/arm64: sm4-ce-gcm - Avoid pointless yield of the NEON unit Kernel mode NEON sections are now preemptible on arm64, and so there is no need to yield it when calling APIs that may sleep. Also, move the calls to kernel_neon_end() to the same scope as kernel_neon_begin(). This is needed for a subsequent change where a stack buffer is allocated transparently and passed to kernel_neon_begin(). While at it, simplify the logic. Reviewed-by: Eric Biggers Acked-by: Herbert Xu Acked-by: Catalin Marinas Signed-off-by: Ard Biesheuvel --- arch/arm64/crypto/sm4-ce-gcm-glue.c | 25 ++++++------------------- 1 file changed, 6 insertions(+), 19 deletions(-) diff --git a/arch/arm64/crypto/sm4-ce-gcm-glue.c b/arch/arm64/crypto/sm4-ce-gcm-glue.c index c2ea3d5f690b3..8f6fc8c33c3fe 100644 --- a/arch/arm64/crypto/sm4-ce-gcm-glue.c +++ b/arch/arm64/crypto/sm4-ce-gcm-glue.c @@ -154,36 +154,23 @@ static int gcm_crypt(struct aead_request *req, struct skcipher_walk *walk, if (req->assoclen) gcm_calculate_auth_mac(req, ghash); - while (walk->nbytes) { + do { unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE; const u8 *src = walk->src.virt.addr; u8 *dst = walk->dst.virt.addr; + const u8 *l = NULL; if (walk->nbytes == walk->total) { - sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv, - walk->nbytes, ghash, - ctx->ghash_table, - (const u8 *)&lengths); - - kernel_neon_end(); - - return skcipher_walk_done(walk, 0); + l = (const u8 *)&lengths; + tail = 0; } sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv, walk->nbytes - tail, ghash, - ctx->ghash_table, NULL); - - kernel_neon_end(); + ctx->ghash_table, l); err = skcipher_walk_done(walk, tail); - - kernel_neon_begin(); - } - - sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, NULL, NULL, iv, - walk->nbytes, ghash, ctx->ghash_table, - (const u8 *)&lengths); + } while (walk->nbytes); kernel_neon_end(); -- 2.47.3