]> git.ipfire.org Git - thirdparty/linux.git/commitdiff
crypto/arm64: sm4/xts - Merge ksimd scopes to reduce stack bloat
authorArd Biesheuvel <ardb@kernel.org>
Wed, 3 Dec 2025 16:38:06 +0000 (17:38 +0100)
committerEric Biggers <ebiggers@kernel.org>
Tue, 9 Dec 2025 23:10:21 +0000 (15:10 -0800)
Merge the two ksimd scopes in the implementation of SM4-XTS to prevent
stack bloat in cases where the compiler fails to combine the stack slots
for the kernel mode FP/SIMD buffers.

Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
Tested-by: Arnd Bergmann <arnd@arndb.de>
Link: https://lore.kernel.org/r/20251203163803.157541-6-ardb@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
arch/arm64/crypto/sm4-ce-glue.c

index 5569cece5a0b85e6ac96b36eac660618c8a87540..0eeabfa9ef25e49da539149c0a6566638874791b 100644 (file)
@@ -346,11 +346,11 @@ static int sm4_xts_crypt(struct skcipher_request *req, bool encrypt)
                tail = 0;
        }
 
-       while ((nbytes = walk.nbytes) >= SM4_BLOCK_SIZE) {
-               if (nbytes < walk.total)
-                       nbytes &= ~(SM4_BLOCK_SIZE - 1);
+       scoped_ksimd() {
+               while ((nbytes = walk.nbytes) >= SM4_BLOCK_SIZE) {
+                       if (nbytes < walk.total)
+                               nbytes &= ~(SM4_BLOCK_SIZE - 1);
 
-               scoped_ksimd() {
                        if (encrypt)
                                sm4_ce_xts_enc(ctx->key1.rkey_enc, walk.dst.virt.addr,
                                                walk.src.virt.addr, walk.iv, nbytes,
@@ -359,32 +359,30 @@ static int sm4_xts_crypt(struct skcipher_request *req, bool encrypt)
                                sm4_ce_xts_dec(ctx->key1.rkey_dec, walk.dst.virt.addr,
                                                walk.src.virt.addr, walk.iv, nbytes,
                                                rkey2_enc);
-               }
 
-               rkey2_enc = NULL;
+                       rkey2_enc = NULL;
 
-               err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
-               if (err)
-                       return err;
-       }
+                       err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
+                       if (err)
+                               return err;
+               }
 
-       if (likely(tail == 0))
-               return 0;
+               if (likely(tail == 0))
+                       return 0;
 
-       /* handle ciphertext stealing */
+               /* handle ciphertext stealing */
 
-       dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
-       if (req->dst != req->src)
-               dst = scatterwalk_ffwd(sg_dst, req->dst, subreq.cryptlen);
+               dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
+               if (req->dst != req->src)
+                       dst = scatterwalk_ffwd(sg_dst, req->dst, subreq.cryptlen);
 
-       skcipher_request_set_crypt(&subreq, src, dst, SM4_BLOCK_SIZE + tail,
-                                  req->iv);
+               skcipher_request_set_crypt(&subreq, src, dst,
+                                          SM4_BLOCK_SIZE + tail, req->iv);
 
-       err = skcipher_walk_virt(&walk, &subreq, false);
-       if (err)
-               return err;
+               err = skcipher_walk_virt(&walk, &subreq, false);
+               if (err)
+                       return err;
 
-       scoped_ksimd() {
                if (encrypt)
                        sm4_ce_xts_enc(ctx->key1.rkey_enc, walk.dst.virt.addr,
                                        walk.src.virt.addr, walk.iv, walk.nbytes,