]> git.ipfire.org Git - thirdparty/kernel/linux.git/commitdiff
lib/crypto: arm/aes: Migrate optimized code into library
authorEric Biggers <ebiggers@kernel.org>
Mon, 12 Jan 2026 19:20:08 +0000 (11:20 -0800)
committerEric Biggers <ebiggers@kernel.org>
Mon, 12 Jan 2026 19:39:58 +0000 (11:39 -0800)
Move the ARM optimized single-block AES en/decryption code into
lib/crypto/, wire it up to the AES library API, and remove the
superseded "aes-arm" crypto_cipher algorithm.

The result is that both the AES library and crypto_cipher APIs are now
optimized for ARM, whereas previously only crypto_cipher was (and the
optimizations weren't enabled by default, which this fixes as well).

Acked-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20260112192035.10427-11-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
12 files changed:
arch/arm/configs/milbeaut_m10v_defconfig
arch/arm/configs/multi_v7_defconfig
arch/arm/configs/omap2plus_defconfig
arch/arm/configs/pxa_defconfig
arch/arm/crypto/Kconfig
arch/arm/crypto/Makefile
arch/arm/crypto/aes-cipher-glue.c [deleted file]
arch/arm/crypto/aes-cipher.h [deleted file]
lib/crypto/Kconfig
lib/crypto/Makefile
lib/crypto/arm/aes-cipher-core.S [moved from arch/arm/crypto/aes-cipher-core.S with 100% similarity]
lib/crypto/arm/aes.h [new file with mode: 0644]

index a2995eb390c603c8c6c2da47d33c8462f3a8198d..77b69d672d40065651536a3dae03ffe53d673db4 100644 (file)
@@ -98,7 +98,6 @@ CONFIG_CRYPTO_SELFTESTS=y
 CONFIG_CRYPTO_AES=y
 CONFIG_CRYPTO_SEQIV=m
 CONFIG_CRYPTO_GHASH_ARM_CE=m
-CONFIG_CRYPTO_AES_ARM=m
 CONFIG_CRYPTO_AES_ARM_BS=m
 CONFIG_CRYPTO_AES_ARM_CE=m
 # CONFIG_CRYPTO_HW is not set
index 7f1fa9dd88c92cf04e3b1d9c05166d0f407b3f65..b6d3e20926bb655dd3b7be26792b5ad4f558365b 100644 (file)
@@ -1286,7 +1286,7 @@ CONFIG_CRYPTO_USER_API_SKCIPHER=m
 CONFIG_CRYPTO_USER_API_RNG=m
 CONFIG_CRYPTO_USER_API_AEAD=m
 CONFIG_CRYPTO_GHASH_ARM_CE=m
-CONFIG_CRYPTO_AES_ARM=m
+CONFIG_CRYPTO_AES=m
 CONFIG_CRYPTO_AES_ARM_BS=m
 CONFIG_CRYPTO_AES_ARM_CE=m
 CONFIG_CRYPTO_DEV_SUN4I_SS=m
index 4e53c331cd841c7e3d7b00e23bbdc8ca897773fa..0464f6552169bacac89f388dafe30a84226eccfd 100644 (file)
@@ -706,7 +706,7 @@ CONFIG_NLS_ISO8859_1=y
 CONFIG_SECURITY=y
 CONFIG_CRYPTO_MICHAEL_MIC=y
 CONFIG_CRYPTO_GHASH_ARM_CE=m
-CONFIG_CRYPTO_AES_ARM=m
+CONFIG_CRYPTO_AES=m
 CONFIG_CRYPTO_AES_ARM_BS=m
 CONFIG_CRYPTO_DEV_OMAP=m
 CONFIG_CRYPTO_DEV_OMAP_SHAM=m
index 3ea189f1f42f9d3ba94fa90c26b5ca6765624b1c..eacd08fd87ad779a8a9d6f78c0184a731ff0e63e 100644 (file)
@@ -657,7 +657,7 @@ CONFIG_CRYPTO_ANUBIS=m
 CONFIG_CRYPTO_XCBC=m
 CONFIG_CRYPTO_DEFLATE=y
 CONFIG_CRYPTO_LZO=y
-CONFIG_CRYPTO_AES_ARM=m
+CONFIG_CRYPTO_AES=m
 CONFIG_FONTS=y
 CONFIG_FONT_8x8=y
 CONFIG_FONT_8x16=y
index 167a648a9def9ecd19febbe4d8b217f610b84180..b9c28c818b7c460ed65fb8045a22fa7d06640a79 100644 (file)
@@ -23,24 +23,6 @@ config CRYPTO_GHASH_ARM_CE
          that is part of the ARMv8 Crypto Extensions, or a slower variant that
          uses the vmull.p8 instruction that is part of the basic NEON ISA.
 
-config CRYPTO_AES_ARM
-       tristate "Ciphers: AES"
-       select CRYPTO_ALGAPI
-       select CRYPTO_AES
-       help
-         Block ciphers: AES cipher algorithms (FIPS-197)
-
-         Architecture: arm
-
-         On ARM processors without the Crypto Extensions, this is the
-         fastest AES implementation for single blocks.  For multiple
-         blocks, the NEON bit-sliced implementation is usually faster.
-
-         This implementation may be vulnerable to cache timing attacks,
-         since it uses lookup tables.  However, as countermeasures it
-         disables IRQs and preloads the tables; it is hoped this makes
-         such attacks very difficult.
-
 config CRYPTO_AES_ARM_BS
        tristate "Ciphers: AES, modes: ECB/CBC/CTR/XTS (bit-sliced NEON)"
        depends on KERNEL_MODE_NEON
index d6683e9d499284e0a07d5ac0caa529ff37f44b33..e73099e120b38a032f3c99876fa028f8067336c1 100644 (file)
@@ -3,13 +3,11 @@
 # Arch-specific CryptoAPI modules.
 #
 
-obj-$(CONFIG_CRYPTO_AES_ARM) += aes-arm.o
 obj-$(CONFIG_CRYPTO_AES_ARM_BS) += aes-arm-bs.o
 
 obj-$(CONFIG_CRYPTO_AES_ARM_CE) += aes-arm-ce.o
 obj-$(CONFIG_CRYPTO_GHASH_ARM_CE) += ghash-arm-ce.o
 
-aes-arm-y      := aes-cipher-core.o aes-cipher-glue.o
 aes-arm-bs-y   := aes-neonbs-core.o aes-neonbs-glue.o
 aes-arm-ce-y   := aes-ce-core.o aes-ce-glue.o
 ghash-arm-ce-y := ghash-ce-core.o ghash-ce-glue.o
diff --git a/arch/arm/crypto/aes-cipher-glue.c b/arch/arm/crypto/aes-cipher-glue.c
deleted file mode 100644 (file)
index f302db8..0000000
+++ /dev/null
@@ -1,77 +0,0 @@
-// SPDX-License-Identifier: GPL-2.0-only
-/*
- * Scalar AES core transform
- *
- * Copyright (C) 2017 Linaro Ltd.
- * Author: Ard Biesheuvel <ard.biesheuvel@linaro.org>
- */
-
-#include <crypto/aes.h>
-#include <crypto/algapi.h>
-#include <linux/module.h>
-#include "aes-cipher.h"
-
-EXPORT_SYMBOL_GPL(__aes_arm_encrypt);
-EXPORT_SYMBOL_GPL(__aes_arm_decrypt);
-
-static int aes_arm_setkey(struct crypto_tfm *tfm, const u8 *in_key,
-                         unsigned int key_len)
-{
-       struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
-
-       return aes_expandkey(ctx, in_key, key_len);
-}
-
-static void aes_arm_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
-{
-       struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
-       int rounds = 6 + ctx->key_length / 4;
-
-       __aes_arm_encrypt(ctx->key_enc, rounds, in, out);
-}
-
-static void aes_arm_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
-{
-       struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
-       int rounds = 6 + ctx->key_length / 4;
-
-       __aes_arm_decrypt(ctx->key_dec, rounds, in, out);
-}
-
-static struct crypto_alg aes_alg = {
-       .cra_name                       = "aes",
-       .cra_driver_name                = "aes-arm",
-       .cra_priority                   = 200,
-       .cra_flags                      = CRYPTO_ALG_TYPE_CIPHER,
-       .cra_blocksize                  = AES_BLOCK_SIZE,
-       .cra_ctxsize                    = sizeof(struct crypto_aes_ctx),
-       .cra_module                     = THIS_MODULE,
-
-       .cra_cipher.cia_min_keysize     = AES_MIN_KEY_SIZE,
-       .cra_cipher.cia_max_keysize     = AES_MAX_KEY_SIZE,
-       .cra_cipher.cia_setkey          = aes_arm_setkey,
-       .cra_cipher.cia_encrypt         = aes_arm_encrypt,
-       .cra_cipher.cia_decrypt         = aes_arm_decrypt,
-
-#ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
-       .cra_alignmask                  = 3,
-#endif
-};
-
-static int __init aes_init(void)
-{
-       return crypto_register_alg(&aes_alg);
-}
-
-static void __exit aes_fini(void)
-{
-       crypto_unregister_alg(&aes_alg);
-}
-
-module_init(aes_init);
-module_exit(aes_fini);
-
-MODULE_DESCRIPTION("Scalar AES cipher for ARM");
-MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
-MODULE_LICENSE("GPL v2");
-MODULE_ALIAS_CRYPTO("aes");
diff --git a/arch/arm/crypto/aes-cipher.h b/arch/arm/crypto/aes-cipher.h
deleted file mode 100644 (file)
index d5db2b8..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-/* SPDX-License-Identifier: GPL-2.0-only */
-#ifndef ARM_CRYPTO_AES_CIPHER_H
-#define ARM_CRYPTO_AES_CIPHER_H
-
-#include <linux/linkage.h>
-#include <linux/types.h>
-
-asmlinkage void __aes_arm_encrypt(const u32 rk[], int rounds,
-                                 const u8 *in, u8 *out);
-asmlinkage void __aes_arm_decrypt(const u32 rk[], int rounds,
-                                 const u8 *in, u8 *out);
-
-#endif /* ARM_CRYPTO_AES_CIPHER_H */
index 4efad77daa241a2c584fbb6b61eeb9458aed1865..60420b421e04fc412ec69cd6c74d6778c9a8b052 100644 (file)
@@ -14,6 +14,7 @@ config CRYPTO_LIB_AES
 config CRYPTO_LIB_AES_ARCH
        bool
        depends on CRYPTO_LIB_AES && !UML && !KMSAN
+       default y if ARM
 
 config CRYPTO_LIB_AESCFB
        tristate
index 01193b3f47ba4452f9558e3f928632bde4a45905..2f6b0f59eb1b9c0b608e9afc40d65fa8dd7b8cf5 100644 (file)
@@ -21,6 +21,9 @@ obj-$(CONFIG_CRYPTO_LIB_AES) += libaes.o
 libaes-y := aes.o
 ifeq ($(CONFIG_CRYPTO_LIB_AES_ARCH),y)
 CFLAGS_aes.o += -I$(src)/$(SRCARCH)
+
+libaes-$(CONFIG_ARM) += arm/aes-cipher-core.o
+
 endif # CONFIG_CRYPTO_LIB_AES_ARCH
 
 ################################################################################
diff --git a/lib/crypto/arm/aes.h b/lib/crypto/arm/aes.h
new file mode 100644 (file)
index 0000000..1dd7dfa
--- /dev/null
@@ -0,0 +1,56 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * AES block cipher, optimized for ARM
+ *
+ * Copyright (C) 2017 Linaro Ltd.
+ * Copyright 2026 Google LLC
+ */
+
+asmlinkage void __aes_arm_encrypt(const u32 rk[], int rounds,
+                                 const u8 in[AES_BLOCK_SIZE],
+                                 u8 out[AES_BLOCK_SIZE]);
+asmlinkage void __aes_arm_decrypt(const u32 inv_rk[], int rounds,
+                                 const u8 in[AES_BLOCK_SIZE],
+                                 u8 out[AES_BLOCK_SIZE]);
+
+static void aes_preparekey_arch(union aes_enckey_arch *k,
+                               union aes_invkey_arch *inv_k,
+                               const u8 *in_key, int key_len, int nrounds)
+{
+       aes_expandkey_generic(k->rndkeys, inv_k ? inv_k->inv_rndkeys : NULL,
+                             in_key, key_len);
+}
+
+static void aes_encrypt_arch(const struct aes_enckey *key,
+                            u8 out[AES_BLOCK_SIZE],
+                            const u8 in[AES_BLOCK_SIZE])
+{
+       if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
+           !IS_ALIGNED((uintptr_t)out | (uintptr_t)in, 4)) {
+               u8 bounce_buf[AES_BLOCK_SIZE] __aligned(4);
+
+               memcpy(bounce_buf, in, AES_BLOCK_SIZE);
+               __aes_arm_encrypt(key->k.rndkeys, key->nrounds, bounce_buf,
+                                 bounce_buf);
+               memcpy(out, bounce_buf, AES_BLOCK_SIZE);
+               return;
+       }
+       __aes_arm_encrypt(key->k.rndkeys, key->nrounds, in, out);
+}
+
+static void aes_decrypt_arch(const struct aes_key *key,
+                            u8 out[AES_BLOCK_SIZE],
+                            const u8 in[AES_BLOCK_SIZE])
+{
+       if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
+           !IS_ALIGNED((uintptr_t)out | (uintptr_t)in, 4)) {
+               u8 bounce_buf[AES_BLOCK_SIZE] __aligned(4);
+
+               memcpy(bounce_buf, in, AES_BLOCK_SIZE);
+               __aes_arm_decrypt(key->inv_k.inv_rndkeys, key->nrounds,
+                                 bounce_buf, bounce_buf);
+               memcpy(out, bounce_buf, AES_BLOCK_SIZE);
+               return;
+       }
+       __aes_arm_decrypt(key->inv_k.inv_rndkeys, key->nrounds, in, out);
+}