From 0cab15611e839142f4fd3c8a366acd1f7334b30b Mon Sep 17 00:00:00 2001 From: Eric Biggers Date: Mon, 12 Jan 2026 11:20:13 -0800 Subject: [PATCH] lib/crypto: s390/aes: Migrate optimized code into library Implement aes_preparekey_arch(), aes_encrypt_arch(), and aes_decrypt_arch() using the CPACF AES instructions. Then, remove the superseded "aes-s390" crypto_cipher. The result is that both the AES library and crypto_cipher APIs use the CPACF AES instructions, whereas previously only crypto_cipher did (and it wasn't enabled by default, which this commit fixes as well). Note that this preserves the optimization where the AES key is stored in raw form rather than expanded form. CPACF just takes the raw key. Acked-by: Ard Biesheuvel Tested-by: Holger Dengler Reviewed-by: Holger Dengler Link: https://lore.kernel.org/r/20260112192035.10427-16-ebiggers@kernel.org Signed-off-by: Eric Biggers --- arch/s390/crypto/Kconfig | 2 - arch/s390/crypto/aes_s390.c | 113 ------------------------------------ include/crypto/aes.h | 3 + lib/crypto/Kconfig | 1 + lib/crypto/s390/aes.h | 106 +++++++++++++++++++++++++++++++++ 5 files changed, 110 insertions(+), 115 deletions(-) create mode 100644 lib/crypto/s390/aes.h diff --git a/arch/s390/crypto/Kconfig b/arch/s390/crypto/Kconfig index f838ca055f6d7..79a2d0034258b 100644 --- a/arch/s390/crypto/Kconfig +++ b/arch/s390/crypto/Kconfig @@ -14,10 +14,8 @@ config CRYPTO_GHASH_S390 config CRYPTO_AES_S390 tristate "Ciphers: AES, modes: ECB, CBC, CTR, XTS, GCM" - select CRYPTO_ALGAPI select CRYPTO_SKCIPHER help - Block cipher: AES cipher algorithms (FIPS 197) AEAD cipher: AES with GCM Length-preserving ciphers: AES with ECB, CBC, XTS, and CTR modes diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c index d0a2954356805..62edc66d54788 100644 --- a/arch/s390/crypto/aes_s390.c +++ b/arch/s390/crypto/aes_s390.c @@ -20,7 +20,6 @@ #include #include #include -#include #include #include #include @@ -45,7 +44,6 @@ struct s390_aes_ctx { unsigned long fc; union { struct crypto_skcipher *skcipher; - struct crypto_cipher *cip; } fallback; }; @@ -72,109 +70,6 @@ struct gcm_sg_walk { unsigned int nbytes; }; -static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key, - unsigned int key_len) -{ - struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); - - sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; - sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags & - CRYPTO_TFM_REQ_MASK); - - return crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len); -} - -static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, - unsigned int key_len) -{ - struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); - unsigned long fc; - - /* Pick the correct function code based on the key length */ - fc = (key_len == 16) ? CPACF_KM_AES_128 : - (key_len == 24) ? CPACF_KM_AES_192 : - (key_len == 32) ? CPACF_KM_AES_256 : 0; - - /* Check if the function code is available */ - sctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0; - if (!sctx->fc) - return setkey_fallback_cip(tfm, in_key, key_len); - - sctx->key_len = key_len; - memcpy(sctx->key, in_key, key_len); - return 0; -} - -static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) -{ - struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); - - if (unlikely(!sctx->fc)) { - crypto_cipher_encrypt_one(sctx->fallback.cip, out, in); - return; - } - cpacf_km(sctx->fc, &sctx->key, out, in, AES_BLOCK_SIZE); -} - -static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) -{ - struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); - - if (unlikely(!sctx->fc)) { - crypto_cipher_decrypt_one(sctx->fallback.cip, out, in); - return; - } - cpacf_km(sctx->fc | CPACF_DECRYPT, - &sctx->key, out, in, AES_BLOCK_SIZE); -} - -static int fallback_init_cip(struct crypto_tfm *tfm) -{ - const char *name = tfm->__crt_alg->cra_name; - struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); - - sctx->fallback.cip = crypto_alloc_cipher(name, 0, - CRYPTO_ALG_NEED_FALLBACK); - - if (IS_ERR(sctx->fallback.cip)) { - pr_err("Allocating AES fallback algorithm %s failed\n", - name); - return PTR_ERR(sctx->fallback.cip); - } - - return 0; -} - -static void fallback_exit_cip(struct crypto_tfm *tfm) -{ - struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); - - crypto_free_cipher(sctx->fallback.cip); - sctx->fallback.cip = NULL; -} - -static struct crypto_alg aes_alg = { - .cra_name = "aes", - .cra_driver_name = "aes-s390", - .cra_priority = 300, - .cra_flags = CRYPTO_ALG_TYPE_CIPHER | - CRYPTO_ALG_NEED_FALLBACK, - .cra_blocksize = AES_BLOCK_SIZE, - .cra_ctxsize = sizeof(struct s390_aes_ctx), - .cra_module = THIS_MODULE, - .cra_init = fallback_init_cip, - .cra_exit = fallback_exit_cip, - .cra_u = { - .cipher = { - .cia_min_keysize = AES_MIN_KEY_SIZE, - .cia_max_keysize = AES_MAX_KEY_SIZE, - .cia_setkey = aes_set_key, - .cia_encrypt = crypto_aes_encrypt, - .cia_decrypt = crypto_aes_decrypt, - } - } -}; - static int setkey_fallback_skcipher(struct crypto_skcipher *tfm, const u8 *key, unsigned int len) { @@ -1049,7 +944,6 @@ static struct aead_alg gcm_aes_aead = { }, }; -static struct crypto_alg *aes_s390_alg; static struct skcipher_alg *aes_s390_skcipher_algs[5]; static int aes_s390_skciphers_num; static struct aead_alg *aes_s390_aead_alg; @@ -1066,8 +960,6 @@ static int aes_s390_register_skcipher(struct skcipher_alg *alg) static void aes_s390_fini(void) { - if (aes_s390_alg) - crypto_unregister_alg(aes_s390_alg); while (aes_s390_skciphers_num--) crypto_unregister_skcipher(aes_s390_skcipher_algs[aes_s390_skciphers_num]); if (ctrblk) @@ -1090,10 +982,6 @@ static int __init aes_s390_init(void) if (cpacf_test_func(&km_functions, CPACF_KM_AES_128) || cpacf_test_func(&km_functions, CPACF_KM_AES_192) || cpacf_test_func(&km_functions, CPACF_KM_AES_256)) { - ret = crypto_register_alg(&aes_alg); - if (ret) - goto out_err; - aes_s390_alg = &aes_alg; ret = aes_s390_register_skcipher(&ecb_aes_alg); if (ret) goto out_err; @@ -1156,4 +1044,3 @@ MODULE_ALIAS_CRYPTO("aes-all"); MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm"); MODULE_LICENSE("GPL"); -MODULE_IMPORT_NS("CRYPTO_INTERNAL"); diff --git a/include/crypto/aes.h b/include/crypto/aes.h index bff71cfaedeb7..19fd99f383fb4 100644 --- a/include/crypto/aes.h +++ b/include/crypto/aes.h @@ -46,6 +46,9 @@ union aes_enckey_arch { * overlap rndkeys) is set to 0 to differentiate the two formats. */ struct p8_aes_key p8; +#elif defined(CONFIG_S390) + /* Used when the CPU supports CPACF AES for this key's length */ + u8 raw_key[AES_MAX_KEY_SIZE]; #endif #endif /* CONFIG_CRYPTO_LIB_AES_ARCH */ }; diff --git a/lib/crypto/Kconfig b/lib/crypto/Kconfig index 2690b5ffc5ca9..56a9b4f53b0e8 100644 --- a/lib/crypto/Kconfig +++ b/lib/crypto/Kconfig @@ -19,6 +19,7 @@ config CRYPTO_LIB_AES_ARCH default y if PPC && (SPE || (PPC64 && VSX)) default y if RISCV && 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \ RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS + default y if S390 config CRYPTO_LIB_AESCFB tristate diff --git a/lib/crypto/s390/aes.h b/lib/crypto/s390/aes.h new file mode 100644 index 0000000000000..5466f6ecbce78 --- /dev/null +++ b/lib/crypto/s390/aes.h @@ -0,0 +1,106 @@ +/* SPDX-License-Identifier: GPL-2.0-or-later */ +/* + * AES optimized using the CP Assist for Cryptographic Functions (CPACF) + * + * Copyright 2026 Google LLC + */ +#include +#include + +static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_cpacf_aes128); +static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_cpacf_aes192); +static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_cpacf_aes256); + +/* + * When the CPU supports CPACF AES for the requested key length, we need only + * save a copy of the raw AES key, as that's what the CPACF instructions need. + * + * When unsupported, fall back to the generic key expansion and en/decryption. + */ +static void aes_preparekey_arch(union aes_enckey_arch *k, + union aes_invkey_arch *inv_k, + const u8 *in_key, int key_len, int nrounds) +{ + if (key_len == AES_KEYSIZE_128) { + if (static_branch_likely(&have_cpacf_aes128)) { + memcpy(k->raw_key, in_key, AES_KEYSIZE_128); + return; + } + } else if (key_len == AES_KEYSIZE_192) { + if (static_branch_likely(&have_cpacf_aes192)) { + memcpy(k->raw_key, in_key, AES_KEYSIZE_192); + return; + } + } else { + if (static_branch_likely(&have_cpacf_aes256)) { + memcpy(k->raw_key, in_key, AES_KEYSIZE_256); + return; + } + } + aes_expandkey_generic(k->rndkeys, inv_k ? inv_k->inv_rndkeys : NULL, + in_key, key_len); +} + +static inline bool aes_crypt_s390(const struct aes_enckey *key, + u8 out[AES_BLOCK_SIZE], + const u8 in[AES_BLOCK_SIZE], int decrypt) +{ + if (key->len == AES_KEYSIZE_128) { + if (static_branch_likely(&have_cpacf_aes128)) { + cpacf_km(CPACF_KM_AES_128 | decrypt, + (void *)key->k.raw_key, out, in, + AES_BLOCK_SIZE); + return true; + } + } else if (key->len == AES_KEYSIZE_192) { + if (static_branch_likely(&have_cpacf_aes192)) { + cpacf_km(CPACF_KM_AES_192 | decrypt, + (void *)key->k.raw_key, out, in, + AES_BLOCK_SIZE); + return true; + } + } else { + if (static_branch_likely(&have_cpacf_aes256)) { + cpacf_km(CPACF_KM_AES_256 | decrypt, + (void *)key->k.raw_key, out, in, + AES_BLOCK_SIZE); + return true; + } + } + return false; +} + +static void aes_encrypt_arch(const struct aes_enckey *key, + u8 out[AES_BLOCK_SIZE], + const u8 in[AES_BLOCK_SIZE]) +{ + if (likely(aes_crypt_s390(key, out, in, 0))) + return; + aes_encrypt_generic(key->k.rndkeys, key->nrounds, out, in); +} + +static void aes_decrypt_arch(const struct aes_key *key, + u8 out[AES_BLOCK_SIZE], + const u8 in[AES_BLOCK_SIZE]) +{ + if (likely(aes_crypt_s390((const struct aes_enckey *)key, out, in, + CPACF_DECRYPT))) + return; + aes_decrypt_generic(key->inv_k.inv_rndkeys, key->nrounds, out, in); +} + +#define aes_mod_init_arch aes_mod_init_arch +static void aes_mod_init_arch(void) +{ + if (cpu_have_feature(S390_CPU_FEATURE_MSA)) { + cpacf_mask_t km_functions; + + cpacf_query(CPACF_KM, &km_functions); + if (cpacf_test_func(&km_functions, CPACF_KM_AES_128)) + static_branch_enable(&have_cpacf_aes128); + if (cpacf_test_func(&km_functions, CPACF_KM_AES_192)) + static_branch_enable(&have_cpacf_aes192); + if (cpacf_test_func(&km_functions, CPACF_KM_AES_256)) + static_branch_enable(&have_cpacf_aes256); + } +} -- 2.47.3