bool
depends on CRYPTO_LIB_CHACHA && !UML && !KMSAN
default y if ARM
- default y if ARM64 && KERNEL_MODE_NEON
+ default y if ARM64
default y if MIPS && CPU_MIPS32_R2
default y if PPC64 && CPU_LITTLE_ENDIAN && VSX
default y if RISCV && 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \
bool
depends on CRYPTO_LIB_NH && !UML && !KMSAN
default y if ARM && KERNEL_MODE_NEON
- default y if ARM64 && KERNEL_MODE_NEON
+ default y if ARM64
default y if X86_64
config CRYPTO_LIB_POLY1305
bool
depends on CRYPTO_LIB_POLY1305 && !UML && !KMSAN
default y if ARM
- default y if ARM64 && KERNEL_MODE_NEON
+ default y if ARM64
default y if MIPS
# The PPC64 code needs to be fixed to work in softirq context.
default y if PPC64 && CPU_LITTLE_ENDIAN && VSX && BROKEN
config CRYPTO_LIB_POLYVAL_ARCH
bool
depends on CRYPTO_LIB_POLYVAL && !UML
- default y if ARM64 && KERNEL_MODE_NEON
+ default y if ARM64
default y if X86_64
config CRYPTO_LIB_CHACHA20POLY1305
bool
depends on CRYPTO_LIB_SHA1 && !UML
default y if ARM
- default y if ARM64 && KERNEL_MODE_NEON
+ default y if ARM64
default y if MIPS && CPU_CAVIUM_OCTEON
default y if PPC
default y if S390
config CRYPTO_LIB_SHA3_ARCH
bool
depends on CRYPTO_LIB_SHA3 && !UML
- default y if ARM64 && KERNEL_MODE_NEON
+ default y if ARM64
default y if S390
config CRYPTO_LIB_SM3
CFLAGS_aes.o += -I$(src)/$(SRCARCH)
libaes-$(CONFIG_ARM) += arm/aes-cipher-core.o
-
-ifeq ($(CONFIG_ARM64),y)
-libaes-y += arm64/aes-cipher-core.o
-libaes-$(CONFIG_KERNEL_MODE_NEON) += arm64/aes-ce-core.o \
- arm64/aes-ce.o \
- arm64/aes-neon.o
-endif
+libaes-$(CONFIG_ARM64) += arm64/aes-cipher-core.o \
+ arm64/aes-ce-core.o \
+ arm64/aes-ce.o \
+ arm64/aes-neon.o
ifeq ($(CONFIG_PPC),y)
ifeq ($(CONFIG_SPE),y)
endif
ifeq ($(CONFIG_ARM64),y)
-libsha256-y += arm64/sha256-core.o
+libsha256-y += arm64/sha256-ce.o arm64/sha256-core.o
$(obj)/arm64/sha256-core.S: $(src)/arm64/sha2-armv8.pl
$(call cmd,perlasm_with_args)
-libsha256-$(CONFIG_KERNEL_MODE_NEON) += arm64/sha256-ce.o
endif
libsha256-$(CONFIG_PPC) += powerpc/sha256-spe-asm.o
endif
ifeq ($(CONFIG_ARM64),y)
-libsha512-y += arm64/sha512-core.o
+libsha512-y += arm64/sha512-ce-core.o arm64/sha512-core.o
$(obj)/arm64/sha512-core.S: $(src)/arm64/sha2-armv8.pl
$(call cmd,perlasm_with_args)
-libsha512-$(CONFIG_KERNEL_MODE_NEON) += arm64/sha512-ce-core.o
endif
libsha512-$(CONFIG_RISCV) += riscv/sha512-riscv64-zvknhb-zvkb.o
struct aes_block *key_enc, *key_dec;
int i, j;
- if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) ||
- !static_branch_likely(&have_aes) || unlikely(!may_use_simd())) {
+ if (!static_branch_likely(&have_aes) || unlikely(!may_use_simd())) {
aes_expandkey_generic(rndkeys, inv_rndkeys, in_key, key_len);
return;
}
}
EXPORT_SYMBOL(ce_aes_expandkey);
-#if IS_ENABLED(CONFIG_KERNEL_MODE_NEON)
EXPORT_SYMBOL_NS_GPL(neon_aes_ecb_encrypt, "CRYPTO_INTERNAL");
EXPORT_SYMBOL_NS_GPL(neon_aes_ecb_decrypt, "CRYPTO_INTERNAL");
EXPORT_SYMBOL_NS_GPL(neon_aes_cbc_encrypt, "CRYPTO_INTERNAL");
EXPORT_SYMBOL_NS_GPL(ce_aes_xts_decrypt, "CRYPTO_INTERNAL");
EXPORT_SYMBOL_NS_GPL(ce_aes_essiv_cbc_encrypt, "CRYPTO_INTERNAL");
EXPORT_SYMBOL_NS_GPL(ce_aes_essiv_cbc_decrypt, "CRYPTO_INTERNAL");
-#endif
#if IS_MODULE(CONFIG_CRYPTO_AES_ARM64_CE_CCM)
EXPORT_SYMBOL_NS_GPL(ce_aes_mac_update, "CRYPTO_INTERNAL");
#endif
u8 out[AES_BLOCK_SIZE],
const u8 in[AES_BLOCK_SIZE])
{
- if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
- static_branch_likely(&have_aes) && likely(may_use_simd())) {
+ if (static_branch_likely(&have_aes) && likely(may_use_simd())) {
scoped_ksimd()
__aes_ce_encrypt(key->k.rndkeys, out, in, key->nrounds);
} else {
u8 out[AES_BLOCK_SIZE],
const u8 in[AES_BLOCK_SIZE])
{
- if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
- static_branch_likely(&have_aes) && likely(may_use_simd())) {
+ if (static_branch_likely(&have_aes) && likely(may_use_simd())) {
scoped_ksimd()
__aes_ce_decrypt(key->inv_k.inv_rndkeys, out, in,
key->nrounds);
size_t nblocks, bool enc_before,
bool enc_after)
{
- if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
- static_branch_likely(&have_neon) && likely(may_use_simd())) {
+ if (static_branch_likely(&have_neon) && likely(may_use_simd())) {
do {
size_t rem;
}
#endif /* CONFIG_CRYPTO_LIB_AES_CBC_MACS */
-#ifdef CONFIG_KERNEL_MODE_NEON
#define aes_mod_init_arch aes_mod_init_arch
static void aes_mod_init_arch(void)
{
static_branch_enable(&have_aes);
}
}
-#endif /* CONFIG_KERNEL_MODE_NEON */
static void sha256_blocks(struct sha256_block_state *state,
const u8 *data, size_t nblocks)
{
- if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
- static_branch_likely(&have_neon) && likely(may_use_simd())) {
+ if (static_branch_likely(&have_neon) && likely(may_use_simd())) {
if (static_branch_likely(&have_ce)) {
do {
size_t rem;
* Further limit len to 65536 to avoid spending too long with preemption
* disabled. (Of course, in practice len is nearly always 4096 anyway.)
*/
- if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
- static_branch_likely(&have_ce) && len >= SHA256_BLOCK_SIZE &&
+ if (static_branch_likely(&have_ce) && len >= SHA256_BLOCK_SIZE &&
len <= 65536 && likely(may_use_simd())) {
scoped_ksimd()
sha256_ce_finup2x(ctx, data1, data2, len, out1, out2);
return static_key_enabled(&have_ce);
}
-#ifdef CONFIG_KERNEL_MODE_NEON
#define sha256_mod_init_arch sha256_mod_init_arch
static void sha256_mod_init_arch(void)
{
static_branch_enable(&have_ce);
}
}
-#endif /* CONFIG_KERNEL_MODE_NEON */
static void sha512_blocks(struct sha512_block_state *state,
const u8 *data, size_t nblocks)
{
- if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
- static_branch_likely(&have_sha512_insns) &&
+ if (static_branch_likely(&have_sha512_insns) &&
likely(may_use_simd())) {
do {
size_t rem;
}
}
-#ifdef CONFIG_KERNEL_MODE_NEON
#define sha512_mod_init_arch sha512_mod_init_arch
static void sha512_mod_init_arch(void)
{
if (cpu_have_named_feature(SHA512))
static_branch_enable(&have_sha512_insns);
}
-#endif /* CONFIG_KERNEL_MODE_NEON */