}
EXPORT_SYMBOL(hchacha_block_arch);
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-{
- chacha_init_generic(state, key, iv);
-}
-EXPORT_SYMBOL(chacha_init_arch);
-
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
int nrounds)
{
err = skcipher_walk_virt(&walk, req, false);
- chacha_init_generic(state, ctx->key, iv);
+ chacha_init(state, ctx->key, iv);
while (walk.nbytes > 0) {
unsigned int nbytes = walk.nbytes;
u32 state[16];
u8 real_iv[16];
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon) {
hchacha_block_arm(state, subctx.key, ctx->nrounds);
}
EXPORT_SYMBOL(hchacha_block_arch);
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-{
- chacha_init_generic(state, key, iv);
-}
-EXPORT_SYMBOL(chacha_init_arch);
-
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
int nrounds)
{
err = skcipher_walk_virt(&walk, req, false);
- chacha_init_generic(state, ctx->key, iv);
+ chacha_init(state, ctx->key, iv);
while (walk.nbytes > 0) {
unsigned int nbytes = walk.nbytes;
u32 state[16];
u8 real_iv[16];
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
hchacha_block_arch(state, subctx.key, ctx->nrounds);
subctx.nrounds = ctx->nrounds;
asmlinkage void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds);
EXPORT_SYMBOL(hchacha_block_arch);
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-{
- chacha_init_generic(state, key, iv);
-}
-EXPORT_SYMBOL(chacha_init_arch);
-
static int chacha_mips_stream_xor(struct skcipher_request *req,
const struct chacha_ctx *ctx, const u8 *iv)
{
err = skcipher_walk_virt(&walk, req, false);
- chacha_init_generic(state, ctx->key, iv);
+ chacha_init(state, ctx->key, iv);
while (walk.nbytes > 0) {
unsigned int nbytes = walk.nbytes;
u32 state[16];
u8 real_iv[16];
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
hchacha_block(state, subctx.key, ctx->nrounds);
subctx.nrounds = ctx->nrounds;
}
EXPORT_SYMBOL(hchacha_block_arch);
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-{
- chacha_init_generic(state, key, iv);
-}
-EXPORT_SYMBOL(chacha_init_arch);
-
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
int nrounds)
{
if (err)
return err;
- chacha_init_generic(state, ctx->key, iv);
+ chacha_init(state, ctx->key, iv);
while (walk.nbytes > 0) {
unsigned int nbytes = walk.nbytes;
u32 state[16];
u8 real_iv[16];
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
hchacha_block_arch(state, subctx.key, ctx->nrounds);
subctx.nrounds = ctx->nrounds;
int rc;
rc = skcipher_walk_virt(&walk, req, false);
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
while (walk.nbytes > 0) {
nbytes = walk.nbytes;
}
EXPORT_SYMBOL(hchacha_block_arch);
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-{
- chacha_init_generic(state, key, iv);
-}
-EXPORT_SYMBOL(chacha_init_arch);
-
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
unsigned int bytes, int nrounds)
{
}
EXPORT_SYMBOL(hchacha_block_arch);
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-{
- chacha_init_generic(state, key, iv);
-}
-EXPORT_SYMBOL(chacha_init_arch);
-
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
int nrounds)
{
err = skcipher_walk_virt(&walk, req, false);
- chacha_init_generic(state, ctx->key, iv);
+ chacha_init(state, ctx->key, iv);
while (walk.nbytes > 0) {
unsigned int nbytes = walk.nbytes;
struct chacha_ctx subctx;
u8 real_iv[16];
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
if (req->cryptlen > CHACHA_BLOCK_SIZE && crypto_simd_usable()) {
kernel_fpu_begin();
err = skcipher_walk_virt(&walk, req, false);
- chacha_init_generic(state, ctx->key, iv);
+ chacha_init(state, ctx->key, iv);
while (walk.nbytes > 0) {
unsigned int nbytes = walk.nbytes;
u8 real_iv[16];
/* Compute the subkey given the original key and first 128 nonce bits */
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
hchacha_block_generic(state, subctx.key, ctx->nrounds);
subctx.nrounds = ctx->nrounds;
state[3] = CHACHA_CONSTANT_TE_K;
}
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv);
-static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv)
+static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv)
{
chacha_init_consts(state);
state[4] = key[0];
state[15] = get_unaligned_le32(iv + 12);
}
-static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv)
-{
- if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
- chacha_init_arch(state, key, iv);
- else
- chacha_init_generic(state, key, iv);
-}
-
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
unsigned int bytes, int nrounds);
void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src,
}
/* Encrypt */
- chacha_init_arch(chacha_state, (u32*)key, iv);
+ chacha_init(chacha_state, (u32 *)key, iv);
start = ktime_get_ns();
chacha_crypt_arch(chacha_state, cipher, plain, data_size, 20);
pr_info("lib encryption took: %lld nsec", end - start);
/* Decrypt */
- chacha_init_arch(chacha_state, (u32 *)key, iv);
+ chacha_init(chacha_state, (u32 *)key, iv);
start = ktime_get_ns();
chacha_crypt_arch(chacha_state, revert, cipher, data_size, 20);