struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
struct safexcel_crypto_priv *priv = ctx->base.priv;
- struct crypto_aes_ctx aes;
+ struct aes_enckey aes;
u32 hashkey[AES_BLOCK_SIZE >> 2];
int ret, i;
- ret = aes_expandkey(&aes, key, len);
- if (ret) {
- memzero_explicit(&aes, sizeof(aes));
+ ret = aes_prepareenckey(&aes, key, len);
+ if (ret)
return ret;
- }
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
for (i = 0; i < len / sizeof(u32); i++) {
- if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
+ if (ctx->key[i] != get_unaligned((__le32 *)key + i)) {
ctx->base.needs_inv = true;
break;
}
}
for (i = 0; i < len / sizeof(u32); i++)
- ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
+ ctx->key[i] = get_unaligned((__le32 *)key + i);
ctx->key_len = len;
bool fb_init_done;
bool fb_do_setkey;
- struct crypto_aes_ctx *aes;
+ struct aes_enckey *aes;
struct crypto_ahash *fback;
struct crypto_shash *shpre;
struct shash_desc *shdesc;
u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
int ret, i;
- ret = aes_expandkey(ctx->aes, key, len);
+ ret = aes_prepareenckey(ctx->aes, key, len);
if (ret)
return ret;
for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
ctx->base.ipad.word[i] = swab32(key_tmp[i]);
- ret = aes_expandkey(ctx->aes,
- (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
- AES_MIN_KEY_SIZE);
+ ret = aes_prepareenckey(ctx->aes,
+ (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
+ AES_MIN_KEY_SIZE);
if (ret)
return ret;
int ret, i;
/* precompute the CMAC key material */
- ret = aes_expandkey(ctx->aes, key, len);
+ ret = aes_prepareenckey(ctx->aes, key, len);
if (ret)
return ret;
for (i = 0; i < len / sizeof(u32); i++)
- ctx->base.ipad.word[i + 8] = swab32(ctx->aes->key_enc[i]);
+ ctx->base.ipad.word[i + 8] = get_unaligned_be32(&key[4 * i]);
/* code below borrowed from crypto/cmac.c */
/* encrypt the zero block */