tail = 0;
}
- for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
- int nbytes = walk.nbytes;
+ scoped_ksimd() {
+ for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
+ int nbytes = walk.nbytes;
- if (walk.nbytes < walk.total)
- nbytes &= ~(AES_BLOCK_SIZE - 1);
+ if (walk.nbytes < walk.total)
+ nbytes &= ~(AES_BLOCK_SIZE - 1);
- scoped_ksimd()
aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
ctx->key1.key_enc, rounds, nbytes,
ctx->key2.key_enc, walk.iv, first);
- err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
- }
+ err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
+ }
- if (err || likely(!tail))
- return err;
+ if (err || likely(!tail))
+ return err;
- dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
- if (req->dst != req->src)
- dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
+ dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
+ if (req->dst != req->src)
+ dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
- skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
- req->iv);
+ skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
+ req->iv);
- err = skcipher_walk_virt(&walk, &subreq, false);
- if (err)
- return err;
+ err = skcipher_walk_virt(&walk, &subreq, false);
+ if (err)
+ return err;
- scoped_ksimd()
aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
ctx->key1.key_enc, rounds, walk.nbytes,
ctx->key2.key_enc, walk.iv, first);
-
+ }
return skcipher_walk_done(&walk, 0);
}
tail = 0;
}
- for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
- int nbytes = walk.nbytes;
+ scoped_ksimd() {
+ for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
+ int nbytes = walk.nbytes;
- if (walk.nbytes < walk.total)
- nbytes &= ~(AES_BLOCK_SIZE - 1);
+ if (walk.nbytes < walk.total)
+ nbytes &= ~(AES_BLOCK_SIZE - 1);
- scoped_ksimd()
aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
ctx->key1.key_dec, rounds, nbytes,
ctx->key2.key_enc, walk.iv, first);
- err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
- }
+ err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
+ }
- if (err || likely(!tail))
- return err;
-
- dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
- if (req->dst != req->src)
- dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
+ if (err || likely(!tail))
+ return err;
- skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
- req->iv);
+ dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
+ if (req->dst != req->src)
+ dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
- err = skcipher_walk_virt(&walk, &subreq, false);
- if (err)
- return err;
+ skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
+ req->iv);
+ err = skcipher_walk_virt(&walk, &subreq, false);
+ if (err)
+ return err;
- scoped_ksimd()
aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
ctx->key1.key_dec, rounds, walk.nbytes,
ctx->key2.key_enc, walk.iv, first);
-
+ }
return skcipher_walk_done(&walk, 0);
}
if (err)
return err;
- while (walk.nbytes >= AES_BLOCK_SIZE) {
- int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7;
- out = walk.dst.virt.addr;
- in = walk.src.virt.addr;
- nbytes = walk.nbytes;
+ scoped_ksimd() {
+ while (walk.nbytes >= AES_BLOCK_SIZE) {
+ int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7;
+ out = walk.dst.virt.addr;
+ in = walk.src.virt.addr;
+ nbytes = walk.nbytes;
- scoped_ksimd() {
if (blocks >= 8) {
if (first == 1)
neon_aes_ecb_encrypt(walk.iv, walk.iv,
ctx->twkey, walk.iv, first);
nbytes = first = 0;
}
+ err = skcipher_walk_done(&walk, nbytes);
}
- err = skcipher_walk_done(&walk, nbytes);
- }
- if (err || likely(!tail))
- return err;
+ if (err || likely(!tail))
+ return err;
- /* handle ciphertext stealing */
- dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
- if (req->dst != req->src)
- dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
+ /* handle ciphertext stealing */
+ dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
+ if (req->dst != req->src)
+ dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
- skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
- req->iv);
+ skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
+ req->iv);
- err = skcipher_walk_virt(&walk, req, false);
- if (err)
- return err;
+ err = skcipher_walk_virt(&walk, req, false);
+ if (err)
+ return err;
- out = walk.dst.virt.addr;
- in = walk.src.virt.addr;
- nbytes = walk.nbytes;
+ out = walk.dst.virt.addr;
+ in = walk.src.virt.addr;
+ nbytes = walk.nbytes;
- scoped_ksimd() {
if (encrypt)
neon_aes_xts_encrypt(out, in, ctx->cts.key_enc,
ctx->key.rounds, nbytes, ctx->twkey,