From d1e92ba95f9644e3c20bb6c85132b0785f52103f Mon Sep 17 00:00:00 2001 From: Greg Kroah-Hartman Date: Sat, 18 May 2019 09:02:04 +0200 Subject: [PATCH] 4.14-stable patches added patches: crypto-arm64-aes-neonbs-don-t-access-already-freed-walk.iv.patch crypto-salsa20-don-t-access-already-freed-walk.iv.patch --- ...s-don-t-access-already-freed-walk.iv.patch | 42 +++++++++++++++++ ...0-don-t-access-already-freed-walk.iv.patch | 45 +++++++++++++++++++ queue-4.14/series | 2 + 3 files changed, 89 insertions(+) create mode 100644 queue-4.14/crypto-arm64-aes-neonbs-don-t-access-already-freed-walk.iv.patch create mode 100644 queue-4.14/crypto-salsa20-don-t-access-already-freed-walk.iv.patch diff --git a/queue-4.14/crypto-arm64-aes-neonbs-don-t-access-already-freed-walk.iv.patch b/queue-4.14/crypto-arm64-aes-neonbs-don-t-access-already-freed-walk.iv.patch new file mode 100644 index 00000000000..8b92e88b7a1 --- /dev/null +++ b/queue-4.14/crypto-arm64-aes-neonbs-don-t-access-already-freed-walk.iv.patch @@ -0,0 +1,42 @@ +From 4a8108b70508df0b6c4ffa4a3974dab93dcbe851 Mon Sep 17 00:00:00 2001 +From: Eric Biggers +Date: Tue, 9 Apr 2019 23:46:32 -0700 +Subject: crypto: arm64/aes-neonbs - don't access already-freed walk.iv + +From: Eric Biggers + +commit 4a8108b70508df0b6c4ffa4a3974dab93dcbe851 upstream. + +If the user-provided IV needs to be aligned to the algorithm's +alignmask, then skcipher_walk_virt() copies the IV into a new aligned +buffer walk.iv. But skcipher_walk_virt() can fail afterwards, and then +if the caller unconditionally accesses walk.iv, it's a use-after-free. + +xts-aes-neonbs doesn't set an alignmask, so currently it isn't affected +by this despite unconditionally accessing walk.iv. However this is more +subtle than desired, and unconditionally accessing walk.iv has caused a +real problem in other algorithms. Thus, update xts-aes-neonbs to start +checking the return value of skcipher_walk_virt(). + +Fixes: 1abee99eafab ("crypto: arm64/aes - reimplement bit-sliced ARM/NEON implementation for arm64") +Cc: # v4.11+ +Signed-off-by: Eric Biggers +Signed-off-by: Herbert Xu +Signed-off-by: Greg Kroah-Hartman + + +--- + arch/arm64/crypto/aes-neonbs-glue.c | 2 ++ + 1 file changed, 2 insertions(+) + +--- a/arch/arm64/crypto/aes-neonbs-glue.c ++++ b/arch/arm64/crypto/aes-neonbs-glue.c +@@ -307,6 +307,8 @@ static int __xts_crypt(struct skcipher_r + int err; + + err = skcipher_walk_virt(&walk, req, true); ++ if (err) ++ return err; + + kernel_neon_begin(); + diff --git a/queue-4.14/crypto-salsa20-don-t-access-already-freed-walk.iv.patch b/queue-4.14/crypto-salsa20-don-t-access-already-freed-walk.iv.patch new file mode 100644 index 00000000000..2e94806e513 --- /dev/null +++ b/queue-4.14/crypto-salsa20-don-t-access-already-freed-walk.iv.patch @@ -0,0 +1,45 @@ +From edaf28e996af69222b2cb40455dbb5459c2b875a Mon Sep 17 00:00:00 2001 +From: Eric Biggers +Date: Tue, 9 Apr 2019 23:46:30 -0700 +Subject: crypto: salsa20 - don't access already-freed walk.iv + +From: Eric Biggers + +commit edaf28e996af69222b2cb40455dbb5459c2b875a upstream. + +If the user-provided IV needs to be aligned to the algorithm's +alignmask, then skcipher_walk_virt() copies the IV into a new aligned +buffer walk.iv. But skcipher_walk_virt() can fail afterwards, and then +if the caller unconditionally accesses walk.iv, it's a use-after-free. + +salsa20-generic doesn't set an alignmask, so currently it isn't affected +by this despite unconditionally accessing walk.iv. However this is more +subtle than desired, and it was actually broken prior to the alignmask +being removed by commit b62b3db76f73 ("crypto: salsa20-generic - cleanup +and convert to skcipher API"). + +Since salsa20-generic does not update the IV and does not need any IV +alignment, update it to use req->iv instead of walk.iv. + +Fixes: 2407d60872dd ("[CRYPTO] salsa20: Salsa20 stream cipher") +Cc: stable@vger.kernel.org +Signed-off-by: Eric Biggers +Signed-off-by: Herbert Xu +Signed-off-by: Greg Kroah-Hartman + + +--- + crypto/salsa20_generic.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +--- a/crypto/salsa20_generic.c ++++ b/crypto/salsa20_generic.c +@@ -186,7 +186,7 @@ static int encrypt(struct blkcipher_desc + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt_block(desc, &walk, 64); + +- salsa20_ivsetup(ctx, walk.iv); ++ salsa20_ivsetup(ctx, desc->info); + + while (walk.nbytes >= 64) { + salsa20_encrypt_bytes(ctx, walk.dst.virt.addr, diff --git a/queue-4.14/series b/queue-4.14/series index a818cb1783f..5f4c323798c 100644 --- a/queue-4.14/series +++ b/queue-4.14/series @@ -49,3 +49,5 @@ ext4-fix-use-after-free-race-with-debug_want_extra_isize.patch ext4-actually-request-zeroing-of-inode-table-after-grow.patch ext4-fix-ext4_show_options-for-file-systems-w-o-journal.patch ipmi-ssif-compare-block-number-correctly-for-multi-part-return-messages.patch +crypto-arm64-aes-neonbs-don-t-access-already-freed-walk.iv.patch +crypto-salsa20-don-t-access-already-freed-walk.iv.patch -- 2.39.5