]> git.ipfire.org Git - thirdparty/kernel/stable-queue.git/blob - releases/4.4.98/crypto-vmx-disable-preemption-to-enable-vsx-in-aes_ctr.c.patch
4.14-stable patches
[thirdparty/kernel/stable-queue.git] / releases / 4.4.98 / crypto-vmx-disable-preemption-to-enable-vsx-in-aes_ctr.c.patch
1 From foo@baz Thu Nov 9 18:22:51 CET 2017
2 From: Li Zhong <zhong@linux.vnet.ibm.com>
3 Date: Fri, 20 Jan 2017 16:35:33 +0800
4 Subject: crypto: vmx - disable preemption to enable vsx in aes_ctr.c
5
6 From: Li Zhong <zhong@linux.vnet.ibm.com>
7
8
9 [ Upstream commit 7dede913fc2ab9c0d3bff3a49e26fa9e858b0c13 ]
10
11 Some preemptible check warnings were reported from enable_kernel_vsx(). This
12 patch disables preemption in aes_ctr.c before enabling vsx, and they are now
13 consistent with other files in the same directory.
14
15 Signed-off-by: Li Zhong <zhong@linux.vnet.ibm.com>
16 Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
17 Signed-off-by: Sasha Levin <alexander.levin@verizon.com>
18 Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
19 ---
20 drivers/crypto/vmx/aes_ctr.c | 6 ++++++
21 1 file changed, 6 insertions(+)
22
23 --- a/drivers/crypto/vmx/aes_ctr.c
24 +++ b/drivers/crypto/vmx/aes_ctr.c
25 @@ -80,11 +80,13 @@ static int p8_aes_ctr_setkey(struct cryp
26 int ret;
27 struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
28
29 + preempt_disable();
30 pagefault_disable();
31 enable_kernel_altivec();
32 enable_kernel_vsx();
33 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
34 pagefault_enable();
35 + preempt_enable();
36
37 ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
38 return ret;
39 @@ -99,11 +101,13 @@ static void p8_aes_ctr_final(struct p8_a
40 u8 *dst = walk->dst.virt.addr;
41 unsigned int nbytes = walk->nbytes;
42
43 + preempt_disable();
44 pagefault_disable();
45 enable_kernel_altivec();
46 enable_kernel_vsx();
47 aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
48 pagefault_enable();
49 + preempt_enable();
50
51 crypto_xor(keystream, src, nbytes);
52 memcpy(dst, keystream, nbytes);
53 @@ -132,6 +136,7 @@ static int p8_aes_ctr_crypt(struct blkci
54 blkcipher_walk_init(&walk, dst, src, nbytes);
55 ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
56 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
57 + preempt_disable();
58 pagefault_disable();
59 enable_kernel_altivec();
60 enable_kernel_vsx();
61 @@ -143,6 +148,7 @@ static int p8_aes_ctr_crypt(struct blkci
62 &ctx->enc_key,
63 walk.iv);
64 pagefault_enable();
65 + preempt_enable();
66
67 /* We need to update IV mostly for last bytes/round */
68 inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE;