]> git.ipfire.org Git - thirdparty/kernel/stable-queue.git/blob - releases/4.14.113/crypto-sha256-arm-fix-crash-bug-in-thumb2-build.patch
Linux 4.9.170
[thirdparty/kernel/stable-queue.git] / releases / 4.14.113 / crypto-sha256-arm-fix-crash-bug-in-thumb2-build.patch
1 From 0ac0717afa9064628736c3f5a85631353a5a2eb6 Mon Sep 17 00:00:00 2001
2 From: Ard Biesheuvel <ard.biesheuvel@linaro.org>
3 Date: Sat, 16 Feb 2019 14:51:25 +0100
4 Subject: crypto: sha256/arm - fix crash bug in Thumb2 build
5
6 [ Upstream commit 69216a545cf81b2b32d01948f7039315abaf75a0 ]
7
8 The SHA256 code we adopted from the OpenSSL project uses a rather
9 peculiar way to take the address of the round constant table: it
10 takes the address of the sha256_block_data_order() routine, and
11 substracts a constant known quantity to arrive at the base of the
12 table, which is emitted by the same assembler code right before
13 the routine's entry point.
14
15 However, recent versions of binutils have helpfully changed the
16 behavior of references emitted via an ADR instruction when running
17 in Thumb2 mode: it now takes the Thumb execution mode bit into
18 account, which is bit 0 af the address. This means the produced
19 table address also has bit 0 set, and so we end up with an address
20 value pointing 1 byte past the start of the table, which results
21 in crashes such as
22
23 Unable to handle kernel paging request at virtual address bf825000
24 pgd = 42f44b11
25 [bf825000] *pgd=80000040206003, *pmd=5f1bd003, *pte=00000000
26 Internal error: Oops: 207 [#1] PREEMPT SMP THUMB2
27 Modules linked in: sha256_arm(+) sha1_arm_ce sha1_arm ...
28 CPU: 7 PID: 396 Comm: cryptomgr_test Not tainted 5.0.0-rc6+ #144
29 Hardware name: QEMU KVM Virtual Machine, BIOS 0.0.0 02/06/2015
30 PC is at sha256_block_data_order+0xaaa/0xb30 [sha256_arm]
31 LR is at __this_module+0x17fd/0xffffe800 [sha256_arm]
32 pc : [<bf820bca>] lr : [<bf824ffd>] psr: 800b0033
33 sp : ebc8bbe8 ip : faaabe1c fp : 2fdd3433
34 r10: 4c5f1692 r9 : e43037df r8 : b04b0a5a
35 r7 : c369d722 r6 : 39c3693e r5 : 7a013189 r4 : 1580d26b
36 r3 : 8762a9b0 r2 : eea9c2cd r1 : 3e9ab536 r0 : 1dea4ae7
37 Flags: Nzcv IRQs on FIQs on Mode SVC_32 ISA Thumb Segment user
38 Control: 70c5383d Table: 6b8467c0 DAC: dbadc0de
39 Process cryptomgr_test (pid: 396, stack limit = 0x69e1fe23)
40 Stack: (0xebc8bbe8 to 0xebc8c000)
41 ...
42 unwind: Unknown symbol address bf820bca
43 unwind: Index not found bf820bca
44 Code: 441a ea80 40f9 440a (f85e) 3b04
45 ---[ end trace e560cce92700ef8a ]---
46
47 Given that this affects older kernels as well, in case they are built
48 with a recent toolchain, apply a minimal backportable fix, which is
49 to emit another non-code label at the start of the routine, and
50 reference that instead. (This is similar to the current upstream state
51 of this file in OpenSSL)
52
53 Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
54 Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
55 Signed-off-by: Sasha Levin <sashal@kernel.org>
56 ---
57 arch/arm/crypto/sha256-armv4.pl | 3 ++-
58 arch/arm/crypto/sha256-core.S_shipped | 3 ++-
59 2 files changed, 4 insertions(+), 2 deletions(-)
60
61 diff --git a/arch/arm/crypto/sha256-armv4.pl b/arch/arm/crypto/sha256-armv4.pl
62 index fac0533ea633..f64e8413ab9a 100644
63 --- a/arch/arm/crypto/sha256-armv4.pl
64 +++ b/arch/arm/crypto/sha256-armv4.pl
65 @@ -205,10 +205,11 @@ K256:
66 .global sha256_block_data_order
67 .type sha256_block_data_order,%function
68 sha256_block_data_order:
69 +.Lsha256_block_data_order:
70 #if __ARM_ARCH__<7
71 sub r3,pc,#8 @ sha256_block_data_order
72 #else
73 - adr r3,sha256_block_data_order
74 + adr r3,.Lsha256_block_data_order
75 #endif
76 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
77 ldr r12,.LOPENSSL_armcap
78 diff --git a/arch/arm/crypto/sha256-core.S_shipped b/arch/arm/crypto/sha256-core.S_shipped
79 index 555a1a8eec90..72c248081d27 100644
80 --- a/arch/arm/crypto/sha256-core.S_shipped
81 +++ b/arch/arm/crypto/sha256-core.S_shipped
82 @@ -86,10 +86,11 @@ K256:
83 .global sha256_block_data_order
84 .type sha256_block_data_order,%function
85 sha256_block_data_order:
86 +.Lsha256_block_data_order:
87 #if __ARM_ARCH__<7
88 sub r3,pc,#8 @ sha256_block_data_order
89 #else
90 - adr r3,sha256_block_data_order
91 + adr r3,.Lsha256_block_data_order
92 #endif
93 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
94 ldr r12,.LOPENSSL_armcap
95 --
96 2.19.1
97