]> git.ipfire.org Git - thirdparty/kernel/stable-queue.git/blame - releases/4.14.13/parisc-fix-alignment-of-pa_tlb_lock-in-assembly-on-32-bit-smp-kernel.patch
Fix up backported ptrace patch
[thirdparty/kernel/stable-queue.git] / releases / 4.14.13 / parisc-fix-alignment-of-pa_tlb_lock-in-assembly-on-32-bit-smp-kernel.patch
CommitLineData
e5d76626
GKH
1From 88776c0e70be0290f8357019d844aae15edaa967 Mon Sep 17 00:00:00 2001
2From: Helge Deller <deller@gmx.de>
3Date: Tue, 2 Jan 2018 20:36:44 +0100
4Subject: parisc: Fix alignment of pa_tlb_lock in assembly on 32-bit SMP kernel
5
6From: Helge Deller <deller@gmx.de>
7
8commit 88776c0e70be0290f8357019d844aae15edaa967 upstream.
9
10Qemu for PARISC reported on a 32bit SMP parisc kernel strange failures
11about "Not-handled unaligned insn 0x0e8011d6 and 0x0c2011c9."
12
13Those opcodes evaluate to the ldcw() assembly instruction which requires
14(on 32bit) an alignment of 16 bytes to ensure atomicity.
15
16As it turns out, qemu is correct and in our assembly code in entry.S and
17pacache.S we don't pay attention to the required alignment.
18
19This patch fixes the problem by aligning the lock offset in assembly
20code in the same manner as we do in our C-code.
21
22Signed-off-by: Helge Deller <deller@gmx.de>
23Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
24
25---
26 arch/parisc/include/asm/ldcw.h | 2 ++
27 arch/parisc/kernel/entry.S | 13 +++++++++++--
28 arch/parisc/kernel/pacache.S | 9 +++++++--
29 3 files changed, 20 insertions(+), 4 deletions(-)
30
31--- a/arch/parisc/include/asm/ldcw.h
32+++ b/arch/parisc/include/asm/ldcw.h
33@@ -12,6 +12,7 @@
34 for the semaphore. */
35
36 #define __PA_LDCW_ALIGNMENT 16
37+#define __PA_LDCW_ALIGN_ORDER 4
38 #define __ldcw_align(a) ({ \
39 unsigned long __ret = (unsigned long) &(a)->lock[0]; \
40 __ret = (__ret + __PA_LDCW_ALIGNMENT - 1) \
41@@ -29,6 +30,7 @@
42 ldcd). */
43
44 #define __PA_LDCW_ALIGNMENT 4
45+#define __PA_LDCW_ALIGN_ORDER 2
46 #define __ldcw_align(a) (&(a)->slock)
47 #define __LDCW "ldcw,co"
48
49--- a/arch/parisc/kernel/entry.S
50+++ b/arch/parisc/kernel/entry.S
51@@ -35,6 +35,7 @@
52 #include <asm/pgtable.h>
53 #include <asm/signal.h>
54 #include <asm/unistd.h>
55+#include <asm/ldcw.h>
56 #include <asm/thread_info.h>
57
58 #include <linux/linkage.h>
59@@ -46,6 +47,14 @@
60 #endif
61
62 .import pa_tlb_lock,data
63+ .macro load_pa_tlb_lock reg
64+#if __PA_LDCW_ALIGNMENT > 4
65+ load32 PA(pa_tlb_lock) + __PA_LDCW_ALIGNMENT-1, \reg
66+ depi 0,31,__PA_LDCW_ALIGN_ORDER, \reg
67+#else
68+ load32 PA(pa_tlb_lock), \reg
69+#endif
70+ .endm
71
72 /* space_to_prot macro creates a prot id from a space id */
73
74@@ -457,7 +466,7 @@
75 .macro tlb_lock spc,ptp,pte,tmp,tmp1,fault
76 #ifdef CONFIG_SMP
77 cmpib,COND(=),n 0,\spc,2f
78- load32 PA(pa_tlb_lock),\tmp
79+ load_pa_tlb_lock \tmp
80 1: LDCW 0(\tmp),\tmp1
81 cmpib,COND(=) 0,\tmp1,1b
82 nop
83@@ -480,7 +489,7 @@
84 /* Release pa_tlb_lock lock. */
85 .macro tlb_unlock1 spc,tmp
86 #ifdef CONFIG_SMP
87- load32 PA(pa_tlb_lock),\tmp
88+ load_pa_tlb_lock \tmp
89 tlb_unlock0 \spc,\tmp
90 #endif
91 .endm
92--- a/arch/parisc/kernel/pacache.S
93+++ b/arch/parisc/kernel/pacache.S
94@@ -36,6 +36,7 @@
95 #include <asm/assembly.h>
96 #include <asm/pgtable.h>
97 #include <asm/cache.h>
98+#include <asm/ldcw.h>
99 #include <linux/linkage.h>
100
101 .text
102@@ -333,8 +334,12 @@ ENDPROC_CFI(flush_data_cache_local)
103
104 .macro tlb_lock la,flags,tmp
105 #ifdef CONFIG_SMP
106- ldil L%pa_tlb_lock,%r1
107- ldo R%pa_tlb_lock(%r1),\la
108+#if __PA_LDCW_ALIGNMENT > 4
109+ load32 pa_tlb_lock + __PA_LDCW_ALIGNMENT-1, \la
110+ depi 0,31,__PA_LDCW_ALIGN_ORDER, \la
111+#else
112+ load32 pa_tlb_lock, \la
113+#endif
114 rsm PSW_SM_I,\flags
115 1: LDCW 0(\la),\tmp
116 cmpib,<>,n 0,\tmp,3f