]>
Commit | Line | Data |
---|---|---|
0617ea8e GKH |
1 | From 113fc5a6e8c2288619ff7e8187a6f556b7e0d372 Mon Sep 17 00:00:00 2001 |
2 | From: H. Peter Anvin <hpa@zytor.com> | |
3 | Date: Tue, 27 Jul 2010 17:01:49 -0700 | |
4 | Subject: x86: Add memory modify constraints to xchg() and cmpxchg() | |
5 | ||
6 | From: H. Peter Anvin <hpa@zytor.com> | |
7 | ||
8 | commit 113fc5a6e8c2288619ff7e8187a6f556b7e0d372 upstream. | |
9 | ||
10 | xchg() and cmpxchg() modify their memory operands, not merely read | |
11 | them. For some versions of gcc the "memory" clobber has apparently | |
12 | dealt with the situation, but not for all. | |
13 | ||
14 | Originally-by: Linus Torvalds <torvalds@linux-foundation.org> | |
15 | Signed-off-by: H. Peter Anvin <hpa@zytor.com> | |
16 | Cc: Glauber Costa <glommer@redhat.com> | |
17 | Cc: Avi Kivity <avi@redhat.com> | |
18 | Cc: Peter Palfrader <peter@palfrader.org> | |
19 | Cc: Greg KH <gregkh@suse.de> | |
20 | Cc: Alan Cox <alan@lxorguk.ukuu.org.uk> | |
21 | Cc: Zachary Amsden <zamsden@redhat.com> | |
22 | Cc: Marcelo Tosatti <mtosatti@redhat.com> | |
23 | LKML-Reference: <4C4F7277.8050306@zytor.com> | |
24 | Signed-off-by: Greg Kroah-Hartman <gregkh@suse.de> | |
25 | ||
26 | --- | |
27 | arch/x86/include/asm/cmpxchg_32.h | 68 +++++++++++++++++++------------------- | |
28 | arch/x86/include/asm/cmpxchg_64.h | 40 +++++++++++----------- | |
29 | 2 files changed, 54 insertions(+), 54 deletions(-) | |
30 | ||
31 | --- a/arch/x86/include/asm/cmpxchg_32.h | |
32 | +++ b/arch/x86/include/asm/cmpxchg_32.h | |
33 | @@ -27,20 +27,20 @@ struct __xchg_dummy { | |
34 | switch (size) { \ | |
35 | case 1: \ | |
36 | asm volatile("xchgb %b0,%1" \ | |
37 | - : "=q" (__x) \ | |
38 | - : "m" (*__xg(ptr)), "0" (__x) \ | |
39 | + : "=q" (__x), "+m" (*__xg(ptr)) \ | |
40 | + : "0" (__x) \ | |
41 | : "memory"); \ | |
42 | break; \ | |
43 | case 2: \ | |
44 | asm volatile("xchgw %w0,%1" \ | |
45 | - : "=r" (__x) \ | |
46 | - : "m" (*__xg(ptr)), "0" (__x) \ | |
47 | + : "=r" (__x), "+m" (*__xg(ptr)) \ | |
48 | + : "0" (__x) \ | |
49 | : "memory"); \ | |
50 | break; \ | |
51 | case 4: \ | |
52 | asm volatile("xchgl %0,%1" \ | |
53 | - : "=r" (__x) \ | |
54 | - : "m" (*__xg(ptr)), "0" (__x) \ | |
55 | + : "=r" (__x), "+m" (*__xg(ptr)) \ | |
56 | + : "0" (__x) \ | |
57 | : "memory"); \ | |
58 | break; \ | |
59 | default: \ | |
60 | @@ -70,14 +70,14 @@ static inline void __set_64bit(unsigned | |
61 | unsigned int low, unsigned int high) | |
62 | { | |
63 | asm volatile("\n1:\t" | |
64 | - "movl (%0), %%eax\n\t" | |
65 | - "movl 4(%0), %%edx\n\t" | |
66 | - LOCK_PREFIX "cmpxchg8b (%0)\n\t" | |
67 | + "movl (%1), %%eax\n\t" | |
68 | + "movl 4(%1), %%edx\n\t" | |
69 | + LOCK_PREFIX "cmpxchg8b (%1)\n\t" | |
70 | "jnz 1b" | |
71 | - : /* no outputs */ | |
72 | - : "D"(ptr), | |
73 | - "b"(low), | |
74 | - "c"(high) | |
75 | + : "=m" (*ptr) | |
76 | + : "D" (ptr), | |
77 | + "b" (low), | |
78 | + "c" (high) | |
79 | : "ax", "dx", "memory"); | |
80 | } | |
81 | ||
82 | @@ -121,21 +121,21 @@ extern void __cmpxchg_wrong_size(void); | |
83 | __typeof__(*(ptr)) __new = (new); \ | |
84 | switch (size) { \ | |
85 | case 1: \ | |
86 | - asm volatile(lock "cmpxchgb %b1,%2" \ | |
87 | - : "=a"(__ret) \ | |
88 | - : "q"(__new), "m"(*__xg(ptr)), "0"(__old) \ | |
89 | + asm volatile(lock "cmpxchgb %b2,%1" \ | |
90 | + : "=a" (__ret), "+m" (*__xg(ptr)) \ | |
91 | + : "q" (__new), "0" (__old) \ | |
92 | : "memory"); \ | |
93 | break; \ | |
94 | case 2: \ | |
95 | - asm volatile(lock "cmpxchgw %w1,%2" \ | |
96 | - : "=a"(__ret) \ | |
97 | - : "r"(__new), "m"(*__xg(ptr)), "0"(__old) \ | |
98 | + asm volatile(lock "cmpxchgw %w2,%1" \ | |
99 | + : "=a" (__ret), "+m" (*__xg(ptr)) \ | |
100 | + : "r" (__new), "0" (__old) \ | |
101 | : "memory"); \ | |
102 | break; \ | |
103 | case 4: \ | |
104 | - asm volatile(lock "cmpxchgl %1,%2" \ | |
105 | - : "=a"(__ret) \ | |
106 | - : "r"(__new), "m"(*__xg(ptr)), "0"(__old) \ | |
107 | + asm volatile(lock "cmpxchgl %2,%1" \ | |
108 | + : "=a" (__ret), "+m" (*__xg(ptr)) \ | |
109 | + : "r" (__new), "0" (__old) \ | |
110 | : "memory"); \ | |
111 | break; \ | |
112 | default: \ | |
113 | @@ -180,12 +180,12 @@ static inline unsigned long long __cmpxc | |
114 | unsigned long long new) | |
115 | { | |
116 | unsigned long long prev; | |
117 | - asm volatile(LOCK_PREFIX "cmpxchg8b %3" | |
118 | - : "=A"(prev) | |
119 | - : "b"((unsigned long)new), | |
120 | - "c"((unsigned long)(new >> 32)), | |
121 | - "m"(*__xg(ptr)), | |
122 | - "0"(old) | |
123 | + asm volatile(LOCK_PREFIX "cmpxchg8b %1" | |
124 | + : "=A" (prev), | |
125 | + "+m" (*__xg(ptr)) | |
126 | + : "b" ((unsigned long)new), | |
127 | + "c" ((unsigned long)(new >> 32)), | |
128 | + "0" (old) | |
129 | : "memory"); | |
130 | return prev; | |
131 | } | |
132 | @@ -195,12 +195,12 @@ static inline unsigned long long __cmpxc | |
133 | unsigned long long new) | |
134 | { | |
135 | unsigned long long prev; | |
136 | - asm volatile("cmpxchg8b %3" | |
137 | - : "=A"(prev) | |
138 | - : "b"((unsigned long)new), | |
139 | - "c"((unsigned long)(new >> 32)), | |
140 | - "m"(*__xg(ptr)), | |
141 | - "0"(old) | |
142 | + asm volatile("cmpxchg8b %1" | |
143 | + : "=A" (prev), | |
144 | + "+m" (*__xg(ptr)) | |
145 | + : "b" ((unsigned long)new), | |
146 | + "c" ((unsigned long)(new >> 32)), | |
147 | + "0" (old) | |
148 | : "memory"); | |
149 | return prev; | |
150 | } | |
151 | --- a/arch/x86/include/asm/cmpxchg_64.h | |
152 | +++ b/arch/x86/include/asm/cmpxchg_64.h | |
153 | @@ -26,26 +26,26 @@ extern void __cmpxchg_wrong_size(void); | |
154 | switch (size) { \ | |
155 | case 1: \ | |
156 | asm volatile("xchgb %b0,%1" \ | |
157 | - : "=q" (__x) \ | |
158 | - : "m" (*__xg(ptr)), "0" (__x) \ | |
159 | + : "=q" (__x), "+m" (*__xg(ptr)) \ | |
160 | + : "0" (__x) \ | |
161 | : "memory"); \ | |
162 | break; \ | |
163 | case 2: \ | |
164 | asm volatile("xchgw %w0,%1" \ | |
165 | - : "=r" (__x) \ | |
166 | - : "m" (*__xg(ptr)), "0" (__x) \ | |
167 | + : "=r" (__x), "+m" (*__xg(ptr)) \ | |
168 | + : "0" (__x) \ | |
169 | : "memory"); \ | |
170 | break; \ | |
171 | case 4: \ | |
172 | asm volatile("xchgl %k0,%1" \ | |
173 | - : "=r" (__x) \ | |
174 | - : "m" (*__xg(ptr)), "0" (__x) \ | |
175 | + : "=r" (__x), "+m" (*__xg(ptr)) \ | |
176 | + : "0" (__x) \ | |
177 | : "memory"); \ | |
178 | break; \ | |
179 | case 8: \ | |
180 | asm volatile("xchgq %0,%1" \ | |
181 | - : "=r" (__x) \ | |
182 | - : "m" (*__xg(ptr)), "0" (__x) \ | |
183 | + : "=r" (__x), "+m" (*__xg(ptr)) \ | |
184 | + : "0" (__x) \ | |
185 | : "memory"); \ | |
186 | break; \ | |
187 | default: \ | |
188 | @@ -71,27 +71,27 @@ extern void __cmpxchg_wrong_size(void); | |
189 | __typeof__(*(ptr)) __new = (new); \ | |
190 | switch (size) { \ | |
191 | case 1: \ | |
192 | - asm volatile(lock "cmpxchgb %b1,%2" \ | |
193 | - : "=a"(__ret) \ | |
194 | - : "q"(__new), "m"(*__xg(ptr)), "0"(__old) \ | |
195 | + asm volatile(lock "cmpxchgb %b2,%1" \ | |
196 | + : "=a" (__ret), "+m" (*__xg(ptr)) \ | |
197 | + : "q" (__new), "0" (__old) \ | |
198 | : "memory"); \ | |
199 | break; \ | |
200 | case 2: \ | |
201 | - asm volatile(lock "cmpxchgw %w1,%2" \ | |
202 | - : "=a"(__ret) \ | |
203 | - : "r"(__new), "m"(*__xg(ptr)), "0"(__old) \ | |
204 | + asm volatile(lock "cmpxchgw %w2,%1" \ | |
205 | + : "=a" (__ret), "+m" (*__xg(ptr)) \ | |
206 | + : "r" (__new), "0" (__old) \ | |
207 | : "memory"); \ | |
208 | break; \ | |
209 | case 4: \ | |
210 | - asm volatile(lock "cmpxchgl %k1,%2" \ | |
211 | - : "=a"(__ret) \ | |
212 | - : "r"(__new), "m"(*__xg(ptr)), "0"(__old) \ | |
213 | + asm volatile(lock "cmpxchgl %k2,%1" \ | |
214 | + : "=a" (__ret), "+m" (*__xg(ptr)) \ | |
215 | + : "r" (__new), "0" (__old) \ | |
216 | : "memory"); \ | |
217 | break; \ | |
218 | case 8: \ | |
219 | - asm volatile(lock "cmpxchgq %1,%2" \ | |
220 | - : "=a"(__ret) \ | |
221 | - : "r"(__new), "m"(*__xg(ptr)), "0"(__old) \ | |
222 | + asm volatile(lock "cmpxchgq %2,%1" \ | |
223 | + : "=a" (__ret), "+m" (*__xg(ptr)) \ | |
224 | + : "r" (__new), "0" (__old) \ | |
225 | : "memory"); \ | |
226 | break; \ | |
227 | default: \ |