]> git.ipfire.org Git - thirdparty/gcc.git/blob - libgcc/config/pa/linux-atomic.c
Update copyright years.
[thirdparty/gcc.git] / libgcc / config / pa / linux-atomic.c
1 /* Linux-specific atomic operations for PA Linux.
2 Copyright (C) 2008-2021 Free Software Foundation, Inc.
3 Based on code contributed by CodeSourcery for ARM EABI Linux.
4 Modifications for PA Linux by Helge Deller <deller@gmx.de>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 Under Section 7 of GPL version 3, you are granted additional
19 permissions described in the GCC Runtime Library Exception, version
20 3.1, as published by the Free Software Foundation.
21
22 You should have received a copy of the GNU General Public License and
23 a copy of the GCC Runtime Library Exception along with this program;
24 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
25 <http://www.gnu.org/licenses/>. */
26
27 #define EFAULT 14
28 #define EBUSY 16
29 #define ENOSYS 251
30
31 /* PA-RISC 2.0 supports out-of-order execution for loads and stores.
32 Thus, we need to synchonize memory accesses. For more info, see:
33 "Advanced Performance Features of the 64-bit PA-8000" by Doug Hunt.
34
35 We implement byte, short and int versions of each atomic operation
36 using the kernel helper defined below. There is no support for
37 64-bit operations yet. */
38
39 /* Determine kernel LWS function call (0=32-bit, 1=64-bit userspace). */
40 #define LWS_CAS (sizeof(long) == 4 ? 0 : 1)
41
42 /* Kernel helper for compare-and-exchange a 32-bit value. */
43 static inline long
44 __kernel_cmpxchg (volatile void *mem, int oldval, int newval)
45 {
46 register unsigned long lws_mem asm("r26") = (unsigned long) (mem);
47 register int lws_old asm("r25") = oldval;
48 register int lws_new asm("r24") = newval;
49 register long lws_ret asm("r28");
50 register long lws_errno asm("r21");
51 asm volatile ( "ble 0xb0(%%sr2, %%r0) \n\t"
52 "ldi %2, %%r20 \n\t"
53 : "=r" (lws_ret), "=r" (lws_errno)
54 : "i" (LWS_CAS), "r" (lws_mem), "r" (lws_old), "r" (lws_new)
55 : "r1", "r20", "r22", "r23", "r29", "r31", "memory"
56 );
57
58 /* If the kernel LWS call succeeded (lws_errno == 0), lws_ret contains
59 the old value from memory. If this value is equal to OLDVAL, the
60 new value was written to memory. If not, return -EBUSY. */
61 if (!lws_errno && lws_ret != oldval)
62 return -EBUSY;
63
64 return lws_errno;
65 }
66
67 static inline long
68 __kernel_cmpxchg2 (volatile void *mem, const void *oldval, const void *newval,
69 int val_size)
70 {
71 register unsigned long lws_mem asm("r26") = (unsigned long) (mem);
72 register unsigned long lws_old asm("r25") = (unsigned long) oldval;
73 register unsigned long lws_new asm("r24") = (unsigned long) newval;
74 register int lws_size asm("r23") = val_size;
75 register long lws_ret asm("r28");
76 register long lws_errno asm("r21");
77 asm volatile ( "ble 0xb0(%%sr2, %%r0) \n\t"
78 "ldi %6, %%r20 \n\t"
79 : "=r" (lws_ret), "=r" (lws_errno), "+r" (lws_mem),
80 "+r" (lws_old), "+r" (lws_new), "+r" (lws_size)
81 : "i" (2)
82 : "r1", "r20", "r22", "r29", "r31", "fr4", "memory"
83 );
84
85 /* If the kernel LWS call is successful, lws_ret contains 0. */
86 if (__builtin_expect (lws_ret == 0, 1))
87 return 0;
88
89 /* If the kernel LWS call fails with no error, return -EBUSY */
90 if (__builtin_expect (!lws_errno, 0))
91 return -EBUSY;
92
93 return lws_errno;
94 }
95 #define HIDDEN __attribute__ ((visibility ("hidden")))
96
97 /* Big endian masks */
98 #define INVERT_MASK_1 24
99 #define INVERT_MASK_2 16
100
101 #define MASK_1 0xffu
102 #define MASK_2 0xffffu
103
104 #define FETCH_AND_OP_2(OP, PFX_OP, INF_OP, TYPE, WIDTH, INDEX) \
105 TYPE HIDDEN \
106 __sync_fetch_and_##OP##_##WIDTH (volatile void *ptr, TYPE val) \
107 { \
108 TYPE tmp, newval; \
109 long failure; \
110 \
111 do { \
112 tmp = __atomic_load_n ((volatile TYPE *)ptr, __ATOMIC_RELAXED); \
113 newval = PFX_OP (tmp INF_OP val); \
114 failure = __kernel_cmpxchg2 (ptr, &tmp, &newval, INDEX); \
115 } while (failure != 0); \
116 \
117 return tmp; \
118 }
119
120 FETCH_AND_OP_2 (add, , +, long long unsigned int, 8, 3)
121 FETCH_AND_OP_2 (sub, , -, long long unsigned int, 8, 3)
122 FETCH_AND_OP_2 (or, , |, long long unsigned int, 8, 3)
123 FETCH_AND_OP_2 (and, , &, long long unsigned int, 8, 3)
124 FETCH_AND_OP_2 (xor, , ^, long long unsigned int, 8, 3)
125 FETCH_AND_OP_2 (nand, ~, &, long long unsigned int, 8, 3)
126
127 FETCH_AND_OP_2 (add, , +, short unsigned int, 2, 1)
128 FETCH_AND_OP_2 (sub, , -, short unsigned int, 2, 1)
129 FETCH_AND_OP_2 (or, , |, short unsigned int, 2, 1)
130 FETCH_AND_OP_2 (and, , &, short unsigned int, 2, 1)
131 FETCH_AND_OP_2 (xor, , ^, short unsigned int, 2, 1)
132 FETCH_AND_OP_2 (nand, ~, &, short unsigned int, 2, 1)
133
134 FETCH_AND_OP_2 (add, , +, unsigned char, 1, 0)
135 FETCH_AND_OP_2 (sub, , -, unsigned char, 1, 0)
136 FETCH_AND_OP_2 (or, , |, unsigned char, 1, 0)
137 FETCH_AND_OP_2 (and, , &, unsigned char, 1, 0)
138 FETCH_AND_OP_2 (xor, , ^, unsigned char, 1, 0)
139 FETCH_AND_OP_2 (nand, ~, &, unsigned char, 1, 0)
140
141 #define OP_AND_FETCH_2(OP, PFX_OP, INF_OP, TYPE, WIDTH, INDEX) \
142 TYPE HIDDEN \
143 __sync_##OP##_and_fetch_##WIDTH (volatile void *ptr, TYPE val) \
144 { \
145 TYPE tmp, newval; \
146 long failure; \
147 \
148 do { \
149 tmp = __atomic_load_n ((volatile TYPE *)ptr, __ATOMIC_RELAXED); \
150 newval = PFX_OP (tmp INF_OP val); \
151 failure = __kernel_cmpxchg2 (ptr, &tmp, &newval, INDEX); \
152 } while (failure != 0); \
153 \
154 return PFX_OP (tmp INF_OP val); \
155 }
156
157 OP_AND_FETCH_2 (add, , +, long long unsigned int, 8, 3)
158 OP_AND_FETCH_2 (sub, , -, long long unsigned int, 8, 3)
159 OP_AND_FETCH_2 (or, , |, long long unsigned int, 8, 3)
160 OP_AND_FETCH_2 (and, , &, long long unsigned int, 8, 3)
161 OP_AND_FETCH_2 (xor, , ^, long long unsigned int, 8, 3)
162 OP_AND_FETCH_2 (nand, ~, &, long long unsigned int, 8, 3)
163
164 OP_AND_FETCH_2 (add, , +, short unsigned int, 2, 1)
165 OP_AND_FETCH_2 (sub, , -, short unsigned int, 2, 1)
166 OP_AND_FETCH_2 (or, , |, short unsigned int, 2, 1)
167 OP_AND_FETCH_2 (and, , &, short unsigned int, 2, 1)
168 OP_AND_FETCH_2 (xor, , ^, short unsigned int, 2, 1)
169 OP_AND_FETCH_2 (nand, ~, &, short unsigned int, 2, 1)
170
171 OP_AND_FETCH_2 (add, , +, unsigned char, 1, 0)
172 OP_AND_FETCH_2 (sub, , -, unsigned char, 1, 0)
173 OP_AND_FETCH_2 (or, , |, unsigned char, 1, 0)
174 OP_AND_FETCH_2 (and, , &, unsigned char, 1, 0)
175 OP_AND_FETCH_2 (xor, , ^, unsigned char, 1, 0)
176 OP_AND_FETCH_2 (nand, ~, &, unsigned char, 1, 0)
177
178 #define FETCH_AND_OP_WORD(OP, PFX_OP, INF_OP) \
179 unsigned int HIDDEN \
180 __sync_fetch_and_##OP##_4 (volatile void *ptr, unsigned int val) \
181 { \
182 unsigned int tmp; \
183 long failure; \
184 \
185 do { \
186 tmp = __atomic_load_n ((volatile unsigned int *)ptr, \
187 __ATOMIC_RELAXED); \
188 failure = __kernel_cmpxchg (ptr, tmp, PFX_OP (tmp INF_OP val)); \
189 } while (failure != 0); \
190 \
191 return tmp; \
192 }
193
194 FETCH_AND_OP_WORD (add, , +)
195 FETCH_AND_OP_WORD (sub, , -)
196 FETCH_AND_OP_WORD (or, , |)
197 FETCH_AND_OP_WORD (and, , &)
198 FETCH_AND_OP_WORD (xor, , ^)
199 FETCH_AND_OP_WORD (nand, ~, &)
200
201 #define OP_AND_FETCH_WORD(OP, PFX_OP, INF_OP) \
202 unsigned int HIDDEN \
203 __sync_##OP##_and_fetch_4 (volatile void *ptr, unsigned int val) \
204 { \
205 unsigned int tmp; \
206 long failure; \
207 \
208 do { \
209 tmp = __atomic_load_n ((volatile unsigned int *)ptr, \
210 __ATOMIC_RELAXED); \
211 failure = __kernel_cmpxchg (ptr, tmp, PFX_OP (tmp INF_OP val)); \
212 } while (failure != 0); \
213 \
214 return PFX_OP (tmp INF_OP val); \
215 }
216
217 OP_AND_FETCH_WORD (add, , +)
218 OP_AND_FETCH_WORD (sub, , -)
219 OP_AND_FETCH_WORD (or, , |)
220 OP_AND_FETCH_WORD (and, , &)
221 OP_AND_FETCH_WORD (xor, , ^)
222 OP_AND_FETCH_WORD (nand, ~, &)
223
224 typedef unsigned char bool;
225
226 #define COMPARE_AND_SWAP_2(TYPE, WIDTH, INDEX) \
227 TYPE HIDDEN \
228 __sync_val_compare_and_swap_##WIDTH (volatile void *ptr, TYPE oldval, \
229 TYPE newval) \
230 { \
231 TYPE actual_oldval; \
232 long fail; \
233 \
234 while (1) \
235 { \
236 actual_oldval = __atomic_load_n ((volatile TYPE *)ptr, \
237 __ATOMIC_RELAXED); \
238 \
239 if (__builtin_expect (oldval != actual_oldval, 0)) \
240 return actual_oldval; \
241 \
242 fail = __kernel_cmpxchg2 (ptr, &actual_oldval, &newval, INDEX); \
243 \
244 if (__builtin_expect (!fail, 1)) \
245 return actual_oldval; \
246 } \
247 } \
248 \
249 _Bool HIDDEN \
250 __sync_bool_compare_and_swap_##WIDTH (volatile void *ptr, \
251 TYPE oldval, TYPE newval) \
252 { \
253 long failure = __kernel_cmpxchg2 (ptr, &oldval, &newval, INDEX); \
254 return (failure == 0); \
255 }
256
257 COMPARE_AND_SWAP_2 (long long unsigned int, 8, 3)
258 COMPARE_AND_SWAP_2 (short unsigned int, 2, 1)
259 COMPARE_AND_SWAP_2 (unsigned char, 1, 0)
260
261 unsigned int HIDDEN
262 __sync_val_compare_and_swap_4 (volatile void *ptr, unsigned int oldval,
263 unsigned int newval)
264 {
265 long fail;
266 unsigned int actual_oldval;
267
268 while (1)
269 {
270 actual_oldval = __atomic_load_n ((volatile unsigned int *)ptr,
271 __ATOMIC_RELAXED);
272
273 if (__builtin_expect (oldval != actual_oldval, 0))
274 return actual_oldval;
275
276 fail = __kernel_cmpxchg (ptr, actual_oldval, newval);
277
278 if (__builtin_expect (!fail, 1))
279 return actual_oldval;
280 }
281 }
282
283 _Bool HIDDEN
284 __sync_bool_compare_and_swap_4 (volatile void *ptr, unsigned int oldval,
285 unsigned int newval)
286 {
287 long failure = __kernel_cmpxchg (ptr, oldval, newval);
288 return (failure == 0);
289 }
290
291 #define SYNC_LOCK_TEST_AND_SET_2(TYPE, WIDTH, INDEX) \
292 TYPE HIDDEN \
293 __sync_lock_test_and_set_##WIDTH (volatile void *ptr, TYPE val) \
294 { \
295 TYPE oldval; \
296 long failure; \
297 \
298 do { \
299 oldval = __atomic_load_n ((volatile TYPE *)ptr, \
300 __ATOMIC_RELAXED); \
301 failure = __kernel_cmpxchg2 (ptr, &oldval, &val, INDEX); \
302 } while (failure != 0); \
303 \
304 return oldval; \
305 }
306
307 SYNC_LOCK_TEST_AND_SET_2 (long long unsigned int, 8, 3)
308 SYNC_LOCK_TEST_AND_SET_2 (short unsigned int, 2, 1)
309 SYNC_LOCK_TEST_AND_SET_2 (unsigned char, 1, 0)
310
311 unsigned int HIDDEN
312 __sync_lock_test_and_set_4 (volatile void *ptr, unsigned int val)
313 {
314 long failure;
315 unsigned int oldval;
316
317 do {
318 oldval = __atomic_load_n ((volatile unsigned int *)ptr, __ATOMIC_RELAXED);
319 failure = __kernel_cmpxchg (ptr, oldval, val);
320 } while (failure != 0);
321
322 return oldval;
323 }
324
325 #define SYNC_LOCK_RELEASE_1(TYPE, WIDTH, INDEX) \
326 void HIDDEN \
327 __sync_lock_release_##WIDTH (volatile void *ptr) \
328 { \
329 TYPE oldval, val = 0; \
330 long failure; \
331 \
332 do { \
333 oldval = __atomic_load_n ((volatile TYPE *)ptr, \
334 __ATOMIC_RELAXED); \
335 failure = __kernel_cmpxchg2 (ptr, &oldval, &val, INDEX); \
336 } while (failure != 0); \
337 }
338
339 SYNC_LOCK_RELEASE_1 (long long unsigned int, 8, 3)
340 SYNC_LOCK_RELEASE_1 (short unsigned int, 2, 1)
341 SYNC_LOCK_RELEASE_1 (unsigned char, 1, 0)
342
343 void HIDDEN
344 __sync_lock_release_4 (volatile void *ptr)
345 {
346 long failure;
347 unsigned int oldval;
348
349 do {
350 oldval = __atomic_load_n ((volatile unsigned int *)ptr, __ATOMIC_RELAXED);
351 failure = __kernel_cmpxchg (ptr, oldval, 0);
352 } while (failure != 0);
353 }