]> git.ipfire.org Git - thirdparty/gcc.git/blame - libgcc/config/nios2/linux-atomic.c
Update copyright years.
[thirdparty/gcc.git] / libgcc / config / nios2 / linux-atomic.c
CommitLineData
e430824f 1/* Linux-specific atomic operations for Nios II Linux.
8d9254fc 2 Copyright (C) 2008-2020 Free Software Foundation, Inc.
e430824f
CLT
3
4This file is free software; you can redistribute it and/or modify it
5under the terms of the GNU General Public License as published by the
6Free Software Foundation; either version 3, or (at your option) any
7later version.
8
9This file is distributed in the hope that it will be useful, but
10WITHOUT ANY WARRANTY; without even the implied warranty of
11MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12General Public License for more details.
13
14Under Section 7 of GPL version 3, you are granted additional
15permissions described in the GCC Runtime Library Exception, version
163.1, as published by the Free Software Foundation.
17
18You should have received a copy of the GNU General Public License and
19a copy of the GCC Runtime Library Exception along with this program;
20see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
21<http://www.gnu.org/licenses/>. */
22
e430824f
CLT
23/* We implement byte, short and int versions of each atomic operation
24 using the kernel helper defined below. There is no support for
25 64-bit operations yet. */
26
27/* Crash a userspace program with SIGSEV. */
28#define ABORT_INSTRUCTION asm ("stw zero, 0(zero)")
29
30/* Kernel helper for compare-and-exchange a 32-bit value. */
31static inline long
32__kernel_cmpxchg (int oldval, int newval, int *mem)
33{
34 register int r2 asm ("r2");
35 register int *r4 asm ("r4") = mem;
36 register int r5 asm ("r5") = oldval;
37 register int r6 asm ("r6") = newval;
38
39 /* Call the kernel provided fixed address cmpxchg helper routine. */
40 asm volatile ("movi %0, %4\n\t"
41 "callr %0\n"
42 : "=r" (r2)
43 : "r" (r4), "r" (r5), "r" (r6), "I" (0x00001004)
44 : "ra", "memory");
45 return r2;
46}
47
48#define HIDDEN __attribute__ ((visibility ("hidden")))
49
50#ifdef __nios2_little_endian__
51#define INVERT_MASK_1 0
52#define INVERT_MASK_2 0
53#else
54#define INVERT_MASK_1 24
55#define INVERT_MASK_2 16
56#endif
57
58#define MASK_1 0xffu
59#define MASK_2 0xffffu
60
61#define FETCH_AND_OP_WORD(OP, PFX_OP, INF_OP) \
62 int HIDDEN \
63 __sync_fetch_and_##OP##_4 (int *ptr, int val) \
64 { \
65 int failure, tmp; \
66 \
67 do { \
68 tmp = *ptr; \
69 failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr); \
70 } while (failure != 0); \
71 \
72 return tmp; \
73 }
74
75FETCH_AND_OP_WORD (add, , +)
76FETCH_AND_OP_WORD (sub, , -)
77FETCH_AND_OP_WORD (or, , |)
78FETCH_AND_OP_WORD (and, , &)
79FETCH_AND_OP_WORD (xor, , ^)
80FETCH_AND_OP_WORD (nand, ~, &)
81
82#define NAME_oldval(OP, WIDTH) __sync_fetch_and_##OP##_##WIDTH
83#define NAME_newval(OP, WIDTH) __sync_##OP##_and_fetch_##WIDTH
84
85/* Implement both __sync_<op>_and_fetch and __sync_fetch_and_<op> for
86 subword-sized quantities. */
87
88#define SUBWORD_SYNC_OP(OP, PFX_OP, INF_OP, TYPE, WIDTH, RETURN) \
89 TYPE HIDDEN \
90 NAME##_##RETURN (OP, WIDTH) (TYPE *ptr, TYPE val) \
91 { \
92 int *wordptr = (int *) ((unsigned long) ptr & ~3); \
93 unsigned int mask, shift, oldval, newval; \
94 int failure; \
95 \
96 shift = (((unsigned long) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
97 mask = MASK_##WIDTH << shift; \
98 \
99 do { \
100 oldval = *wordptr; \
101 newval = ((PFX_OP (((oldval & mask) >> shift) \
102 INF_OP (unsigned int) val)) << shift) & mask; \
103 newval |= oldval & ~mask; \
104 failure = __kernel_cmpxchg (oldval, newval, wordptr); \
105 } while (failure != 0); \
106 \
107 return (RETURN & mask) >> shift; \
108 }
109
110SUBWORD_SYNC_OP (add, , +, unsigned short, 2, oldval)
111SUBWORD_SYNC_OP (sub, , -, unsigned short, 2, oldval)
112SUBWORD_SYNC_OP (or, , |, unsigned short, 2, oldval)
113SUBWORD_SYNC_OP (and, , &, unsigned short, 2, oldval)
114SUBWORD_SYNC_OP (xor, , ^, unsigned short, 2, oldval)
115SUBWORD_SYNC_OP (nand, ~, &, unsigned short, 2, oldval)
116
117SUBWORD_SYNC_OP (add, , +, unsigned char, 1, oldval)
118SUBWORD_SYNC_OP (sub, , -, unsigned char, 1, oldval)
119SUBWORD_SYNC_OP (or, , |, unsigned char, 1, oldval)
120SUBWORD_SYNC_OP (and, , &, unsigned char, 1, oldval)
121SUBWORD_SYNC_OP (xor, , ^, unsigned char, 1, oldval)
122SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, oldval)
123
124#define OP_AND_FETCH_WORD(OP, PFX_OP, INF_OP) \
125 int HIDDEN \
126 __sync_##OP##_and_fetch_4 (int *ptr, int val) \
127 { \
128 int tmp, failure; \
129 \
130 do { \
131 tmp = *ptr; \
132 failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr); \
133 } while (failure != 0); \
134 \
135 return PFX_OP (tmp INF_OP val); \
136 }
137
138OP_AND_FETCH_WORD (add, , +)
139OP_AND_FETCH_WORD (sub, , -)
140OP_AND_FETCH_WORD (or, , |)
141OP_AND_FETCH_WORD (and, , &)
142OP_AND_FETCH_WORD (xor, , ^)
143OP_AND_FETCH_WORD (nand, ~, &)
144
145SUBWORD_SYNC_OP (add, , +, unsigned short, 2, newval)
146SUBWORD_SYNC_OP (sub, , -, unsigned short, 2, newval)
147SUBWORD_SYNC_OP (or, , |, unsigned short, 2, newval)
148SUBWORD_SYNC_OP (and, , &, unsigned short, 2, newval)
149SUBWORD_SYNC_OP (xor, , ^, unsigned short, 2, newval)
150SUBWORD_SYNC_OP (nand, ~, &, unsigned short, 2, newval)
151
152SUBWORD_SYNC_OP (add, , +, unsigned char, 1, newval)
153SUBWORD_SYNC_OP (sub, , -, unsigned char, 1, newval)
154SUBWORD_SYNC_OP (or, , |, unsigned char, 1, newval)
155SUBWORD_SYNC_OP (and, , &, unsigned char, 1, newval)
156SUBWORD_SYNC_OP (xor, , ^, unsigned char, 1, newval)
157SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, newval)
158
159int HIDDEN
160__sync_val_compare_and_swap_4 (int *ptr, int oldval, int newval)
161{
162 int actual_oldval, fail;
163
164 while (1)
165 {
166 actual_oldval = *ptr;
167
168 if (oldval != actual_oldval)
169 return actual_oldval;
170
171 fail = __kernel_cmpxchg (actual_oldval, newval, ptr);
172
173 if (!fail)
174 return oldval;
175 }
176}
177
178#define SUBWORD_VAL_CAS(TYPE, WIDTH) \
179 TYPE HIDDEN \
180 __sync_val_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \
181 TYPE newval) \
182 { \
183 int *wordptr = (int *)((unsigned long) ptr & ~3), fail; \
184 unsigned int mask, shift, actual_oldval, actual_newval; \
185 \
186 shift = (((unsigned long) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
187 mask = MASK_##WIDTH << shift; \
188 \
189 while (1) \
190 { \
191 actual_oldval = *wordptr; \
192 \
193 if (((actual_oldval & mask) >> shift) != (unsigned int) oldval) \
194 return (actual_oldval & mask) >> shift; \
195 \
196 actual_newval = (actual_oldval & ~mask) \
197 | (((unsigned int) newval << shift) & mask); \
198 \
199 fail = __kernel_cmpxchg (actual_oldval, actual_newval, \
200 wordptr); \
201 \
202 if (!fail) \
203 return oldval; \
204 } \
205 }
206
207SUBWORD_VAL_CAS (unsigned short, 2)
208SUBWORD_VAL_CAS (unsigned char, 1)
209
210typedef unsigned char bool;
211
212bool HIDDEN
213__sync_bool_compare_and_swap_4 (int *ptr, int oldval, int newval)
214{
215 int failure = __kernel_cmpxchg (oldval, newval, ptr);
216 return (failure == 0);
217}
218
219#define SUBWORD_BOOL_CAS(TYPE, WIDTH) \
220 bool HIDDEN \
221 __sync_bool_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \
222 TYPE newval) \
223 { \
224 TYPE actual_oldval \
225 = __sync_val_compare_and_swap_##WIDTH (ptr, oldval, newval); \
226 return (oldval == actual_oldval); \
227 }
228
229SUBWORD_BOOL_CAS (unsigned short, 2)
230SUBWORD_BOOL_CAS (unsigned char, 1)
231
232int HIDDEN
233__sync_lock_test_and_set_4 (int *ptr, int val)
234{
235 int failure, oldval;
236
237 do {
238 oldval = *ptr;
239 failure = __kernel_cmpxchg (oldval, val, ptr);
240 } while (failure != 0);
241
242 return oldval;
243}
244
245#define SUBWORD_TEST_AND_SET(TYPE, WIDTH) \
246 TYPE HIDDEN \
247 __sync_lock_test_and_set_##WIDTH (TYPE *ptr, TYPE val) \
248 { \
249 int failure; \
250 unsigned int oldval, newval, shift, mask; \
251 int *wordptr = (int *) ((unsigned long) ptr & ~3); \
252 \
253 shift = (((unsigned long) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
254 mask = MASK_##WIDTH << shift; \
255 \
256 do { \
257 oldval = *wordptr; \
258 newval = (oldval & ~mask) \
259 | (((unsigned int) val << shift) & mask); \
260 failure = __kernel_cmpxchg (oldval, newval, wordptr); \
261 } while (failure != 0); \
262 \
263 return (oldval & mask) >> shift; \
264 }
265
266SUBWORD_TEST_AND_SET (unsigned short, 2)
267SUBWORD_TEST_AND_SET (unsigned char, 1)
268
269#define SYNC_LOCK_RELEASE(TYPE, WIDTH) \
270 void HIDDEN \
271 __sync_lock_release_##WIDTH (TYPE *ptr) \
272 { \
273 /* All writes before this point must be seen before we release \
274 the lock itself. */ \
275 __builtin_sync (); \
276 *ptr = 0; \
277 }
278
279SYNC_LOCK_RELEASE (int, 4)
280SYNC_LOCK_RELEASE (short, 2)
281SYNC_LOCK_RELEASE (char, 1)