]>
git.ipfire.org Git - thirdparty/gcc.git/blob - libgcc/config/csky/linux-atomic.c
1 /* Linux-specific atomic operations for C-SKY.
2 Copyright (C) 2018-2019 Free Software Foundation, Inc.
3 Contributed by C-SKY Microsystems and Mentor Graphics.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 Under Section 7 of GPL version 3, you are granted additional
18 permissions described in the GCC Runtime Library Exception, version
19 3.1, as published by the Free Software Foundation.
21 You should have received a copy of the GNU General Public License and
22 a copy of the GCC Runtime Library Exception along with this program;
23 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
24 <http://www.gnu.org/licenses/>. */
26 /* Kernel helper for compare-and-exchange. */
28 __kernel_cmpxchg (int oldval
, int newval
, volatile int *ptr
)
30 register int _a0
asm ("a0") = oldval
;
31 register int _a1
asm ("a1") = newval
;
32 register volatile int *_a2
asm ("a2") = ptr
;
33 __asm__
__volatile__ ("trap 2\n" \
34 :"+r" (_a0
) :"r" (_a1
) , "r" (_a2
) \
40 /* Kernel helper for memory barrier. */
41 inline void __kernel_dmb (void)
43 asm ("sync":::"memory");
46 /* Note: we implement byte, short and int versions of atomic operations using
47 the above kernel helpers, but there is no support for "long long" (64-bit)
50 #define HIDDEN __attribute__ ((visibility ("hidden")))
53 #define INVERT_MASK_1 0
54 #define INVERT_MASK_2 0
56 #define INVERT_MASK_1 24
57 #define INVERT_MASK_2 16
61 #define MASK_2 0xffffu
63 #define FETCH_AND_OP_WORD(OP, PFX_OP, INF_OP) \
65 __sync_fetch_and_##OP##_4 (int *ptr, int val) \
72 failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr); \
74 while (failure != 0); \
79 FETCH_AND_OP_WORD (add
, , +)
80 FETCH_AND_OP_WORD (sub
, , -)
81 FETCH_AND_OP_WORD (or, , |)
82 FETCH_AND_OP_WORD (and, , &)
83 FETCH_AND_OP_WORD (xor, , ^)
84 FETCH_AND_OP_WORD (nand
, ~, &)
86 #define NAME_oldval(OP, WIDTH) __sync_fetch_and_##OP##_##WIDTH
87 #define NAME_newval(OP, WIDTH) __sync_##OP##_and_fetch_##WIDTH
89 /* Implement both __sync_<op>_and_fetch and __sync_fetch_and_<op> for
90 subword-sized quantities. */
92 #define SUBWORD_SYNC_OP(OP, PFX_OP, INF_OP, TYPE, WIDTH, RETURN) \
94 NAME##_##RETURN (OP, WIDTH) (TYPE *ptr, TYPE val) \
96 int *wordptr = (int *) ((unsigned int) ptr & ~3); \
97 unsigned int mask, shift, oldval, newval; \
100 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
101 mask = MASK_##WIDTH << shift; \
106 newval = ((PFX_OP (((oldval & mask) >> shift) \
107 INF_OP (unsigned int) val)) << shift) & mask; \
108 newval |= oldval & ~mask; \
109 failure = __kernel_cmpxchg (oldval, newval, wordptr); \
111 while (failure != 0); \
113 return (RETURN & mask) >> shift; \
116 SUBWORD_SYNC_OP (add
, , +, unsigned short, 2, oldval
)
117 SUBWORD_SYNC_OP (sub
, , -, unsigned short, 2, oldval
)
118 SUBWORD_SYNC_OP (or, , |, unsigned short, 2, oldval
)
119 SUBWORD_SYNC_OP (and, , &, unsigned short, 2, oldval
)
120 SUBWORD_SYNC_OP (xor, , ^, unsigned short, 2, oldval
)
121 SUBWORD_SYNC_OP (nand
, ~, &, unsigned short, 2, oldval
)
123 SUBWORD_SYNC_OP (add
, , +, unsigned char, 1, oldval
)
124 SUBWORD_SYNC_OP (sub
, , -, unsigned char, 1, oldval
)
125 SUBWORD_SYNC_OP (or, , |, unsigned char, 1, oldval
)
126 SUBWORD_SYNC_OP (and, , &, unsigned char, 1, oldval
)
127 SUBWORD_SYNC_OP (xor, , ^, unsigned char, 1, oldval
)
128 SUBWORD_SYNC_OP (nand
, ~, &, unsigned char, 1, oldval
)
130 #define OP_AND_FETCH_WORD(OP, PFX_OP, INF_OP) \
132 __sync_##OP##_and_fetch_4 (int *ptr, int val) \
139 failure = __kernel_cmpxchg (tmp, PFX_OP tmp INF_OP val, ptr); \
141 while (failure != 0); \
143 return PFX_OP tmp INF_OP val; \
146 OP_AND_FETCH_WORD (add
, , +)
147 OP_AND_FETCH_WORD (sub
, , -)
148 OP_AND_FETCH_WORD (or, , |)
149 OP_AND_FETCH_WORD (and, , &)
150 OP_AND_FETCH_WORD (xor, , ^)
151 OP_AND_FETCH_WORD (nand
, ~, &)
153 SUBWORD_SYNC_OP (add
, , +, unsigned short, 2, newval
)
154 SUBWORD_SYNC_OP (sub
, , -, unsigned short, 2, newval
)
155 SUBWORD_SYNC_OP (or, , |, unsigned short, 2, newval
)
156 SUBWORD_SYNC_OP (and, , &, unsigned short, 2, newval
)
157 SUBWORD_SYNC_OP (xor, , ^, unsigned short, 2, newval
)
158 SUBWORD_SYNC_OP (nand
, ~, &, unsigned short, 2, newval
)
160 SUBWORD_SYNC_OP (add
, , +, unsigned char, 1, newval
)
161 SUBWORD_SYNC_OP (sub
, , -, unsigned char, 1, newval
)
162 SUBWORD_SYNC_OP (or, , |, unsigned char, 1, newval
)
163 SUBWORD_SYNC_OP (and, , &, unsigned char, 1, newval
)
164 SUBWORD_SYNC_OP (xor, , ^, unsigned char, 1, newval
)
165 SUBWORD_SYNC_OP (nand
, ~, &, unsigned char, 1, newval
)
168 __sync_val_compare_and_swap_4 (int *ptr
, int oldval
, int newval
)
170 int actual_oldval
, fail
;
174 actual_oldval
= *ptr
;
176 if (oldval
!= actual_oldval
)
177 return actual_oldval
;
179 fail
= __kernel_cmpxchg (actual_oldval
, newval
, ptr
);
186 #define SUBWORD_VAL_CAS(TYPE, WIDTH) \
188 __sync_val_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \
191 int *wordptr = (int *)((unsigned int) ptr & ~3), fail; \
192 unsigned int mask, shift, actual_oldval, actual_newval; \
194 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
195 mask = MASK_##WIDTH << shift; \
199 actual_oldval = *wordptr; \
201 if (((actual_oldval & mask) >> shift) != (unsigned int) oldval) \
202 return (actual_oldval & mask) >> shift; \
204 actual_newval = (actual_oldval & ~mask) \
205 | (((unsigned int) newval << shift) & mask); \
207 fail = __kernel_cmpxchg (actual_oldval, actual_newval, \
215 SUBWORD_VAL_CAS (unsigned short, 2)
216 SUBWORD_VAL_CAS (unsigned char, 1)
218 typedef unsigned char bool;
221 __sync_bool_compare_and_swap_4 (int *ptr
, int oldval
, int newval
)
223 int failure
= __kernel_cmpxchg (oldval
, newval
, ptr
);
224 return (failure
== 0);
227 #define SUBWORD_BOOL_CAS(TYPE, WIDTH) \
229 __sync_bool_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \
233 = __sync_val_compare_and_swap_##WIDTH (ptr, oldval, newval); \
234 return (oldval == actual_oldval); \
237 SUBWORD_BOOL_CAS (unsigned short, 2)
238 SUBWORD_BOOL_CAS (unsigned char, 1)
241 __sync_synchronize (void)
247 __sync_lock_test_and_set_4 (int *ptr
, int val
)
254 failure
= __kernel_cmpxchg (oldval
, val
, ptr
);
256 while (failure
!= 0);
261 #define SUBWORD_TEST_AND_SET(TYPE, WIDTH) \
263 __sync_lock_test_and_set_##WIDTH (TYPE *ptr, TYPE val) \
266 unsigned int oldval, newval, shift, mask; \
267 int *wordptr = (int *) ((unsigned int) ptr & ~3); \
269 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
270 mask = MASK_##WIDTH << shift; \
275 newval = ((oldval & ~mask) \
276 | (((unsigned int) val << shift) & mask)); \
277 failure = __kernel_cmpxchg (oldval, newval, wordptr); \
279 while (failure != 0); \
281 return (oldval & mask) >> shift; \
284 SUBWORD_TEST_AND_SET (unsigned short, 2)
285 SUBWORD_TEST_AND_SET (unsigned char, 1)
287 #define SYNC_LOCK_RELEASE(TYPE, WIDTH) \
289 __sync_lock_release_##WIDTH (TYPE *ptr) \
291 /* All writes before this point must be seen before we release \
292 the lock itself. */ \
297 SYNC_LOCK_RELEASE (int, 4)
298 SYNC_LOCK_RELEASE (short, 2)
299 SYNC_LOCK_RELEASE (char, 1)