]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/atomics.md
Update copyright years.
[thirdparty/gcc.git] / gcc / config / aarch64 / atomics.md
CommitLineData
86c818c2 1;; Machine description for AArch64 processor synchronization primitives.
a945c346 2;; Copyright (C) 2009-2024 Free Software Foundation, Inc.
86c818c2
JG
3;; Contributed by ARM Ltd.
4;;
5;; This file is part of GCC.
6;;
7;; GCC is free software; you can redistribute it and/or modify it
8;; under the terms of the GNU General Public License as published by
9;; the Free Software Foundation; either version 3, or (at your option)
10;; any later version.
11;;
12;; GCC is distributed in the hope that it will be useful, but
13;; WITHOUT ANY WARRANTY; without even the implied warranty of
14;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15;; General Public License for more details.
16;;
17;; You should have received a copy of the GNU General Public License
18;; along with GCC; see the file COPYING3. If not see
19;; <http://www.gnu.org/licenses/>.
20
6380d2bc
MW
21;; Instruction patterns.
22
0016d8d9 23(define_expand "@atomic_compare_and_swap<mode>"
4a2095eb
RH
24 [(match_operand:SI 0 "register_operand" "") ;; bool out
25 (match_operand:ALLI_TI 1 "register_operand" "") ;; val out
26 (match_operand:ALLI_TI 2 "aarch64_sync_memory_operand" "") ;; memory
27 (match_operand:ALLI_TI 3 "nonmemory_operand" "") ;; expected
28 (match_operand:ALLI_TI 4 "aarch64_reg_or_zero" "") ;; desired
86c818c2
JG
29 (match_operand:SI 5 "const_int_operand") ;; is_weak
30 (match_operand:SI 6 "const_int_operand") ;; mod_s
31 (match_operand:SI 7 "const_int_operand")] ;; mod_f
32 ""
33 {
34 aarch64_expand_compare_and_swap (operands);
35 DONE;
36 }
37)
38
d400fda3
RH
39(define_mode_attr cas_short_expected_pred
40 [(QI "aarch64_reg_or_imm") (HI "aarch64_plushi_operand")])
a27c5347 41(define_mode_attr cas_short_expected_imm
c1c267df 42 [(QI "n") (HI "Uih")])
d400fda3 43
0016d8d9 44(define_insn_and_split "@aarch64_compare_and_swap<mode>"
86c818c2
JG
45 [(set (reg:CC CC_REGNUM) ;; bool out
46 (unspec_volatile:CC [(const_int 0)] UNSPECV_ATOMIC_CMPSW))
d400fda3 47 (set (match_operand:SI 0 "register_operand" "=&r") ;; val out
86c818c2
JG
48 (zero_extend:SI
49 (match_operand:SHORT 1 "aarch64_sync_memory_operand" "+Q"))) ;; memory
50 (set (match_dup 1)
51 (unspec_volatile:SHORT
a27c5347
JJ
52 [(match_operand:SHORT 2 "<cas_short_expected_pred>"
53 "r<cas_short_expected_imm>") ;; expected
4ebcc903 54 (match_operand:SHORT 3 "aarch64_reg_or_zero" "rZ") ;; desired
d400fda3
RH
55 (match_operand:SI 4 "const_int_operand") ;; is_weak
56 (match_operand:SI 5 "const_int_operand") ;; mod_s
57 (match_operand:SI 6 "const_int_operand")] ;; mod_f
86c818c2
JG
58 UNSPECV_ATOMIC_CMPSW))
59 (clobber (match_scratch:SI 7 "=&r"))]
60 ""
61 "#"
e5e07b68 62 "&& epilogue_completed"
86c818c2
JG
63 [(const_int 0)]
64 {
65 aarch64_split_compare_and_swap (operands);
66 DONE;
67 }
68)
69
0016d8d9 70(define_insn_and_split "@aarch64_compare_and_swap<mode>"
86c818c2
JG
71 [(set (reg:CC CC_REGNUM) ;; bool out
72 (unspec_volatile:CC [(const_int 0)] UNSPECV_ATOMIC_CMPSW))
73 (set (match_operand:GPI 0 "register_operand" "=&r") ;; val out
b0770c0f 74 (match_operand:GPI 1 "aarch64_sync_memory_operand" "+Q")) ;; memory
86c818c2
JG
75 (set (match_dup 1)
76 (unspec_volatile:GPI
e21679a8 77 [(match_operand:GPI 2 "aarch64_plus_operand" "rIJ") ;; expect
4ebcc903 78 (match_operand:GPI 3 "aarch64_reg_or_zero" "rZ") ;; desired
b0770c0f
MW
79 (match_operand:SI 4 "const_int_operand") ;; is_weak
80 (match_operand:SI 5 "const_int_operand") ;; mod_s
86c818c2
JG
81 (match_operand:SI 6 "const_int_operand")] ;; mod_f
82 UNSPECV_ATOMIC_CMPSW))
83 (clobber (match_scratch:SI 7 "=&r"))]
84 ""
85 "#"
e5e07b68 86 "&& epilogue_completed"
86c818c2
JG
87 [(const_int 0)]
88 {
89 aarch64_split_compare_and_swap (operands);
90 DONE;
91 }
92)
93
4a2095eb
RH
94(define_insn_and_split "@aarch64_compare_and_swap<mode>"
95 [(set (reg:CC CC_REGNUM) ;; bool out
96 (unspec_volatile:CC [(const_int 0)] UNSPECV_ATOMIC_CMPSW))
97 (set (match_operand:JUST_TI 0 "register_operand" "=&r") ;; val out
98 (match_operand:JUST_TI 1 "aarch64_sync_memory_operand" "+Q")) ;; memory
99 (set (match_dup 1)
100 (unspec_volatile:JUST_TI
101 [(match_operand:JUST_TI 2 "aarch64_reg_or_zero" "rZ") ;; expect
102 (match_operand:JUST_TI 3 "aarch64_reg_or_zero" "rZ") ;; desired
103 (match_operand:SI 4 "const_int_operand") ;; is_weak
104 (match_operand:SI 5 "const_int_operand") ;; mod_s
105 (match_operand:SI 6 "const_int_operand")] ;; mod_f
106 UNSPECV_ATOMIC_CMPSW))
107 (clobber (match_scratch:SI 7 "=&r"))]
108 ""
109 "#"
e5e07b68 110 "&& epilogue_completed"
4a2095eb
RH
111 [(const_int 0)]
112 {
113 aarch64_split_compare_and_swap (operands);
114 DONE;
115 }
116)
117
77f33f44
RH
118(define_insn "@aarch64_compare_and_swap<mode>_lse"
119 [(set (match_operand:SI 0 "register_operand" "+r") ;; val out
b0770c0f 120 (zero_extend:SI
77f33f44 121 (match_operand:SHORT 1 "aarch64_sync_memory_operand" "+Q"))) ;; memory
b0770c0f
MW
122 (set (match_dup 1)
123 (unspec_volatile:SHORT
77f33f44
RH
124 [(match_dup 0) ;; expected
125 (match_operand:SHORT 2 "aarch64_reg_or_zero" "rZ") ;; desired
126 (match_operand:SI 3 "const_int_operand")] ;; mod_s
b0770c0f
MW
127 UNSPECV_ATOMIC_CMPSW))]
128 "TARGET_LSE"
77f33f44
RH
129{
130 enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
131 if (is_mm_relaxed (model))
132 return "cas<atomic_sfx>\t%<w>0, %<w>2, %1";
133 else if (is_mm_acquire (model) || is_mm_consume (model))
134 return "casa<atomic_sfx>\t%<w>0, %<w>2, %1";
135 else if (is_mm_release (model))
136 return "casl<atomic_sfx>\t%<w>0, %<w>2, %1";
137 else
138 return "casal<atomic_sfx>\t%<w>0, %<w>2, %1";
139})
b0770c0f 140
77f33f44
RH
141(define_insn "@aarch64_compare_and_swap<mode>_lse"
142 [(set (match_operand:GPI 0 "register_operand" "+r") ;; val out
b0770c0f
MW
143 (match_operand:GPI 1 "aarch64_sync_memory_operand" "+Q")) ;; memory
144 (set (match_dup 1)
145 (unspec_volatile:GPI
77f33f44
RH
146 [(match_dup 0) ;; expected
147 (match_operand:GPI 2 "aarch64_reg_or_zero" "rZ") ;; desired
148 (match_operand:SI 3 "const_int_operand")] ;; mod_s
b0770c0f 149 UNSPECV_ATOMIC_CMPSW))]
9cd7b720 150 "TARGET_LSE"
77f33f44
RH
151{
152 enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
153 if (is_mm_relaxed (model))
154 return "cas<atomic_sfx>\t%<w>0, %<w>2, %1";
155 else if (is_mm_acquire (model) || is_mm_consume (model))
156 return "casa<atomic_sfx>\t%<w>0, %<w>2, %1";
157 else if (is_mm_release (model))
158 return "casl<atomic_sfx>\t%<w>0, %<w>2, %1";
159 else
160 return "casal<atomic_sfx>\t%<w>0, %<w>2, %1";
161})
b0770c0f 162
4a2095eb
RH
163(define_insn "@aarch64_compare_and_swap<mode>_lse"
164 [(set (match_operand:JUST_TI 0 "register_operand" "+r") ;; val out
165 (match_operand:JUST_TI 1 "aarch64_sync_memory_operand" "+Q")) ;; memory
166 (set (match_dup 1)
167 (unspec_volatile:JUST_TI
168 [(match_dup 0) ;; expect
169 (match_operand:JUST_TI 2 "register_operand" "r") ;; desired
170 (match_operand:SI 3 "const_int_operand")] ;; mod_s
171 UNSPECV_ATOMIC_CMPSW))]
172 "TARGET_LSE"
173{
174 enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
175 if (is_mm_relaxed (model))
176 return "casp\t%0, %R0, %2, %R2, %1";
177 else if (is_mm_acquire (model) || is_mm_consume (model))
178 return "caspa\t%0, %R0, %2, %R2, %1";
179 else if (is_mm_release (model))
180 return "caspl\t%0, %R0, %2, %R2, %1";
181 else
182 return "caspal\t%0, %R0, %2, %R2, %1";
183})
184
9cd7b720 185(define_expand "atomic_exchange<mode>"
1bbffb87
DZ
186 [(match_operand:ALLI 0 "register_operand")
187 (match_operand:ALLI 1 "aarch64_sync_memory_operand")
188 (match_operand:ALLI 2 "aarch64_reg_or_zero")
189 (match_operand:SI 3 "const_int_operand")]
9cd7b720
MW
190 ""
191 {
9cd7b720
MW
192 /* Use an atomic SWP when available. */
193 if (TARGET_LSE)
3950b229
RH
194 {
195 emit_insn (gen_aarch64_atomic_exchange<mode>_lse
196 (operands[0], operands[1], operands[2], operands[3]));
197 }
198 else if (TARGET_OUTLINE_ATOMICS)
199 {
200 machine_mode mode = <MODE>mode;
201 rtx func = aarch64_atomic_ool_func (mode, operands[3],
202 &aarch64_ool_swp_names);
203 rtx rval = emit_library_call_value (func, operands[0], LCT_NORMAL,
204 mode, operands[2], mode,
205 XEXP (operands[1], 0), Pmode);
206 emit_move_insn (operands[0], rval);
207 }
9cd7b720 208 else
3950b229
RH
209 {
210 emit_insn (gen_aarch64_atomic_exchange<mode>
211 (operands[0], operands[1], operands[2], operands[3]));
212 }
9cd7b720
MW
213 DONE;
214 }
215)
216
217(define_insn_and_split "aarch64_atomic_exchange<mode>"
86c818c2 218 [(set (match_operand:ALLI 0 "register_operand" "=&r") ;; output
8f5603d3 219 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q")) ;; memory
86c818c2
JG
220 (set (match_dup 1)
221 (unspec_volatile:ALLI
8f5603d3 222 [(match_operand:ALLI 2 "aarch64_reg_or_zero" "rZ") ;; input
86c818c2
JG
223 (match_operand:SI 3 "const_int_operand" "")] ;; model
224 UNSPECV_ATOMIC_EXCHG))
225 (clobber (reg:CC CC_REGNUM))
226 (clobber (match_scratch:SI 4 "=&r"))]
b632a516 227 "!TARGET_LSE"
86c818c2 228 "#"
e5e07b68 229 "&& epilogue_completed"
86c818c2
JG
230 [(const_int 0)]
231 {
232 aarch64_split_atomic_op (SET, operands[0], NULL, operands[1],
9cd7b720
MW
233 operands[2], operands[3], operands[4]);
234 DONE;
235 }
236)
237
8f5603d3
RH
238(define_insn "aarch64_atomic_exchange<mode>_lse"
239 [(set (match_operand:ALLI 0 "register_operand" "=r")
9cd7b720
MW
240 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q"))
241 (set (match_dup 1)
242 (unspec_volatile:ALLI
8f5603d3 243 [(match_operand:ALLI 2 "aarch64_reg_or_zero" "rZ")
9cd7b720
MW
244 (match_operand:SI 3 "const_int_operand" "")]
245 UNSPECV_ATOMIC_EXCHG))]
246 "TARGET_LSE"
9cd7b720 247 {
8f5603d3
RH
248 enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
249 if (is_mm_relaxed (model))
250 return "swp<atomic_sfx>\t%<w>2, %<w>0, %1";
251 else if (is_mm_acquire (model) || is_mm_consume (model))
252 return "swpa<atomic_sfx>\t%<w>2, %<w>0, %1";
253 else if (is_mm_release (model))
254 return "swpl<atomic_sfx>\t%<w>2, %<w>0, %1";
255 else
256 return "swpal<atomic_sfx>\t%<w>2, %<w>0, %1";
86c818c2
JG
257 }
258)
259
641c2f8b 260(define_expand "atomic_<atomic_optab><mode>"
1bbffb87 261 [(match_operand:ALLI 0 "aarch64_sync_memory_operand")
641c2f8b 262 (atomic_op:ALLI
1bbffb87 263 (match_operand:ALLI 1 "<atomic_op_operand>")
641c2f8b
MW
264 (match_operand:SI 2 "const_int_operand"))]
265 ""
266 {
267 rtx (*gen) (rtx, rtx, rtx);
268
269 /* Use an atomic load-operate instruction when possible. */
7803ec5e
RH
270 if (TARGET_LSE)
271 {
272 switch (<CODE>)
273 {
274 case MINUS:
275 operands[1] = expand_simple_unop (<MODE>mode, NEG, operands[1],
276 NULL, 1);
277 /* fallthru */
278 case PLUS:
279 gen = gen_aarch64_atomic_add<mode>_lse;
280 break;
281 case IOR:
282 gen = gen_aarch64_atomic_ior<mode>_lse;
283 break;
284 case XOR:
285 gen = gen_aarch64_atomic_xor<mode>_lse;
286 break;
287 case AND:
288 operands[1] = expand_simple_unop (<MODE>mode, NOT, operands[1],
289 NULL, 1);
290 gen = gen_aarch64_atomic_bic<mode>_lse;
291 break;
292 default:
293 gcc_unreachable ();
294 }
295 operands[1] = force_reg (<MODE>mode, operands[1]);
296 }
3950b229
RH
297 else if (TARGET_OUTLINE_ATOMICS)
298 {
299 const atomic_ool_names *names;
300 switch (<CODE>)
301 {
302 case MINUS:
303 operands[1] = expand_simple_unop (<MODE>mode, NEG, operands[1],
304 NULL, 1);
305 /* fallthru */
306 case PLUS:
307 names = &aarch64_ool_ldadd_names;
308 break;
309 case IOR:
310 names = &aarch64_ool_ldset_names;
311 break;
312 case XOR:
313 names = &aarch64_ool_ldeor_names;
314 break;
315 case AND:
316 operands[1] = expand_simple_unop (<MODE>mode, NOT, operands[1],
317 NULL, 1);
318 names = &aarch64_ool_ldclr_names;
319 break;
320 default:
321 gcc_unreachable ();
322 }
323 machine_mode mode = <MODE>mode;
324 rtx func = aarch64_atomic_ool_func (mode, operands[2], names);
325 emit_library_call_value (func, NULL_RTX, LCT_NORMAL, mode,
326 operands[1], mode,
327 XEXP (operands[0], 0), Pmode);
328 DONE;
329 }
641c2f8b
MW
330 else
331 gen = gen_aarch64_atomic_<atomic_optab><mode>;
332
333 emit_insn (gen (operands[0], operands[1], operands[2]));
641c2f8b
MW
334 DONE;
335 }
336)
337
338(define_insn_and_split "aarch64_atomic_<atomic_optab><mode>"
339 [(set (match_operand:ALLI 0 "aarch64_sync_memory_operand" "+Q")
340 (unspec_volatile:ALLI
341 [(atomic_op:ALLI (match_dup 0)
342 (match_operand:ALLI 1 "<atomic_op_operand>" "r<const_atomic>"))
343 (match_operand:SI 2 "const_int_operand")]
344 UNSPECV_ATOMIC_OP))
345 (clobber (reg:CC CC_REGNUM))
346 (clobber (match_scratch:ALLI 3 "=&r"))
347 (clobber (match_scratch:SI 4 "=&r"))]
348 ""
349 "#"
e5e07b68 350 "&& epilogue_completed"
641c2f8b
MW
351 [(const_int 0)]
352 {
353 aarch64_split_atomic_op (<CODE>, NULL, operands[3], operands[0],
354 operands[1], operands[2], operands[4]);
355 DONE;
356 }
357)
358
53de1ea8
RH
359;; It is tempting to want to use ST<OP> for relaxed and release
360;; memory models here. However, that is incompatible with the
361;; C++ memory model for the following case:
362;;
363;; atomic_fetch_add(ptr, 1, memory_order_relaxed);
364;; atomic_thread_fence(memory_order_acquire);
365;;
366;; The problem is that the architecture says that ST<OP> (and LD<OP>
367;; insns where the destination is XZR) are not regarded as a read.
368;; However we also implement the acquire memory barrier with DMB LD,
369;; and so the ST<OP> is not blocked by the barrier.
370
7803ec5e 371(define_insn "aarch64_atomic_<atomic_ldoptab><mode>_lse"
86c818c2 372 [(set (match_operand:ALLI 0 "aarch64_sync_memory_operand" "+Q")
7803ec5e
RH
373 (unspec_volatile:ALLI
374 [(match_dup 0)
375 (match_operand:ALLI 1 "register_operand" "r")
376 (match_operand:SI 2 "const_int_operand")]
377 ATOMIC_LDOP))
53de1ea8 378 (clobber (match_scratch:ALLI 3 "=r"))]
641c2f8b 379 "TARGET_LSE"
86c818c2 380 {
7803ec5e
RH
381 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
382 if (is_mm_relaxed (model))
383 return "ld<atomic_ldop><atomic_sfx>\t%<w>1, %<w>3, %0";
384 else if (is_mm_release (model))
385 return "ld<atomic_ldop>l<atomic_sfx>\t%<w>1, %<w>3, %0";
386 else if (is_mm_acquire (model) || is_mm_consume (model))
387 return "ld<atomic_ldop>a<atomic_sfx>\t%<w>1, %<w>3, %0";
388 else
389 return "ld<atomic_ldop>al<atomic_sfx>\t%<w>1, %<w>3, %0";
86c818c2
JG
390 }
391)
392
393(define_insn_and_split "atomic_nand<mode>"
394 [(set (match_operand:ALLI 0 "aarch64_sync_memory_operand" "+Q")
395 (unspec_volatile:ALLI
396 [(not:ALLI
397 (and:ALLI (match_dup 0)
95d47b10 398 (match_operand:ALLI 1 "aarch64_logical_operand" "r<lconst_atomic>")))
86c818c2
JG
399 (match_operand:SI 2 "const_int_operand")] ;; model
400 UNSPECV_ATOMIC_OP))
401 (clobber (reg:CC CC_REGNUM))
402 (clobber (match_scratch:ALLI 3 "=&r"))
403 (clobber (match_scratch:SI 4 "=&r"))]
404 ""
405 "#"
e5e07b68 406 "&& epilogue_completed"
86c818c2
JG
407 [(const_int 0)]
408 {
409 aarch64_split_atomic_op (NOT, NULL, operands[3], operands[0],
410 operands[1], operands[2], operands[4]);
411 DONE;
412 }
413)
414
7803ec5e 415;; Load-operate-store, returning the original memory data.
641c2f8b
MW
416
417(define_expand "atomic_fetch_<atomic_optab><mode>"
1bbffb87
DZ
418 [(match_operand:ALLI 0 "register_operand")
419 (match_operand:ALLI 1 "aarch64_sync_memory_operand")
641c2f8b 420 (atomic_op:ALLI
1bbffb87 421 (match_operand:ALLI 2 "<atomic_op_operand>")
641c2f8b
MW
422 (match_operand:SI 3 "const_int_operand"))]
423 ""
424{
425 rtx (*gen) (rtx, rtx, rtx, rtx);
426
427 /* Use an atomic load-operate instruction when possible. */
7803ec5e
RH
428 if (TARGET_LSE)
429 {
430 switch (<CODE>)
431 {
432 case MINUS:
433 operands[2] = expand_simple_unop (<MODE>mode, NEG, operands[2],
434 NULL, 1);
435 /* fallthru */
436 case PLUS:
437 gen = gen_aarch64_atomic_fetch_add<mode>_lse;
438 break;
439 case IOR:
440 gen = gen_aarch64_atomic_fetch_ior<mode>_lse;
441 break;
442 case XOR:
443 gen = gen_aarch64_atomic_fetch_xor<mode>_lse;
444 break;
445 case AND:
446 operands[2] = expand_simple_unop (<MODE>mode, NOT, operands[2],
447 NULL, 1);
448 gen = gen_aarch64_atomic_fetch_bic<mode>_lse;
449 break;
450 default:
451 gcc_unreachable ();
452 }
453 operands[2] = force_reg (<MODE>mode, operands[2]);
454 }
3950b229
RH
455 else if (TARGET_OUTLINE_ATOMICS)
456 {
457 const atomic_ool_names *names;
458 switch (<CODE>)
459 {
460 case MINUS:
461 operands[2] = expand_simple_unop (<MODE>mode, NEG, operands[2],
462 NULL, 1);
463 /* fallthru */
464 case PLUS:
465 names = &aarch64_ool_ldadd_names;
466 break;
467 case IOR:
468 names = &aarch64_ool_ldset_names;
469 break;
470 case XOR:
471 names = &aarch64_ool_ldeor_names;
472 break;
473 case AND:
474 operands[2] = expand_simple_unop (<MODE>mode, NOT, operands[2],
475 NULL, 1);
476 names = &aarch64_ool_ldclr_names;
477 break;
478 default:
479 gcc_unreachable ();
480 }
481 machine_mode mode = <MODE>mode;
482 rtx func = aarch64_atomic_ool_func (mode, operands[3], names);
483 rtx rval = emit_library_call_value (func, operands[0], LCT_NORMAL, mode,
484 operands[2], mode,
485 XEXP (operands[1], 0), Pmode);
486 emit_move_insn (operands[0], rval);
487 DONE;
488 }
641c2f8b
MW
489 else
490 gen = gen_aarch64_atomic_fetch_<atomic_optab><mode>;
491
492 emit_insn (gen (operands[0], operands[1], operands[2], operands[3]));
641c2f8b
MW
493 DONE;
494})
495
496(define_insn_and_split "aarch64_atomic_fetch_<atomic_optab><mode>"
86c818c2
JG
497 [(set (match_operand:ALLI 0 "register_operand" "=&r")
498 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q"))
499 (set (match_dup 1)
500 (unspec_volatile:ALLI
501 [(atomic_op:ALLI (match_dup 1)
356c32e2 502 (match_operand:ALLI 2 "<atomic_op_operand>" "r<const_atomic>"))
86c818c2
JG
503 (match_operand:SI 3 "const_int_operand")] ;; model
504 UNSPECV_ATOMIC_OP))
505 (clobber (reg:CC CC_REGNUM))
506 (clobber (match_scratch:ALLI 4 "=&r"))
507 (clobber (match_scratch:SI 5 "=&r"))]
508 ""
509 "#"
e5e07b68 510 "&& epilogue_completed"
86c818c2
JG
511 [(const_int 0)]
512 {
513 aarch64_split_atomic_op (<CODE>, operands[0], operands[4], operands[1],
514 operands[2], operands[3], operands[5]);
641c2f8b
MW
515 DONE;
516 }
517)
518
7803ec5e
RH
519(define_insn "aarch64_atomic_fetch_<atomic_ldoptab><mode>_lse"
520 [(set (match_operand:ALLI 0 "register_operand" "=r")
521 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q"))
641c2f8b 522 (set (match_dup 1)
7803ec5e
RH
523 (unspec_volatile:ALLI
524 [(match_dup 1)
525 (match_operand:ALLI 2 "register_operand" "r")
526 (match_operand:SI 3 "const_int_operand")]
527 ATOMIC_LDOP))]
641c2f8b 528 "TARGET_LSE"
641c2f8b 529 {
7803ec5e
RH
530 enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
531 if (is_mm_relaxed (model))
532 return "ld<atomic_ldop><atomic_sfx>\t%<w>2, %<w>0, %1";
533 else if (is_mm_acquire (model) || is_mm_consume (model))
534 return "ld<atomic_ldop>a<atomic_sfx>\t%<w>2, %<w>0, %1";
535 else if (is_mm_release (model))
536 return "ld<atomic_ldop>l<atomic_sfx>\t%<w>2, %<w>0, %1";
537 else
538 return "ld<atomic_ldop>al<atomic_sfx>\t%<w>2, %<w>0, %1";
86c818c2
JG
539 }
540)
541
542(define_insn_and_split "atomic_fetch_nand<mode>"
543 [(set (match_operand:ALLI 0 "register_operand" "=&r")
544 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q"))
545 (set (match_dup 1)
546 (unspec_volatile:ALLI
547 [(not:ALLI
548 (and:ALLI (match_dup 1)
95d47b10 549 (match_operand:ALLI 2 "aarch64_logical_operand" "r<lconst_atomic>")))
86c818c2
JG
550 (match_operand:SI 3 "const_int_operand")] ;; model
551 UNSPECV_ATOMIC_OP))
552 (clobber (reg:CC CC_REGNUM))
553 (clobber (match_scratch:ALLI 4 "=&r"))
554 (clobber (match_scratch:SI 5 "=&r"))]
555 ""
556 "#"
e5e07b68 557 "&& epilogue_completed"
86c818c2
JG
558 [(const_int 0)]
559 {
560 aarch64_split_atomic_op (NOT, operands[0], operands[4], operands[1],
561 operands[2], operands[3], operands[5]);
562 DONE;
563 }
564)
565
7803ec5e 566;; Load-operate-store, returning the updated memory data.
68729b06
MW
567
568(define_expand "atomic_<atomic_optab>_fetch<mode>"
1bbffb87 569 [(match_operand:ALLI 0 "register_operand")
68729b06 570 (atomic_op:ALLI
1bbffb87
DZ
571 (match_operand:ALLI 1 "aarch64_sync_memory_operand")
572 (match_operand:ALLI 2 "<atomic_op_operand>"))
68729b06
MW
573 (match_operand:SI 3 "const_int_operand")]
574 ""
575{
7803ec5e
RH
576 /* Use an atomic load-operate instruction when possible. In this case
577 we will re-compute the result from the original mem value. */
3950b229 578 if (TARGET_LSE || TARGET_OUTLINE_ATOMICS)
7803ec5e
RH
579 {
580 rtx tmp = gen_reg_rtx (<MODE>mode);
581 operands[2] = force_reg (<MODE>mode, operands[2]);
582 emit_insn (gen_atomic_fetch_<atomic_optab><mode>
583 (tmp, operands[1], operands[2], operands[3]));
584 tmp = expand_simple_binop (<MODE>mode, <CODE>, tmp, operands[2],
585 operands[0], 1, OPTAB_WIDEN);
586 emit_move_insn (operands[0], tmp);
587 }
68729b06 588 else
7803ec5e
RH
589 {
590 emit_insn (gen_aarch64_atomic_<atomic_optab>_fetch<mode>
591 (operands[0], operands[1], operands[2], operands[3]));
592 }
68729b06
MW
593 DONE;
594})
595
596(define_insn_and_split "aarch64_atomic_<atomic_optab>_fetch<mode>"
86c818c2
JG
597 [(set (match_operand:ALLI 0 "register_operand" "=&r")
598 (atomic_op:ALLI
599 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q")
356c32e2 600 (match_operand:ALLI 2 "<atomic_op_operand>" "r<const_atomic>")))
86c818c2
JG
601 (set (match_dup 1)
602 (unspec_volatile:ALLI
603 [(match_dup 1) (match_dup 2)
604 (match_operand:SI 3 "const_int_operand")] ;; model
605 UNSPECV_ATOMIC_OP))
606 (clobber (reg:CC CC_REGNUM))
607 (clobber (match_scratch:SI 4 "=&r"))]
608 ""
609 "#"
e5e07b68 610 "&& epilogue_completed"
86c818c2
JG
611 [(const_int 0)]
612 {
613 aarch64_split_atomic_op (<CODE>, NULL, operands[0], operands[1],
614 operands[2], operands[3], operands[4]);
615 DONE;
616 }
617)
618
619(define_insn_and_split "atomic_nand_fetch<mode>"
620 [(set (match_operand:ALLI 0 "register_operand" "=&r")
621 (not:ALLI
622 (and:ALLI
623 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q")
95d47b10 624 (match_operand:ALLI 2 "aarch64_logical_operand" "r<lconst_atomic>"))))
86c818c2
JG
625 (set (match_dup 1)
626 (unspec_volatile:ALLI
627 [(match_dup 1) (match_dup 2)
628 (match_operand:SI 3 "const_int_operand")] ;; model
629 UNSPECV_ATOMIC_OP))
630 (clobber (reg:CC CC_REGNUM))
631 (clobber (match_scratch:SI 4 "=&r"))]
632 ""
633 "#"
e5e07b68 634 "&& epilogue_completed"
86c818c2
JG
635 [(const_int 0)]
636 {
637 aarch64_split_atomic_op (NOT, NULL, operands[0], operands[1],
638 operands[2], operands[3], operands[4]);
639 DONE;
640 }
641)
642
33b93ac3
KT
643(define_insn "*atomic_load<ALLX:mode>_zext<SD_HSDI:mode>"
644 [(set (match_operand:SD_HSDI 0 "register_operand" "=r")
645 (zero_extend:SD_HSDI
646 (unspec_volatile:ALLX
647 [(match_operand:ALLX 1 "aarch64_sync_memory_operand" "Q")
648 (match_operand:SI 2 "const_int_operand")] ;; model
649 UNSPECV_LDA)))]
650 "GET_MODE_SIZE (<SD_HSDI:MODE>mode) > GET_MODE_SIZE (<ALLX:MODE>mode)"
651 {
652 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
653 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_release (model))
654 return "ldr<ALLX:atomic_sfx>\t%<ALLX:w>0, %1";
655 else
656 return "ldar<ALLX:atomic_sfx>\t%<ALLX:w>0, %1";
657 }
658)
659
0431e8ae
AV
660(define_expand "atomic_load<mode>"
661 [(match_operand:ALLI 0 "register_operand" "=r")
662 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "Q")
663 (match_operand:SI 2 "const_int_operand")]
664 ""
665 {
666 /* If TARGET_RCPC and this is an ACQUIRE load, then expand to a pattern
667 using UNSPECV_LDAP. */
668 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
669 if (TARGET_RCPC
670 && (is_mm_acquire (model)
671 || is_mm_acq_rel (model)))
672 emit_insn (gen_aarch64_atomic_load<mode>_rcpc (operands[0], operands[1],
673 operands[2]));
674 else
675 emit_insn (gen_aarch64_atomic_load<mode> (operands[0], operands[1],
676 operands[2]));
677 DONE;
678 }
679)
680
681(define_insn "aarch64_atomic_load<mode>_rcpc"
682 [(set (match_operand:ALLI 0 "register_operand" "=r")
683 (unspec_volatile:ALLI
684 [(match_operand:ALLI 1 "aarch64_sync_memory_operand" "Q")
685 (match_operand:SI 2 "const_int_operand")] ;; model
686 UNSPECV_LDAP))]
687 "TARGET_RCPC"
688 "ldapr<atomic_sfx>\t%<w>0, %1"
689)
690
691(define_insn "aarch64_atomic_load<mode>"
86c818c2
JG
692 [(set (match_operand:ALLI 0 "register_operand" "=r")
693 (unspec_volatile:ALLI
694 [(match_operand:ALLI 1 "aarch64_sync_memory_operand" "Q")
695 (match_operand:SI 2 "const_int_operand")] ;; model
696 UNSPECV_LDA))]
697 ""
698 {
46b35980
AM
699 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
700 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_release (model))
86c818c2
JG
701 return "ldr<atomic_sfx>\t%<w>0, %1";
702 else
703 return "ldar<atomic_sfx>\t%<w>0, %1";
704 }
705)
706
28f636a0 707(define_insn "*aarch64_atomic_load<ALLX:mode>_rcpc_zext"
2d70f321
KT
708 [(set (match_operand:SD_HSDI 0 "register_operand" "=r")
709 (zero_extend:SD_HSDI
28f636a0
AV
710 (unspec_volatile:ALLX
711 [(match_operand:ALLX 1 "aarch64_sync_memory_operand" "Q")
712 (match_operand:SI 2 "const_int_operand")] ;; model
713 UNSPECV_LDAP)))]
2d70f321 714 "TARGET_RCPC && (<SD_HSDI:sizen> > <ALLX:sizen>)"
33de7b37 715 "ldapr<ALLX:atomic_sfx>\t%w0, %1"
28f636a0
AV
716)
717
718(define_insn "*aarch64_atomic_load<ALLX:mode>_rcpc_sext"
719 [(set (match_operand:GPI 0 "register_operand" "=r")
720 (sign_extend:GPI
721 (unspec_volatile:ALLX
722 [(match_operand:ALLX 1 "aarch64_sync_memory_operand" "Q")
723 (match_operand:SI 2 "const_int_operand")] ;; model
724 UNSPECV_LDAP)))]
33de7b37 725 "TARGET_RCPC2 && (<GPI:sizen> > <ALLX:sizen>)"
92905f61 726 "ldapurs<ALLX:size>\t%<GPI:w>0, %1"
28f636a0
AV
727)
728
86c818c2 729(define_insn "atomic_store<mode>"
3c5af608 730 [(set (match_operand:ALLI 0 "aarch64_rcpc_memory_operand" "=Q,Ust")
86c818c2 731 (unspec_volatile:ALLI
3c5af608 732 [(match_operand:ALLI 1 "general_operand" "rZ,rZ")
86c818c2
JG
733 (match_operand:SI 2 "const_int_operand")] ;; model
734 UNSPECV_STL))]
735 ""
736 {
46b35980
AM
737 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
738 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_acquire (model))
86c818c2 739 return "str<atomic_sfx>\t%<w>1, %0";
3c5af608 740 else if (which_alternative == 0)
86c818c2 741 return "stlr<atomic_sfx>\t%<w>1, %0";
3c5af608
MM
742 else
743 return "stlur<atomic_sfx>\t%<w>1, %0";
86c818c2 744 }
3c5af608 745 [(set_attr "arch" "*,rcpc8_4")]
86c818c2
JG
746)
747
0016d8d9 748(define_insn "@aarch64_load_exclusive<mode>"
86c818c2
JG
749 [(set (match_operand:SI 0 "register_operand" "=r")
750 (zero_extend:SI
751 (unspec_volatile:SHORT
752 [(match_operand:SHORT 1 "aarch64_sync_memory_operand" "Q")
753 (match_operand:SI 2 "const_int_operand")]
754 UNSPECV_LX)))]
755 ""
756 {
46b35980
AM
757 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
758 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_release (model))
86c818c2
JG
759 return "ldxr<atomic_sfx>\t%w0, %1";
760 else
761 return "ldaxr<atomic_sfx>\t%w0, %1";
762 }
763)
764
0016d8d9 765(define_insn "@aarch64_load_exclusive<mode>"
86c818c2
JG
766 [(set (match_operand:GPI 0 "register_operand" "=r")
767 (unspec_volatile:GPI
768 [(match_operand:GPI 1 "aarch64_sync_memory_operand" "Q")
769 (match_operand:SI 2 "const_int_operand")]
770 UNSPECV_LX))]
771 ""
772 {
46b35980
AM
773 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
774 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_release (model))
86c818c2
JG
775 return "ldxr\t%<w>0, %1";
776 else
777 return "ldaxr\t%<w>0, %1";
778 }
779)
780
4a2095eb
RH
781(define_insn "aarch64_load_exclusive_pair"
782 [(set (match_operand:DI 0 "register_operand" "=r")
783 (unspec_volatile:DI
784 [(match_operand:TI 2 "aarch64_sync_memory_operand" "Q")
785 (match_operand:SI 3 "const_int_operand")]
786 UNSPECV_LX))
787 (set (match_operand:DI 1 "register_operand" "=r")
788 (unspec_volatile:DI [(match_dup 2) (match_dup 3)] UNSPECV_LX))]
789 ""
790 {
791 enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
792 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_release (model))
793 return "ldxp\t%0, %1, %2";
794 else
795 return "ldaxp\t%0, %1, %2";
796 }
797)
798
0016d8d9 799(define_insn "@aarch64_store_exclusive<mode>"
1d896f48 800 [(set (match_operand:SI 0 "register_operand" "=&r")
86c818c2
JG
801 (unspec_volatile:SI [(const_int 0)] UNSPECV_SX))
802 (set (match_operand:ALLI 1 "aarch64_sync_memory_operand" "=Q")
803 (unspec_volatile:ALLI
4ebcc903 804 [(match_operand:ALLI 2 "aarch64_reg_or_zero" "rZ")
86c818c2
JG
805 (match_operand:SI 3 "const_int_operand")]
806 UNSPECV_SX))]
807 ""
808 {
46b35980
AM
809 enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
810 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_acquire (model))
86c818c2
JG
811 return "stxr<atomic_sfx>\t%w0, %<w>2, %1";
812 else
813 return "stlxr<atomic_sfx>\t%w0, %<w>2, %1";
814 }
815)
816
4a2095eb
RH
817(define_insn "aarch64_store_exclusive_pair"
818 [(set (match_operand:SI 0 "register_operand" "=&r")
819 (unspec_volatile:SI [(const_int 0)] UNSPECV_SX))
820 (set (match_operand:TI 1 "aarch64_sync_memory_operand" "=Q")
821 (unspec_volatile:TI
822 [(match_operand:DI 2 "aarch64_reg_or_zero" "rZ")
823 (match_operand:DI 3 "aarch64_reg_or_zero" "rZ")
824 (match_operand:SI 4 "const_int_operand")]
825 UNSPECV_SX))]
826 ""
827 {
3a30d255 828 enum memmodel model = memmodel_from_int (INTVAL (operands[4]));
4a2095eb
RH
829 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_acquire (model))
830 return "stxp\t%w0, %x2, %x3, %1";
831 else
832 return "stlxp\t%w0, %x2, %x3, %1";
833 }
834)
835
86c818c2 836(define_expand "mem_thread_fence"
1bbffb87 837 [(match_operand:SI 0 "const_int_operand")]
86c818c2
JG
838 ""
839 {
46b35980
AM
840 enum memmodel model = memmodel_from_int (INTVAL (operands[0]));
841 if (!(is_mm_relaxed (model) || is_mm_consume (model)))
86c818c2
JG
842 emit_insn (gen_dmb (operands[0]));
843 DONE;
844 }
845)
846
847(define_expand "dmb"
848 [(set (match_dup 1)
849 (unspec:BLK [(match_dup 1) (match_operand:SI 0 "const_int_operand")]
850 UNSPEC_MB))]
851 ""
852 {
853 operands[1] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
854 MEM_VOLATILE_P (operands[1]) = 1;
855 }
856)
857
858(define_insn "*dmb"
859 [(set (match_operand:BLK 0 "" "")
860 (unspec:BLK [(match_dup 0) (match_operand:SI 1 "const_int_operand")]
861 UNSPEC_MB))]
862 ""
863 {
46b35980
AM
864 enum memmodel model = memmodel_from_int (INTVAL (operands[1]));
865 if (is_mm_acquire (model))
86c818c2
JG
866 return "dmb\\tishld";
867 else
868 return "dmb\\tish";
869 }
870)