]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/atomics.md
Update copyright years.
[thirdparty/gcc.git] / gcc / config / aarch64 / atomics.md
CommitLineData
86c818c2 1;; Machine description for AArch64 processor synchronization primitives.
a5544970 2;; Copyright (C) 2009-2019 Free Software Foundation, Inc.
86c818c2
JG
3;; Contributed by ARM Ltd.
4;;
5;; This file is part of GCC.
6;;
7;; GCC is free software; you can redistribute it and/or modify it
8;; under the terms of the GNU General Public License as published by
9;; the Free Software Foundation; either version 3, or (at your option)
10;; any later version.
11;;
12;; GCC is distributed in the hope that it will be useful, but
13;; WITHOUT ANY WARRANTY; without even the implied warranty of
14;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15;; General Public License for more details.
16;;
17;; You should have received a copy of the GNU General Public License
18;; along with GCC; see the file COPYING3. If not see
19;; <http://www.gnu.org/licenses/>.
20
6380d2bc
MW
21;; Instruction patterns.
22
0016d8d9 23(define_expand "@atomic_compare_and_swap<mode>"
86c818c2
JG
24 [(match_operand:SI 0 "register_operand" "") ;; bool out
25 (match_operand:ALLI 1 "register_operand" "") ;; val out
26 (match_operand:ALLI 2 "aarch64_sync_memory_operand" "") ;; memory
d400fda3
RH
27 (match_operand:ALLI 3 "nonmemory_operand" "") ;; expected
28 (match_operand:ALLI 4 "aarch64_reg_or_zero" "") ;; desired
86c818c2
JG
29 (match_operand:SI 5 "const_int_operand") ;; is_weak
30 (match_operand:SI 6 "const_int_operand") ;; mod_s
31 (match_operand:SI 7 "const_int_operand")] ;; mod_f
32 ""
33 {
34 aarch64_expand_compare_and_swap (operands);
35 DONE;
36 }
37)
38
d400fda3
RH
39(define_mode_attr cas_short_expected_pred
40 [(QI "aarch64_reg_or_imm") (HI "aarch64_plushi_operand")])
41
0016d8d9 42(define_insn_and_split "@aarch64_compare_and_swap<mode>"
86c818c2
JG
43 [(set (reg:CC CC_REGNUM) ;; bool out
44 (unspec_volatile:CC [(const_int 0)] UNSPECV_ATOMIC_CMPSW))
d400fda3 45 (set (match_operand:SI 0 "register_operand" "=&r") ;; val out
86c818c2
JG
46 (zero_extend:SI
47 (match_operand:SHORT 1 "aarch64_sync_memory_operand" "+Q"))) ;; memory
48 (set (match_dup 1)
49 (unspec_volatile:SHORT
d400fda3 50 [(match_operand:SHORT 2 "<cas_short_expected_pred>" "rn") ;; expected
4ebcc903 51 (match_operand:SHORT 3 "aarch64_reg_or_zero" "rZ") ;; desired
d400fda3
RH
52 (match_operand:SI 4 "const_int_operand") ;; is_weak
53 (match_operand:SI 5 "const_int_operand") ;; mod_s
54 (match_operand:SI 6 "const_int_operand")] ;; mod_f
86c818c2
JG
55 UNSPECV_ATOMIC_CMPSW))
56 (clobber (match_scratch:SI 7 "=&r"))]
57 ""
58 "#"
59 "&& reload_completed"
60 [(const_int 0)]
61 {
62 aarch64_split_compare_and_swap (operands);
63 DONE;
64 }
65)
66
0016d8d9 67(define_insn_and_split "@aarch64_compare_and_swap<mode>"
86c818c2
JG
68 [(set (reg:CC CC_REGNUM) ;; bool out
69 (unspec_volatile:CC [(const_int 0)] UNSPECV_ATOMIC_CMPSW))
70 (set (match_operand:GPI 0 "register_operand" "=&r") ;; val out
b0770c0f 71 (match_operand:GPI 1 "aarch64_sync_memory_operand" "+Q")) ;; memory
86c818c2
JG
72 (set (match_dup 1)
73 (unspec_volatile:GPI
e21679a8 74 [(match_operand:GPI 2 "aarch64_plus_operand" "rIJ") ;; expect
4ebcc903 75 (match_operand:GPI 3 "aarch64_reg_or_zero" "rZ") ;; desired
b0770c0f
MW
76 (match_operand:SI 4 "const_int_operand") ;; is_weak
77 (match_operand:SI 5 "const_int_operand") ;; mod_s
86c818c2
JG
78 (match_operand:SI 6 "const_int_operand")] ;; mod_f
79 UNSPECV_ATOMIC_CMPSW))
80 (clobber (match_scratch:SI 7 "=&r"))]
81 ""
82 "#"
83 "&& reload_completed"
84 [(const_int 0)]
85 {
86 aarch64_split_compare_and_swap (operands);
87 DONE;
88 }
89)
90
77f33f44
RH
91(define_insn "@aarch64_compare_and_swap<mode>_lse"
92 [(set (match_operand:SI 0 "register_operand" "+r") ;; val out
b0770c0f 93 (zero_extend:SI
77f33f44 94 (match_operand:SHORT 1 "aarch64_sync_memory_operand" "+Q"))) ;; memory
b0770c0f
MW
95 (set (match_dup 1)
96 (unspec_volatile:SHORT
77f33f44
RH
97 [(match_dup 0) ;; expected
98 (match_operand:SHORT 2 "aarch64_reg_or_zero" "rZ") ;; desired
99 (match_operand:SI 3 "const_int_operand")] ;; mod_s
b0770c0f
MW
100 UNSPECV_ATOMIC_CMPSW))]
101 "TARGET_LSE"
77f33f44
RH
102{
103 enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
104 if (is_mm_relaxed (model))
105 return "cas<atomic_sfx>\t%<w>0, %<w>2, %1";
106 else if (is_mm_acquire (model) || is_mm_consume (model))
107 return "casa<atomic_sfx>\t%<w>0, %<w>2, %1";
108 else if (is_mm_release (model))
109 return "casl<atomic_sfx>\t%<w>0, %<w>2, %1";
110 else
111 return "casal<atomic_sfx>\t%<w>0, %<w>2, %1";
112})
b0770c0f 113
77f33f44
RH
114(define_insn "@aarch64_compare_and_swap<mode>_lse"
115 [(set (match_operand:GPI 0 "register_operand" "+r") ;; val out
b0770c0f
MW
116 (match_operand:GPI 1 "aarch64_sync_memory_operand" "+Q")) ;; memory
117 (set (match_dup 1)
118 (unspec_volatile:GPI
77f33f44
RH
119 [(match_dup 0) ;; expected
120 (match_operand:GPI 2 "aarch64_reg_or_zero" "rZ") ;; desired
121 (match_operand:SI 3 "const_int_operand")] ;; mod_s
b0770c0f 122 UNSPECV_ATOMIC_CMPSW))]
9cd7b720 123 "TARGET_LSE"
77f33f44
RH
124{
125 enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
126 if (is_mm_relaxed (model))
127 return "cas<atomic_sfx>\t%<w>0, %<w>2, %1";
128 else if (is_mm_acquire (model) || is_mm_consume (model))
129 return "casa<atomic_sfx>\t%<w>0, %<w>2, %1";
130 else if (is_mm_release (model))
131 return "casl<atomic_sfx>\t%<w>0, %<w>2, %1";
132 else
133 return "casal<atomic_sfx>\t%<w>0, %<w>2, %1";
134})
b0770c0f 135
9cd7b720
MW
136(define_expand "atomic_exchange<mode>"
137 [(match_operand:ALLI 0 "register_operand" "")
138 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "")
8f5603d3 139 (match_operand:ALLI 2 "aarch64_reg_or_zero" "")
9cd7b720
MW
140 (match_operand:SI 3 "const_int_operand" "")]
141 ""
142 {
143 rtx (*gen) (rtx, rtx, rtx, rtx);
144
145 /* Use an atomic SWP when available. */
146 if (TARGET_LSE)
147 gen = gen_aarch64_atomic_exchange<mode>_lse;
148 else
149 gen = gen_aarch64_atomic_exchange<mode>;
150
151 emit_insn (gen (operands[0], operands[1], operands[2], operands[3]));
152
153 DONE;
154 }
155)
156
157(define_insn_and_split "aarch64_atomic_exchange<mode>"
86c818c2 158 [(set (match_operand:ALLI 0 "register_operand" "=&r") ;; output
8f5603d3 159 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q")) ;; memory
86c818c2
JG
160 (set (match_dup 1)
161 (unspec_volatile:ALLI
8f5603d3 162 [(match_operand:ALLI 2 "aarch64_reg_or_zero" "rZ") ;; input
86c818c2
JG
163 (match_operand:SI 3 "const_int_operand" "")] ;; model
164 UNSPECV_ATOMIC_EXCHG))
165 (clobber (reg:CC CC_REGNUM))
166 (clobber (match_scratch:SI 4 "=&r"))]
167 ""
168 "#"
169 "&& reload_completed"
170 [(const_int 0)]
171 {
172 aarch64_split_atomic_op (SET, operands[0], NULL, operands[1],
9cd7b720
MW
173 operands[2], operands[3], operands[4]);
174 DONE;
175 }
176)
177
8f5603d3
RH
178(define_insn "aarch64_atomic_exchange<mode>_lse"
179 [(set (match_operand:ALLI 0 "register_operand" "=r")
9cd7b720
MW
180 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q"))
181 (set (match_dup 1)
182 (unspec_volatile:ALLI
8f5603d3 183 [(match_operand:ALLI 2 "aarch64_reg_or_zero" "rZ")
9cd7b720
MW
184 (match_operand:SI 3 "const_int_operand" "")]
185 UNSPECV_ATOMIC_EXCHG))]
186 "TARGET_LSE"
9cd7b720 187 {
8f5603d3
RH
188 enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
189 if (is_mm_relaxed (model))
190 return "swp<atomic_sfx>\t%<w>2, %<w>0, %1";
191 else if (is_mm_acquire (model) || is_mm_consume (model))
192 return "swpa<atomic_sfx>\t%<w>2, %<w>0, %1";
193 else if (is_mm_release (model))
194 return "swpl<atomic_sfx>\t%<w>2, %<w>0, %1";
195 else
196 return "swpal<atomic_sfx>\t%<w>2, %<w>0, %1";
86c818c2
JG
197 }
198)
199
641c2f8b
MW
200(define_expand "atomic_<atomic_optab><mode>"
201 [(match_operand:ALLI 0 "aarch64_sync_memory_operand" "")
202 (atomic_op:ALLI
203 (match_operand:ALLI 1 "<atomic_op_operand>" "")
204 (match_operand:SI 2 "const_int_operand"))]
205 ""
206 {
207 rtx (*gen) (rtx, rtx, rtx);
208
209 /* Use an atomic load-operate instruction when possible. */
7803ec5e
RH
210 if (TARGET_LSE)
211 {
212 switch (<CODE>)
213 {
214 case MINUS:
215 operands[1] = expand_simple_unop (<MODE>mode, NEG, operands[1],
216 NULL, 1);
217 /* fallthru */
218 case PLUS:
219 gen = gen_aarch64_atomic_add<mode>_lse;
220 break;
221 case IOR:
222 gen = gen_aarch64_atomic_ior<mode>_lse;
223 break;
224 case XOR:
225 gen = gen_aarch64_atomic_xor<mode>_lse;
226 break;
227 case AND:
228 operands[1] = expand_simple_unop (<MODE>mode, NOT, operands[1],
229 NULL, 1);
230 gen = gen_aarch64_atomic_bic<mode>_lse;
231 break;
232 default:
233 gcc_unreachable ();
234 }
235 operands[1] = force_reg (<MODE>mode, operands[1]);
236 }
641c2f8b
MW
237 else
238 gen = gen_aarch64_atomic_<atomic_optab><mode>;
239
240 emit_insn (gen (operands[0], operands[1], operands[2]));
641c2f8b
MW
241 DONE;
242 }
243)
244
245(define_insn_and_split "aarch64_atomic_<atomic_optab><mode>"
246 [(set (match_operand:ALLI 0 "aarch64_sync_memory_operand" "+Q")
247 (unspec_volatile:ALLI
248 [(atomic_op:ALLI (match_dup 0)
249 (match_operand:ALLI 1 "<atomic_op_operand>" "r<const_atomic>"))
250 (match_operand:SI 2 "const_int_operand")]
251 UNSPECV_ATOMIC_OP))
252 (clobber (reg:CC CC_REGNUM))
253 (clobber (match_scratch:ALLI 3 "=&r"))
254 (clobber (match_scratch:SI 4 "=&r"))]
255 ""
256 "#"
257 "&& reload_completed"
258 [(const_int 0)]
259 {
260 aarch64_split_atomic_op (<CODE>, NULL, operands[3], operands[0],
261 operands[1], operands[2], operands[4]);
262 DONE;
263 }
264)
265
53de1ea8
RH
266;; It is tempting to want to use ST<OP> for relaxed and release
267;; memory models here. However, that is incompatible with the
268;; C++ memory model for the following case:
269;;
270;; atomic_fetch_add(ptr, 1, memory_order_relaxed);
271;; atomic_thread_fence(memory_order_acquire);
272;;
273;; The problem is that the architecture says that ST<OP> (and LD<OP>
274;; insns where the destination is XZR) are not regarded as a read.
275;; However we also implement the acquire memory barrier with DMB LD,
276;; and so the ST<OP> is not blocked by the barrier.
277
7803ec5e 278(define_insn "aarch64_atomic_<atomic_ldoptab><mode>_lse"
86c818c2 279 [(set (match_operand:ALLI 0 "aarch64_sync_memory_operand" "+Q")
7803ec5e
RH
280 (unspec_volatile:ALLI
281 [(match_dup 0)
282 (match_operand:ALLI 1 "register_operand" "r")
283 (match_operand:SI 2 "const_int_operand")]
284 ATOMIC_LDOP))
53de1ea8 285 (clobber (match_scratch:ALLI 3 "=r"))]
641c2f8b 286 "TARGET_LSE"
86c818c2 287 {
7803ec5e
RH
288 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
289 if (is_mm_relaxed (model))
290 return "ld<atomic_ldop><atomic_sfx>\t%<w>1, %<w>3, %0";
291 else if (is_mm_release (model))
292 return "ld<atomic_ldop>l<atomic_sfx>\t%<w>1, %<w>3, %0";
293 else if (is_mm_acquire (model) || is_mm_consume (model))
294 return "ld<atomic_ldop>a<atomic_sfx>\t%<w>1, %<w>3, %0";
295 else
296 return "ld<atomic_ldop>al<atomic_sfx>\t%<w>1, %<w>3, %0";
86c818c2
JG
297 }
298)
299
300(define_insn_and_split "atomic_nand<mode>"
301 [(set (match_operand:ALLI 0 "aarch64_sync_memory_operand" "+Q")
302 (unspec_volatile:ALLI
303 [(not:ALLI
304 (and:ALLI (match_dup 0)
95d47b10 305 (match_operand:ALLI 1 "aarch64_logical_operand" "r<lconst_atomic>")))
86c818c2
JG
306 (match_operand:SI 2 "const_int_operand")] ;; model
307 UNSPECV_ATOMIC_OP))
308 (clobber (reg:CC CC_REGNUM))
309 (clobber (match_scratch:ALLI 3 "=&r"))
310 (clobber (match_scratch:SI 4 "=&r"))]
311 ""
312 "#"
313 "&& reload_completed"
314 [(const_int 0)]
315 {
316 aarch64_split_atomic_op (NOT, NULL, operands[3], operands[0],
317 operands[1], operands[2], operands[4]);
318 DONE;
319 }
320)
321
7803ec5e 322;; Load-operate-store, returning the original memory data.
641c2f8b
MW
323
324(define_expand "atomic_fetch_<atomic_optab><mode>"
325 [(match_operand:ALLI 0 "register_operand" "")
326 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "")
327 (atomic_op:ALLI
328 (match_operand:ALLI 2 "<atomic_op_operand>" "")
329 (match_operand:SI 3 "const_int_operand"))]
330 ""
331{
332 rtx (*gen) (rtx, rtx, rtx, rtx);
333
334 /* Use an atomic load-operate instruction when possible. */
7803ec5e
RH
335 if (TARGET_LSE)
336 {
337 switch (<CODE>)
338 {
339 case MINUS:
340 operands[2] = expand_simple_unop (<MODE>mode, NEG, operands[2],
341 NULL, 1);
342 /* fallthru */
343 case PLUS:
344 gen = gen_aarch64_atomic_fetch_add<mode>_lse;
345 break;
346 case IOR:
347 gen = gen_aarch64_atomic_fetch_ior<mode>_lse;
348 break;
349 case XOR:
350 gen = gen_aarch64_atomic_fetch_xor<mode>_lse;
351 break;
352 case AND:
353 operands[2] = expand_simple_unop (<MODE>mode, NOT, operands[2],
354 NULL, 1);
355 gen = gen_aarch64_atomic_fetch_bic<mode>_lse;
356 break;
357 default:
358 gcc_unreachable ();
359 }
360 operands[2] = force_reg (<MODE>mode, operands[2]);
361 }
641c2f8b
MW
362 else
363 gen = gen_aarch64_atomic_fetch_<atomic_optab><mode>;
364
365 emit_insn (gen (operands[0], operands[1], operands[2], operands[3]));
641c2f8b
MW
366 DONE;
367})
368
369(define_insn_and_split "aarch64_atomic_fetch_<atomic_optab><mode>"
86c818c2
JG
370 [(set (match_operand:ALLI 0 "register_operand" "=&r")
371 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q"))
372 (set (match_dup 1)
373 (unspec_volatile:ALLI
374 [(atomic_op:ALLI (match_dup 1)
356c32e2 375 (match_operand:ALLI 2 "<atomic_op_operand>" "r<const_atomic>"))
86c818c2
JG
376 (match_operand:SI 3 "const_int_operand")] ;; model
377 UNSPECV_ATOMIC_OP))
378 (clobber (reg:CC CC_REGNUM))
379 (clobber (match_scratch:ALLI 4 "=&r"))
380 (clobber (match_scratch:SI 5 "=&r"))]
381 ""
382 "#"
383 "&& reload_completed"
384 [(const_int 0)]
385 {
386 aarch64_split_atomic_op (<CODE>, operands[0], operands[4], operands[1],
387 operands[2], operands[3], operands[5]);
641c2f8b
MW
388 DONE;
389 }
390)
391
7803ec5e
RH
392(define_insn "aarch64_atomic_fetch_<atomic_ldoptab><mode>_lse"
393 [(set (match_operand:ALLI 0 "register_operand" "=r")
394 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q"))
641c2f8b 395 (set (match_dup 1)
7803ec5e
RH
396 (unspec_volatile:ALLI
397 [(match_dup 1)
398 (match_operand:ALLI 2 "register_operand" "r")
399 (match_operand:SI 3 "const_int_operand")]
400 ATOMIC_LDOP))]
641c2f8b 401 "TARGET_LSE"
641c2f8b 402 {
7803ec5e
RH
403 enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
404 if (is_mm_relaxed (model))
405 return "ld<atomic_ldop><atomic_sfx>\t%<w>2, %<w>0, %1";
406 else if (is_mm_acquire (model) || is_mm_consume (model))
407 return "ld<atomic_ldop>a<atomic_sfx>\t%<w>2, %<w>0, %1";
408 else if (is_mm_release (model))
409 return "ld<atomic_ldop>l<atomic_sfx>\t%<w>2, %<w>0, %1";
410 else
411 return "ld<atomic_ldop>al<atomic_sfx>\t%<w>2, %<w>0, %1";
86c818c2
JG
412 }
413)
414
415(define_insn_and_split "atomic_fetch_nand<mode>"
416 [(set (match_operand:ALLI 0 "register_operand" "=&r")
417 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q"))
418 (set (match_dup 1)
419 (unspec_volatile:ALLI
420 [(not:ALLI
421 (and:ALLI (match_dup 1)
95d47b10 422 (match_operand:ALLI 2 "aarch64_logical_operand" "r<lconst_atomic>")))
86c818c2
JG
423 (match_operand:SI 3 "const_int_operand")] ;; model
424 UNSPECV_ATOMIC_OP))
425 (clobber (reg:CC CC_REGNUM))
426 (clobber (match_scratch:ALLI 4 "=&r"))
427 (clobber (match_scratch:SI 5 "=&r"))]
428 ""
429 "#"
430 "&& reload_completed"
431 [(const_int 0)]
432 {
433 aarch64_split_atomic_op (NOT, operands[0], operands[4], operands[1],
434 operands[2], operands[3], operands[5]);
435 DONE;
436 }
437)
438
7803ec5e 439;; Load-operate-store, returning the updated memory data.
68729b06
MW
440
441(define_expand "atomic_<atomic_optab>_fetch<mode>"
442 [(match_operand:ALLI 0 "register_operand" "")
443 (atomic_op:ALLI
444 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "")
445 (match_operand:ALLI 2 "<atomic_op_operand>" ""))
446 (match_operand:SI 3 "const_int_operand")]
447 ""
448{
7803ec5e
RH
449 /* Use an atomic load-operate instruction when possible. In this case
450 we will re-compute the result from the original mem value. */
451 if (TARGET_LSE)
452 {
453 rtx tmp = gen_reg_rtx (<MODE>mode);
454 operands[2] = force_reg (<MODE>mode, operands[2]);
455 emit_insn (gen_atomic_fetch_<atomic_optab><mode>
456 (tmp, operands[1], operands[2], operands[3]));
457 tmp = expand_simple_binop (<MODE>mode, <CODE>, tmp, operands[2],
458 operands[0], 1, OPTAB_WIDEN);
459 emit_move_insn (operands[0], tmp);
460 }
68729b06 461 else
7803ec5e
RH
462 {
463 emit_insn (gen_aarch64_atomic_<atomic_optab>_fetch<mode>
464 (operands[0], operands[1], operands[2], operands[3]));
465 }
68729b06
MW
466 DONE;
467})
468
469(define_insn_and_split "aarch64_atomic_<atomic_optab>_fetch<mode>"
86c818c2
JG
470 [(set (match_operand:ALLI 0 "register_operand" "=&r")
471 (atomic_op:ALLI
472 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q")
356c32e2 473 (match_operand:ALLI 2 "<atomic_op_operand>" "r<const_atomic>")))
86c818c2
JG
474 (set (match_dup 1)
475 (unspec_volatile:ALLI
476 [(match_dup 1) (match_dup 2)
477 (match_operand:SI 3 "const_int_operand")] ;; model
478 UNSPECV_ATOMIC_OP))
479 (clobber (reg:CC CC_REGNUM))
480 (clobber (match_scratch:SI 4 "=&r"))]
481 ""
482 "#"
483 "&& reload_completed"
484 [(const_int 0)]
485 {
486 aarch64_split_atomic_op (<CODE>, NULL, operands[0], operands[1],
487 operands[2], operands[3], operands[4]);
488 DONE;
489 }
490)
491
492(define_insn_and_split "atomic_nand_fetch<mode>"
493 [(set (match_operand:ALLI 0 "register_operand" "=&r")
494 (not:ALLI
495 (and:ALLI
496 (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q")
95d47b10 497 (match_operand:ALLI 2 "aarch64_logical_operand" "r<lconst_atomic>"))))
86c818c2
JG
498 (set (match_dup 1)
499 (unspec_volatile:ALLI
500 [(match_dup 1) (match_dup 2)
501 (match_operand:SI 3 "const_int_operand")] ;; model
502 UNSPECV_ATOMIC_OP))
503 (clobber (reg:CC CC_REGNUM))
504 (clobber (match_scratch:SI 4 "=&r"))]
505 ""
506 "#"
507 "&& reload_completed"
508 [(const_int 0)]
509 {
510 aarch64_split_atomic_op (NOT, NULL, operands[0], operands[1],
511 operands[2], operands[3], operands[4]);
512 DONE;
513 }
514)
515
516(define_insn "atomic_load<mode>"
517 [(set (match_operand:ALLI 0 "register_operand" "=r")
518 (unspec_volatile:ALLI
519 [(match_operand:ALLI 1 "aarch64_sync_memory_operand" "Q")
520 (match_operand:SI 2 "const_int_operand")] ;; model
521 UNSPECV_LDA))]
522 ""
523 {
46b35980
AM
524 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
525 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_release (model))
86c818c2
JG
526 return "ldr<atomic_sfx>\t%<w>0, %1";
527 else
528 return "ldar<atomic_sfx>\t%<w>0, %1";
529 }
530)
531
532(define_insn "atomic_store<mode>"
3c5af608 533 [(set (match_operand:ALLI 0 "aarch64_rcpc_memory_operand" "=Q,Ust")
86c818c2 534 (unspec_volatile:ALLI
3c5af608 535 [(match_operand:ALLI 1 "general_operand" "rZ,rZ")
86c818c2
JG
536 (match_operand:SI 2 "const_int_operand")] ;; model
537 UNSPECV_STL))]
538 ""
539 {
46b35980
AM
540 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
541 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_acquire (model))
86c818c2 542 return "str<atomic_sfx>\t%<w>1, %0";
3c5af608 543 else if (which_alternative == 0)
86c818c2 544 return "stlr<atomic_sfx>\t%<w>1, %0";
3c5af608
MM
545 else
546 return "stlur<atomic_sfx>\t%<w>1, %0";
86c818c2 547 }
3c5af608 548 [(set_attr "arch" "*,rcpc8_4")]
86c818c2
JG
549)
550
0016d8d9 551(define_insn "@aarch64_load_exclusive<mode>"
86c818c2
JG
552 [(set (match_operand:SI 0 "register_operand" "=r")
553 (zero_extend:SI
554 (unspec_volatile:SHORT
555 [(match_operand:SHORT 1 "aarch64_sync_memory_operand" "Q")
556 (match_operand:SI 2 "const_int_operand")]
557 UNSPECV_LX)))]
558 ""
559 {
46b35980
AM
560 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
561 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_release (model))
86c818c2
JG
562 return "ldxr<atomic_sfx>\t%w0, %1";
563 else
564 return "ldaxr<atomic_sfx>\t%w0, %1";
565 }
566)
567
0016d8d9 568(define_insn "@aarch64_load_exclusive<mode>"
86c818c2
JG
569 [(set (match_operand:GPI 0 "register_operand" "=r")
570 (unspec_volatile:GPI
571 [(match_operand:GPI 1 "aarch64_sync_memory_operand" "Q")
572 (match_operand:SI 2 "const_int_operand")]
573 UNSPECV_LX))]
574 ""
575 {
46b35980
AM
576 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
577 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_release (model))
86c818c2
JG
578 return "ldxr\t%<w>0, %1";
579 else
580 return "ldaxr\t%<w>0, %1";
581 }
582)
583
0016d8d9 584(define_insn "@aarch64_store_exclusive<mode>"
1d896f48 585 [(set (match_operand:SI 0 "register_operand" "=&r")
86c818c2
JG
586 (unspec_volatile:SI [(const_int 0)] UNSPECV_SX))
587 (set (match_operand:ALLI 1 "aarch64_sync_memory_operand" "=Q")
588 (unspec_volatile:ALLI
4ebcc903 589 [(match_operand:ALLI 2 "aarch64_reg_or_zero" "rZ")
86c818c2
JG
590 (match_operand:SI 3 "const_int_operand")]
591 UNSPECV_SX))]
592 ""
593 {
46b35980
AM
594 enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
595 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_acquire (model))
86c818c2
JG
596 return "stxr<atomic_sfx>\t%w0, %<w>2, %1";
597 else
598 return "stlxr<atomic_sfx>\t%w0, %<w>2, %1";
599 }
600)
601
602(define_expand "mem_thread_fence"
603 [(match_operand:SI 0 "const_int_operand" "")]
604 ""
605 {
46b35980
AM
606 enum memmodel model = memmodel_from_int (INTVAL (operands[0]));
607 if (!(is_mm_relaxed (model) || is_mm_consume (model)))
86c818c2
JG
608 emit_insn (gen_dmb (operands[0]));
609 DONE;
610 }
611)
612
613(define_expand "dmb"
614 [(set (match_dup 1)
615 (unspec:BLK [(match_dup 1) (match_operand:SI 0 "const_int_operand")]
616 UNSPEC_MB))]
617 ""
618 {
619 operands[1] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
620 MEM_VOLATILE_P (operands[1]) = 1;
621 }
622)
623
624(define_insn "*dmb"
625 [(set (match_operand:BLK 0 "" "")
626 (unspec:BLK [(match_dup 0) (match_operand:SI 1 "const_int_operand")]
627 UNSPEC_MB))]
628 ""
629 {
46b35980
AM
630 enum memmodel model = memmodel_from_int (INTVAL (operands[1]));
631 if (is_mm_acquire (model))
86c818c2
JG
632 return "dmb\\tishld";
633 else
634 return "dmb\\tish";
635 }
636)