]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/arm/sync.md
Update copyright years.
[thirdparty/gcc.git] / gcc / config / arm / sync.md
CommitLineData
029e79eb 1;; Machine description for ARM processor synchronization primitives.
7adcbafe 2;; Copyright (C) 2010-2022 Free Software Foundation, Inc.
029e79eb 3;; Written by Marcus Shawcroft (marcus.shawcroft@arm.com)
cfe52743 4;; 64bit Atomics by Dave Gilbert (david.gilbert@linaro.org)
029e79eb
MS
5;;
6;; This file is part of GCC.
7;;
8;; GCC is free software; you can redistribute it and/or modify it
9;; under the terms of the GNU General Public License as published by
10;; the Free Software Foundation; either version 3, or (at your option)
11;; any later version.
12;;
13;; GCC is distributed in the hope that it will be useful, but
14;; WITHOUT ANY WARRANTY; without even the implied warranty of
15;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16;; General Public License for more details.
17;;
18;; You should have received a copy of the GNU General Public License
19;; along with GCC; see the file COPYING3. If not see
20;; <http://www.gnu.org/licenses/>. */
21
18f0fe6b
RH
22(define_mode_attr sync_predtab
23 [(QI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER")
24 (HI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER")
25 (SI "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER")
26 (DI "TARGET_HAVE_LDREXD && ARM_DOUBLEWORD_ALIGN
27 && TARGET_HAVE_MEMORY_BARRIER")])
28
29(define_code_iterator syncop [plus minus ior xor and])
30
31(define_code_attr sync_optab
e4eb230d 32 [(ior "or") (xor "xor") (and "and") (plus "add") (minus "sub")])
18f0fe6b
RH
33
34(define_mode_attr sync_sfx
35 [(QI "b") (HI "h") (SI "") (DI "d")])
029e79eb
MS
36
37(define_expand "memory_barrier"
38 [(set (match_dup 0)
39 (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))]
40 "TARGET_HAVE_MEMORY_BARRIER"
41{
42 operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
43 MEM_VOLATILE_P (operands[0]) = 1;
44})
45
18f0fe6b
RH
46(define_insn "*memory_barrier"
47 [(set (match_operand:BLK 0 "" "")
48 (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))]
49 "TARGET_HAVE_MEMORY_BARRIER"
029e79eb 50 {
18f0fe6b
RH
51 if (TARGET_HAVE_DMB)
52 {
b2095240 53 return "dmb\\tish";
18f0fe6b 54 }
029e79eb 55
18f0fe6b 56 if (TARGET_HAVE_DMB_MCR)
b2095240 57 return "mcr\\tp15, 0, r0, c7, c10, 5";
b7b79b54 58
18f0fe6b
RH
59 gcc_unreachable ();
60 }
61 [(set_attr "length" "4")
62 (set_attr "conds" "unconditional")
63 (set_attr "predicable" "no")])
029e79eb 64
5ad29f12 65(define_insn "atomic_load<mode>"
84c20253 66 [(set (match_operand:QHSI 0 "register_operand" "=r,r,l")
5ad29f12 67 (unspec_volatile:QHSI
84c20253
TP
68 [(match_operand:QHSI 1 "arm_sync_memory_operand" "Q,Q,Q")
69 (match_operand:SI 2 "const_int_operand" "n,Pf,n")] ;; model
5ad29f12
KT
70 VUNSPEC_LDA))]
71 "TARGET_HAVE_LDACQ"
72 {
5e14418b 73 if (aarch_mm_needs_acquire (operands[2]))
84c20253
TP
74 {
75 if (TARGET_THUMB1)
5e14418b 76 return "lda<sync_sfx>\t%0, %1";
84c20253 77 else
5e14418b 78 return "lda<sync_sfx>%?\t%0, %1";
84c20253 79 }
5ad29f12 80 else
84c20253
TP
81 {
82 if (TARGET_THUMB1)
5e14418b 83 return "ldr<sync_sfx>\t%0, %1";
84c20253 84 else
5e14418b 85 return "ldr<sync_sfx>%?\t%0, %1";
84c20253 86 }
5ad29f12 87 }
84c20253 88 [(set_attr "arch" "32,v8mb,any")
113c53c3 89 (set_attr "predicable" "yes")])
5ad29f12
KT
90
91(define_insn "atomic_store<mode>"
84c20253 92 [(set (match_operand:QHSI 0 "memory_operand" "=Q,Q,Q")
5ad29f12 93 (unspec_volatile:QHSI
84c20253
TP
94 [(match_operand:QHSI 1 "general_operand" "r,r,l")
95 (match_operand:SI 2 "const_int_operand" "n,Pf,n")] ;; model
5ad29f12
KT
96 VUNSPEC_STL))]
97 "TARGET_HAVE_LDACQ"
98 {
5e14418b 99 if (aarch_mm_needs_release (operands[2]))
84c20253
TP
100 {
101 if (TARGET_THUMB1)
5e14418b 102 return "stl<sync_sfx>\t%1, %0";
84c20253 103 else
5e14418b 104 return "stl<sync_sfx>%?\t%1, %0";
84c20253 105 }
5ad29f12 106 else
84c20253
TP
107 {
108 if (TARGET_THUMB1)
5e14418b 109 return "str<sync_sfx>\t%1, %0";
84c20253 110 else
5e14418b 111 return "str<sync_sfx>%?\t%1, %0";
84c20253 112 }
5ad29f12 113 }
84c20253 114 [(set_attr "arch" "32,v8mb,any")
113c53c3 115 (set_attr "predicable" "yes")])
5ad29f12 116
74a00288
KT
117;; An LDRD instruction usable by the atomic_loaddi expander on LPAE targets
118
119(define_insn "arm_atomic_loaddi2_ldrd"
120 [(set (match_operand:DI 0 "register_operand" "=r")
121 (unspec_volatile:DI
122 [(match_operand:DI 1 "arm_sync_memory_operand" "Q")]
123 VUNSPEC_LDRD_ATOMIC))]
124 "ARM_DOUBLEWORD_ALIGN && TARGET_HAVE_LPAE"
125 "ldrd%?\t%0, %H0, %C1"
113c53c3 126 [(set_attr "predicable" "yes")])
74a00288
KT
127
128;; There are three ways to expand this depending on the architecture
129;; features available. As for the barriers, a load needs a barrier
130;; after it on all non-relaxed memory models except when the load
131;; has acquire semantics (for ARMv8-A).
132
a7b8107f
RH
133(define_expand "atomic_loaddi"
134 [(match_operand:DI 0 "s_register_operand") ;; val out
135 (match_operand:DI 1 "mem_noofs_operand") ;; memory
136 (match_operand:SI 2 "const_int_operand")] ;; model
d62b809c 137 "(TARGET_HAVE_LDREXD || TARGET_HAVE_LPAE || TARGET_HAVE_LDACQEXD)
74a00288 138 && ARM_DOUBLEWORD_ALIGN"
a7b8107f 139{
74a00288
KT
140 memmodel model = memmodel_from_int (INTVAL (operands[2]));
141
142 /* For ARMv8-A we can use an LDAEXD to atomically load two 32-bit registers
143 when acquire or stronger semantics are needed. When the relaxed model is
144 used this can be relaxed to a normal LDRD. */
d62b809c 145 if (TARGET_HAVE_LDACQEXD)
74a00288
KT
146 {
147 if (is_mm_relaxed (model))
148 emit_insn (gen_arm_atomic_loaddi2_ldrd (operands[0], operands[1]));
149 else
150 emit_insn (gen_arm_load_acquire_exclusivedi (operands[0], operands[1]));
151
152 DONE;
153 }
154
155 /* On LPAE targets LDRD and STRD accesses to 64-bit aligned
156 locations are 64-bit single-copy atomic. We still need barriers in the
157 appropriate places to implement the ordering constraints. */
158 if (TARGET_HAVE_LPAE)
159 emit_insn (gen_arm_atomic_loaddi2_ldrd (operands[0], operands[1]));
160 else
161 emit_insn (gen_arm_load_exclusivedi (operands[0], operands[1]));
162
163
164 /* All non-relaxed models need a barrier after the load when load-acquire
165 instructions are not available. */
166 if (!is_mm_relaxed (model))
a7b8107f 167 expand_mem_thread_fence (model);
74a00288 168
a7b8107f
RH
169 DONE;
170})
171
18f0fe6b 172(define_expand "atomic_compare_and_swap<mode>"
cd65e265
DZ
173 [(match_operand:SI 0 "s_register_operand") ;; bool out
174 (match_operand:QHSD 1 "s_register_operand") ;; val out
175 (match_operand:QHSD 2 "mem_noofs_operand") ;; memory
176 (match_operand:QHSD 3 "general_operand") ;; expected
177 (match_operand:QHSD 4 "s_register_operand") ;; desired
18f0fe6b
RH
178 (match_operand:SI 5 "const_int_operand") ;; is_weak
179 (match_operand:SI 6 "const_int_operand") ;; mod_s
180 (match_operand:SI 7 "const_int_operand")] ;; mod_f
cfe52743 181 "<sync_predtab>"
18f0fe6b
RH
182{
183 arm_expand_compare_and_swap (operands);
184 DONE;
185})
029e79eb 186
3a5a75b5
TP
187;; Constraints of this pattern must be at least as strict as those of the
188;; cbranchsi operations in thumb1.md and aim to be as permissive.
ff03f2d1 189(define_insn_and_split "@atomic_compare_and_swap<CCSI:arch><NARROW:mode>_1"
42a10bb8 190 [(set (match_operand:CCSI 0 "cc_register_operand" "=&c,&l,&l") ;; bool out
3cff0135 191 (unspec_volatile:CCSI [(const_int 0)] VUNSPEC_ATOMIC_CAS))
42a10bb8 192 (set (match_operand:SI 1 "s_register_operand" "=&r,&l,&l*h") ;; val out
18f0fe6b 193 (zero_extend:SI
42a10bb8 194 (match_operand:NARROW 2 "mem_noofs_operand" "+Ua,Ua,Ua"))) ;; memory
12286f25 195 (set (match_dup 2)
18f0fe6b 196 (unspec_volatile:NARROW
42a10bb8
AC
197 [(match_operand:SI 3 "arm_add_operand" "rIL,lILJ*h,*r") ;; expected
198 (match_operand:NARROW 4 "s_register_operand" "r,r,r") ;; desired
12286f25
TP
199 (match_operand:SI 5 "const_int_operand") ;; is_weak
200 (match_operand:SI 6 "const_int_operand") ;; mod_s
201 (match_operand:SI 7 "const_int_operand")] ;; mod_f
18f0fe6b 202 VUNSPEC_ATOMIC_CAS))
42a10bb8 203 (clobber (match_scratch:SI 8 "=&r,X,X"))]
193bee75 204 "<NARROW:sync_predtab>"
18f0fe6b
RH
205 "#"
206 "&& reload_completed"
207 [(const_int 0)]
029e79eb 208 {
18f0fe6b 209 arm_split_compare_and_swap (operands);
029e79eb 210 DONE;
3a5a75b5 211 }
42a10bb8 212 [(set_attr "arch" "32,v8mb,v8mb")])
029e79eb 213
18f0fe6b
RH
214(define_mode_attr cas_cmp_operand
215 [(SI "arm_add_operand") (DI "cmpdi_operand")])
216(define_mode_attr cas_cmp_str
217 [(SI "rIL") (DI "rDi")])
029e79eb 218
3a5a75b5
TP
219;; Constraints of this pattern must be at least as strict as those of the
220;; cbranchsi operations in thumb1.md and aim to be as permissive.
ff03f2d1 221(define_insn_and_split "@atomic_compare_and_swap<CCSI:arch><SIDI:mode>_1"
42a10bb8 222 [(set (match_operand:CCSI 0 "cc_register_operand" "=&c,&l,&l") ;; bool out
3cff0135 223 (unspec_volatile:CCSI [(const_int 0)] VUNSPEC_ATOMIC_CAS))
42a10bb8
AC
224 (set (match_operand:SIDI 1 "s_register_operand" "=&r,&l,&l*h") ;; val out
225 (match_operand:SIDI 2 "mem_noofs_operand" "+Ua,Ua,Ua")) ;; memory
12286f25 226 (set (match_dup 2)
18f0fe6b 227 (unspec_volatile:SIDI
42a10bb8
AC
228 [(match_operand:SIDI 3 "<SIDI:cas_cmp_operand>" "<SIDI:cas_cmp_str>,lILJ*h,*r") ;; expect
229 (match_operand:SIDI 4 "s_register_operand" "r,r,r") ;; desired
12286f25
TP
230 (match_operand:SI 5 "const_int_operand") ;; is_weak
231 (match_operand:SI 6 "const_int_operand") ;; mod_s
232 (match_operand:SI 7 "const_int_operand")] ;; mod_f
18f0fe6b 233 VUNSPEC_ATOMIC_CAS))
42a10bb8 234 (clobber (match_scratch:SI 8 "=&r,X,X"))]
193bee75 235 "<SIDI:sync_predtab>"
18f0fe6b
RH
236 "#"
237 "&& reload_completed"
238 [(const_int 0)]
029e79eb 239 {
18f0fe6b 240 arm_split_compare_and_swap (operands);
029e79eb 241 DONE;
3a5a75b5 242 }
42a10bb8 243 [(set_attr "arch" "32,v8mb,v8mb")])
029e79eb 244
18f0fe6b 245(define_insn_and_split "atomic_exchange<mode>"
33cab746
TP
246 [(set (match_operand:QHSD 0 "s_register_operand" "=&r,&r") ;; output
247 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua,Ua")) ;; memory
18f0fe6b
RH
248 (set (match_dup 1)
249 (unspec_volatile:QHSD
33cab746 250 [(match_operand:QHSD 2 "s_register_operand" "r,r") ;; input
18f0fe6b
RH
251 (match_operand:SI 3 "const_int_operand" "")] ;; model
252 VUNSPEC_ATOMIC_XCHG))
253 (clobber (reg:CC CC_REGNUM))
33cab746 254 (clobber (match_scratch:SI 4 "=&r,&l"))]
cfe52743 255 "<sync_predtab>"
18f0fe6b
RH
256 "#"
257 "&& reload_completed"
258 [(const_int 0)]
029e79eb 259 {
18f0fe6b
RH
260 arm_split_atomic_op (SET, operands[0], NULL, operands[1],
261 operands[2], operands[3], operands[4]);
029e79eb 262 DONE;
33cab746
TP
263 }
264 [(set_attr "arch" "32,v8mb")])
265
266;; The following mode and code attribute are defined here because they are
267;; specific to atomics and are not needed anywhere else.
029e79eb 268
18f0fe6b
RH
269(define_mode_attr atomic_op_operand
270 [(QI "reg_or_int_operand")
271 (HI "reg_or_int_operand")
272 (SI "reg_or_int_operand")
273 (DI "s_register_operand")])
029e79eb 274
18f0fe6b
RH
275(define_mode_attr atomic_op_str
276 [(QI "rn") (HI "rn") (SI "rn") (DI "r")])
029e79eb 277
33cab746
TP
278(define_code_attr thumb1_atomic_op_str
279 [(ior "l,l") (xor "l,l") (and "l,l") (plus "lIJL,r") (minus "lPd,lPd")])
280
281(define_code_attr thumb1_atomic_newop_str
282 [(ior "&l,&l") (xor "&l,&l") (and "&l,&l") (plus "&l,&r") (minus "&l,&l")])
283
284;; Constraints of this pattern must be at least as strict as those of the non
285;; atomic operations in thumb1.md and aim to be as permissive.
18f0fe6b 286(define_insn_and_split "atomic_<sync_optab><mode>"
33cab746 287 [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua,Ua,Ua")
18f0fe6b
RH
288 (unspec_volatile:QHSD
289 [(syncop:QHSD (match_dup 0)
33cab746 290 (match_operand:QHSD 1 "<atomic_op_operand>" "<atomic_op_str>,<thumb1_atomic_op_str>"))
18f0fe6b
RH
291 (match_operand:SI 2 "const_int_operand")] ;; model
292 VUNSPEC_ATOMIC_OP))
029e79eb 293 (clobber (reg:CC CC_REGNUM))
33cab746
TP
294 (clobber (match_scratch:QHSD 3 "=&r,<thumb1_atomic_newop_str>"))
295 (clobber (match_scratch:SI 4 "=&r,&l,&l"))]
cfe52743 296 "<sync_predtab>"
18f0fe6b
RH
297 "#"
298 "&& reload_completed"
299 [(const_int 0)]
029e79eb 300 {
18f0fe6b
RH
301 arm_split_atomic_op (<CODE>, NULL, operands[3], operands[0],
302 operands[1], operands[2], operands[4]);
303 DONE;
33cab746
TP
304 }
305 [(set_attr "arch" "32,v8mb,v8mb")])
029e79eb 306
33cab746
TP
307;; Constraints of this pattern must be at least as strict as those of the non
308;; atomic NANDs in thumb1.md and aim to be as permissive.
18f0fe6b 309(define_insn_and_split "atomic_nand<mode>"
33cab746 310 [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua,Ua")
18f0fe6b
RH
311 (unspec_volatile:QHSD
312 [(not:QHSD
313 (and:QHSD (match_dup 0)
33cab746 314 (match_operand:QHSD 1 "<atomic_op_operand>" "<atomic_op_str>,l")))
18f0fe6b
RH
315 (match_operand:SI 2 "const_int_operand")] ;; model
316 VUNSPEC_ATOMIC_OP))
029e79eb 317 (clobber (reg:CC CC_REGNUM))
33cab746
TP
318 (clobber (match_scratch:QHSD 3 "=&r,&l"))
319 (clobber (match_scratch:SI 4 "=&r,&l"))]
cfe52743 320 "<sync_predtab>"
18f0fe6b
RH
321 "#"
322 "&& reload_completed"
323 [(const_int 0)]
029e79eb 324 {
18f0fe6b
RH
325 arm_split_atomic_op (NOT, NULL, operands[3], operands[0],
326 operands[1], operands[2], operands[4]);
327 DONE;
33cab746
TP
328 }
329 [(set_attr "arch" "32,v8mb")])
330
331;; 3 alternatives are needed to represent constraints after split from
332;; thumb1_addsi3: (i) case where operand1 and destination can be in different
333;; registers, (ii) case where they are in the same low register and (iii) case
334;; when they are in the same register without restriction on the register. We
335;; disparage slightly alternatives that require copying the old value into the
336;; register for the new value (see bind_old_new in arm_split_atomic_op).
337(define_code_attr thumb1_atomic_fetch_op_str
338 [(ior "l,l,l") (xor "l,l,l") (and "l,l,l") (plus "lL,?IJ,?r") (minus "lPd,lPd,lPd")])
029e79eb 339
33cab746
TP
340(define_code_attr thumb1_atomic_fetch_newop_str
341 [(ior "&l,&l,&l") (xor "&l,&l,&l") (and "&l,&l,&l") (plus "&l,&l,&r") (minus "&l,&l,&l")])
342
343(define_code_attr thumb1_atomic_fetch_oldop_str
344 [(ior "&r,&r,&r") (xor "&r,&r,&r") (and "&r,&r,&r") (plus "&l,&r,&r") (minus "&l,&l,&l")])
345
346;; Constraints of this pattern must be at least as strict as those of the non
347;; atomic operations in thumb1.md and aim to be as permissive.
18f0fe6b 348(define_insn_and_split "atomic_fetch_<sync_optab><mode>"
33cab746
TP
349 [(set (match_operand:QHSD 0 "s_register_operand" "=&r,<thumb1_atomic_fetch_oldop_str>")
350 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua,Ua,Ua,Ua"))
029e79eb 351 (set (match_dup 1)
18f0fe6b
RH
352 (unspec_volatile:QHSD
353 [(syncop:QHSD (match_dup 1)
33cab746 354 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>,<thumb1_atomic_fetch_op_str>"))
18f0fe6b
RH
355 (match_operand:SI 3 "const_int_operand")] ;; model
356 VUNSPEC_ATOMIC_OP))
029e79eb 357 (clobber (reg:CC CC_REGNUM))
33cab746
TP
358 (clobber (match_scratch:QHSD 4 "=&r,<thumb1_atomic_fetch_newop_str>"))
359 (clobber (match_scratch:SI 5 "=&r,&l,&l,&l"))]
cfe52743 360 "<sync_predtab>"
18f0fe6b
RH
361 "#"
362 "&& reload_completed"
363 [(const_int 0)]
029e79eb 364 {
18f0fe6b
RH
365 arm_split_atomic_op (<CODE>, operands[0], operands[4], operands[1],
366 operands[2], operands[3], operands[5]);
367 DONE;
33cab746
TP
368 }
369 [(set_attr "arch" "32,v8mb,v8mb,v8mb")])
029e79eb 370
33cab746
TP
371;; Constraints of this pattern must be at least as strict as those of the non
372;; atomic NANDs in thumb1.md and aim to be as permissive.
18f0fe6b 373(define_insn_and_split "atomic_fetch_nand<mode>"
33cab746
TP
374 [(set (match_operand:QHSD 0 "s_register_operand" "=&r,&r")
375 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua,Ua"))
029e79eb 376 (set (match_dup 1)
18f0fe6b
RH
377 (unspec_volatile:QHSD
378 [(not:QHSD
379 (and:QHSD (match_dup 1)
33cab746 380 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>,l")))
18f0fe6b
RH
381 (match_operand:SI 3 "const_int_operand")] ;; model
382 VUNSPEC_ATOMIC_OP))
029e79eb 383 (clobber (reg:CC CC_REGNUM))
33cab746
TP
384 (clobber (match_scratch:QHSD 4 "=&r,&l"))
385 (clobber (match_scratch:SI 5 "=&r,&l"))]
cfe52743 386 "<sync_predtab>"
18f0fe6b
RH
387 "#"
388 "&& reload_completed"
389 [(const_int 0)]
029e79eb 390 {
18f0fe6b
RH
391 arm_split_atomic_op (NOT, operands[0], operands[4], operands[1],
392 operands[2], operands[3], operands[5]);
393 DONE;
33cab746
TP
394 }
395 [(set_attr "arch" "32,v8mb")])
029e79eb 396
33cab746
TP
397;; Constraints of this pattern must be at least as strict as those of the non
398;; atomic operations in thumb1.md and aim to be as permissive.
18f0fe6b 399(define_insn_and_split "atomic_<sync_optab>_fetch<mode>"
33cab746 400 [(set (match_operand:QHSD 0 "s_register_operand" "=&r,<thumb1_atomic_newop_str>")
18f0fe6b 401 (syncop:QHSD
33cab746
TP
402 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua,Ua,Ua")
403 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>,<thumb1_atomic_op_str>")))
029e79eb 404 (set (match_dup 1)
18f0fe6b
RH
405 (unspec_volatile:QHSD
406 [(match_dup 1) (match_dup 2)
407 (match_operand:SI 3 "const_int_operand")] ;; model
408 VUNSPEC_ATOMIC_OP))
029e79eb 409 (clobber (reg:CC CC_REGNUM))
33cab746 410 (clobber (match_scratch:SI 4 "=&r,&l,&l"))]
cfe52743 411 "<sync_predtab>"
18f0fe6b
RH
412 "#"
413 "&& reload_completed"
414 [(const_int 0)]
029e79eb 415 {
18f0fe6b
RH
416 arm_split_atomic_op (<CODE>, NULL, operands[0], operands[1],
417 operands[2], operands[3], operands[4]);
418 DONE;
33cab746
TP
419 }
420 [(set_attr "arch" "32,v8mb,v8mb")])
029e79eb 421
33cab746
TP
422;; Constraints of this pattern must be at least as strict as those of the non
423;; atomic NANDs in thumb1.md and aim to be as permissive.
18f0fe6b 424(define_insn_and_split "atomic_nand_fetch<mode>"
33cab746 425 [(set (match_operand:QHSD 0 "s_register_operand" "=&r,&l")
18f0fe6b
RH
426 (not:QHSD
427 (and:QHSD
33cab746
TP
428 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua,Ua")
429 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>,l"))))
029e79eb 430 (set (match_dup 1)
18f0fe6b
RH
431 (unspec_volatile:QHSD
432 [(match_dup 1) (match_dup 2)
433 (match_operand:SI 3 "const_int_operand")] ;; model
434 VUNSPEC_ATOMIC_OP))
029e79eb 435 (clobber (reg:CC CC_REGNUM))
33cab746 436 (clobber (match_scratch:SI 4 "=&r,&l"))]
cfe52743 437 "<sync_predtab>"
18f0fe6b
RH
438 "#"
439 "&& reload_completed"
440 [(const_int 0)]
029e79eb 441 {
18f0fe6b
RH
442 arm_split_atomic_op (NOT, NULL, operands[0], operands[1],
443 operands[2], operands[3], operands[4]);
444 DONE;
33cab746
TP
445 }
446 [(set_attr "arch" "32,v8mb")])
029e79eb 447
18f0fe6b 448(define_insn "arm_load_exclusive<mode>"
84c20253 449 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
18f0fe6b
RH
450 (zero_extend:SI
451 (unspec_volatile:NARROW
84c20253 452 [(match_operand:NARROW 1 "mem_noofs_operand" "Ua,Ua")]
18f0fe6b
RH
453 VUNSPEC_LL)))]
454 "TARGET_HAVE_LDREXBH"
84c20253
TP
455 "@
456 ldrex<sync_sfx>%?\t%0, %C1
457 ldrex<sync_sfx>\t%0, %C1"
458 [(set_attr "arch" "32,v8mb")
113c53c3 459 (set_attr "predicable" "yes")])
029e79eb 460
5ad29f12 461(define_insn "arm_load_acquire_exclusive<mode>"
84c20253 462 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5ad29f12
KT
463 (zero_extend:SI
464 (unspec_volatile:NARROW
84c20253 465 [(match_operand:NARROW 1 "mem_noofs_operand" "Ua,Ua")]
5ad29f12
KT
466 VUNSPEC_LAX)))]
467 "TARGET_HAVE_LDACQ"
84c20253
TP
468 "@
469 ldaex<sync_sfx>%?\\t%0, %C1
470 ldaex<sync_sfx>\\t%0, %C1"
471 [(set_attr "arch" "32,v8mb")
113c53c3 472 (set_attr "predicable" "yes")])
5ad29f12 473
18f0fe6b 474(define_insn "arm_load_exclusivesi"
84c20253 475 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
18f0fe6b 476 (unspec_volatile:SI
84c20253 477 [(match_operand:SI 1 "mem_noofs_operand" "Ua,Ua")]
18f0fe6b
RH
478 VUNSPEC_LL))]
479 "TARGET_HAVE_LDREX"
84c20253
TP
480 "@
481 ldrex%?\t%0, %C1
482 ldrex\t%0, %C1"
483 [(set_attr "arch" "32,v8mb")
113c53c3 484 (set_attr "predicable" "yes")])
029e79eb 485
5ad29f12 486(define_insn "arm_load_acquire_exclusivesi"
84c20253 487 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5ad29f12 488 (unspec_volatile:SI
84c20253 489 [(match_operand:SI 1 "mem_noofs_operand" "Ua,Ua")]
5ad29f12
KT
490 VUNSPEC_LAX))]
491 "TARGET_HAVE_LDACQ"
84c20253
TP
492 "@
493 ldaex%?\t%0, %C1
494 ldaex\t%0, %C1"
495 [(set_attr "arch" "32,v8mb")
113c53c3 496 (set_attr "predicable" "yes")])
5ad29f12 497
18f0fe6b
RH
498(define_insn "arm_load_exclusivedi"
499 [(set (match_operand:DI 0 "s_register_operand" "=r")
500 (unspec_volatile:DI
501 [(match_operand:DI 1 "mem_noofs_operand" "Ua")]
502 VUNSPEC_LL))]
503 "TARGET_HAVE_LDREXD"
a7b8107f 504 "ldrexd%?\t%0, %H0, %C1"
113c53c3 505 [(set_attr "predicable" "yes")])
029e79eb 506
5ad29f12
KT
507(define_insn "arm_load_acquire_exclusivedi"
508 [(set (match_operand:DI 0 "s_register_operand" "=r")
509 (unspec_volatile:DI
510 [(match_operand:DI 1 "mem_noofs_operand" "Ua")]
511 VUNSPEC_LAX))]
d62b809c 512 "TARGET_HAVE_LDACQEXD && ARM_DOUBLEWORD_ALIGN"
5ad29f12 513 "ldaexd%?\t%0, %H0, %C1"
113c53c3 514 [(set_attr "predicable" "yes")])
5ad29f12 515
18f0fe6b 516(define_insn "arm_store_exclusive<mode>"
029e79eb 517 [(set (match_operand:SI 0 "s_register_operand" "=&r")
18f0fe6b
RH
518 (unspec_volatile:SI [(const_int 0)] VUNSPEC_SC))
519 (set (match_operand:QHSD 1 "mem_noofs_operand" "=Ua")
520 (unspec_volatile:QHSD
521 [(match_operand:QHSD 2 "s_register_operand" "r")]
522 VUNSPEC_SC))]
cfe52743 523 "<sync_predtab>"
029e79eb 524 {
18f0fe6b
RH
525 if (<MODE>mode == DImode)
526 {
18f0fe6b
RH
527 /* The restrictions on target registers in ARM mode are that the two
528 registers are consecutive and the first one is even; Thumb is
529 actually more flexible, but DI should give us this anyway.
4272cd33
KT
530 Note that the 1st register always gets the
531 lowest word in memory. */
532 gcc_assert ((REGNO (operands[2]) & 1) == 0 || TARGET_THUMB2);
533 return "strexd%?\t%0, %2, %H2, %C1";
18f0fe6b 534 }
84c20253
TP
535 if (TARGET_THUMB1)
536 return "strex<sync_sfx>\t%0, %2, %C1";
537 else
538 return "strex<sync_sfx>%?\t%0, %2, %C1";
029e79eb 539 }
113c53c3 540 [(set_attr "predicable" "yes")])
5ad29f12
KT
541
542(define_insn "arm_store_release_exclusivedi"
543 [(set (match_operand:SI 0 "s_register_operand" "=&r")
544 (unspec_volatile:SI [(const_int 0)] VUNSPEC_SLX))
545 (set (match_operand:DI 1 "mem_noofs_operand" "=Ua")
546 (unspec_volatile:DI
547 [(match_operand:DI 2 "s_register_operand" "r")]
548 VUNSPEC_SLX))]
d62b809c 549 "TARGET_HAVE_LDACQEXD && ARM_DOUBLEWORD_ALIGN"
5ad29f12 550 {
5ad29f12 551 /* See comment in arm_store_exclusive<mode> above. */
4272cd33
KT
552 gcc_assert ((REGNO (operands[2]) & 1) == 0 || TARGET_THUMB2);
553 return "stlexd%?\t%0, %2, %H2, %C1";
5ad29f12 554 }
113c53c3 555 [(set_attr "predicable" "yes")])
5ad29f12
KT
556
557(define_insn "arm_store_release_exclusive<mode>"
84c20253 558 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
5ad29f12 559 (unspec_volatile:SI [(const_int 0)] VUNSPEC_SLX))
84c20253 560 (set (match_operand:QHSI 1 "mem_noofs_operand" "=Ua,Ua")
5ad29f12 561 (unspec_volatile:QHSI
84c20253 562 [(match_operand:QHSI 2 "s_register_operand" "r,r")]
5ad29f12
KT
563 VUNSPEC_SLX))]
564 "TARGET_HAVE_LDACQ"
84c20253
TP
565 "@
566 stlex<sync_sfx>%?\t%0, %2, %C1
567 stlex<sync_sfx>\t%0, %2, %C1"
568 [(set_attr "arch" "32,v8mb")
113c53c3 569 (set_attr "predicable" "yes")])