]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/sync.md
rs6000: Delete HAVE_AS_LWSYNC and TARGET_LWSYNC_INSTRUCTION
[thirdparty/gcc.git] / gcc / config / rs6000 / sync.md
CommitLineData
f565b0a1 1;; Machine description for PowerPC synchronization instructions.
85ec4feb 2;; Copyright (C) 2005-2018 Free Software Foundation, Inc.
f565b0a1
DE
3;; Contributed by Geoffrey Keating.
4
5;; This file is part of GCC.
6
7;; GCC is free software; you can redistribute it and/or modify it
8;; under the terms of the GNU General Public License as published
2f83c7d6 9;; by the Free Software Foundation; either version 3, or (at your
f565b0a1
DE
10;; option) any later version.
11
12;; GCC is distributed in the hope that it will be useful, but WITHOUT
13;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15;; License for more details.
16
17;; You should have received a copy of the GNU General Public License
2f83c7d6
NC
18;; along with GCC; see the file COPYING3. If not see
19;; <http://www.gnu.org/licenses/>.
f565b0a1 20
4b02c962
MM
21(define_mode_attr larx [(QI "lbarx")
22 (HI "lharx")
23 (SI "lwarx")
24 (DI "ldarx")
25 (TI "lqarx")])
26
27(define_mode_attr stcx [(QI "stbcx.")
28 (HI "sthcx.")
29 (SI "stwcx.")
30 (DI "stdcx.")
31 (TI "stqcx.")])
f565b0a1 32
3abcb3a7 33(define_code_iterator FETCHOP [plus minus ior xor and])
9f0076e5 34(define_code_attr fetchop_name
3877ce45 35 [(plus "add") (minus "sub") (ior "or") (xor "xor") (and "and")])
9f0076e5 36(define_code_attr fetchop_pred
4b02c962 37 [(plus "add_operand") (minus "int_reg_operand")
9f0076e5 38 (ior "logical_operand") (xor "logical_operand") (and "and_operand")])
9f0076e5 39
2747a046 40(define_expand "mem_thread_fence"
ad18eed2 41 [(match_operand:SI 0 "const_int_operand")] ;; model
2747a046
RH
42 ""
43{
39e150e8 44 enum memmodel model = memmodel_base (INTVAL (operands[0]));
2747a046
RH
45 switch (model)
46 {
47 case MEMMODEL_RELAXED:
48 break;
49 case MEMMODEL_CONSUME:
50 case MEMMODEL_ACQUIRE:
51 case MEMMODEL_RELEASE:
52 case MEMMODEL_ACQ_REL:
53 emit_insn (gen_lwsync ());
54 break;
55 case MEMMODEL_SEQ_CST:
56 emit_insn (gen_hwsync ());
57 break;
58 default:
59 gcc_unreachable ();
60 }
61 DONE;
62})
63
64(define_expand "hwsync"
1a8c13b3
UB
65 [(set (match_dup 0)
66 (unspec:BLK [(match_dup 0)] UNSPEC_SYNC))]
f565b0a1 67 ""
b52110d4
DE
68{
69 operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
70 MEM_VOLATILE_P (operands[0]) = 1;
71})
72
2747a046 73(define_insn "*hwsync"
b52110d4 74 [(set (match_operand:BLK 0 "" "")
1a8c13b3 75 (unspec:BLK [(match_dup 0)] UNSPEC_SYNC))]
b52110d4 76 ""
6b39bc38 77 "sync"
b52110d4
DE
78 [(set_attr "type" "sync")])
79
2747a046
RH
80(define_expand "lwsync"
81 [(set (match_dup 0)
82 (unspec:BLK [(match_dup 0)] UNSPEC_LWSYNC))]
83 ""
b52110d4 84{
2747a046
RH
85 operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
86 MEM_VOLATILE_P (operands[0]) = 1;
b52110d4 87})
f565b0a1 88
2747a046
RH
89(define_insn "*lwsync"
90 [(set (match_operand:BLK 0 "" "")
91 (unspec:BLK [(match_dup 0)] UNSPEC_LWSYNC))]
92 ""
9fc75b97 93{
2747a046
RH
94 if (TARGET_NO_LWSYNC)
95 return "sync";
2747a046 96 else
2660fecc 97 return "lwsync";
2747a046
RH
98}
99 [(set_attr "type" "sync")])
9fc75b97 100
2747a046
RH
101(define_insn "isync"
102 [(unspec_volatile:BLK [(const_int 0)] UNSPECV_ISYNC)]
103 ""
6b39bc38 104 "isync"
2747a046 105 [(set_attr "type" "isync")])
9fc75b97 106
bf245bf4
PH
107;; Types that we should provide atomic instructions for.
108(define_mode_iterator AINT [QI
109 HI
110 SI
111 (DI "TARGET_POWERPC64")
112 (TI "TARGET_SYNC_TI")])
113
2747a046
RH
114;; The control dependency used for load dependency described
115;; in B.2.3 of the Power ISA 2.06B.
1ba24090 116(define_insn "loadsync_<mode>"
bf245bf4 117 [(unspec_volatile:BLK [(match_operand:AINT 0 "register_operand" "r")]
2747a046
RH
118 UNSPECV_ISYNC)
119 (clobber (match_scratch:CC 1 "=y"))]
120 ""
121 "cmpw %1,%0,%0\;bne- %1,$+4\;isync"
122 [(set_attr "type" "isync")
123 (set_attr "length" "12")])
124
bf245bf4
PH
125(define_insn "load_quadpti"
126 [(set (match_operand:PTI 0 "quad_int_reg_operand" "=&r")
127 (unspec:PTI
128 [(match_operand:TI 1 "quad_memory_operand" "wQ")] UNSPEC_LSQ))]
129 "TARGET_SYNC_TI
130 && !reg_mentioned_p (operands[0], operands[1])"
131 "lq %0,%1"
b24a46be 132 [(set_attr "type" "load")])
bf245bf4 133
2747a046 134(define_expand "atomic_load<mode>"
ad18eed2
SB
135 [(set (match_operand:AINT 0 "register_operand") ;; output
136 (match_operand:AINT 1 "memory_operand")) ;; memory
137 (use (match_operand:SI 2 "const_int_operand"))] ;; model
2747a046 138 ""
9fc75b97 139{
bf245bf4
PH
140 if (<MODE>mode == TImode && !TARGET_SYNC_TI)
141 FAIL;
142
39e150e8 143 enum memmodel model = memmodel_base (INTVAL (operands[2]));
9fc75b97 144
46b35980 145 if (is_mm_seq_cst (model))
2747a046 146 emit_insn (gen_hwsync ());
f565b0a1 147
bf245bf4
PH
148 if (<MODE>mode != TImode)
149 emit_move_insn (operands[0], operands[1]);
150 else
151 {
152 rtx op0 = operands[0];
153 rtx op1 = operands[1];
154 rtx pti_reg = gen_reg_rtx (PTImode);
155
ba6bf284 156 if (!quad_address_p (XEXP (op1, 0), TImode, false))
bf245bf4
PH
157 {
158 rtx old_addr = XEXP (op1, 0);
159 rtx new_addr = force_reg (Pmode, old_addr);
160 operands[1] = op1 = replace_equiv_address (op1, new_addr);
161 }
162
163 emit_insn (gen_load_quadpti (pti_reg, op1));
164
165 if (WORDS_BIG_ENDIAN)
166 emit_move_insn (op0, gen_lowpart (TImode, pti_reg));
167 else
168 {
169 emit_move_insn (gen_lowpart (DImode, op0), gen_highpart (DImode, pti_reg));
170 emit_move_insn (gen_highpart (DImode, op0), gen_lowpart (DImode, pti_reg));
171 }
172 }
2747a046
RH
173
174 switch (model)
9f0076e5 175 {
2747a046
RH
176 case MEMMODEL_RELAXED:
177 break;
178 case MEMMODEL_CONSUME:
179 case MEMMODEL_ACQUIRE:
180 case MEMMODEL_SEQ_CST:
4b02c962 181 emit_insn (gen_loadsync_<mode> (operands[0]));
2747a046
RH
182 break;
183 default:
184 gcc_unreachable ();
9f0076e5 185 }
f565b0a1 186 DONE;
9f0076e5 187})
f565b0a1 188
bf245bf4
PH
189(define_insn "store_quadpti"
190 [(set (match_operand:PTI 0 "quad_memory_operand" "=wQ")
191 (unspec:PTI
192 [(match_operand:PTI 1 "quad_int_reg_operand" "r")] UNSPEC_LSQ))]
193 "TARGET_SYNC_TI"
194 "stq %1,%0"
b24a46be 195 [(set_attr "type" "store")])
bf245bf4 196
2747a046 197(define_expand "atomic_store<mode>"
ad18eed2
SB
198 [(set (match_operand:AINT 0 "memory_operand") ;; memory
199 (match_operand:AINT 1 "register_operand")) ;; input
200 (use (match_operand:SI 2 "const_int_operand"))] ;; model
2747a046 201 ""
f565b0a1 202{
bf245bf4
PH
203 if (<MODE>mode == TImode && !TARGET_SYNC_TI)
204 FAIL;
205
39e150e8 206 enum memmodel model = memmodel_base (INTVAL (operands[2]));
2747a046
RH
207 switch (model)
208 {
209 case MEMMODEL_RELAXED:
210 break;
211 case MEMMODEL_RELEASE:
212 emit_insn (gen_lwsync ());
213 break;
214 case MEMMODEL_SEQ_CST:
215 emit_insn (gen_hwsync ());
216 break;
217 default:
218 gcc_unreachable ();
219 }
bf245bf4
PH
220 if (<MODE>mode != TImode)
221 emit_move_insn (operands[0], operands[1]);
222 else
223 {
224 rtx op0 = operands[0];
225 rtx op1 = operands[1];
226 rtx pti_reg = gen_reg_rtx (PTImode);
227
ba6bf284 228 if (!quad_address_p (XEXP (op0, 0), TImode, false))
bf245bf4
PH
229 {
230 rtx old_addr = XEXP (op0, 0);
231 rtx new_addr = force_reg (Pmode, old_addr);
232 operands[0] = op0 = replace_equiv_address (op0, new_addr);
233 }
234
235 if (WORDS_BIG_ENDIAN)
236 emit_move_insn (pti_reg, gen_lowpart (PTImode, op1));
237 else
238 {
239 emit_move_insn (gen_lowpart (DImode, pti_reg), gen_highpart (DImode, op1));
240 emit_move_insn (gen_highpart (DImode, pti_reg), gen_lowpart (DImode, op1));
241 }
242
243 emit_insn (gen_store_quadpti (gen_lowpart (PTImode, op0), pti_reg));
244 }
245
f565b0a1 246 DONE;
9f0076e5 247})
f565b0a1 248
4b02c962
MM
249;; Any supported integer mode that has atomic l<x>arx/st<x>cx. instrucitons
250;; other than the quad memory operations, which have special restrictions.
251;; Byte/halfword atomic instructions were added in ISA 2.06B, but were phased
252;; in and did not show up until power8. TImode atomic lqarx/stqcx. require
253;; special handling due to even/odd register requirements.
254(define_mode_iterator ATOMIC [(QI "TARGET_SYNC_HI_QI")
255 (HI "TARGET_SYNC_HI_QI")
256 SI
257 (DI "TARGET_POWERPC64")])
258
2747a046 259(define_insn "load_locked<mode>"
4b02c962 260 [(set (match_operand:ATOMIC 0 "int_reg_operand" "=r")
2747a046
RH
261 (unspec_volatile:ATOMIC
262 [(match_operand:ATOMIC 1 "memory_operand" "Z")] UNSPECV_LL))]
a441dedb 263 ""
2747a046
RH
264 "<larx> %0,%y1"
265 [(set_attr "type" "load_l")])
f565b0a1 266
4b02c962
MM
267(define_insn "load_locked<QHI:mode>_si"
268 [(set (match_operand:SI 0 "int_reg_operand" "=r")
269 (unspec_volatile:SI
270 [(match_operand:QHI 1 "memory_operand" "Z")] UNSPECV_LL))]
271 "TARGET_SYNC_HI_QI"
272 "<QHI:larx> %0,%y1"
273 [(set_attr "type" "load_l")])
274
b846c948
MM
275;; Use PTImode to get even/odd register pairs.
276;; Use a temporary register to force getting an even register for the
277;; lqarx/stqcrx. instructions. Normal optimizations will eliminate this extra
278;; copy on big endian systems.
279
280;; On little endian systems where non-atomic quad word load/store instructions
281;; are not used, the address can be register+offset, so make sure the address
282;; is indexed or indirect before register allocation.
283
4b02c962 284(define_expand "load_lockedti"
ad18eed2
SB
285 [(use (match_operand:TI 0 "quad_int_reg_operand"))
286 (use (match_operand:TI 1 "memory_operand"))]
4b02c962
MM
287 "TARGET_SYNC_TI"
288{
b846c948
MM
289 rtx op0 = operands[0];
290 rtx op1 = operands[1];
4b02c962 291 rtx pti = gen_reg_rtx (PTImode);
b846c948
MM
292
293 if (!indexed_or_indirect_operand (op1, TImode))
294 {
295 rtx old_addr = XEXP (op1, 0);
296 rtx new_addr = force_reg (Pmode, old_addr);
297 operands[1] = op1 = change_address (op1, TImode, new_addr);
298 }
299
300 emit_insn (gen_load_lockedpti (pti, op1));
301 if (WORDS_BIG_ENDIAN)
302 emit_move_insn (op0, gen_lowpart (TImode, pti));
303 else
304 {
305 emit_move_insn (gen_lowpart (DImode, op0), gen_highpart (DImode, pti));
306 emit_move_insn (gen_highpart (DImode, op0), gen_lowpart (DImode, pti));
307 }
4b02c962
MM
308 DONE;
309})
310
311(define_insn "load_lockedpti"
312 [(set (match_operand:PTI 0 "quad_int_reg_operand" "=&r")
313 (unspec_volatile:PTI
b846c948 314 [(match_operand:TI 1 "indexed_or_indirect_operand" "Z")] UNSPECV_LL))]
4b02c962
MM
315 "TARGET_SYNC_TI
316 && !reg_mentioned_p (operands[0], operands[1])
317 && quad_int_reg_operand (operands[0], PTImode)"
318 "lqarx %0,%y1"
319 [(set_attr "type" "load_l")])
320
2747a046
RH
321(define_insn "store_conditional<mode>"
322 [(set (match_operand:CC 0 "cc_reg_operand" "=x")
323 (unspec_volatile:CC [(const_int 0)] UNSPECV_SC))
324 (set (match_operand:ATOMIC 1 "memory_operand" "=Z")
4b02c962 325 (match_operand:ATOMIC 2 "int_reg_operand" "r"))]
a441dedb 326 ""
2747a046
RH
327 "<stcx> %2,%y1"
328 [(set_attr "type" "store_c")])
f565b0a1 329
b846c948
MM
330;; Use a temporary register to force getting an even register for the
331;; lqarx/stqcrx. instructions. Normal optimizations will eliminate this extra
332;; copy on big endian systems.
333
334;; On little endian systems where non-atomic quad word load/store instructions
335;; are not used, the address can be register+offset, so make sure the address
336;; is indexed or indirect before register allocation.
337
4b02c962 338(define_expand "store_conditionalti"
ad18eed2
SB
339 [(use (match_operand:CC 0 "cc_reg_operand"))
340 (use (match_operand:TI 1 "memory_operand"))
341 (use (match_operand:TI 2 "quad_int_reg_operand"))]
4b02c962
MM
342 "TARGET_SYNC_TI"
343{
344 rtx op0 = operands[0];
345 rtx op1 = operands[1];
346 rtx op2 = operands[2];
b846c948
MM
347 rtx addr = XEXP (op1, 0);
348 rtx pti_mem;
349 rtx pti_reg;
350
351 if (!indexed_or_indirect_operand (op1, TImode))
352 {
353 rtx new_addr = force_reg (Pmode, addr);
354 operands[1] = op1 = change_address (op1, TImode, new_addr);
355 addr = new_addr;
356 }
357
358 pti_mem = change_address (op1, PTImode, addr);
359 pti_reg = gen_reg_rtx (PTImode);
360
361 if (WORDS_BIG_ENDIAN)
362 emit_move_insn (pti_reg, gen_lowpart (PTImode, op2));
363 else
364 {
365 emit_move_insn (gen_lowpart (DImode, pti_reg), gen_highpart (DImode, op2));
366 emit_move_insn (gen_highpart (DImode, pti_reg), gen_lowpart (DImode, op2));
367 }
368
369 emit_insn (gen_store_conditionalpti (op0, pti_mem, pti_reg));
4b02c962
MM
370 DONE;
371})
372
373(define_insn "store_conditionalpti"
374 [(set (match_operand:CC 0 "cc_reg_operand" "=x")
375 (unspec_volatile:CC [(const_int 0)] UNSPECV_SC))
b846c948 376 (set (match_operand:PTI 1 "indexed_or_indirect_operand" "=Z")
4b02c962
MM
377 (match_operand:PTI 2 "quad_int_reg_operand" "r"))]
378 "TARGET_SYNC_TI && quad_int_reg_operand (operands[2], PTImode)"
379 "stqcx. %2,%y1"
380 [(set_attr "type" "store_c")])
381
2747a046 382(define_expand "atomic_compare_and_swap<mode>"
ad18eed2
SB
383 [(match_operand:SI 0 "int_reg_operand") ;; bool out
384 (match_operand:AINT 1 "int_reg_operand") ;; val out
385 (match_operand:AINT 2 "memory_operand") ;; memory
386 (match_operand:AINT 3 "reg_or_short_operand") ;; expected
387 (match_operand:AINT 4 "int_reg_operand") ;; desired
388 (match_operand:SI 5 "const_int_operand") ;; is_weak
389 (match_operand:SI 6 "const_int_operand") ;; model succ
390 (match_operand:SI 7 "const_int_operand")] ;; model fail
a441dedb 391 ""
f565b0a1 392{
2747a046 393 rs6000_expand_atomic_compare_and_swap (operands);
f565b0a1 394 DONE;
9f0076e5 395})
f565b0a1 396
2747a046 397(define_expand "atomic_exchange<mode>"
ad18eed2
SB
398 [(match_operand:AINT 0 "int_reg_operand") ;; output
399 (match_operand:AINT 1 "memory_operand") ;; memory
400 (match_operand:AINT 2 "int_reg_operand") ;; input
401 (match_operand:SI 3 "const_int_operand")] ;; model
a441dedb 402 ""
f565b0a1 403{
2747a046 404 rs6000_expand_atomic_exchange (operands);
f565b0a1 405 DONE;
9f0076e5 406})
f565b0a1 407
2747a046 408(define_expand "atomic_<fetchop_name><mode>"
ad18eed2 409 [(match_operand:AINT 0 "memory_operand") ;; memory
4b02c962 410 (FETCHOP:AINT (match_dup 0)
ad18eed2
SB
411 (match_operand:AINT 1 "<fetchop_pred>")) ;; operand
412 (match_operand:SI 2 "const_int_operand")] ;; model
a441dedb 413 ""
f565b0a1 414{
2747a046
RH
415 rs6000_expand_atomic_op (<CODE>, operands[0], operands[1],
416 NULL_RTX, NULL_RTX, operands[2]);
f565b0a1 417 DONE;
9f0076e5 418})
f565b0a1 419
2747a046 420(define_expand "atomic_nand<mode>"
ad18eed2
SB
421 [(match_operand:AINT 0 "memory_operand") ;; memory
422 (match_operand:AINT 1 "int_reg_operand") ;; operand
423 (match_operand:SI 2 "const_int_operand")] ;; model
a441dedb 424 ""
f565b0a1 425{
2747a046
RH
426 rs6000_expand_atomic_op (NOT, operands[0], operands[1],
427 NULL_RTX, NULL_RTX, operands[2]);
f565b0a1 428 DONE;
9f0076e5 429})
f565b0a1 430
2747a046 431(define_expand "atomic_fetch_<fetchop_name><mode>"
ad18eed2
SB
432 [(match_operand:AINT 0 "int_reg_operand") ;; output
433 (match_operand:AINT 1 "memory_operand") ;; memory
4b02c962 434 (FETCHOP:AINT (match_dup 1)
ad18eed2
SB
435 (match_operand:AINT 2 "<fetchop_pred>")) ;; operand
436 (match_operand:SI 3 "const_int_operand")] ;; model
a441dedb 437 ""
2747a046
RH
438{
439 rs6000_expand_atomic_op (<CODE>, operands[1], operands[2],
440 operands[0], NULL_RTX, operands[3]);
f565b0a1 441 DONE;
9f0076e5 442})
f565b0a1 443
2747a046 444(define_expand "atomic_fetch_nand<mode>"
ad18eed2
SB
445 [(match_operand:AINT 0 "int_reg_operand") ;; output
446 (match_operand:AINT 1 "memory_operand") ;; memory
447 (match_operand:AINT 2 "int_reg_operand") ;; operand
448 (match_operand:SI 3 "const_int_operand")] ;; model
a441dedb 449 ""
f565b0a1 450{
2747a046
RH
451 rs6000_expand_atomic_op (NOT, operands[1], operands[2],
452 operands[0], NULL_RTX, operands[3]);
453 DONE;
454})
f565b0a1 455
2747a046 456(define_expand "atomic_<fetchop_name>_fetch<mode>"
ad18eed2
SB
457 [(match_operand:AINT 0 "int_reg_operand") ;; output
458 (match_operand:AINT 1 "memory_operand") ;; memory
4b02c962 459 (FETCHOP:AINT (match_dup 1)
ad18eed2
SB
460 (match_operand:AINT 2 "<fetchop_pred>")) ;; operand
461 (match_operand:SI 3 "const_int_operand")] ;; model
a441dedb 462 ""
f565b0a1 463{
2747a046
RH
464 rs6000_expand_atomic_op (<CODE>, operands[1], operands[2],
465 NULL_RTX, operands[0], operands[3]);
f565b0a1 466 DONE;
9f0076e5
DE
467})
468
2747a046 469(define_expand "atomic_nand_fetch<mode>"
ad18eed2
SB
470 [(match_operand:AINT 0 "int_reg_operand") ;; output
471 (match_operand:AINT 1 "memory_operand") ;; memory
472 (match_operand:AINT 2 "int_reg_operand") ;; operand
473 (match_operand:SI 3 "const_int_operand")] ;; model
a441dedb 474 ""
f565b0a1 475{
2747a046
RH
476 rs6000_expand_atomic_op (NOT, operands[1], operands[2],
477 NULL_RTX, operands[0], operands[3]);
f565b0a1 478 DONE;
2747a046 479})