]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/sync.md
Update copyright years.
[thirdparty/gcc.git] / gcc / config / rs6000 / sync.md
CommitLineData
44cb69ff 1;; Machine description for PowerPC synchronization instructions.
f1717362 2;; Copyright (C) 2005-2016 Free Software Foundation, Inc.
44cb69ff 3;; Contributed by Geoffrey Keating.
4
5;; This file is part of GCC.
6
7;; GCC is free software; you can redistribute it and/or modify it
8;; under the terms of the GNU General Public License as published
038d1e19 9;; by the Free Software Foundation; either version 3, or (at your
44cb69ff 10;; option) any later version.
11
12;; GCC is distributed in the hope that it will be useful, but WITHOUT
13;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15;; License for more details.
16
17;; You should have received a copy of the GNU General Public License
038d1e19 18;; along with GCC; see the file COPYING3. If not see
19;; <http://www.gnu.org/licenses/>.
44cb69ff 20
488befe3 21(define_mode_attr larx [(QI "lbarx")
22 (HI "lharx")
23 (SI "lwarx")
24 (DI "ldarx")
25 (TI "lqarx")])
26
27(define_mode_attr stcx [(QI "stbcx.")
28 (HI "sthcx.")
29 (SI "stwcx.")
30 (DI "stdcx.")
31 (TI "stqcx.")])
44cb69ff 32
fd781bb2 33(define_code_iterator FETCHOP [plus minus ior xor and])
e3aadb92 34(define_code_attr fetchop_name
34fac337 35 [(plus "add") (minus "sub") (ior "or") (xor "xor") (and "and")])
e3aadb92 36(define_code_attr fetchop_pred
488befe3 37 [(plus "add_operand") (minus "int_reg_operand")
e3aadb92 38 (ior "logical_operand") (xor "logical_operand") (and "and_operand")])
e3aadb92 39
b4db606f 40(define_expand "mem_thread_fence"
41 [(match_operand:SI 0 "const_int_operand" "")] ;; model
42 ""
43{
7211bb77 44 enum memmodel model = memmodel_base (INTVAL (operands[0]));
b4db606f 45 switch (model)
46 {
47 case MEMMODEL_RELAXED:
48 break;
49 case MEMMODEL_CONSUME:
50 case MEMMODEL_ACQUIRE:
51 case MEMMODEL_RELEASE:
52 case MEMMODEL_ACQ_REL:
53 emit_insn (gen_lwsync ());
54 break;
55 case MEMMODEL_SEQ_CST:
56 emit_insn (gen_hwsync ());
57 break;
58 default:
59 gcc_unreachable ();
60 }
61 DONE;
62})
63
64(define_expand "hwsync"
dc178856 65 [(set (match_dup 0)
66 (unspec:BLK [(match_dup 0)] UNSPEC_SYNC))]
44cb69ff 67 ""
dda067b9 68{
69 operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
70 MEM_VOLATILE_P (operands[0]) = 1;
71})
72
b4db606f 73(define_insn "*hwsync"
dda067b9 74 [(set (match_operand:BLK 0 "" "")
dc178856 75 (unspec:BLK [(match_dup 0)] UNSPEC_SYNC))]
dda067b9 76 ""
4ff70f1c 77 "sync"
dda067b9 78 [(set_attr "type" "sync")])
79
b4db606f 80(define_expand "lwsync"
81 [(set (match_dup 0)
82 (unspec:BLK [(match_dup 0)] UNSPEC_LWSYNC))]
83 ""
dda067b9 84{
b4db606f 85 operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
86 MEM_VOLATILE_P (operands[0]) = 1;
dda067b9 87})
44cb69ff 88
b4db606f 89(define_insn "*lwsync"
90 [(set (match_operand:BLK 0 "" "")
91 (unspec:BLK [(match_dup 0)] UNSPEC_LWSYNC))]
92 ""
dee88a74 93{
b4db606f 94 /* Some AIX assemblers don't accept lwsync, so we use a .long. */
95 if (TARGET_NO_LWSYNC)
96 return "sync";
97 else if (TARGET_LWSYNC_INSTRUCTION)
98 return "lwsync";
99 else
100 return ".long 0x7c2004ac";
101}
102 [(set_attr "type" "sync")])
dee88a74 103
b4db606f 104(define_insn "isync"
105 [(unspec_volatile:BLK [(const_int 0)] UNSPECV_ISYNC)]
106 ""
4ff70f1c 107 "isync"
b4db606f 108 [(set_attr "type" "isync")])
dee88a74 109
66240e32 110;; Types that we should provide atomic instructions for.
111(define_mode_iterator AINT [QI
112 HI
113 SI
114 (DI "TARGET_POWERPC64")
115 (TI "TARGET_SYNC_TI")])
116
b4db606f 117;; The control dependency used for load dependency described
118;; in B.2.3 of the Power ISA 2.06B.
b845c500 119(define_insn "loadsync_<mode>"
66240e32 120 [(unspec_volatile:BLK [(match_operand:AINT 0 "register_operand" "r")]
b4db606f 121 UNSPECV_ISYNC)
122 (clobber (match_scratch:CC 1 "=y"))]
123 ""
124 "cmpw %1,%0,%0\;bne- %1,$+4\;isync"
125 [(set_attr "type" "isync")
126 (set_attr "length" "12")])
127
66240e32 128(define_insn "load_quadpti"
129 [(set (match_operand:PTI 0 "quad_int_reg_operand" "=&r")
130 (unspec:PTI
131 [(match_operand:TI 1 "quad_memory_operand" "wQ")] UNSPEC_LSQ))]
132 "TARGET_SYNC_TI
133 && !reg_mentioned_p (operands[0], operands[1])"
134 "lq %0,%1"
135 [(set_attr "type" "load")
136 (set_attr "length" "4")])
137
b4db606f 138(define_expand "atomic_load<mode>"
66240e32 139 [(set (match_operand:AINT 0 "register_operand" "") ;; output
140 (match_operand:AINT 1 "memory_operand" "")) ;; memory
b4db606f 141 (use (match_operand:SI 2 "const_int_operand" ""))] ;; model
142 ""
dee88a74 143{
66240e32 144 if (<MODE>mode == TImode && !TARGET_SYNC_TI)
145 FAIL;
146
7211bb77 147 enum memmodel model = memmodel_base (INTVAL (operands[2]));
dee88a74 148
a372f7ca 149 if (is_mm_seq_cst (model))
b4db606f 150 emit_insn (gen_hwsync ());
44cb69ff 151
66240e32 152 if (<MODE>mode != TImode)
153 emit_move_insn (operands[0], operands[1]);
154 else
155 {
156 rtx op0 = operands[0];
157 rtx op1 = operands[1];
158 rtx pti_reg = gen_reg_rtx (PTImode);
159
160 // Can't have indexed address for 'lq'
161 if (indexed_address (XEXP (op1, 0), TImode))
162 {
163 rtx old_addr = XEXP (op1, 0);
164 rtx new_addr = force_reg (Pmode, old_addr);
165 operands[1] = op1 = replace_equiv_address (op1, new_addr);
166 }
167
168 emit_insn (gen_load_quadpti (pti_reg, op1));
169
170 if (WORDS_BIG_ENDIAN)
171 emit_move_insn (op0, gen_lowpart (TImode, pti_reg));
172 else
173 {
174 emit_move_insn (gen_lowpart (DImode, op0), gen_highpart (DImode, pti_reg));
175 emit_move_insn (gen_highpart (DImode, op0), gen_lowpart (DImode, pti_reg));
176 }
177 }
b4db606f 178
179 switch (model)
e3aadb92 180 {
b4db606f 181 case MEMMODEL_RELAXED:
182 break;
183 case MEMMODEL_CONSUME:
184 case MEMMODEL_ACQUIRE:
185 case MEMMODEL_SEQ_CST:
488befe3 186 emit_insn (gen_loadsync_<mode> (operands[0]));
b4db606f 187 break;
188 default:
189 gcc_unreachable ();
e3aadb92 190 }
44cb69ff 191 DONE;
e3aadb92 192})
44cb69ff 193
66240e32 194(define_insn "store_quadpti"
195 [(set (match_operand:PTI 0 "quad_memory_operand" "=wQ")
196 (unspec:PTI
197 [(match_operand:PTI 1 "quad_int_reg_operand" "r")] UNSPEC_LSQ))]
198 "TARGET_SYNC_TI"
199 "stq %1,%0"
200 [(set_attr "type" "store")
201 (set_attr "length" "4")])
202
b4db606f 203(define_expand "atomic_store<mode>"
66240e32 204 [(set (match_operand:AINT 0 "memory_operand" "") ;; memory
205 (match_operand:AINT 1 "register_operand" "")) ;; input
b4db606f 206 (use (match_operand:SI 2 "const_int_operand" ""))] ;; model
207 ""
44cb69ff 208{
66240e32 209 if (<MODE>mode == TImode && !TARGET_SYNC_TI)
210 FAIL;
211
7211bb77 212 enum memmodel model = memmodel_base (INTVAL (operands[2]));
b4db606f 213 switch (model)
214 {
215 case MEMMODEL_RELAXED:
216 break;
217 case MEMMODEL_RELEASE:
218 emit_insn (gen_lwsync ());
219 break;
220 case MEMMODEL_SEQ_CST:
221 emit_insn (gen_hwsync ());
222 break;
223 default:
224 gcc_unreachable ();
225 }
66240e32 226 if (<MODE>mode != TImode)
227 emit_move_insn (operands[0], operands[1]);
228 else
229 {
230 rtx op0 = operands[0];
231 rtx op1 = operands[1];
232 rtx pti_reg = gen_reg_rtx (PTImode);
233
234 // Can't have indexed address for 'stq'
235 if (indexed_address (XEXP (op0, 0), TImode))
236 {
237 rtx old_addr = XEXP (op0, 0);
238 rtx new_addr = force_reg (Pmode, old_addr);
239 operands[0] = op0 = replace_equiv_address (op0, new_addr);
240 }
241
242 if (WORDS_BIG_ENDIAN)
243 emit_move_insn (pti_reg, gen_lowpart (PTImode, op1));
244 else
245 {
246 emit_move_insn (gen_lowpart (DImode, pti_reg), gen_highpart (DImode, op1));
247 emit_move_insn (gen_highpart (DImode, pti_reg), gen_lowpart (DImode, op1));
248 }
249
250 emit_insn (gen_store_quadpti (gen_lowpart (PTImode, op0), pti_reg));
251 }
252
44cb69ff 253 DONE;
e3aadb92 254})
44cb69ff 255
488befe3 256;; Any supported integer mode that has atomic l<x>arx/st<x>cx. instrucitons
257;; other than the quad memory operations, which have special restrictions.
258;; Byte/halfword atomic instructions were added in ISA 2.06B, but were phased
259;; in and did not show up until power8. TImode atomic lqarx/stqcx. require
260;; special handling due to even/odd register requirements.
261(define_mode_iterator ATOMIC [(QI "TARGET_SYNC_HI_QI")
262 (HI "TARGET_SYNC_HI_QI")
263 SI
264 (DI "TARGET_POWERPC64")])
265
b4db606f 266(define_insn "load_locked<mode>"
488befe3 267 [(set (match_operand:ATOMIC 0 "int_reg_operand" "=r")
b4db606f 268 (unspec_volatile:ATOMIC
269 [(match_operand:ATOMIC 1 "memory_operand" "Z")] UNSPECV_LL))]
34c34d94 270 ""
b4db606f 271 "<larx> %0,%y1"
272 [(set_attr "type" "load_l")])
44cb69ff 273
488befe3 274(define_insn "load_locked<QHI:mode>_si"
275 [(set (match_operand:SI 0 "int_reg_operand" "=r")
276 (unspec_volatile:SI
277 [(match_operand:QHI 1 "memory_operand" "Z")] UNSPECV_LL))]
278 "TARGET_SYNC_HI_QI"
279 "<QHI:larx> %0,%y1"
280 [(set_attr "type" "load_l")])
281
1c09f133 282;; Use PTImode to get even/odd register pairs.
283;; Use a temporary register to force getting an even register for the
284;; lqarx/stqcrx. instructions. Normal optimizations will eliminate this extra
285;; copy on big endian systems.
286
287;; On little endian systems where non-atomic quad word load/store instructions
288;; are not used, the address can be register+offset, so make sure the address
289;; is indexed or indirect before register allocation.
290
488befe3 291(define_expand "load_lockedti"
292 [(use (match_operand:TI 0 "quad_int_reg_operand" ""))
293 (use (match_operand:TI 1 "memory_operand" ""))]
294 "TARGET_SYNC_TI"
295{
1c09f133 296 rtx op0 = operands[0];
297 rtx op1 = operands[1];
488befe3 298 rtx pti = gen_reg_rtx (PTImode);
1c09f133 299
300 if (!indexed_or_indirect_operand (op1, TImode))
301 {
302 rtx old_addr = XEXP (op1, 0);
303 rtx new_addr = force_reg (Pmode, old_addr);
304 operands[1] = op1 = change_address (op1, TImode, new_addr);
305 }
306
307 emit_insn (gen_load_lockedpti (pti, op1));
308 if (WORDS_BIG_ENDIAN)
309 emit_move_insn (op0, gen_lowpart (TImode, pti));
310 else
311 {
312 emit_move_insn (gen_lowpart (DImode, op0), gen_highpart (DImode, pti));
313 emit_move_insn (gen_highpart (DImode, op0), gen_lowpart (DImode, pti));
314 }
488befe3 315 DONE;
316})
317
318(define_insn "load_lockedpti"
319 [(set (match_operand:PTI 0 "quad_int_reg_operand" "=&r")
320 (unspec_volatile:PTI
1c09f133 321 [(match_operand:TI 1 "indexed_or_indirect_operand" "Z")] UNSPECV_LL))]
488befe3 322 "TARGET_SYNC_TI
323 && !reg_mentioned_p (operands[0], operands[1])
324 && quad_int_reg_operand (operands[0], PTImode)"
325 "lqarx %0,%y1"
326 [(set_attr "type" "load_l")])
327
b4db606f 328(define_insn "store_conditional<mode>"
329 [(set (match_operand:CC 0 "cc_reg_operand" "=x")
330 (unspec_volatile:CC [(const_int 0)] UNSPECV_SC))
331 (set (match_operand:ATOMIC 1 "memory_operand" "=Z")
488befe3 332 (match_operand:ATOMIC 2 "int_reg_operand" "r"))]
34c34d94 333 ""
b4db606f 334 "<stcx> %2,%y1"
335 [(set_attr "type" "store_c")])
44cb69ff 336
1c09f133 337;; Use a temporary register to force getting an even register for the
338;; lqarx/stqcrx. instructions. Normal optimizations will eliminate this extra
339;; copy on big endian systems.
340
341;; On little endian systems where non-atomic quad word load/store instructions
342;; are not used, the address can be register+offset, so make sure the address
343;; is indexed or indirect before register allocation.
344
488befe3 345(define_expand "store_conditionalti"
346 [(use (match_operand:CC 0 "cc_reg_operand" ""))
347 (use (match_operand:TI 1 "memory_operand" ""))
348 (use (match_operand:TI 2 "quad_int_reg_operand" ""))]
349 "TARGET_SYNC_TI"
350{
351 rtx op0 = operands[0];
352 rtx op1 = operands[1];
353 rtx op2 = operands[2];
1c09f133 354 rtx addr = XEXP (op1, 0);
355 rtx pti_mem;
356 rtx pti_reg;
357
358 if (!indexed_or_indirect_operand (op1, TImode))
359 {
360 rtx new_addr = force_reg (Pmode, addr);
361 operands[1] = op1 = change_address (op1, TImode, new_addr);
362 addr = new_addr;
363 }
364
365 pti_mem = change_address (op1, PTImode, addr);
366 pti_reg = gen_reg_rtx (PTImode);
367
368 if (WORDS_BIG_ENDIAN)
369 emit_move_insn (pti_reg, gen_lowpart (PTImode, op2));
370 else
371 {
372 emit_move_insn (gen_lowpart (DImode, pti_reg), gen_highpart (DImode, op2));
373 emit_move_insn (gen_highpart (DImode, pti_reg), gen_lowpart (DImode, op2));
374 }
375
376 emit_insn (gen_store_conditionalpti (op0, pti_mem, pti_reg));
488befe3 377 DONE;
378})
379
380(define_insn "store_conditionalpti"
381 [(set (match_operand:CC 0 "cc_reg_operand" "=x")
382 (unspec_volatile:CC [(const_int 0)] UNSPECV_SC))
1c09f133 383 (set (match_operand:PTI 1 "indexed_or_indirect_operand" "=Z")
488befe3 384 (match_operand:PTI 2 "quad_int_reg_operand" "r"))]
385 "TARGET_SYNC_TI && quad_int_reg_operand (operands[2], PTImode)"
386 "stqcx. %2,%y1"
387 [(set_attr "type" "store_c")])
388
b4db606f 389(define_expand "atomic_compare_and_swap<mode>"
488befe3 390 [(match_operand:SI 0 "int_reg_operand" "") ;; bool out
391 (match_operand:AINT 1 "int_reg_operand" "") ;; val out
392 (match_operand:AINT 2 "memory_operand" "") ;; memory
393 (match_operand:AINT 3 "reg_or_short_operand" "") ;; expected
394 (match_operand:AINT 4 "int_reg_operand" "") ;; desired
b4db606f 395 (match_operand:SI 5 "const_int_operand" "") ;; is_weak
396 (match_operand:SI 6 "const_int_operand" "") ;; model succ
397 (match_operand:SI 7 "const_int_operand" "")] ;; model fail
34c34d94 398 ""
44cb69ff 399{
b4db606f 400 rs6000_expand_atomic_compare_and_swap (operands);
44cb69ff 401 DONE;
e3aadb92 402})
44cb69ff 403
b4db606f 404(define_expand "atomic_exchange<mode>"
488befe3 405 [(match_operand:AINT 0 "int_reg_operand" "") ;; output
406 (match_operand:AINT 1 "memory_operand" "") ;; memory
407 (match_operand:AINT 2 "int_reg_operand" "") ;; input
b4db606f 408 (match_operand:SI 3 "const_int_operand" "")] ;; model
34c34d94 409 ""
44cb69ff 410{
b4db606f 411 rs6000_expand_atomic_exchange (operands);
44cb69ff 412 DONE;
e3aadb92 413})
44cb69ff 414
b4db606f 415(define_expand "atomic_<fetchop_name><mode>"
488befe3 416 [(match_operand:AINT 0 "memory_operand" "") ;; memory
417 (FETCHOP:AINT (match_dup 0)
418 (match_operand:AINT 1 "<fetchop_pred>" "")) ;; operand
b4db606f 419 (match_operand:SI 2 "const_int_operand" "")] ;; model
34c34d94 420 ""
44cb69ff 421{
b4db606f 422 rs6000_expand_atomic_op (<CODE>, operands[0], operands[1],
423 NULL_RTX, NULL_RTX, operands[2]);
44cb69ff 424 DONE;
e3aadb92 425})
44cb69ff 426
b4db606f 427(define_expand "atomic_nand<mode>"
488befe3 428 [(match_operand:AINT 0 "memory_operand" "") ;; memory
429 (match_operand:AINT 1 "int_reg_operand" "") ;; operand
b4db606f 430 (match_operand:SI 2 "const_int_operand" "")] ;; model
34c34d94 431 ""
44cb69ff 432{
b4db606f 433 rs6000_expand_atomic_op (NOT, operands[0], operands[1],
434 NULL_RTX, NULL_RTX, operands[2]);
44cb69ff 435 DONE;
e3aadb92 436})
44cb69ff 437
b4db606f 438(define_expand "atomic_fetch_<fetchop_name><mode>"
488befe3 439 [(match_operand:AINT 0 "int_reg_operand" "") ;; output
440 (match_operand:AINT 1 "memory_operand" "") ;; memory
441 (FETCHOP:AINT (match_dup 1)
442 (match_operand:AINT 2 "<fetchop_pred>" "")) ;; operand
b4db606f 443 (match_operand:SI 3 "const_int_operand" "")] ;; model
34c34d94 444 ""
b4db606f 445{
446 rs6000_expand_atomic_op (<CODE>, operands[1], operands[2],
447 operands[0], NULL_RTX, operands[3]);
44cb69ff 448 DONE;
e3aadb92 449})
44cb69ff 450
b4db606f 451(define_expand "atomic_fetch_nand<mode>"
488befe3 452 [(match_operand:AINT 0 "int_reg_operand" "") ;; output
453 (match_operand:AINT 1 "memory_operand" "") ;; memory
454 (match_operand:AINT 2 "int_reg_operand" "") ;; operand
b4db606f 455 (match_operand:SI 3 "const_int_operand" "")] ;; model
34c34d94 456 ""
44cb69ff 457{
b4db606f 458 rs6000_expand_atomic_op (NOT, operands[1], operands[2],
459 operands[0], NULL_RTX, operands[3]);
460 DONE;
461})
44cb69ff 462
b4db606f 463(define_expand "atomic_<fetchop_name>_fetch<mode>"
488befe3 464 [(match_operand:AINT 0 "int_reg_operand" "") ;; output
465 (match_operand:AINT 1 "memory_operand" "") ;; memory
466 (FETCHOP:AINT (match_dup 1)
467 (match_operand:AINT 2 "<fetchop_pred>" "")) ;; operand
b4db606f 468 (match_operand:SI 3 "const_int_operand" "")] ;; model
34c34d94 469 ""
44cb69ff 470{
b4db606f 471 rs6000_expand_atomic_op (<CODE>, operands[1], operands[2],
472 NULL_RTX, operands[0], operands[3]);
44cb69ff 473 DONE;
e3aadb92 474})
475
b4db606f 476(define_expand "atomic_nand_fetch<mode>"
488befe3 477 [(match_operand:AINT 0 "int_reg_operand" "") ;; output
478 (match_operand:AINT 1 "memory_operand" "") ;; memory
479 (match_operand:AINT 2 "int_reg_operand" "") ;; operand
b4db606f 480 (match_operand:SI 3 "const_int_operand" "")] ;; model
34c34d94 481 ""
44cb69ff 482{
b4db606f 483 rs6000_expand_atomic_op (NOT, operands[1], operands[2],
484 NULL_RTX, operands[0], operands[3]);
44cb69ff 485 DONE;
b4db606f 486})