;;- Machine description for HP PA-RISC architecture for GCC compiler
-;; Copyright (C) 1992-2013 Free Software Foundation, Inc.
+;; Copyright (C) 1992-2020 Free Software Foundation, Inc.
;; Contributed by the Center for Software Science at the University
;; of Utah.
UNSPEC_TLSGD_PIC
UNSPEC_TLSLDM_PIC
UNSPEC_TLSIE_PIC
+ UNSPEC_MEMORY_BARRIER
])
;; UNSPEC_VOLATILE:
;; type "binary" insns have two input operands (1,2) and one output (0)
(define_attr "type"
- "move,unary,binary,shift,nullshift,compare,load,store,uncond_branch,branch,cbranch,fbranch,call,sibcall,dyncall,fpload,fpstore,fpalu,fpcc,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,multi,milli,sh_func_adrs,parallel_branch,fpstore_load,store_fpload"
+ "move,unary,binary,shift,nullshift,compare,load,store,uncond_branch,branch,cbranch,fbranch,call,sibcall,dyncall,fpload,fpstore,fpalu,fpcc,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,multi,milli,sh_func_adrs,parallel_branch,fpstore_load,store_fpload,trap"
(const_string "binary"))
(define_attr "pa_combine_type"
;; For conditional branches. Frame related instructions are not allowed
;; because they confuse the unwind support.
(define_attr "in_branch_delay" "false,true"
- (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,sibcall,dyncall,multi,milli,sh_func_adrs,parallel_branch")
+ (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,sibcall,dyncall,multi,milli,sh_func_adrs,parallel_branch,trap")
(eq_attr "length" "4")
(not (match_test "RTX_FRAME_RELATED_P (insn)")))
(const_string "true")
;; Disallow instructions which use the FPU since they will tie up the FPU
;; even if the instruction is nullified.
(define_attr "in_nullified_branch_delay" "false,true"
- (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,sibcall,dyncall,multi,milli,sh_func_adrs,fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,parallel_branch")
+ (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,sibcall,dyncall,multi,milli,sh_func_adrs,fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,parallel_branch,trap")
(eq_attr "length" "4")
(not (match_test "RTX_FRAME_RELATED_P (insn)")))
(const_string "true")
(const_string "false")))
-;; For calls and millicode calls. Allow unconditional branches in the
-;; delay slot.
+;; For calls and millicode calls.
(define_attr "in_call_delay" "false,true"
- (cond [(and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,sibcall,dyncall,multi,milli,sh_func_adrs,parallel_branch")
- (eq_attr "length" "4")
- (not (match_test "RTX_FRAME_RELATED_P (insn)")))
- (const_string "true")
- (eq_attr "type" "uncond_branch")
- (if_then_else (match_test "TARGET_JUMP_IN_DELAY")
- (const_string "true")
- (const_string "false"))]
- (const_string "false")))
-
+ (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,sibcall,dyncall,multi,milli,sh_func_adrs,parallel_branch,trap")
+ (eq_attr "length" "4")
+ (not (match_test "RTX_FRAME_RELATED_P (insn)")))
+ (const_string "true")
+ (const_string "false")))
;; Call delay slot description.
(define_delay (eq_attr "type" "call")
(and (eq_attr "in_nullified_branch_delay" "true")
(attr_flag "backward"))])
-(define_delay (and (eq_attr "type" "uncond_branch")
- (not (match_test "pa_following_call (insn)")))
+(define_delay (eq_attr "type" "uncond_branch")
[(eq_attr "in_branch_delay" "true") (nil) (nil)])
;; Memory. Disregarding Cache misses, the Mustang memory times are:
(include "predicates.md")
(include "constraints.md")
\f
-;; Atomic instructions
-
-;; All memory loads and stores access storage atomically except
-;; for one exception. The STORE BYTES, STORE DOUBLE BYTES, and
-;; doubleword loads and stores are not guaranteed to be atomic
-;; when referencing the I/O address space.
-
-;; Implement atomic DImode load using 64-bit floating point load and copy.
-
-(define_expand "atomic_loaddi"
- [(match_operand:DI 0 "register_operand") ;; val out
- (match_operand:DI 1 "memory_operand") ;; memory
- (match_operand:SI 2 "const_int_operand")] ;; model
- "!TARGET_64BIT && !TARGET_SOFT_FLOAT"
-{
- enum memmodel model = (enum memmodel) INTVAL (operands[2]);
- operands[1] = force_reg (SImode, XEXP (operands[1], 0));
- operands[2] = gen_reg_rtx (DImode);
- expand_mem_thread_fence (model);
- emit_insn (gen_atomic_loaddi_1 (operands[0], operands[1], operands[2]));
- if ((model & MEMMODEL_MASK) == MEMMODEL_SEQ_CST)
- expand_mem_thread_fence (model);
- DONE;
-})
-
-(define_insn "atomic_loaddi_1"
- [(set (match_operand:DI 0 "register_operand" "=r")
- (mem:DI (match_operand:SI 1 "register_operand" "r")))
- (clobber (match_operand:DI 2 "register_operand" "=&f"))]
- "!TARGET_64BIT && !TARGET_SOFT_FLOAT"
- "{fldds|fldd} 0(%1),%2\;{fstds|fstd} %2,-16(%%sp)\;{ldws|ldw} -16(%%sp),%0\;{ldws|ldw} -12(%%sp),%R0"
- [(set_attr "type" "move")
- (set_attr "length" "16")])
-
-;; Implement atomic DImode store using copy and 64-bit floating point store.
-
-(define_expand "atomic_storedi"
- [(match_operand:DI 0 "memory_operand") ;; memory
- (match_operand:DI 1 "register_operand") ;; val out
- (match_operand:SI 2 "const_int_operand")] ;; model
- "!TARGET_64BIT && !TARGET_SOFT_FLOAT"
-{
- enum memmodel model = (enum memmodel) INTVAL (operands[2]);
- operands[0] = force_reg (SImode, XEXP (operands[0], 0));
- operands[2] = gen_reg_rtx (DImode);
- expand_mem_thread_fence (model);
- emit_insn (gen_atomic_storedi_1 (operands[0], operands[1], operands[2]));
- if ((model & MEMMODEL_MASK) == MEMMODEL_SEQ_CST)
- expand_mem_thread_fence (model);
- DONE;
-})
-
-(define_insn "atomic_storedi_1"
- [(set (mem:DI (match_operand:SI 0 "register_operand" "r"))
- (match_operand:DI 1 "register_operand" "r"))
- (clobber (match_operand:DI 2 "register_operand" "=&f"))]
- "!TARGET_64BIT && !TARGET_SOFT_FLOAT"
- "{stws|stw} %1,-16(%%sp)\;{stws|stw} %R1,-12(%%sp)\;{fldds|fldd} -16(%%sp),%2\;{fstds|fstd} %2,0(%0)"
- [(set_attr "type" "move")
- (set_attr "length" "16")])
-
;; Compare instructions.
;; This controls RTL generation and register allocation.
[(match_operand:SI 1 "reg_or_0_operand" "rM")
(match_operand:SI 2 "arith11_operand" "rI")]))]
""
- "{com%I2clr|cmp%I2clr},%B3 %2,%1,%0\;ldi 1,%0"
+ "{com%I2clr|cmp%I2clr},%B3 %2,%r1,%0\;ldi 1,%0"
[(set_attr "type" "binary")
(set_attr "length" "8")])
[(match_operand:DI 1 "reg_or_0_operand" "rM")
(match_operand:DI 2 "arith11_operand" "rI")]))]
"TARGET_64BIT"
- "cmp%I2clr,*%B3 %2,%1,%0\;ldi 1,%0"
+ "cmp%I2clr,*%B3 %2,%r1,%0\;ldi 1,%0"
[(set_attr "type" "binary")
(set_attr "length" "8")])
[(match_operand:SI 4 "reg_or_0_operand" "rM")
(match_operand:SI 5 "arith11_operand" "rI")])))]
""
- "{com%I2clr|cmp%I2clr},%S3 %2,%1,%%r0\;{com%I5clr|cmp%I5clr},%B6 %5,%4,%0\;ldi 1,%0"
+ "{com%I2clr|cmp%I2clr},%S3 %2,%r1,%%r0\;{com%I5clr|cmp%I5clr},%B6 %5,%r4,%0\;ldi 1,%0"
[(set_attr "type" "binary")
(set_attr "length" "12")])
[(match_operand:DI 4 "reg_or_0_operand" "rM")
(match_operand:DI 5 "arith11_operand" "rI")])))]
"TARGET_64BIT"
- "cmp%I2clr,*%S3 %2,%1,%%r0\;cmp%I5clr,*%B6 %5,%4,%0\;ldi 1,%0"
+ "cmp%I2clr,*%S3 %2,%r1,%%r0\;cmp%I5clr,*%B6 %5,%r4,%0\;ldi 1,%0"
[(set_attr "type" "binary")
(set_attr "length" "12")])
[(match_operand:SI 1 "reg_or_0_operand" "rM")
(match_operand:SI 2 "arith11_operand" "rI")])))]
""
- "{com%I2clr|cmp%I2clr},%B3 %2,%1,%0\;ldi -1,%0"
+ "{com%I2clr|cmp%I2clr},%B3 %2,%r1,%0\;ldi -1,%0"
[(set_attr "type" "binary")
(set_attr "length" "8")])
[(match_operand:DI 1 "reg_or_0_operand" "rM")
(match_operand:DI 2 "arith11_operand" "rI")])))]
"TARGET_64BIT"
- "cmp%I2clr,*%B3 %2,%1,%0\;ldi -1,%0"
+ "cmp%I2clr,*%B3 %2,%r1,%0\;ldi -1,%0"
[(set_attr "type" "binary")
(set_attr "length" "8")])
[(set_attr "type" "multi,multi")
(set_attr "length" "8,8")])
+(define_insn "absqi2"
+ [(set (match_operand:QI 0 "register_operand" "=r")
+ (abs:QI (match_operand:QI 1 "register_operand" "r")))]
+ ""
+ "{extrs|extrw,s},>= %1,31,8,%0\;subi 0,%0,%0"
+ [(set_attr "type" "multi")
+ (set_attr "length" "8")])
+
+(define_insn "abshi2"
+ [(set (match_operand:HI 0 "register_operand" "=r")
+ (abs:HI (match_operand:HI 1 "register_operand" "r")))]
+ ""
+ "{extrs|extrw,s},>= %1,31,16,%0\;subi 0,%0,%0"
+ [(set_attr "type" "multi")
+ (set_attr "length" "8")])
+
(define_insn "abssi2"
[(set (match_operand:SI 0 "register_operand" "=r")
(abs:SI (match_operand:SI 1 "register_operand" "r")))]
[(set_attr "type" "multi")
(set_attr "length" "8")])
+(define_insn "bswaphi2"
+ [(set (match_operand:HI 0 "register_operand" "=&r")
+ (bswap:HI (match_operand:HI 1 "register_operand" "r")))]
+ ""
+ "{extru|extrw,u} %1,23,8,%0\;{dep|depw} %1,23,8,%0"
+ [(set_attr "type" "multi")
+ (set_attr "length" "8")])
+
+(define_insn "bswapsi2"
+ [(set (match_operand:SI 0 "register_operand" "=&r")
+ (bswap:SI (match_operand:SI 1 "register_operand" "r")))]
+ ""
+ "{shd|shrpw} %1,%1,16,%0\;{dep|depw} %0,15,8,%0\;{shd|shrpw} %1,%0,8,%0"
+ [(set_attr "type" "multi")
+ (set_attr "length" "12")])
+
+(define_insn "bswapdi2"
+ [(set (match_operand:DI 0 "register_operand" "=&r")
+ (bswap:DI (match_operand:DI 1 "register_operand" "r")))
+ (clobber (match_scratch:DI 2 "=r"))]
+ "TARGET_64BIT"
+ "permh,3210 %1,%2\;hshl %2,8,%0\;hshr,u %2,8,%2\;or %0,%2,%0"
+ [(set_attr "type" "multi")
+ (set_attr "length" "16")])
+
;;; Experimental conditional move patterns
(define_expand "movsicc"
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
; computes the address to be deleted if the register it sets is dead.
(define_peephole2
[(set (match_operand:SI 0 "register_operand" "")
- (plus:SI (mult:SI (match_operand:SI 1 "register_operand" "")
- (const_int 4))
+ (plus:SI (ashift:SI (match_operand:SI 1 "register_operand" "")
+ (const_int 2))
(match_operand:SI 2 "register_operand" "")))
(set (mem:SI (match_dup 0))
(match_operand:SI 3 "register_operand" ""))]
&& FP_REGNO_P (REGNO (operands[3]))"
[(set (mem:SI (plus:SI (mult:SI (match_dup 1) (const_int 4)) (match_dup 2)))
(match_dup 3))
- (set (match_dup 0) (plus:SI (mult:SI (match_dup 1) (const_int 4))
- (match_dup 2)))]
- "")
-
-(define_peephole2
- [(set (match_operand:SI 0 "register_operand" "")
- (plus:SI (match_operand:SI 2 "register_operand" "")
- (mult:SI (match_operand:SI 1 "register_operand" "")
- (const_int 4))))
- (set (mem:SI (match_dup 0))
- (match_operand:SI 3 "register_operand" ""))]
- "!TARGET_SOFT_FLOAT
- && !TARGET_DISABLE_INDEXING
- && REG_OK_FOR_BASE_P (operands[2])
- && FP_REGNO_P (REGNO (operands[3]))"
- [(set (mem:SI (plus:SI (mult:SI (match_dup 1) (const_int 4)) (match_dup 2)))
- (match_dup 3))
- (set (match_dup 0) (plus:SI (mult:SI (match_dup 1) (const_int 4))
+ (set (match_dup 0) (plus:SI (ashift:SI (match_dup 1) (const_int 2))
(match_dup 2)))]
"")
(define_peephole2
[(set (match_operand:DI 0 "register_operand" "")
- (plus:DI (mult:DI (match_operand:DI 1 "register_operand" "")
- (const_int 4))
+ (plus:DI (ashift:DI (match_operand:DI 1 "register_operand" "")
+ (const_int 2))
(match_operand:DI 2 "register_operand" "")))
(set (mem:SI (match_dup 0))
(match_operand:SI 3 "register_operand" ""))]
&& FP_REGNO_P (REGNO (operands[3]))"
[(set (mem:SI (plus:DI (mult:DI (match_dup 1) (const_int 4)) (match_dup 2)))
(match_dup 3))
- (set (match_dup 0) (plus:DI (mult:DI (match_dup 1) (const_int 4))
- (match_dup 2)))]
- "")
-
-(define_peephole2
- [(set (match_operand:DI 0 "register_operand" "")
- (plus:DI (match_operand:DI 2 "register_operand" "")
- (mult:DI (match_operand:DI 1 "register_operand" "")
- (const_int 4))))
- (set (mem:SI (match_dup 0))
- (match_operand:SI 3 "register_operand" ""))]
- "!TARGET_SOFT_FLOAT
- && !TARGET_DISABLE_INDEXING
- && TARGET_64BIT
- && REG_OK_FOR_BASE_P (operands[2])
- && FP_REGNO_P (REGNO (operands[3]))"
- [(set (mem:SI (plus:DI (mult:DI (match_dup 1) (const_int 4)) (match_dup 2)))
- (match_dup 3))
- (set (match_dup 0) (plus:DI (mult:DI (match_dup 1) (const_int 4))
+ (set (match_dup 0) (plus:DI (ashift:DI (match_dup 1) (const_int 2))
(match_dup 2)))]
"")
xoperands[0] = operands[0];
xoperands[1] = operands[1];
- xoperands[2] = gen_label_rtx ();
- (*targetm.asm_out.internal_label) (asm_out_file, \"L\",
- CODE_LABEL_NUMBER (xoperands[2]));
- output_asm_insn (\"mfia %0\", xoperands);
-
- /* If we're trying to load the address of a label that happens to be
- close, then we can use a shorter sequence. */
if (GET_CODE (operands[1]) == LABEL_REF
- && !LABEL_REF_NONLOCAL_P (operands[1])
- && INSN_ADDRESSES_SET_P ()
- && abs (INSN_ADDRESSES (INSN_UID (XEXP (operands[1], 0)))
- - INSN_ADDRESSES (INSN_UID (insn))) < 8100)
- output_asm_insn (\"ldo %1-%2(%0),%0\", xoperands);
+ && !LABEL_REF_NONLOCAL_P (operands[1]))
+ {
+ xoperands[2] = gen_label_rtx ();
+ (*targetm.asm_out.internal_label) (asm_out_file, \"L\",
+ CODE_LABEL_NUMBER (xoperands[2]));
+ output_asm_insn (\"mfia %0\", xoperands);
+
+ /* If we're trying to load the address of a label that happens to be
+ close, then we can use a shorter sequence. */
+ if (INSN_ADDRESSES_SET_P ()
+ && abs (INSN_ADDRESSES (INSN_UID (XEXP (operands[1], 0)))
+ - INSN_ADDRESSES (INSN_UID (insn))) < 8100)
+ output_asm_insn (\"ldo %1-%2(%0),%0\", xoperands);
+ else
+ {
+ output_asm_insn (\"addil L%%%1-%2,%0\", xoperands);
+ output_asm_insn (\"ldo R%%%1-%2(%0),%0\", xoperands);
+ }
+ }
else
{
- output_asm_insn (\"addil L%%%1-%2,%0\", xoperands);
- output_asm_insn (\"ldo R%%%1-%2(%0),%0\", xoperands);
+ /* Load using linkage table. */
+ if (TARGET_64BIT)
+ {
+ output_asm_insn (\"addil LT%%%1,%%r27\", xoperands);
+ output_asm_insn (\"ldd RT%%%1(%0),%0\", xoperands);
+ }
+ else
+ {
+ output_asm_insn (\"addil LT%%%1,%%r19\", xoperands);
+ output_asm_insn (\"ldw RT%%%1(%0),%0\", xoperands);
+ }
}
return \"\";
}"
xoperands[0] = operands[0];
xoperands[1] = operands[1];
- xoperands[2] = gen_label_rtx ();
-
- output_asm_insn (\"bl .+8,%0\", xoperands);
- output_asm_insn (\"depi 0,31,2,%0\", xoperands);
- (*targetm.asm_out.internal_label) (asm_out_file, \"L\",
- CODE_LABEL_NUMBER (xoperands[2]));
- /* If we're trying to load the address of a label that happens to be
- close, then we can use a shorter sequence. */
if (GET_CODE (operands[1]) == LABEL_REF
- && !LABEL_REF_NONLOCAL_P (operands[1])
- && INSN_ADDRESSES_SET_P ()
- && abs (INSN_ADDRESSES (INSN_UID (XEXP (operands[1], 0)))
- - INSN_ADDRESSES (INSN_UID (insn))) < 8100)
- output_asm_insn (\"ldo %1-%2(%0),%0\", xoperands);
+ && !LABEL_REF_NONLOCAL_P (operands[1]))
+ {
+ xoperands[2] = gen_label_rtx ();
+ output_asm_insn (\"bl .+8,%0\", xoperands);
+ output_asm_insn (\"depi 0,31,2,%0\", xoperands);
+ (*targetm.asm_out.internal_label) (asm_out_file, \"L\",
+ CODE_LABEL_NUMBER (xoperands[2]));
+
+ /* If we're trying to load the address of a label that happens to be
+ close, then we can use a shorter sequence. */
+ if (INSN_ADDRESSES_SET_P ()
+ && abs (INSN_ADDRESSES (INSN_UID (XEXP (operands[1], 0)))
+ - INSN_ADDRESSES (INSN_UID (insn))) < 8100)
+ output_asm_insn (\"ldo %1-%2(%0),%0\", xoperands);
+ else
+ {
+ output_asm_insn (\"addil L%%%1-%2,%0\", xoperands);
+ output_asm_insn (\"ldo R%%%1-%2(%0),%0\", xoperands);
+ }
+ }
else
{
- output_asm_insn (\"addil L%%%1-%2,%0\", xoperands);
- output_asm_insn (\"ldo R%%%1-%2(%0),%0\", xoperands);
+ /* Load using linkage table. */
+ output_asm_insn (\"addil LT%%%1,%%r19\", xoperands);
+ output_asm_insn (\"ldw RT%%%1(%0),%0\", xoperands);
}
return \"\";
}"
[(set_attr "type" "binary")
(set_attr "length" "4")])
+(define_insn ""
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
+ (unspec:SI [(match_operand 2 "" "")] UNSPEC_DLTIND14R)))]
+ "symbolic_operand (operands[2], Pmode)
+ && ! function_label_operand (operands[2], Pmode)
+ && flag_pic"
+ "ldo RT'%G2(%1),%0"
+ [(set_attr "type" "binary")
+ (set_attr "length" "4")])
+
+(define_insn ""
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (lo_sum:DI (match_operand:DI 1 "register_operand" "r")
+ (unspec:DI [(match_operand 2 "" "")] UNSPEC_DLTIND14R)))]
+ "symbolic_operand (operands[2], Pmode)
+ && ! function_label_operand (operands[2], Pmode)
+ && TARGET_64BIT
+ && flag_pic"
+ "ldo RT'%G2(%1),%0"
+ [(set_attr "type" "binary")
+ (set_attr "length" "4")])
+
;; Always use addil rather than ldil;add sequences. This allows the
;; HP linker to eliminate the dp relocation if the symbolic operand
;; lives in the TEXT space.
;; a 2 insn store with some creative RTL rewriting.
(define_split
[(set (mem:SI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
- (match_operand:SI 1 "shadd_operand" ""))
+ (match_operand:SI 1 "mem_shadd_operand" ""))
(plus:SI (match_operand:SI 2 "register_operand" "")
(match_operand:SI 3 "const_int_operand" ""))))
(match_operand:SI 4 "register_operand" ""))
(clobber (match_operand:SI 5 "register_operand" ""))]
""
- [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
+ [(set (match_dup 5) (plus:SI (ashift:SI (match_dup 0) (match_dup 1))
(match_dup 2)))
(set (mem:SI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
- "")
+ "
+{
+ operands[1] = GEN_INT (exact_log2 (INTVAL (operands[1])));
+
+}")
(define_split
[(set (mem:HI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
- (match_operand:SI 1 "shadd_operand" ""))
+ (match_operand:SI 1 "mem_shadd_operand" ""))
(plus:SI (match_operand:SI 2 "register_operand" "")
(match_operand:SI 3 "const_int_operand" ""))))
(match_operand:HI 4 "register_operand" ""))
(clobber (match_operand:SI 5 "register_operand" ""))]
""
- [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
+ [(set (match_dup 5) (plus:SI (ashift:SI (match_dup 0) (match_dup 1))
(match_dup 2)))
(set (mem:HI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
- "")
+ "
+{
+ operands[1] = GEN_INT (exact_log2 (INTVAL (operands[1])));
+
+}")
(define_split
[(set (mem:QI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
- (match_operand:SI 1 "shadd_operand" ""))
+ (match_operand:SI 1 "mem_shadd_operand" ""))
(plus:SI (match_operand:SI 2 "register_operand" "")
(match_operand:SI 3 "const_int_operand" ""))))
(match_operand:QI 4 "register_operand" ""))
(clobber (match_operand:SI 5 "register_operand" ""))]
""
- [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
+ [(set (match_dup 5) (plus:SI (ashift:SI (match_dup 0) (match_dup 1))
(match_dup 2)))
(set (mem:QI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
- "")
+ "
+{
+ operands[1] = GEN_INT (exact_log2 (INTVAL (operands[1])));
+
+}")
(define_expand "movhi"
[(set (match_operand:HI 0 "general_operand" "")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
;; The definition of this insn does not really explain what it does,
;; but it should suffice that anything generated as this insn will be
-;; recognized as a movmemsi operation, and that it will not successfully
+;; recognized as a cpymemsi operation, and that it will not successfully
;; combine with anything.
-(define_expand "movmemsi"
+(define_expand "cpymemsi"
[(parallel [(set (match_operand:BLK 0 "" "")
(match_operand:BLK 1 "" ""))
(clobber (match_dup 4))
;; operands 0 and 1 are both equivalent to symbolic MEMs. Thus, we are
;; forced to internally copy operands 0 and 1 to operands 7 and 8,
;; respectively. We then split or peephole optimize after reload.
-(define_insn "movmemsi_prereload"
+(define_insn "cpymemsi_prereload"
[(set (mem:BLK (match_operand:SI 0 "register_operand" "r,r"))
(mem:BLK (match_operand:SI 1 "register_operand" "r,r")))
(clobber (match_operand:SI 2 "register_operand" "=&r,&r")) ;loop cnt/tmp
operands[7] = addr;
else
{
- emit_insn (gen_rtx_SET (VOIDmode, operands[7], addr));
+ emit_insn (gen_rtx_SET (operands[7], addr));
operands[0] = replace_equiv_address (operands[0], operands[7]);
}
operands[8] = addr;
else
{
- emit_insn (gen_rtx_SET (VOIDmode, operands[8], addr));
+ emit_insn (gen_rtx_SET (operands[8], addr));
operands[1] = replace_equiv_address (operands[1], operands[8]);
}
}")
-(define_insn "movmemsi_postreload"
+(define_insn "cpymemsi_postreload"
[(set (mem:BLK (match_operand:SI 0 "register_operand" "+r,r"))
(mem:BLK (match_operand:SI 1 "register_operand" "+r,r")))
(clobber (match_operand:SI 2 "register_operand" "=&r,&r")) ;loop cnt/tmp
"* return pa_output_block_move (operands, !which_alternative);"
[(set_attr "type" "multi,multi")])
-(define_expand "movmemdi"
+(define_expand "cpymemdi"
[(parallel [(set (match_operand:BLK 0 "" "")
(match_operand:BLK 1 "" ""))
(clobber (match_dup 4))
;; operands 0 and 1 are both equivalent to symbolic MEMs. Thus, we are
;; forced to internally copy operands 0 and 1 to operands 7 and 8,
;; respectively. We then split or peephole optimize after reload.
-(define_insn "movmemdi_prereload"
+(define_insn "cpymemdi_prereload"
[(set (mem:BLK (match_operand:DI 0 "register_operand" "r,r"))
(mem:BLK (match_operand:DI 1 "register_operand" "r,r")))
(clobber (match_operand:DI 2 "register_operand" "=&r,&r")) ;loop cnt/tmp
operands[7] = addr;
else
{
- emit_insn (gen_rtx_SET (VOIDmode, operands[7], addr));
+ emit_insn (gen_rtx_SET (operands[7], addr));
operands[0] = replace_equiv_address (operands[0], operands[7]);
}
operands[8] = addr;
else
{
- emit_insn (gen_rtx_SET (VOIDmode, operands[8], addr));
+ emit_insn (gen_rtx_SET (operands[8], addr));
operands[1] = replace_equiv_address (operands[1], operands[8]);
}
}")
-(define_insn "movmemdi_postreload"
+(define_insn "cpymemdi_postreload"
[(set (mem:BLK (match_operand:DI 0 "register_operand" "+r,r"))
(mem:BLK (match_operand:DI 1 "register_operand" "+r,r")))
(clobber (match_operand:DI 2 "register_operand" "=&r,&r")) ;loop cnt/tmp
operands[4] = addr;
else
{
- emit_insn (gen_rtx_SET (VOIDmode, operands[4], addr));
+ emit_insn (gen_rtx_SET (operands[4], addr));
operands[0] = replace_equiv_address (operands[0], operands[4]);
}
}")
operands[4] = addr;
else
{
- emit_insn (gen_rtx_SET (VOIDmode, operands[4], addr));
+ emit_insn (gen_rtx_SET (operands[4], addr));
operands[0] = replace_equiv_address (operands[0], operands[4]);
}
}")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
(define_peephole2
[(set (match_operand:SI 0 "register_operand" "")
- (plus:SI (mult:SI (match_operand:SI 1 "register_operand" "")
- (const_int 8))
+ (plus:SI (ashift:SI (match_operand:SI 1 "register_operand" "")
+ (const_int 3))
(match_operand:SI 2 "register_operand" "")))
(set (mem:DF (match_dup 0))
(match_operand:DF 3 "register_operand" ""))]
&& FP_REGNO_P (REGNO (operands[3]))"
[(set (mem:DF (plus:SI (mult:SI (match_dup 1) (const_int 8)) (match_dup 2)))
(match_dup 3))
- (set (match_dup 0) (plus:SI (mult:SI (match_dup 1) (const_int 8))
+ (set (match_dup 0) (plus:SI (ashift:SI (match_dup 1) (const_int 3))
(match_dup 2)))]
"")
(define_peephole2
[(set (match_operand:SI 0 "register_operand" "")
(plus:SI (match_operand:SI 2 "register_operand" "")
- (mult:SI (match_operand:SI 1 "register_operand" "")
- (const_int 8))))
+ (ashift:SI (match_operand:SI 1 "register_operand" "")
+ (const_int 3))))
(set (mem:DF (match_dup 0))
(match_operand:DF 3 "register_operand" ""))]
"!TARGET_SOFT_FLOAT
&& FP_REGNO_P (REGNO (operands[3]))"
[(set (mem:DF (plus:SI (mult:SI (match_dup 1) (const_int 8)) (match_dup 2)))
(match_dup 3))
- (set (match_dup 0) (plus:SI (mult:SI (match_dup 1) (const_int 8))
+ (set (match_dup 0) (plus:SI (ashift:SI (match_dup 1) (const_int 3))
(match_dup 2)))]
"")
(define_peephole2
[(set (match_operand:DI 0 "register_operand" "")
- (plus:DI (mult:DI (match_operand:DI 1 "register_operand" "")
- (const_int 8))
+ (plus:DI (ashift:DI (match_operand:DI 1 "register_operand" "")
+ (const_int 3))
(match_operand:DI 2 "register_operand" "")))
(set (mem:DF (match_dup 0))
(match_operand:DF 3 "register_operand" ""))]
&& FP_REGNO_P (REGNO (operands[3]))"
[(set (mem:DF (plus:DI (mult:DI (match_dup 1) (const_int 8)) (match_dup 2)))
(match_dup 3))
- (set (match_dup 0) (plus:DI (mult:DI (match_dup 1) (const_int 8))
+ (set (match_dup 0) (plus:DI (ashift:DI (match_dup 1) (const_int 3))
(match_dup 2)))]
"")
(define_peephole2
[(set (match_operand:DI 0 "register_operand" "")
(plus:DI (match_operand:DI 2 "register_operand" "")
- (mult:DI (match_operand:DI 1 "register_operand" "")
- (const_int 8))))
+ (ashift:DI (match_operand:DI 1 "register_operand" "")
+ (const_int 3))))
(set (mem:DF (match_dup 0))
(match_operand:DF 3 "register_operand" ""))]
"!TARGET_SOFT_FLOAT
&& FP_REGNO_P (REGNO (operands[3]))"
[(set (mem:DF (plus:DI (mult:DI (match_dup 1) (const_int 8)) (match_dup 2)))
(match_dup 3))
- (set (match_dup 0) (plus:DI (mult:DI (match_dup 1) (const_int 8))
+ (set (match_dup 0) (plus:DI (ashift:DI (match_dup 1) (const_int 3))
(match_dup 2)))]
"")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
(define_peephole2
[(set (match_operand:DI 0 "register_operand" "")
- (plus:DI (mult:DI (match_operand:DI 1 "register_operand" "")
- (const_int 8))
+ (plus:DI (ashift:DI (match_operand:DI 1 "register_operand" "")
+ (const_int 3))
(match_operand:DI 2 "register_operand" "")))
(set (mem:DI (match_dup 0))
(match_operand:DI 3 "register_operand" ""))]
&& FP_REGNO_P (REGNO (operands[3]))"
[(set (mem:DI (plus:DI (mult:DI (match_dup 1) (const_int 8)) (match_dup 2)))
(match_dup 3))
- (set (match_dup 0) (plus:DI (mult:DI (match_dup 1) (const_int 8))
- (match_dup 2)))]
- "")
-
-(define_peephole2
- [(set (match_operand:DI 0 "register_operand" "")
- (plus:DI (match_operand:DI 2 "register_operand" "")
- (mult:DI (match_operand:DI 1 "register_operand" "")
- (const_int 8))))
- (set (mem:DI (match_dup 0))
- (match_operand:DI 3 "register_operand" ""))]
- "!TARGET_SOFT_FLOAT
- && !TARGET_DISABLE_INDEXING
- && TARGET_64BIT
- && REG_OK_FOR_BASE_P (operands[2])
- && FP_REGNO_P (REGNO (operands[3]))"
- [(set (mem:DI (plus:DI (mult:DI (match_dup 1) (const_int 8)) (match_dup 2)))
- (match_dup 3))
- (set (match_dup 0) (plus:DI (mult:DI (match_dup 1) (const_int 8))
+ (set (match_dup 0) (plus:DI (ashift:DI (match_dup 1) (const_int 3))
(match_dup 2)))]
"")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
DONE;
/* We don't want the clobber emitted, so handle this ourselves. */
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
+ emit_insn (gen_rtx_SET (operands[0], operands[1]));
DONE;
}")
(define_peephole2
[(set (match_operand:SI 0 "register_operand" "")
- (plus:SI (mult:SI (match_operand:SI 1 "register_operand" "")
- (const_int 4))
+ (plus:SI (ashift:SI (match_operand:SI 1 "register_operand" "")
+ (const_int 2))
(match_operand:SI 2 "register_operand" "")))
(set (mem:SF (match_dup 0))
(match_operand:SF 3 "register_operand" ""))]
&& FP_REGNO_P (REGNO (operands[3]))"
[(set (mem:SF (plus:SI (mult:SI (match_dup 1) (const_int 4)) (match_dup 2)))
(match_dup 3))
- (set (match_dup 0) (plus:SI (mult:SI (match_dup 1) (const_int 4))
- (match_dup 2)))]
- "")
-
-(define_peephole2
- [(set (match_operand:SI 0 "register_operand" "")
- (plus:SI (match_operand:SI 2 "register_operand" "")
- (mult:SI (match_operand:SI 1 "register_operand" "")
- (const_int 4))))
- (set (mem:SF (match_dup 0))
- (match_operand:SF 3 "register_operand" ""))]
- "!TARGET_SOFT_FLOAT
- && !TARGET_DISABLE_INDEXING
- && REG_OK_FOR_BASE_P (operands[2])
- && FP_REGNO_P (REGNO (operands[3]))"
- [(set (mem:SF (plus:SI (mult:SI (match_dup 1) (const_int 4)) (match_dup 2)))
- (match_dup 3))
- (set (match_dup 0) (plus:SI (mult:SI (match_dup 1) (const_int 4))
+ (set (match_dup 0) (plus:SI (ashift:SI (match_dup 1) (const_int 2))
(match_dup 2)))]
"")
(define_peephole2
[(set (match_operand:DI 0 "register_operand" "")
- (plus:DI (mult:DI (match_operand:DI 1 "register_operand" "")
- (const_int 4))
+ (plus:DI (ashift:DI (match_operand:DI 1 "register_operand" "")
+ (const_int 2))
(match_operand:DI 2 "register_operand" "")))
(set (mem:SF (match_dup 0))
(match_operand:SF 3 "register_operand" ""))]
&& FP_REGNO_P (REGNO (operands[3]))"
[(set (mem:SF (plus:DI (mult:DI (match_dup 1) (const_int 4)) (match_dup 2)))
(match_dup 3))
- (set (match_dup 0) (plus:DI (mult:DI (match_dup 1) (const_int 4))
- (match_dup 2)))]
- "")
-
-(define_peephole2
- [(set (match_operand:DI 0 "register_operand" "")
- (plus:DI (match_operand:DI 2 "register_operand" "")
- (mult:DI (match_operand:DI 1 "register_operand" "")
- (const_int 4))))
- (set (mem:SF (match_dup 0))
- (match_operand:SF 3 "register_operand" ""))]
- "!TARGET_SOFT_FLOAT
- && !TARGET_DISABLE_INDEXING
- && TARGET_64BIT
- && REG_OK_FOR_BASE_P (operands[2])
- && FP_REGNO_P (REGNO (operands[3]))"
- [(set (mem:SF (plus:DI (mult:DI (match_dup 1) (const_int 4)) (match_dup 2)))
- (match_dup 3))
- (set (match_dup 0) (plus:DI (mult:DI (match_dup 1) (const_int 4))
+ (set (match_dup 0) (plus:DI (ashift:DI (match_dup 1) (const_int 2))
(match_dup 2)))]
"")
(plus:SI (match_operand:SI 1 "register_operand" "")
(match_operand:SI 2 "const_int_operand" "")))
(clobber (match_operand:SI 4 "register_operand" ""))]
- "! pa_cint_ok_for_move (INTVAL (operands[2]))
+ "! pa_cint_ok_for_move (UINTVAL (operands[2]))
&& VAL_14_BITS_P (INTVAL (operands[2]) >> 1)"
[(set (match_dup 4) (plus:SI (match_dup 1) (match_dup 2)))
(set (match_dup 0) (plus:SI (match_dup 4) (match_dup 3)))]
(plus:SI (match_operand:SI 1 "register_operand" "")
(match_operand:SI 2 "const_int_operand" "")))
(clobber (match_operand:SI 4 "register_operand" ""))]
- "! pa_cint_ok_for_move (INTVAL (operands[2]))"
+ "! pa_cint_ok_for_move (UINTVAL (operands[2]))"
[(set (match_dup 4) (match_dup 2))
- (set (match_dup 0) (plus:SI (mult:SI (match_dup 4) (match_dup 3))
+ (set (match_dup 0) (plus:SI (ashift:SI (match_dup 4) (match_dup 3))
(match_dup 1)))]
"
{
- HOST_WIDE_INT intval = INTVAL (operands[2]);
+ unsigned HOST_WIDE_INT intval = UINTVAL (operands[2]);
/* Try dividing the constant by 2, then 4, and finally 8 to see
if we can get a constant which can be loaded into a register
if (intval % 2 == 0 && pa_cint_ok_for_move (intval / 2))
{
operands[2] = GEN_INT (intval / 2);
- operands[3] = const2_rtx;
+ operands[3] = const1_rtx;
}
else if (intval % 4 == 0 && pa_cint_ok_for_move (intval / 4))
{
operands[2] = GEN_INT (intval / 4);
- operands[3] = GEN_INT (4);
+ operands[3] = const2_rtx;
}
else if (intval % 8 == 0 && pa_cint_ok_for_move (intval / 8))
{
operands[2] = GEN_INT (intval / 8);
- operands[3] = GEN_INT (8);
+ operands[3] = GEN_INT (3);
}
else if (pa_cint_ok_for_move (-intval))
{
- emit_insn (gen_rtx_SET (VOIDmode, operands[4], GEN_INT (-intval)));
+ emit_insn (gen_rtx_SET (operands[4], GEN_INT (-intval)));
emit_insn (gen_subsi3 (operands[0], operands[1], operands[4]));
DONE;
}
[(set_attr "type" "binary,binary")
(set_attr "length" "4,4")])
+;; Trap instructions.
+
+(define_insn "trap"
+ [(trap_if (const_int 1) (const_int 0))]
+ ""
+ "{addit|addi,tc},<> 1,%%r0,%%r0"
+ [(set_attr "type" "trap")
+ (set_attr "length" "4")])
+
;; Clobbering a "register_operand" instead of a match_scratch
;; in operand3 of millicode calls avoids spilling %r1 and
;; produces better code.
(define_insn "umulsidi3"
[(set (match_operand:DI 0 "register_operand" "=f")
- (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
- (zero_extend:DI (match_operand:SI 2 "nonimmediate_operand" "f"))))]
+ (mult:DI (zero_extend:DI (match_operand:SI 1 "register_operand" "f"))
+ (zero_extend:DI (match_operand:SI 2 "register_operand" "f"))))]
"TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
"xmpyu %1,%2,%0"
[(set_attr "type" "fpmuldbl")
(define_insn ""
[(set (match_operand:DI 0 "register_operand" "=f")
- (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
+ (mult:DI (zero_extend:DI (match_operand:SI 1 "register_operand" "f"))
(match_operand:DI 2 "uint32_operand" "f")))]
"TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT && !TARGET_64BIT"
"xmpyu %1,%R2,%0"
(define_insn ""
[(set (match_operand:DI 0 "register_operand" "=f")
- (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
+ (mult:DI (zero_extend:DI (match_operand:SI 1 "register_operand" "f"))
(match_operand:DI 2 "uint32_operand" "f")))]
"TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT && TARGET_64BIT"
"xmpyu %1,%2R,%0"
[(set_attr "type" "load")
(set_attr "length" "4")])
+(define_insn ""
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (plus:SI (ashift:SI (match_operand:SI 2 "register_operand" "r")
+ (match_operand:SI 3 "shadd_operand" ""))
+ (match_operand:SI 1 "register_operand" "r")))]
+ ""
+ "{sh%o3addl %2,%1,%0|shladd,l %2,%o3,%1,%0} "
+ [(set_attr "type" "binary")
+ (set_attr "length" "4")])
+
(define_insn ""
[(set (match_operand:SI 0 "register_operand" "=r")
(plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
- (match_operand:SI 3 "shadd_operand" ""))
+ (match_operand:SI 3 "mem_shadd_operand" ""))
(match_operand:SI 1 "register_operand" "r")))]
""
- "{sh%O3addl %2,%1,%0|shladd,l %2,%O3,%1,%0} "
+ "*
+{
+ int shift_val = exact_log2 (INTVAL (operands[3]));
+ operands[3] = GEN_INT (shift_val);
+ return \"{sh%o3addl %2,%1,%0|shladd,l %2,%o3,%1,%0}\";
+}"
+ [(set_attr "type" "binary")
+ (set_attr "length" "4")])
+
+(define_insn ""
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (plus:DI (ashift:DI (match_operand:DI 2 "register_operand" "r")
+ (match_operand:DI 3 "shadd_operand" ""))
+ (match_operand:DI 1 "register_operand" "r")))]
+ "TARGET_64BIT"
+ "shladd,l %2,%o3,%1,%0"
[(set_attr "type" "binary")
(set_attr "length" "4")])
(define_insn ""
[(set (match_operand:DI 0 "register_operand" "=r")
(plus:DI (mult:DI (match_operand:DI 2 "register_operand" "r")
- (match_operand:DI 3 "shadd_operand" ""))
+ (match_operand:DI 3 "mem_shadd_operand" ""))
(match_operand:DI 1 "register_operand" "r")))]
"TARGET_64BIT"
- "shladd,l %2,%O3,%1,%0"
+ "*
+{
+ int shift_val = exact_log2 (INTVAL (operands[3]));
+ operands[3] = GEN_INT (shift_val);
+ return \"shladd,l %2,%o3,%1,%0\";
+}"
[(set_attr "type" "binary")
(set_attr "length" "4")])
[(set_attr "type" "uncond_branch")
(set_attr "pa_combine_type" "uncond_branch")
(set (attr "length")
- (cond [(match_test "pa_jump_in_call_delay (insn)")
- (if_then_else (lt (abs (minus (match_dup 0)
- (plus (pc) (const_int 8))))
- (const_int MAX_12BIT_OFFSET))
- (const_int 4)
- (const_int 8))
- (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
+ (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
(const_int MAX_17BIT_OFFSET))
(const_int 4)
(match_test "TARGET_PORTABLE_RUNTIME")
;;; Hope this is only within a function...
(define_insn "indirect_jump"
- [(set (pc) (match_operand 0 "register_operand" "r"))]
- "GET_MODE (operands[0]) == word_mode"
+ [(set (pc) (match_operand 0 "pmode_register_operand" "r"))]
+ ""
"bv%* %%r0(%0)"
[(set_attr "type" "branch")
(set_attr "length" "4")])
rtx stack = operands[2];
rtx fp = operands[3];
- lab = copy_to_reg (lab);
-
emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
- /* Restore the frame pointer. The virtual_stack_vars_rtx is saved
- instead of the hard_frame_pointer_rtx in the save area. As a
- result, an extra instruction is needed to adjust for the offset
- of the virtual stack variables and the hard frame pointer. */
- if (GET_CODE (fp) != REG)
- fp = force_reg (Pmode, fp);
- emit_move_insn (hard_frame_pointer_rtx, plus_constant (Pmode, fp, -8));
+ lab = copy_to_reg (lab);
+ /* Restore the stack and frame pointers. */
+ fp = copy_to_reg (fp);
emit_stack_restore (SAVE_NONLOCAL, stack);
+ /* Ensure the frame pointer move is not optimized. */
+ emit_insn (gen_blockage ());
+ emit_clobber (hard_frame_pointer_rtx);
+ emit_clobber (frame_pointer_rtx);
+ emit_move_insn (hard_frame_pointer_rtx, fp);
+
emit_use (hard_frame_pointer_rtx);
emit_use (stack_pointer_rtx);
if (TARGET_PORTABLE_RUNTIME)
op = force_reg (SImode, XEXP (operands[0], 0));
else
- op = XEXP (operands[0], 0);
+ {
+ op = XEXP (operands[0], 0);
+
+ /* Generate indirect long calls to non-local functions. */
+ if (TARGET_LONG_CALLS && GET_CODE (op) == SYMBOL_REF)
+ {
+ tree call_decl = SYMBOL_REF_DECL (op);
+ if (!(call_decl && targetm.binds_local_p (call_decl)))
+ op = force_reg (word_mode, op);
+ }
+ }
if (TARGET_64BIT)
{
"TARGET_64BIT"
"*
{
- pa_output_arg_descriptor (insn);
return pa_output_call (insn, operands[0], 0);
}"
[(set_attr "type" "call")
(define_insn "call_reg_64bit"
[(call (mem:SI (match_operand:DI 0 "register_operand" "r"))
(match_operand 1 "" "i"))
- (clobber (reg:DI 1))
(clobber (reg:DI 2))
(clobber (match_operand 2))
(use (reg:DI 27))
(define_split
[(parallel [(call (mem:SI (match_operand 0 "register_operand" ""))
(match_operand 1 "" ""))
- (clobber (reg:DI 1))
(clobber (reg:DI 2))
(clobber (match_operand 2))
(use (reg:DI 27))
[(set (match_dup 2) (reg:DI 27))
(parallel [(call (mem:SI (match_dup 0))
(match_dup 1))
- (clobber (reg:DI 1))
(clobber (reg:DI 2))
(use (reg:DI 27))
(use (reg:DI 29))
(define_split
[(parallel [(call (mem:SI (match_operand 0 "register_operand" ""))
(match_operand 1 "" ""))
- (clobber (reg:DI 1))
(clobber (reg:DI 2))
(clobber (match_operand 2))
(use (reg:DI 27))
[(set (match_dup 2) (reg:DI 27))
(parallel [(call (mem:SI (match_dup 0))
(match_dup 1))
- (clobber (reg:DI 1))
(clobber (reg:DI 2))
(use (reg:DI 27))
(use (reg:DI 29))
(define_insn "*call_reg_64bit_post_reload"
[(call (mem:SI (match_operand:DI 0 "register_operand" "r"))
(match_operand 1 "" "i"))
- (clobber (reg:DI 1))
(clobber (reg:DI 2))
(use (reg:DI 27))
(use (reg:DI 29))
rtx op;
rtx dst = operands[0];
rtx nb = operands[2];
+ bool call_powf = false;
if (TARGET_PORTABLE_RUNTIME)
op = force_reg (SImode, XEXP (operands[1], 0));
else
- op = XEXP (operands[1], 0);
+ {
+ op = XEXP (operands[1], 0);
+ if (GET_CODE (op) == SYMBOL_REF)
+ {
+ /* Handle special call to buggy powf function. */
+ if (TARGET_HPUX && !TARGET_DISABLE_FPREGS && !TARGET_SOFT_FLOAT
+ && !strcmp (targetm.strip_name_encoding (XSTR (op, 0)), "powf"))
+ call_powf = true;
+
+ /* Generate indirect long calls to non-local functions. */
+ else if (TARGET_LONG_CALLS)
+ {
+ tree call_decl = SYMBOL_REF_DECL (op);
+ if (!(call_decl && targetm.binds_local_p (call_decl)))
+ op = force_reg (word_mode, op);
+ }
+ }
+ }
if (TARGET_64BIT)
{
rtx r4 = gen_rtx_REG (word_mode, 4);
if (GET_CODE (op) == SYMBOL_REF)
{
- if (TARGET_HPUX && !TARGET_DISABLE_FPREGS && !TARGET_SOFT_FLOAT
- && !strcmp (targetm.strip_name_encoding (XSTR (op, 0)), "powf"))
+ if (call_powf)
emit_call_insn (gen_call_val_powf_64bit (dst, op, nb, r4));
else
emit_call_insn (gen_call_val_symref_64bit (dst, op, nb, r4));
{
rtx r4 = gen_rtx_REG (word_mode, 4);
- if (TARGET_HPUX && !TARGET_DISABLE_FPREGS && !TARGET_SOFT_FLOAT
- && !strcmp (targetm.strip_name_encoding (XSTR (op, 0)),
- "powf"))
+ if (call_powf)
emit_call_insn (gen_call_val_powf_pic (dst, op, nb, r4));
else
emit_call_insn (gen_call_val_symref_pic (dst, op, nb, r4));
}
else
{
- if (TARGET_HPUX && !TARGET_DISABLE_FPREGS && !TARGET_SOFT_FLOAT
- && !strcmp (targetm.strip_name_encoding (XSTR (op, 0)),
- "powf"))
+ if (call_powf)
emit_call_insn (gen_call_val_powf (dst, op, nb));
else
emit_call_insn (gen_call_val_symref (dst, op, nb));
"TARGET_64BIT"
"*
{
- pa_output_arg_descriptor (insn);
return pa_output_call (insn, operands[1], 0);
}"
[(set_attr "type" "call")
"TARGET_64BIT && TARGET_HPUX"
"*
{
- pa_output_arg_descriptor (insn);
return pa_output_call (insn, operands[1], 0);
}"
[(set_attr "type" "call")
[(set (match_operand 0 "" "")
(call (mem:SI (match_operand:DI 1 "register_operand" "r"))
(match_operand 2 "" "i")))
- (clobber (reg:DI 1))
(clobber (reg:DI 2))
(clobber (match_operand 3))
(use (reg:DI 27))
[(parallel [(set (match_operand 0 "" "")
(call (mem:SI (match_operand:DI 1 "register_operand" ""))
(match_operand 2 "" "")))
- (clobber (reg:DI 1))
(clobber (reg:DI 2))
(clobber (match_operand 3))
(use (reg:DI 27))
(parallel [(set (match_dup 0)
(call (mem:SI (match_dup 1))
(match_dup 2)))
- (clobber (reg:DI 1))
(clobber (reg:DI 2))
(use (reg:DI 27))
(use (reg:DI 29))
[(parallel [(set (match_operand 0 "" "")
(call (mem:SI (match_operand:DI 1 "register_operand" ""))
(match_operand 2 "" "")))
- (clobber (reg:DI 1))
(clobber (reg:DI 2))
(clobber (match_operand 3))
(use (reg:DI 27))
(parallel [(set (match_dup 0)
(call (mem:SI (match_dup 1))
(match_dup 2)))
- (clobber (reg:DI 1))
(clobber (reg:DI 2))
(use (reg:DI 27))
(use (reg:DI 29))
[(set (match_operand 0 "" "")
(call (mem:SI (match_operand:DI 1 "register_operand" "r"))
(match_operand 2 "" "i")))
- (clobber (reg:DI 1))
(clobber (reg:DI 2))
(use (reg:DI 27))
(use (reg:DI 29))
(cond [(and (const_int 0) (eq (const_int 0) (pc))) (const_int 12)]
(symbol_ref "pa_attr_length_indirect_call (insn)")))])
+/* Expand special pc-relative call to _mcount. */
+
+(define_expand "call_mcount"
+ [(parallel [(call (match_operand:SI 0 "" "")
+ (match_operand 1 "" ""))
+ (set (reg:SI 25)
+ (plus:SI (reg:SI 2)
+ (minus:SI (match_operand 2 "" "")
+ (plus:SI (pc) (const_int 4)))))
+ (clobber (reg:SI 2))])]
+ "!TARGET_PORTABLE_RUNTIME"
+ "
+{
+ rtx op = XEXP (operands[0], 0);
+ rtx nb = operands[1];
+ rtx lab = operands[2];
+
+ if (TARGET_64BIT)
+ {
+ rtx r4 = gen_rtx_REG (word_mode, 4);
+ emit_move_insn (arg_pointer_rtx,
+ gen_rtx_PLUS (word_mode, virtual_outgoing_args_rtx,
+ GEN_INT (64)));
+ emit_call_insn (gen_call_mcount_64bit (op, nb, lab, r4));
+ }
+ else
+ {
+ if (flag_pic)
+ {
+ rtx r4 = gen_rtx_REG (word_mode, 4);
+ emit_call_insn (gen_call_mcount_pic (op, nb, lab, r4));
+ }
+ else
+ emit_call_insn (gen_call_mcount_nonpic (op, nb, lab));
+ }
+
+ DONE;
+}")
+
+(define_insn "call_mcount_nonpic"
+ [(call (mem:SI (match_operand 0 "call_operand_address" ""))
+ (match_operand 1 "" "i"))
+ (set (reg:SI 25)
+ (plus:SI (reg:SI 2)
+ (minus:SI (match_operand 2 "" "")
+ (plus:SI (pc) (const_int 4)))))
+ (clobber (reg:SI 2))]
+ "!TARGET_PORTABLE_RUNTIME && !TARGET_64BIT"
+ "*
+{
+ pa_output_arg_descriptor (insn);
+ return \"{bl|b,l} %0,%%r2\;ldo %2-.-4(%%r2),%%r25\";
+}"
+ [(set_attr "type" "multi")
+ (set_attr "length" "8")])
+
+(define_insn "call_mcount_pic"
+ [(call (mem:SI (match_operand 0 "call_operand_address" ""))
+ (match_operand 1 "" "i"))
+ (set (reg:SI 25)
+ (plus:SI (reg:SI 2)
+ (minus:SI (match_operand 2 "" "")
+ (plus:SI (pc) (const_int 4)))))
+ (clobber (reg:SI 2))
+ (clobber (match_operand 3))
+ (use (reg:SI 19))]
+ "!TARGET_PORTABLE_RUNTIME && !TARGET_64BIT"
+ "#")
+
+(define_split
+ [(parallel [(call (mem:SI (match_operand 0 "call_operand_address" ""))
+ (match_operand 1 "" ""))
+ (set (reg:SI 25)
+ (plus:SI (reg:SI 2)
+ (minus:SI (match_operand 2 "" "")
+ (plus:SI (pc) (const_int 4)))))
+ (clobber (reg:SI 2))
+ (clobber (match_operand 3))
+ (use (reg:SI 19))])]
+ "!TARGET_PORTABLE_RUNTIME && !TARGET_64BIT && reload_completed"
+ [(set (match_dup 3) (reg:SI 19))
+ (parallel [(call (mem:SI (match_dup 0))
+ (match_dup 1))
+ (set (reg:SI 25)
+ (plus:SI (reg:SI 2)
+ (minus:SI (match_dup 2)
+ (plus:SI (pc) (const_int 4)))))
+ (clobber (reg:SI 2))
+ (use (reg:SI 19))])
+ (set (reg:SI 19) (match_dup 3))]
+ "")
+
+(define_insn "*call_mcount_pic_post_reload"
+ [(call (mem:SI (match_operand 0 "call_operand_address" ""))
+ (match_operand 1 "" "i"))
+ (set (reg:SI 25)
+ (plus:SI (reg:SI 2)
+ (minus:SI (match_operand 2 "" "")
+ (plus:SI (pc) (const_int 4)))))
+ (clobber (reg:SI 2))
+ (use (reg:SI 19))]
+ "!TARGET_PORTABLE_RUNTIME && !TARGET_64BIT"
+ "*
+{
+ pa_output_arg_descriptor (insn);
+ return \"{bl|b,l} %0,%%r2\;ldo %2-.-4(%%r2),%%r25\";
+}"
+ [(set_attr "type" "multi")
+ (set_attr "length" "8")])
+
+(define_insn "call_mcount_64bit"
+ [(call (mem:SI (match_operand 0 "call_operand_address" ""))
+ (match_operand 1 "" "i"))
+ (set (reg:SI 25)
+ (plus:SI (reg:SI 2)
+ (minus:SI (match_operand 2 "" "")
+ (plus:SI (pc) (const_int 4)))))
+ (clobber (reg:DI 2))
+ (clobber (match_operand 3))
+ (use (reg:DI 27))
+ (use (reg:DI 29))]
+ "TARGET_64BIT"
+ "#")
+
+(define_split
+ [(parallel [(call (mem:SI (match_operand 0 "call_operand_address" ""))
+ (match_operand 1 "" ""))
+ (set (reg:SI 25)
+ (plus:SI (reg:SI 2)
+ (minus:SI (match_operand 2 "" "")
+ (plus:SI (pc) (const_int 4)))))
+ (clobber (reg:DI 2))
+ (clobber (match_operand 3))
+ (use (reg:DI 27))
+ (use (reg:DI 29))])]
+ "TARGET_64BIT && reload_completed"
+ [(set (match_dup 3) (reg:DI 27))
+ (parallel [(call (mem:SI (match_dup 0))
+ (match_dup 1))
+ (set (reg:SI 25)
+ (plus:SI (reg:SI 2)
+ (minus:SI (match_dup 2)
+ (plus:SI (pc) (const_int 4)))))
+ (clobber (reg:DI 2))
+ (use (reg:DI 27))
+ (use (reg:DI 29))])
+ (set (reg:DI 27) (match_dup 3))]
+ "")
+
+(define_insn "*call_mcount_64bit_post_reload"
+ [(call (mem:SI (match_operand 0 "call_operand_address" ""))
+ (match_operand 1 "" "i"))
+ (set (reg:SI 25)
+ (plus:SI (reg:SI 2)
+ (minus:SI (match_operand 2 "" "")
+ (plus:SI (pc) (const_int 4)))))
+ (clobber (reg:DI 2))
+ (use (reg:DI 27))
+ (use (reg:DI 29))]
+ "TARGET_64BIT"
+ "{bl|b,l} %0,%%r2\;ldo %2-.-4(%%r2),%%r25"
+ [(set_attr "type" "multi")
+ (set_attr "length" "8")])
+
;; Call subroutine returning any type.
(define_expand "untyped_call"
{
int i;
- emit_call_insn (GEN_CALL (operands[0], const0_rtx, NULL, const0_rtx));
+ emit_call_insn (gen_call (operands[0], const0_rtx));
for (i = 0; i < XVECLEN (operands[2], 0); i++)
{
"TARGET_64BIT"
"*
{
- pa_output_arg_descriptor (insn);
return pa_output_call (insn, operands[0], 1);
}"
[(set_attr "type" "sibcall")
"TARGET_64BIT"
"*
{
- pa_output_arg_descriptor (insn);
return pa_output_call (insn, operands[1], 1);
}"
[(set_attr "type" "sibcall")
[(set_attr "type" "move")
(set_attr "length" "4")])
-;; These are just placeholders so we know where branch tables
-;; begin and end.
-(define_insn "begin_brtab"
- [(const_int 1)]
- ""
- "*
-{
- /* Only GAS actually supports this pseudo-op. */
- if (TARGET_GAS)
- return \".begin_brtab\";
- else
- return \"\";
-}"
- [(set_attr "type" "move")
- (set_attr "length" "0")])
-
-(define_insn "end_brtab"
- [(const_int 2)]
- ""
- "*
-{
- /* Only GAS actually supports this pseudo-op. */
- if (TARGET_GAS)
- return \".end_brtab\";
- else
- return \"\";
-}"
- [(set_attr "type" "move")
- (set_attr "length" "0")])
-
;;; EH does longjmp's from and within the data section. Thus,
;;; an interspace branch is required for the longjmp implementation.
;;; Registers r1 and r2 are used as scratch registers for the jump
emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
- /* Restore the frame pointer. The virtual_stack_vars_rtx is saved
- instead of the hard_frame_pointer_rtx in the save area. We need
- to adjust for the offset between these two values. */
- if (GET_CODE (fp) != REG)
- fp = force_reg (Pmode, fp);
- emit_move_insn (hard_frame_pointer_rtx, plus_constant (Pmode, fp, -8));
-
- /* This bit is the same as expand_builtin_longjmp. */
- emit_stack_restore (SAVE_NONLOCAL, stack);
- emit_use (hard_frame_pointer_rtx);
- emit_use (stack_pointer_rtx);
-
/* Load the label we are jumping through into r1 so that we know
where to look for it when we get back to setjmp's function for
restoring the gp. */
emit_move_insn (pv, lab);
+ /* Restore the stack and frame pointers. */
+ fp = copy_to_reg (fp);
+ emit_stack_restore (SAVE_NONLOCAL, stack);
+
+ /* Ensure the frame pointer move is not optimized. */
+ emit_insn (gen_blockage ());
+ emit_clobber (hard_frame_pointer_rtx);
+ emit_clobber (frame_pointer_rtx);
+ emit_move_insn (hard_frame_pointer_rtx, fp);
+
+ emit_use (hard_frame_pointer_rtx);
+ emit_use (stack_pointer_rtx);
+
/* Prevent the insns above from being scheduled into the delay slot
of the interspace jump because the space register could change. */
emit_insn (gen_blockage ());
;; strength reduction is used. It is actually created when the instruction
;; combination phase combines the special loop test. Since this insn
;; is both a jump insn and has an output, it must deal with its own
-;; reloads, hence the `m' constraints. The `!' constraints direct reload
+;; reloads, hence the `Q' constraints. The `!' constraints direct reload
;; to not choose the register alternatives in the event a reload is needed.
(define_insn "decrement_and_branch_until_zero"
[(set (pc)
(if_then_else
(match_operator 2 "comparison_operator"
[(plus:SI
- (match_operand:SI 0 "reg_before_reload_operand" "+!r,!*f,*m")
+ (match_operand:SI 0 "reg_before_reload_operand" "+!r,!*f,*Q")
(match_operand:SI 1 "int5_operand" "L,L,L"))
(const_int 0)])
(label_ref (match_operand 3 "" ""))
[(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
(label_ref (match_operand 3 "" ""))
(pc)))
- (set (match_operand:SI 0 "reg_before_reload_operand" "=!r,!*f,*m,!*q")
+ (set (match_operand:SI 0 "reg_before_reload_operand" "=!r,!*f,*Q,!*q")
(match_dup 1))]
""
"* return pa_output_movb (operands, insn, which_alternative, 0); "
[(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
(pc)
(label_ref (match_operand 3 "" ""))))
- (set (match_operand:SI 0 "reg_before_reload_operand" "=!r,!*f,*m,!*q")
+ (set (match_operand:SI 0 "reg_before_reload_operand" "=!r,!*f,*Q,!*q")
(match_dup 1))]
""
"* return pa_output_movb (operands, insn, which_alternative, 1); "
emit_library_call_value (canonicalize_funcptr_for_compare_libfunc,
operands[0], LCT_NORMAL, Pmode,
- 1, operands[1], Pmode);
+ operands[1], Pmode);
DONE;
}
"addil LR'%1-$tls_leoff$,%2\;ldo RR'%1-$tls_leoff$(%%r1),%0"
[(set_attr "type" "multi")
(set_attr "length" "8")])
+
+;; Atomic instructions
+
+;; All memory loads and stores access storage atomically except
+;; for one exception. The STORE BYTES, STORE DOUBLE BYTES, and
+;; doubleword loads and stores are not guaranteed to be atomic
+;; when referencing the I/O address space.
+
+;; These patterns are at the bottom so the non atomic versions are preferred.
+
+(define_expand "atomic_storeqi"
+ [(match_operand:QI 0 "memory_operand") ;; memory
+ (match_operand:QI 1 "register_operand") ;; val out
+ (match_operand:SI 2 "const_int_operand")] ;; model
+ ""
+{
+ if (TARGET_SYNC_LIBCALL)
+ {
+ rtx mem = operands[0];
+ rtx val = operands[1];
+ if (pa_maybe_emit_compare_and_swap_exchange_loop (NULL_RTX, mem, val))
+ DONE;
+ }
+ FAIL;
+})
+
+;; Implement atomic HImode stores using exchange.
+
+(define_expand "atomic_storehi"
+ [(match_operand:HI 0 "memory_operand") ;; memory
+ (match_operand:HI 1 "register_operand") ;; val out
+ (match_operand:SI 2 "const_int_operand")] ;; model
+ ""
+{
+ if (TARGET_SYNC_LIBCALL)
+ {
+ rtx mem = operands[0];
+ rtx val = operands[1];
+ if (pa_maybe_emit_compare_and_swap_exchange_loop (NULL_RTX, mem, val))
+ DONE;
+ }
+ FAIL;
+})
+
+;; Implement atomic SImode store using exchange.
+
+(define_expand "atomic_storesi"
+ [(match_operand:SI 0 "memory_operand") ;; memory
+ (match_operand:SI 1 "register_operand") ;; val out
+ (match_operand:SI 2 "const_int_operand")] ;; model
+ ""
+{
+ if (TARGET_SYNC_LIBCALL)
+ {
+ rtx mem = operands[0];
+ rtx val = operands[1];
+ if (pa_maybe_emit_compare_and_swap_exchange_loop (NULL_RTX, mem, val))
+ DONE;
+ }
+ FAIL;
+})
+
+;; Implement atomic DImode load.
+
+(define_expand "atomic_loaddi"
+ [(match_operand:DI 0 "register_operand") ;; val out
+ (match_operand:DI 1 "memory_operand") ;; memory
+ (match_operand:SI 2 "const_int_operand")] ;; model
+ ""
+{
+ enum memmodel model;
+
+ if (TARGET_64BIT || TARGET_DISABLE_FPREGS || TARGET_SOFT_FLOAT)
+ FAIL;
+
+ model = memmodel_from_int (INTVAL (operands[2]));
+ operands[1] = force_reg (SImode, XEXP (operands[1], 0));
+ if (is_mm_seq_cst (model))
+ expand_mem_thread_fence (model);
+ emit_insn (gen_atomic_loaddi_1 (operands[0], operands[1]));
+ expand_mem_thread_fence (model);
+ DONE;
+})
+
+(define_insn "atomic_loaddi_1"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (mem:DI (match_operand:SI 1 "register_operand" "r")))
+ (clobber (match_scratch:DI 2 "=f"))]
+ "!TARGET_64BIT && !TARGET_DISABLE_FPREGS && !TARGET_SOFT_FLOAT"
+ "{fldds|fldd} 0(%1),%2\n\t{fstds|fstd} %2,-16(%%sp)\n\t{ldws|ldw} -16(%%sp),%0\n\t{ldws|ldw} -12(%%sp),%R0"
+ [(set_attr "type" "move")
+ (set_attr "length" "16")])
+
+;; Implement atomic DImode store.
+
+(define_expand "atomic_storedi"
+ [(match_operand:DI 0 "memory_operand") ;; memory
+ (match_operand:DI 1 "reg_or_cint_move_operand") ;; val out
+ (match_operand:SI 2 "const_int_operand")] ;; model
+ ""
+{
+ enum memmodel model;
+
+ if (TARGET_SYNC_LIBCALL)
+ {
+ rtx mem = operands[0];
+ rtx val = operands[1];
+ if (pa_maybe_emit_compare_and_swap_exchange_loop (NULL_RTX, mem, val))
+ DONE;
+ }
+
+ if (TARGET_64BIT || TARGET_DISABLE_FPREGS || TARGET_SOFT_FLOAT)
+ FAIL;
+
+ model = memmodel_from_int (INTVAL (operands[2]));
+ operands[0] = force_reg (SImode, XEXP (operands[0], 0));
+ if (operands[1] != CONST0_RTX (DImode))
+ operands[1] = force_reg (DImode, operands[1]);
+ expand_mem_thread_fence (model);
+ emit_insn (gen_atomic_storedi_1 (operands[0], operands[1]));
+ if (is_mm_seq_cst (model))
+ expand_mem_thread_fence (model);
+ DONE;
+})
+
+(define_insn "atomic_storedi_1"
+ [(set (mem:DI (match_operand:SI 0 "register_operand" "r,r"))
+ (match_operand:DI 1 "reg_or_0_operand" "M,r"))
+ (clobber (match_scratch:DI 2 "=X,f"))]
+ "!TARGET_64BIT && !TARGET_DISABLE_FPREGS && !TARGET_SOFT_FLOAT"
+ "@
+ {fstds|fstd} %%fr0,0(%0)
+ {stws|stw} %1,-16(%%sp)\n\t{stws|stw} %R1,-12(%%sp)\n\t{fldds|fldd} -16(%%sp),%2\n\t{fstds|fstd} %2,0(%0)"
+ [(set_attr "type" "move,move")
+ (set_attr "length" "4,16")])
+
+;; PA 2.0 hardware supports out-of-order execution of loads and stores, so
+;; we need memory barriers to enforce program order for memory references
+;; when the TLB and PSW O bits are not set. We assume all PA 2.0 systems
+;; are weakly ordered since neither HP-UX or Linux set the PSW O bit. Since
+;; we want PA 1.x code to be PA 2.0 compatible, we also need barriers when
+;; generating PA 1.x code even though all PA 1.x systems are strongly ordered.
+
+;; When barriers are needed, we use a strongly ordered ldcw instruction as
+;; the barrier. Most PA 2.0 targets are cache coherent. In that case, we
+;; can use the coherent cache control hint and avoid aligning the ldcw
+;; address. In spite of its description, it is not clear that the sync
+;; instruction works as a barrier.
+
+(define_expand "memory_barrier"
+ [(parallel
+ [(set (match_dup 0) (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))
+ (clobber (match_dup 1))])]
+ ""
+{
+ /* We don't need a barrier if the target uses ordered memory references. */
+ if (TARGET_ORDERED)
+ FAIL;
+ operands[1] = gen_reg_rtx (Pmode);
+ operands[0] = gen_rtx_MEM (BLKmode, operands[1]);
+ MEM_VOLATILE_P (operands[0]) = 1;
+})
+
+(define_insn "*memory_barrier_coherent"
+ [(set (match_operand:BLK 0 "" "")
+ (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))
+ (clobber (match_operand 1 "pmode_register_operand" "=r"))]
+ "TARGET_PA_20 && TARGET_COHERENT_LDCW"
+ "ldcw,co 0(%%sp),%1"
+ [(set_attr "type" "binary")
+ (set_attr "length" "4")])
+
+(define_insn "*memory_barrier_64"
+ [(set (match_operand:BLK 0 "" "")
+ (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))
+ (clobber (match_operand 1 "pmode_register_operand" "=&r"))]
+ "TARGET_64BIT"
+ "ldo 15(%%sp),%1\n\tdepd %%r0,63,3,%1\n\tldcw 0(%1),%1"
+ [(set_attr "type" "binary")
+ (set_attr "length" "12")])
+
+(define_insn "*memory_barrier_32"
+ [(set (match_operand:BLK 0 "" "")
+ (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))
+ (clobber (match_operand 1 "pmode_register_operand" "=&r"))]
+ ""
+ "ldo 15(%%sp),%1\n\t{dep|depw} %%r0,31,3,%1\n\tldcw 0(%1),%1"
+ [(set_attr "type" "binary")
+ (set_attr "length" "12")])