extern bool s390_hard_regno_rename_ok (unsigned int, unsigned int);
extern int s390_class_max_nregs (enum reg_class, machine_mode);
extern bool s390_return_addr_from_memory(void);
-extern rtx s390_gen_lowpart_subreg (machine_mode, rtx);
extern bool s390_fma_allowed_p (machine_mode);
#if S390_USE_TARGET_ATTRIBUTE
extern tree s390_valid_target_attribute_tree (tree args,
return cfun_gpr_save_slot(RETURN_REGNUM) == SAVE_SLOT_STACK;
}
-/* Generate a SUBREG for the MODE lowpart of EXPR.
-
- In contrast to gen_lowpart it will always return a SUBREG
- expression. This is useful to generate STRICT_LOW_PART
- expressions. */
-rtx
-s390_gen_lowpart_subreg (machine_mode mode, rtx expr)
-{
- rtx lowpart = gen_lowpart (mode, expr);
-
- /* There might be no SUBREG in case it could be applied to the hard
- REG rtx or it could be folded with a paradoxical subreg. Bring
- it back. */
- if (!SUBREG_P (lowpart))
- {
- machine_mode reg_mode = TARGET_ZARCH ? DImode : SImode;
- gcc_assert (REG_P (lowpart));
- lowpart = gen_lowpart_SUBREG (mode,
- gen_rtx_REG (reg_mode,
- REGNO (lowpart)));
- }
-
- return lowpart;
-}
-
/* Return nonzero if it's OK to use fused multiply-add for MODE. */
bool
s390_fma_allowed_p (machine_mode mode)
/* Emit a strict_low_part pattern if possible. */
if (smode_bsize == bitsize && bitpos == mode_bsize - smode_bsize)
{
- rtx low_dest = s390_gen_lowpart_subreg (smode, dest);
- rtx low_src = gen_lowpart (smode, src);
-
- switch (smode)
+ rtx low_dest = gen_lowpart (smode, dest);
+ if (SUBREG_P (low_dest) && !paradoxical_subreg_p (low_dest))
{
- case E_QImode: emit_insn (gen_movstrictqi (low_dest, low_src)); return true;
- case E_HImode: emit_insn (gen_movstricthi (low_dest, low_src)); return true;
- case E_SImode: emit_insn (gen_movstrictsi (low_dest, low_src)); return true;
- default: break;
+ poly_int64 offset = GET_MODE_SIZE (mode) - GET_MODE_SIZE (smode);
+ rtx low_src = adjust_address (src, smode, offset);
+ switch (smode)
+ {
+ case E_QImode: emit_insn (gen_movstrictqi (low_dest, low_src));
+ return true;
+ case E_HImode: emit_insn (gen_movstricthi (low_dest, low_src));
+ return true;
+ case E_SImode: emit_insn (gen_movstrictsi (low_dest, low_src));
+ return true;
+ default: break;
+ }
}
}
"TARGET_ZARCH"
"#"
"&& reload_completed"
- [(set (match_dup 2) (match_dup 4))
+ [(set (match_dup 2) (match_dup 3))
(set (match_dup 0) (ashift:DI (match_dup 0) (const_int 32)))
- (set (strict_low_part (match_dup 3)) (match_dup 5))]
+ (set (strict_low_part (match_dup 2)) (match_dup 4))]
"operands[2] = gen_lowpart (SImode, operands[0]);
- operands[3] = s390_gen_lowpart_subreg (SImode, operands[0]);
- s390_split_access_reg (operands[1], &operands[5], &operands[4]);")
+ s390_split_access_reg (operands[1], &operands[4], &operands[3]);")
; Splitters for storing TLS pointer to %a0:DI.
(parallel
[(set (strict_low_part (match_dup 2)) (match_dup 1))
(clobber (reg:CC CC_REGNUM))])]
- "operands[2] = s390_gen_lowpart_subreg (HImode, operands[0]);")
+ "operands[2] = gen_lowpart (HImode, operands[0]);")
(define_insn_and_split "*zero_extendqisi2_31"
[(set (match_operand:SI 0 "register_operand" "=&d")
"&& reload_completed"
[(set (match_dup 0) (const_int 0))
(set (strict_low_part (match_dup 2)) (match_dup 1))]
- "operands[2] = s390_gen_lowpart_subreg (QImode, operands[0]);")
+ "operands[2] = gen_lowpart (QImode, operands[0]);")
;
; zero_extendqihi2 instruction pattern(s).
"&& reload_completed"
[(set (match_dup 0) (const_int 0))
(set (strict_low_part (match_dup 2)) (match_dup 1))]
- "operands[2] = s390_gen_lowpart_subreg (QImode, operands[0]);")
+ "operands[2] = gen_lowpart (QImode, operands[0]);")
;
; fixuns_trunc(dd|td|sf|df|tf)(si|di)2 expander