(ior (and (match_code "const_int,const_double,const,symbol_ref,label_ref")
(match_test "TARGET_AUTO_LITPOOLS"))
(and (match_code "const_int")
- (match_test "can_create_pseudo_p ()"))))
+ (match_test "! xtensa_split1_finished_p ()"))))
;; Memory constraints. Do not use define_memory_constraint here. Doing so
;; causes reload to force some constants into the constant pool, but since
(ior (and (match_code "const_int")
(match_test "(GET_MODE_CLASS (mode) == MODE_INT
&& xtensa_simm12b (INTVAL (op)))
- || can_create_pseudo_p ()"))
+ || ! xtensa_split1_finished_p ()"))
(and (match_code "const_int,const_double,const,symbol_ref,label_ref")
(match_test "(TARGET_CONST16 || TARGET_AUTO_LITPOOLS)
&& CONSTANT_P (op)
extern char *xtensa_emit_sibcall (int, rtx *);
extern bool xtensa_tls_referenced_p (rtx);
extern enum rtx_code xtensa_shlrd_which_direction (rtx, rtx);
+extern bool xtensa_split1_finished_p (void);
+extern void xtensa_split_DI_reg_imm (rtx *);
#ifdef TREE_CODE
extern void init_cumulative_args (CUMULATIVE_ARGS *, int);
#include "hw-doloop.h"
#include "rtl-iter.h"
#include "insn-attr.h"
+#include "tree-pass.h"
/* This file should be included last. */
#include "target-def.h"
HOST_WIDE_INT delta,
HOST_WIDE_INT vcall_offset,
tree function);
+static bool xtensa_lra_p (void);
static rtx xtensa_delegitimize_address (rtx);
#define TARGET_CANNOT_FORCE_CONST_MEM xtensa_cannot_force_const_mem
#undef TARGET_LRA_P
-#define TARGET_LRA_P hook_bool_void_false
+#define TARGET_LRA_P xtensa_lra_p
#undef TARGET_LEGITIMATE_ADDRESS_P
#define TARGET_LEGITIMATE_ADDRESS_P xtensa_legitimate_address_p
int
xt_true_regnum (rtx x)
{
- if (GET_CODE (x) == REG)
+ if (REG_P (x))
{
- if (reg_renumber
- && REGNO (x) >= FIRST_PSEUDO_REGISTER
- && reg_renumber[REGNO (x)] >= 0)
+ if (! HARD_REGISTER_P (x)
+ && reg_renumber
+ && (lra_in_progress || reg_renumber[REGNO (x)] >= 0))
return reg_renumber[REGNO (x)];
return REGNO (x);
}
- if (GET_CODE (x) == SUBREG)
+ if (SUBREG_P (x))
{
int base = xt_true_regnum (SUBREG_REG (x));
- if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
- return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
- GET_MODE (SUBREG_REG (x)),
- SUBREG_BYTE (x), GET_MODE (x));
+
+ if (base >= 0
+ && HARD_REGISTER_NUM_P (base))
+ {
+ struct subreg_info info;
+
+ subreg_get_info (lra_in_progress
+ ? (unsigned) base : REGNO (SUBREG_REG (x)),
+ GET_MODE (SUBREG_REG (x)),
+ SUBREG_BYTE (x), GET_MODE (x), &info);
+ if (info.representable_p)
+ return base + info.offset;
+ }
}
return -1;
}
}
+/* Return true after "split1" pass has been finished. */
+
+bool
+xtensa_split1_finished_p (void)
+{
+ return cfun && (cfun->curr_properties & PROP_rtl_split_insns);
+}
+
+
+/* Split a DImode pair of reg (operand[0]) and const_int (operand[1]) into
+ two SImode pairs, the low-part (operands[0] and [1]) and the high-part
+ (operands[2] and [3]). */
+
+void
+xtensa_split_DI_reg_imm (rtx *operands)
+{
+ rtx lowpart, highpart;
+
+ if (WORDS_BIG_ENDIAN)
+ split_double (operands[1], &highpart, &lowpart);
+ else
+ split_double (operands[1], &lowpart, &highpart);
+
+ operands[3] = highpart;
+ operands[2] = gen_highpart (SImode, operands[0]);
+ operands[1] = lowpart;
+ operands[0] = gen_lowpart (SImode, operands[0]);
+}
+
+
/* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
static bool
return op;
}
+/* Implement TARGET_LRA_P. */
+
+static bool
+xtensa_lra_p (void)
+{
+ return TARGET_LRA;
+}
+
#include "gt-xtensa.h"
}
/* 1 for registers not available across function calls.
- These must include the FIXED_REGISTERS and also any
+ These need not include the FIXED_REGISTERS but must any
registers that can be used without being saved.
The latter must include the registers where values are returned
and the register where structure-value addresses are passed.
Proper values are computed in TARGET_CONDITIONAL_REGISTER_USAGE. */
-#define CALL_USED_REGISTERS \
+#define CALL_REALLY_USED_REGISTERS \
{ \
- 1, 1, 4, 4, 4, 4, 4, 4, 1, 1, 1, 1, 2, 2, 2, 2, \
- 1, 1, 1, \
+ 1, 0, 4, 4, 4, 4, 4, 4, 1, 1, 1, 1, 2, 2, 2, 2, \
+ 0, 0, 1, \
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, \
1, \
}
because of offering further optimization opportunities. */
if (register_operand (operands[0], DImode))
{
- rtx lowpart, highpart;
-
- if (TARGET_BIG_ENDIAN)
- split_double (operands[1], &highpart, &lowpart);
- else
- split_double (operands[1], &lowpart, &highpart);
- emit_insn (gen_movsi (gen_lowpart (SImode, operands[0]), lowpart));
- emit_insn (gen_movsi (gen_highpart (SImode, operands[0]), highpart));
+ xtensa_split_DI_reg_imm (operands);
+ emit_move_insn (operands[0], operands[1]);
+ emit_move_insn (operands[2], operands[3]);
DONE;
}
}
})
+(define_split
+ [(set (match_operand:DI 0 "register_operand")
+ (match_operand:DI 1 "const_int_operand"))]
+ "!TARGET_CONST16 && !TARGET_AUTO_LITPOOLS
+ && ! xtensa_split1_finished_p ()"
+ [(set (match_dup 0)
+ (match_dup 1))
+ (set (match_dup 2)
+ (match_dup 3))]
+{
+ xtensa_split_DI_reg_imm (operands);
+})
+
;; 32-bit Integer moves
(define_expand "movsi"
(set_attr "mode" "SI")
(set_attr "length" "2,2,2,2,2,2,3,3,3,3,6,3,3,3,3,3")])
+(define_split
+ [(set (match_operand:SI 0 "register_operand")
+ (match_operand:SI 1 "const_int_operand"))]
+ "!TARGET_CONST16 && !TARGET_AUTO_LITPOOLS
+ && ! xtensa_split1_finished_p ()
+ && ! xtensa_simm12b (INTVAL (operands[1]))"
+ [(set (match_dup 0)
+ (match_dup 1))]
+{
+ operands[1] = force_const_mem (SImode, operands[1]);
+})
+
(define_split
[(set (match_operand:SI 0 "register_operand")
(match_operand:SI 1 "constantpool_operand"))]
Target RejectNegative Joined UInteger Var(xtensa_extra_l32r_costs) Init(0)
Set extra memory access cost for L32R instruction, in clock-cycle units.
+mlra
+Target Mask(LRA)
+Use LRA instead of reload (transitional).
+
mtarget-align
Target
Automatically align branch targets to reduce branch penalties.