m_fully_unmasked_p (false), m_use_real_merge_p (false),
m_needs_avl_p (false), m_vlmax_p (false), m_has_tail_policy_p (false),
m_has_mask_policy_p (false), m_tail_policy (TAIL_ANY),
- m_mask_policy (MASK_ANY), m_dest_mode (VOIDmode), m_mask_mode (VOIDmode)
+ m_mask_policy (MASK_ANY), m_dest_mode (VOIDmode), m_mask_mode (VOIDmode),
+ m_vl_op (NULL_RTX)
{}
/* Initializer for various configurations. */
m_use_real_merge_p (use_real_merge_p), m_needs_avl_p (needs_avl_p),
m_vlmax_p (vlmax_p), m_has_tail_policy_p (false),
m_has_mask_policy_p (false), m_tail_policy (TAIL_ANY),
- m_mask_policy (MASK_ANY), m_dest_mode (dest_mode), m_mask_mode (mask_mode)
+ m_mask_policy (MASK_ANY), m_dest_mode (dest_mode),
+ m_mask_mode (mask_mode), m_vl_op (NULL_RTX)
{}
void set_policy (enum tail_policy ta)
m_has_mask_policy_p = true;
m_mask_policy = ma;
}
+ void set_vl (rtx vl) { m_vl_op = vl; }
void add_output_operand (rtx x, machine_mode mode)
{
if (m_needs_avl_p)
{
- rtx len = ops[m_op_num];
+ rtx len = m_vl_op;
if (m_vlmax_p)
{
if (const_vlmax_p (m_dest_mode))
enum mask_policy m_mask_policy;
machine_mode m_dest_mode;
machine_mode m_mask_mode;
+ rtx m_vl_op;
expand_operand m_ops[MAX_OPERANDS];
};
/* This function emits a {VLMAX, TAIL_ANY, MASK_ANY} vsetvli followed by the
* actual operation. */
void
-emit_vlmax_insn (unsigned icode, int op_num, rtx *ops)
+emit_vlmax_insn (unsigned icode, int op_num, rtx *ops, rtx vl)
{
machine_mode data_mode = GET_MODE (ops[0]);
machine_mode mask_mode = get_mask_mode (data_mode).require ();
/*DEST_MODE*/ data_mode, /*MASK_MODE*/ mask_mode);
e.set_policy (TAIL_ANY);
e.set_policy (MASK_ANY);
+ /* According to LRA mov pattern in vector.md, we have a clobber operand
+ to be used ad VL operand. */
+ e.set_vl (vl);
e.emit_insn ((enum insn_code) icode, ops);
}
/* This function emits a {NONVLMAX, TAIL_ANY, MASK_ANY} vsetvli followed by the
* actual operation. */
void
-emit_nonvlmax_insn (unsigned icode, int op_num, rtx *ops)
+emit_nonvlmax_insn (unsigned icode, int op_num, rtx *ops, rtx avl)
{
machine_mode data_mode = GET_MODE (ops[0]);
machine_mode mask_mode = get_mask_mode (data_mode).require ();
/*DEST_MODE*/ data_mode, /*MASK_MODE*/ mask_mode);
e.set_policy (TAIL_ANY);
e.set_policy (MASK_ANY);
+ e.set_vl (avl);
e.emit_insn ((enum insn_code) icode, ops);
}
*scalar_op = force_reg (scalar_mode, *scalar_op);
rtx tmp = gen_reg_rtx (vector_mode);
- rtx ops[] = {tmp, *scalar_op, vl};
+ rtx ops[] = {tmp, *scalar_op};
riscv_vector::emit_nonvlmax_insn (code_for_pred_broadcast (vector_mode),
- riscv_vector::RVV_UNOP, ops);
+ riscv_vector::RVV_UNOP, ops, vl);
emit_vector_func (operands, tmp);
return true;
if (fractional_p)
{
- rtx operands[] = {subreg, mem, ops[4]};
+ rtx operands[] = {subreg, mem};
emit_vlmax_insn (code_for_pred_mov (subpart_mode), RVV_UNOP,
- operands);
+ operands, ops[4]);
}
else
emit_move_insn (subreg, mem);
if (fractional_p)
{
- rtx operands[] = {mem, subreg, ops[4]};
+ rtx operands[] = {mem, subreg};
emit_vlmax_insn (code_for_pred_mov (subpart_mode), RVV_UNOP,
- operands);
+ operands, ops[4]);
}
else
emit_move_insn (mem, subreg);