/* Convert tree expression to rtl instructions, for GNU compiler.
- Copyright (C) 1988-2018 Free Software Foundation, Inc.
+ Copyright (C) 1988-2019 Free Software Foundation, Inc.
This file is part of GCC.
: op_by_pieces_d (to, false, from, true, NULL, NULL, len, align)
{
}
- rtx finish_endp (int);
+ rtx finish_retmode (memop_ret);
};
/* Return true if MODE can be used for a set of copies, given an
}
/* Perform the final adjustment at the end of a string to obtain the
- correct return value for the block operation. If ENDP is 1 return
- memory at the end ala mempcpy, and if ENDP is 2 return memory the
- end minus one byte ala stpcpy. */
+ correct return value for the block operation.
+ Return value is based on RETMODE argument. */
rtx
-move_by_pieces_d::finish_endp (int endp)
+move_by_pieces_d::finish_retmode (memop_ret retmode)
{
gcc_assert (!m_reverse);
- if (endp == 2)
+ if (retmode == RETURN_END_MINUS_ONE)
{
m_to.maybe_postinc (-1);
--m_offset;
ALIGN is maximum stack alignment we can assume.
- If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
- mempcpy, and if ENDP is 2 return memory the end minus one byte ala
- stpcpy. */
+ Return value is based on RETMODE argument. */
rtx
move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
- unsigned int align, int endp)
+ unsigned int align, memop_ret retmode)
{
#ifndef PUSH_ROUNDING
if (to == NULL)
data.run ();
- if (endp)
- return data.finish_endp (endp);
+ if (retmode != RETURN_BEGIN)
+ return data.finish_retmode (retmode);
else
return to;
}
: op_by_pieces_d (to, false, NULL_RTX, true, cfn, cfn_data, len, align)
{
}
- rtx finish_endp (int);
+ rtx finish_retmode (memop_ret);
};
/* Return true if MODE can be used for a set of stores, given an
}
/* Perform the final adjustment at the end of a string to obtain the
- correct return value for the block operation. If ENDP is 1 return
- memory at the end ala mempcpy, and if ENDP is 2 return memory the
- end minus one byte ala stpcpy. */
+ correct return value for the block operation.
+ Return value is based on RETMODE argument. */
rtx
-store_by_pieces_d::finish_endp (int endp)
+store_by_pieces_d::finish_retmode (memop_ret retmode)
{
gcc_assert (!m_reverse);
- if (endp == 2)
+ if (retmode == RETURN_END_MINUS_ONE)
{
m_to.maybe_postinc (-1);
--m_offset;
pointer which will be passed as argument in every CONSTFUN call.
ALIGN is maximum alignment we can assume. MEMSETP is true if this is
a memset operation and false if it's a copy of a constant string.
- If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
- mempcpy, and if ENDP is 2 return memory the end minus one byte ala
- stpcpy. */
+ Return value is based on RETMODE argument. */
rtx
store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
- void *constfundata, unsigned int align, bool memsetp, int endp)
+ void *constfundata, unsigned int align, bool memsetp,
+ memop_ret retmode)
{
if (len == 0)
{
- gcc_assert (endp != 2);
+ gcc_assert (retmode != RETURN_END_MINUS_ONE);
return to;
}
store_by_pieces_d data (to, constfun, constfundata, len, align);
data.run ();
- if (endp)
- return data.finish_endp (endp);
+ if (retmode != RETURN_BEGIN)
+ return data.finish_retmode (retmode);
else
return to;
}
ALIGN is the maximum alignment we can assume they have.
METHOD describes what kind of copy this is, and what mechanisms may be used.
MIN_SIZE is the minimal size of block to move
- MAX_SIZE is the maximal size of block to move, if it can not be represented
+ MAX_SIZE is the maximal size of block to move, if it cannot be represented
in unsigned HOST_WIDE_INT, than it is mask of all ones.
Return the address of the new block, if memcpy is called and returns it,
unsigned int expected_align, HOST_WIDE_INT expected_size,
unsigned HOST_WIDE_INT min_size,
unsigned HOST_WIDE_INT max_size,
- unsigned HOST_WIDE_INT probable_max_size)
+ unsigned HOST_WIDE_INT probable_max_size,
+ bool bail_out_libcall, bool *is_move_done)
{
int may_use_call;
rtx retval = 0;
unsigned int align;
+ if (is_move_done)
+ *is_move_done = true;
+
gcc_assert (size);
if (CONST_INT_P (size) && INTVAL (size) == 0)
return 0;
}
if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
- move_by_pieces (x, y, INTVAL (size), align, 0);
+ move_by_pieces (x, y, INTVAL (size), align, RETURN_BEGIN);
else if (emit_block_move_via_movmem (x, y, size, align,
expected_align, expected_size,
min_size, max_size, probable_max_size))
&& ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
&& ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
{
+ if (bail_out_libcall)
+ {
+ if (is_move_done)
+ *is_move_done = false;
+ return retval;
+ }
+
if (may_use_call < 0)
return pc_rtx;
- /* Since x and y are passed to a libcall, mark the corresponding
- tree EXPR as addressable. */
- tree y_expr = MEM_EXPR (y);
- tree x_expr = MEM_EXPR (x);
- if (y_expr)
- mark_addressable (y_expr);
- if (x_expr)
- mark_addressable (x_expr);
retval = emit_block_copy_via_libcall (x, y, size,
method == BLOCK_OP_TAILCALL);
}
if (nops >= 8)
{
create_integer_operand (&ops[6], min_size);
- /* If we can not represent the maximal size,
+ /* If we cannot represent the maximal size,
make parameter NULL. */
if ((HOST_WIDE_INT) max_size != -1)
create_integer_operand (&ops[7], max_size);
}
if (nops == 9)
{
- /* If we can not represent the maximal size,
+ /* If we cannot represent the maximal size,
make parameter NULL. */
if ((HOST_WIDE_INT) probable_max_size != -1)
create_integer_operand (&ops[8], probable_max_size);
tree call_expr, dst_tree, src_tree, size_tree;
machine_mode size_mode;
+ /* Since dst and src are passed to a libcall, mark the corresponding
+ tree EXPR as addressable. */
+ tree dst_expr = MEM_EXPR (dst);
+ tree src_expr = MEM_EXPR (src);
+ if (dst_expr)
+ mark_addressable (dst_expr);
+ if (src_expr)
+ mark_addressable (src_expr);
+
dst_addr = copy_addr_to_reg (XEXP (dst, 0));
dst_addr = convert_memory_address (ptr_mode, dst_addr);
dst_tree = make_tree (ptr_type_node, dst_addr);
if (nops >= 8)
{
create_integer_operand (&ops[6], min_size);
- /* If we can not represent the maximal size,
+ /* If we cannot represent the maximal size,
make parameter NULL. */
if ((HOST_WIDE_INT) max_size != -1)
create_integer_operand (&ops[7], max_size);
}
if (nops == 9)
{
- /* If we can not represent the maximal size,
+ /* If we cannot represent the maximal size,
make parameter NULL. */
if ((HOST_WIDE_INT) probable_max_size != -1)
create_integer_operand (&ops[8], probable_max_size);
&& where_pad != stack_direction)
anti_adjust_stack (gen_int_mode (extra, Pmode));
- move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
+ move_by_pieces (NULL, xinner, INTVAL (size) - used, align,
+ RETURN_BEGIN);
}
else
#endif /* PUSH_ROUNDING */
}
else
{
- rtx from_rtx
- = simplify_gen_subreg (to_mode, result,
- TYPE_MODE (TREE_TYPE (from)), 0);
+ rtx from_rtx;
+ if (MEM_P (result))
+ from_rtx = change_address (result, to_mode, NULL_RTX);
+ else
+ from_rtx
+ = simplify_gen_subreg (to_mode, result,
+ TYPE_MODE (TREE_TYPE (from)), 0);
if (from_rtx)
{
emit_move_insn (XEXP (to_rtx, 0),
emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
}
}
+ /* For calls to functions returning variable length structures, if TO_RTX
+ is not a MEM, go through a MEM because we must not create temporaries
+ of the VLA type. */
+ else if (!MEM_P (to_rtx)
+ && TREE_CODE (from) == CALL_EXPR
+ && COMPLETE_TYPE_P (TREE_TYPE (from))
+ && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) != INTEGER_CST)
+ {
+ rtx temp = assign_stack_temp (GET_MODE (to_rtx),
+ GET_MODE_SIZE (GET_MODE (to_rtx)));
+ result = store_field (temp, bitsize, bitpos, bitregion_start,
+ bitregion_end, mode1, from, get_alias_set (to),
+ nontemporal, reversep);
+ emit_move_insn (to_rtx, temp);
+ }
else
{
if (MEM_P (to_rtx))
MEM_VOLATILE_P (to_rtx) = 1;
}
+ gcc_checking_assert (known_ge (bitpos, 0));
if (optimize_bitfield_assignment_op (bitsize, bitpos,
bitregion_start, bitregion_end,
mode1, to_rtx, to, from,
return maybe_expand_insn (code, 2, ops);
}
+/* Helper function for store_expr storing of STRING_CST. */
+
+static rtx
+string_cst_read_str (void *data, HOST_WIDE_INT offset, scalar_int_mode mode)
+{
+ tree str = (tree) data;
+
+ gcc_assert (offset >= 0);
+ if (offset >= TREE_STRING_LENGTH (str))
+ return const0_rtx;
+
+ if ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
+ > (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (str))
+ {
+ char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
+ size_t l = TREE_STRING_LENGTH (str) - offset;
+ memcpy (p, TREE_STRING_POINTER (str) + offset, l);
+ memset (p + l, '\0', GET_MODE_SIZE (mode) - l);
+ return c_readstr (p, mode, false);
+ }
+
+ return c_readstr (TREE_STRING_POINTER (str) + offset, mode, false);
+}
+
/* Generate code for computing expression EXP,
and storing the value into TARGET.
rtx
store_expr (tree exp, rtx target, int call_param_p,
- bool nontemporal, bool reverse)
+ bool nontemporal, bool reverse)
{
rtx temp;
rtx alt_rtl = NULL_RTX;
if (TREE_STRING_LENGTH (str) <= 0)
goto normal_expr;
- str_copy_len = strlen (TREE_STRING_POINTER (str));
- if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
- goto normal_expr;
+ if (can_store_by_pieces (exp_len, string_cst_read_str, (void *) str,
+ MEM_ALIGN (target), false))
+ {
+ store_by_pieces (target, exp_len, string_cst_read_str, (void *) str,
+ MEM_ALIGN (target), false, RETURN_BEGIN);
+ return NULL_RTX;
+ }
str_copy_len = TREE_STRING_LENGTH (str);
- if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
- && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
+ if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
{
str_copy_len += STORE_MAX_PIECES - 1;
str_copy_len &= ~(STORE_MAX_PIECES - 1);
}
- str_copy_len = MIN (str_copy_len, exp_len);
- if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
- CONST_CAST (char *, TREE_STRING_POINTER (str)),
- MEM_ALIGN (target), false))
+ if (str_copy_len >= exp_len)
goto normal_expr;
- dest_mem = target;
-
- dest_mem = store_by_pieces (dest_mem,
- str_copy_len, builtin_strncpy_read_str,
- CONST_CAST (char *,
- TREE_STRING_POINTER (str)),
- MEM_ALIGN (target), false,
- exp_len > str_copy_len ? 1 : 0);
- if (exp_len > str_copy_len)
- clear_storage (adjust_address (dest_mem, BLKmode, 0),
- GEN_INT (exp_len - str_copy_len),
- BLOCK_OP_NORMAL);
+ if (!can_store_by_pieces (str_copy_len, string_cst_read_str,
+ (void *) str, MEM_ALIGN (target), false))
+ goto normal_expr;
+
+ dest_mem = store_by_pieces (target, str_copy_len, string_cst_read_str,
+ (void *) str, MEM_ALIGN (target), false,
+ RETURN_END);
+ clear_storage (adjust_address_1 (dest_mem, BLKmode, 0, 1, 1, 0,
+ exp_len - str_copy_len),
+ GEN_INT (exp_len - str_copy_len), BLOCK_OP_NORMAL);
return NULL_RTX;
}
else
static bool
categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
+ HOST_WIDE_INT *p_unique_nz_elts,
HOST_WIDE_INT *p_init_elts, bool *p_complete)
{
unsigned HOST_WIDE_INT idx;
- HOST_WIDE_INT nz_elts, init_elts, num_fields;
+ HOST_WIDE_INT nz_elts, unique_nz_elts, init_elts, num_fields;
tree value, purpose, elt_type;
/* Whether CTOR is a valid constant initializer, in accordance with what
bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
nz_elts = 0;
+ unique_nz_elts = 0;
init_elts = 0;
num_fields = 0;
elt_type = NULL_TREE;
{
case CONSTRUCTOR:
{
- HOST_WIDE_INT nz = 0, ic = 0;
+ HOST_WIDE_INT nz = 0, unz = 0, ic = 0;
- bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
- p_complete);
+ bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &unz,
+ &ic, p_complete);
nz_elts += mult * nz;
+ unique_nz_elts += unz;
init_elts += mult * ic;
if (const_from_elts_p && const_p)
case REAL_CST:
case FIXED_CST:
if (!initializer_zerop (value))
- nz_elts += mult;
+ {
+ nz_elts += mult;
+ unique_nz_elts++;
+ }
init_elts += mult;
break;
case STRING_CST:
nz_elts += mult * TREE_STRING_LENGTH (value);
+ unique_nz_elts += TREE_STRING_LENGTH (value);
init_elts += mult * TREE_STRING_LENGTH (value);
break;
case COMPLEX_CST:
if (!initializer_zerop (TREE_REALPART (value)))
- nz_elts += mult;
+ {
+ nz_elts += mult;
+ unique_nz_elts++;
+ }
if (!initializer_zerop (TREE_IMAGPART (value)))
- nz_elts += mult;
- init_elts += mult;
+ {
+ nz_elts += mult;
+ unique_nz_elts++;
+ }
+ init_elts += 2 * mult;
break;
case VECTOR_CST:
{
tree v = VECTOR_CST_ELT (value, i);
if (!initializer_zerop (v))
- nz_elts += mult;
+ {
+ nz_elts += mult;
+ unique_nz_elts++;
+ }
init_elts += mult;
}
}
{
HOST_WIDE_INT tc = count_type_elements (elt_type, false);
nz_elts += mult * tc;
+ unique_nz_elts += tc;
init_elts += mult * tc;
if (const_from_elts_p && const_p)
*p_complete = false;
*p_nz_elts += nz_elts;
+ *p_unique_nz_elts += unique_nz_elts;
*p_init_elts += init_elts;
return const_p;
/* Examine CTOR to discover:
* how many scalar fields are set to nonzero values,
and place it in *P_NZ_ELTS;
+ * the same, but counting RANGE_EXPRs as multiplier of 1 instead of
+ high - low + 1 (this can be useful for callers to determine ctors
+ that could be cheaply initialized with - perhaps nested - loops
+ compared to copied from huge read-only data),
+ and place it in *P_UNIQUE_NZ_ELTS;
* how many scalar fields in total are in CTOR,
and place it in *P_ELT_COUNT.
* whether the constructor is complete -- in the sense that every
bool
categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
+ HOST_WIDE_INT *p_unique_nz_elts,
HOST_WIDE_INT *p_init_elts, bool *p_complete)
{
*p_nz_elts = 0;
+ *p_unique_nz_elts = 0;
*p_init_elts = 0;
*p_complete = true;
- return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
+ return categorize_ctor_elements_1 (ctor, p_nz_elts, p_unique_nz_elts,
+ p_init_elts, p_complete);
}
/* TYPE is initialized by a constructor with NUM_ELTS elements, the last
return count_type_elements (type, true) == num_elts;
}
-/* Return 1 if EXP contains mostly (3/4) zeros. */
+/* Return 1 if EXP contains mostly (3/4) zeros. */
static int
mostly_zeros_p (const_tree exp)
{
if (TREE_CODE (exp) == CONSTRUCTOR)
{
- HOST_WIDE_INT nz_elts, init_elts;
+ HOST_WIDE_INT nz_elts, unz_elts, init_elts;
bool complete_p;
- categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
+ categorize_ctor_elements (exp, &nz_elts, &unz_elts, &init_elts,
+ &complete_p);
return !complete_p || nz_elts < init_elts / 4;
}
{
if (TREE_CODE (exp) == CONSTRUCTOR)
{
- HOST_WIDE_INT nz_elts, init_elts;
+ HOST_WIDE_INT nz_elts, unz_elts, init_elts;
bool complete_p;
- categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
+ categorize_ctor_elements (exp, &nz_elts, &unz_elts, &init_elts,
+ &complete_p);
return nz_elts == 0;
}
}
/* Store the value in the bitfield. */
+ gcc_checking_assert (known_ge (bitpos, 0));
store_bit_field (target, bitsize, bitpos,
bitregion_start, bitregion_end,
mode, temp, reverse);
if ((TREE_STATIC (exp)
&& ((mode == BLKmode
&& ! (target != 0 && safe_from_p (target, exp, 1)))
- || TREE_ADDRESSABLE (exp)
- || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
- && (! can_move_by_pieces
- (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
- TYPE_ALIGN (type)))
- && ! mostly_zeros_p (exp))))
+ || TREE_ADDRESSABLE (exp)
+ || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
+ && (! can_move_by_pieces
+ (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
+ TYPE_ALIGN (type)))
+ && ! mostly_zeros_p (exp))))
|| ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
&& TREE_CONSTANT (exp)))
{
!= INTEGER_CST check. Handle it. */
if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
{
- op0 = convert_modes (innermode, mode, op0, true);
- op1 = convert_modes (innermode, mode, op1, false);
+ op0 = convert_modes (mode, innermode, op0, true);
+ op1 = convert_modes (mode, innermode, op1, false);
return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
target, unsignedp));
}
if (TREE_CODE (treeop0) != INTEGER_CST)
{
if (find_widening_optab_handler (this_optab, mode, innermode)
- != CODE_FOR_nothing)
+ != CODE_FOR_nothing)
{
expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
EXPAND_NORMAL);
if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
{
widen_mult_const:
- op0 = convert_modes (innermode, mode, op0, zextend_p);
+ op0 = convert_modes (mode, innermode, op0, zextend_p);
op1
- = convert_modes (innermode, mode, op1,
+ = convert_modes (mode, innermode, op1,
TYPE_UNSIGNED (TREE_TYPE (treeop1)));
return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
target,
return REDUCE_BIT_FIELD (temp);
}
if (find_widening_optab_handler (other_optab, mode, innermode)
- != CODE_FOR_nothing
+ != CODE_FOR_nothing
&& innermode == word_mode)
{
rtx htem, hipart;
op0 = expand_normal (treeop0);
- if (TREE_CODE (treeop1) == INTEGER_CST)
- op1 = convert_modes (word_mode, mode,
- expand_normal (treeop1),
- TYPE_UNSIGNED (TREE_TYPE (treeop1)));
- else
- op1 = expand_normal (treeop1);
- /* op0 and op1 might still be constant, despite the above
+ op1 = expand_normal (treeop1);
+ /* op0 and op1 might be constants, despite the above
!= INTEGER_CST check. Handle it. */
if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
goto widen_mult_const;
+ if (TREE_CODE (treeop1) == INTEGER_CST)
+ op1 = convert_modes (mode, word_mode, op1,
+ TYPE_UNSIGNED (TREE_TYPE (treeop1)));
temp = expand_binop (mode, other_optab, op0, op1, target,
unsignedp, OPTAB_LIB_WIDEN);
hipart = gen_highpart (word_mode, temp);
gcc_assert (target);
return target;
- case VEC_PACK_TRUNC_EXPR:
case VEC_PACK_SAT_EXPR:
case VEC_PACK_FIX_TRUNC_EXPR:
mode = TYPE_MODE (TREE_TYPE (treeop0));
goto binop;
+ case VEC_PACK_TRUNC_EXPR:
+ if (VECTOR_BOOLEAN_TYPE_P (type)
+ && VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (treeop0))
+ && mode == TYPE_MODE (TREE_TYPE (treeop0))
+ && SCALAR_INT_MODE_P (mode))
+ {
+ struct expand_operand eops[4];
+ machine_mode imode = TYPE_MODE (TREE_TYPE (treeop0));
+ expand_operands (treeop0, treeop1,
+ subtarget, &op0, &op1, EXPAND_NORMAL);
+ this_optab = vec_pack_sbool_trunc_optab;
+ enum insn_code icode = optab_handler (this_optab, imode);
+ create_output_operand (&eops[0], target, mode);
+ create_convert_operand_from (&eops[1], op0, imode, false);
+ create_convert_operand_from (&eops[2], op1, imode, false);
+ temp = GEN_INT (TYPE_VECTOR_SUBPARTS (type).to_constant ());
+ create_input_operand (&eops[3], temp, imode);
+ expand_insn (icode, 4, eops);
+ return eops[0].value;
+ }
+ mode = TYPE_MODE (TREE_TYPE (treeop0));
+ goto binop;
+
case VEC_PACK_FLOAT_EXPR:
mode = TYPE_MODE (TREE_TYPE (treeop0));
expand_operands (treeop0, treeop1,
mode2
= CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
+ /* Make sure bitpos is not negative, it can wreak havoc later. */
+ if (maybe_lt (bitpos, 0))
+ {
+ gcc_checking_assert (offset == NULL_TREE);
+ offset = size_int (bits_to_bytes_round_down (bitpos));
+ bitpos = num_trailing_bits (bitpos);
+ }
+
/* If we have either an offset, a BLKmode result, or a reference
outside the underlying object, we must force it to memory.
Such a case can occur in Ada if we have unchecked conversion
&& GET_MODE_CLASS (ext_mode) == MODE_INT)
reversep = TYPE_REVERSE_STORAGE_ORDER (type);
+ gcc_checking_assert (known_ge (bitpos, 0));
op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
(modifier == EXPAND_STACK_PARM
? NULL_RTX : target),
DECL_ATTRIBUTES (fndecl))) != NULL)
{
const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
- warning_at (tree_nonartificial_location (exp), 0,
+ warning_at (tree_nonartificial_location (exp),
+ OPT_Wattribute_warning,
"%Kcall to %qs declared with attribute warning: %s",
exp, identifier_to_locale (ident),
TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
}
/* Check for a built-in function. */
- if (fndecl && DECL_BUILT_IN (fndecl))
+ if (fndecl && fndecl_built_in_p (fndecl))
{
gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
return expand_builtin (exp, target, subtarget, tmode, ignore);
{
rtx_code_label *label = gen_label_rtx ();
int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
- do_jump (TREE_OPERAND (rhs, 1),
- value ? label : 0,
- value ? 0 : label,
- profile_probability::uninitialized ());
+ profile_probability prob = profile_probability::uninitialized ();
+ if (value)
+ jumpifnot (TREE_OPERAND (rhs, 1), label, prob);
+ else
+ jumpif (TREE_OPERAND (rhs, 1), label, prob);
expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
false);
do_pending_stack_adjust ();
/* Return the tree node if an ARG corresponds to a string constant or zero
if it doesn't. If we return nonzero, set *PTR_OFFSET to the (possibly
non-constant) offset in bytes within the string that ARG is accessing.
- The type of the offset is sizetype. */
+ If MEM_SIZE is non-zero the storage size of the memory is returned.
+ If DECL is non-zero the constant declaration is returned if available. */
tree
-string_constant (tree arg, tree *ptr_offset)
+string_constant (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
{
tree array;
STRIP_NOPS (arg);
if (TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE)
return NULL_TREE;
+
+ if (!integer_zerop (array_ref_low_bound (arg)))
+ return NULL_TREE;
+
+ if (!integer_onep (array_ref_element_size (arg)))
+ return NULL_TREE;
}
}
array = get_addr_base_and_unit_offset (ref, &base_off);
tree arg0 = TREE_OPERAND (arg, 0);
tree arg1 = TREE_OPERAND (arg, 1);
- STRIP_NOPS (arg0);
- STRIP_NOPS (arg1);
-
- if (TREE_CODE (arg0) == ADDR_EXPR)
- ; /* Do nothing. */
- else if (TREE_CODE (arg1) == ADDR_EXPR)
- std::swap (arg0, arg1);
- else
- return NULL_TREE;
-
tree offset;
- if (tree str = string_constant (arg0, &offset))
+ tree str = string_constant (arg0, &offset, mem_size, decl);
+ if (!str)
+ {
+ str = string_constant (arg1, &offset, mem_size, decl);
+ arg1 = arg0;
+ }
+
+ if (str)
{
/* Avoid pointers to arrays (see bug 86622). */
if (POINTER_TYPE_P (TREE_TYPE (arg))
&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == ARRAY_TYPE
- && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF)
+ && !(decl && !*decl)
+ && !(decl && tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl))
+ && mem_size && tree_fits_uhwi_p (*mem_size)
+ && tree_int_cst_equal (*mem_size, DECL_SIZE_UNIT (*decl))))
return NULL_TREE;
- tree type = TREE_TYPE (arg1);
+ tree type = TREE_TYPE (offset);
+ arg1 = fold_convert (type, arg1);
*ptr_offset = fold_build2 (PLUS_EXPR, type, offset, arg1);
return str;
}
return NULL_TREE;
}
+ else if (TREE_CODE (arg) == SSA_NAME)
+ {
+ gimple *stmt = SSA_NAME_DEF_STMT (arg);
+ if (!is_gimple_assign (stmt))
+ return NULL_TREE;
+
+ tree rhs1 = gimple_assign_rhs1 (stmt);
+ tree_code code = gimple_assign_rhs_code (stmt);
+ if (code == ADDR_EXPR)
+ return string_constant (rhs1, ptr_offset, mem_size, decl);
+ else if (code != POINTER_PLUS_EXPR)
+ return NULL_TREE;
+
+ tree offset;
+ if (tree str = string_constant (rhs1, &offset, mem_size, decl))
+ {
+ /* Avoid pointers to arrays (see bug 86622). */
+ if (POINTER_TYPE_P (TREE_TYPE (rhs1))
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs1))) == ARRAY_TYPE
+ && !(decl && !*decl)
+ && !(decl && tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl))
+ && mem_size && tree_fits_uhwi_p (*mem_size)
+ && tree_int_cst_equal (*mem_size, DECL_SIZE_UNIT (*decl))))
+ return NULL_TREE;
+
+ tree rhs2 = gimple_assign_rhs2 (stmt);
+ tree type = TREE_TYPE (offset);
+ rhs2 = fold_convert (type, rhs2);
+ *ptr_offset = fold_build2 (PLUS_EXPR, type, offset, rhs2);
+ return str;
+ }
+ return NULL_TREE;
+ }
else if (DECL_P (arg))
array = arg;
else
if (TREE_CODE (chartype) != INTEGER_TYPE)
return NULL;
- tree charsize = array_ref_element_size (arg);
- /* Set the non-constant offset to the non-constant index scaled
- by the size of the character type. */
- offset = fold_build2 (MULT_EXPR, TREE_TYPE (offset),
- fold_convert (sizetype, varidx), charsize);
+ offset = fold_convert (sizetype, varidx);
}
if (TREE_CODE (array) == STRING_CST)
{
*ptr_offset = fold_convert (sizetype, offset);
+ if (mem_size)
+ *mem_size = TYPE_SIZE_UNIT (TREE_TYPE (array));
+ if (decl)
+ *decl = NULL_TREE;
+ gcc_checking_assert (tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (array)))
+ >= TREE_STRING_LENGTH (array));
return array;
}
offset = off;
}
- if (!init || TREE_CODE (init) != STRING_CST)
+ if (!init)
return NULL_TREE;
- tree array_size = DECL_SIZE_UNIT (array);
- if (!array_size || TREE_CODE (array_size) != INTEGER_CST)
- return NULL_TREE;
+ *ptr_offset = offset;
+
+ tree eltype = TREE_TYPE (init);
+ tree initsize = TYPE_SIZE_UNIT (eltype);
+ if (mem_size)
+ *mem_size = initsize;
+
+ if (decl)
+ *decl = array;
+
+ if (TREE_CODE (init) == INTEGER_CST
+ && (TREE_CODE (TREE_TYPE (array)) == INTEGER_TYPE
+ || TYPE_MAIN_VARIANT (eltype) == char_type_node))
+ {
+ /* For a reference to (address of) a single constant character,
+ store the native representation of the character in CHARBUF.
+ If the reference is to an element of an array or a member
+ of a struct, only consider narrow characters until ctors
+ for wide character arrays are transformed to STRING_CSTs
+ like those for narrow arrays. */
+ unsigned char charbuf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
+ int len = native_encode_expr (init, charbuf, sizeof charbuf, 0);
+ if (len > 0)
+ {
+ /* Construct a string literal with elements of ELTYPE and
+ the representation above. Then strip
+ the ADDR_EXPR (ARRAY_REF (...)) around the STRING_CST. */
+ init = build_string_literal (len, (char *)charbuf, eltype);
+ init = TREE_OPERAND (TREE_OPERAND (init, 0), 0);
+ }
+ }
- /* Avoid returning a string that doesn't fit in the array
- it is stored in, like
- const char a[4] = "abcde";
- but do handle those that fit even if they have excess
- initializers, such as in
- const char a[4] = "abc\000\000";
- The excess elements contribute to TREE_STRING_LENGTH()
- but not to strlen(). */
- unsigned HOST_WIDE_INT charsize
- = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
- unsigned HOST_WIDE_INT length = TREE_STRING_LENGTH (init);
- length = string_length (TREE_STRING_POINTER (init), charsize,
- length / charsize);
- if (compare_tree_int (array_size, length + 1) < 0)
+ if (TREE_CODE (init) != STRING_CST)
return NULL_TREE;
- *ptr_offset = offset;
+ gcc_checking_assert (tree_to_shwi (initsize) >= TREE_STRING_LENGTH (init));
+
return init;
}
\f
+/* Compute the modular multiplicative inverse of A modulo M
+ using extended Euclid's algorithm. Assumes A and M are coprime. */
+static wide_int
+mod_inv (const wide_int &a, const wide_int &b)
+{
+ /* Verify the assumption. */
+ gcc_checking_assert (wi::eq_p (wi::gcd (a, b), 1));
+
+ unsigned int p = a.get_precision () + 1;
+ gcc_checking_assert (b.get_precision () + 1 == p);
+ wide_int c = wide_int::from (a, p, UNSIGNED);
+ wide_int d = wide_int::from (b, p, UNSIGNED);
+ wide_int x0 = wide_int::from (0, p, UNSIGNED);
+ wide_int x1 = wide_int::from (1, p, UNSIGNED);
+
+ if (wi::eq_p (b, 1))
+ return wide_int::from (1, p, UNSIGNED);
+
+ while (wi::gt_p (c, 1, UNSIGNED))
+ {
+ wide_int t = d;
+ wide_int q = wi::divmod_trunc (c, d, UNSIGNED, &d);
+ c = t;
+ wide_int s = x0;
+ x0 = wi::sub (x1, wi::mul (q, x0));
+ x1 = s;
+ }
+ if (wi::lt_p (x1, 0, SIGNED))
+ x1 += d;
+ return x1;
+}
+
+/* Optimize x % C1 == C2 for signed modulo if C1 is a power of two and C2
+ is non-zero and C3 ((1<<(prec-1)) | (C1 - 1)):
+ for C2 > 0 to x & C3 == C2
+ for C2 < 0 to x & C3 == (C2 & C3). */
+enum tree_code
+maybe_optimize_pow2p_mod_cmp (enum tree_code code, tree *arg0, tree *arg1)
+{
+ gimple *stmt = get_def_for_expr (*arg0, TRUNC_MOD_EXPR);
+ tree treeop0 = gimple_assign_rhs1 (stmt);
+ tree treeop1 = gimple_assign_rhs2 (stmt);
+ tree type = TREE_TYPE (*arg0);
+ scalar_int_mode mode;
+ if (!is_a <scalar_int_mode> (TYPE_MODE (type), &mode))
+ return code;
+ if (GET_MODE_BITSIZE (mode) != TYPE_PRECISION (type)
+ || TYPE_PRECISION (type) <= 1
+ || TYPE_UNSIGNED (type)
+ /* Signed x % c == 0 should have been optimized into unsigned modulo
+ earlier. */
+ || integer_zerop (*arg1)
+ /* If c is known to be non-negative, modulo will be expanded as unsigned
+ modulo. */
+ || get_range_pos_neg (treeop0) == 1)
+ return code;
+
+ /* x % c == d where d < 0 && d <= -c should be always false. */
+ if (tree_int_cst_sgn (*arg1) == -1
+ && -wi::to_widest (treeop1) >= wi::to_widest (*arg1))
+ return code;
+
+ int prec = TYPE_PRECISION (type);
+ wide_int w = wi::to_wide (treeop1) - 1;
+ w |= wi::shifted_mask (0, prec - 1, true, prec);
+ tree c3 = wide_int_to_tree (type, w);
+ tree c4 = *arg1;
+ if (tree_int_cst_sgn (*arg1) == -1)
+ c4 = wide_int_to_tree (type, w & wi::to_wide (*arg1));
+
+ rtx op0 = expand_normal (treeop0);
+ treeop0 = make_tree (TREE_TYPE (treeop0), op0);
+
+ bool speed_p = optimize_insn_for_speed_p ();
+
+ do_pending_stack_adjust ();
+
+ location_t loc = gimple_location (stmt);
+ struct separate_ops ops;
+ ops.code = TRUNC_MOD_EXPR;
+ ops.location = loc;
+ ops.type = TREE_TYPE (treeop0);
+ ops.op0 = treeop0;
+ ops.op1 = treeop1;
+ ops.op2 = NULL_TREE;
+ start_sequence ();
+ rtx mor = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type),
+ EXPAND_NORMAL);
+ rtx_insn *moinsns = get_insns ();
+ end_sequence ();
+
+ unsigned mocost = seq_cost (moinsns, speed_p);
+ mocost += rtx_cost (mor, mode, EQ, 0, speed_p);
+ mocost += rtx_cost (expand_normal (*arg1), mode, EQ, 1, speed_p);
+
+ ops.code = BIT_AND_EXPR;
+ ops.location = loc;
+ ops.type = TREE_TYPE (treeop0);
+ ops.op0 = treeop0;
+ ops.op1 = c3;
+ ops.op2 = NULL_TREE;
+ start_sequence ();
+ rtx mur = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type),
+ EXPAND_NORMAL);
+ rtx_insn *muinsns = get_insns ();
+ end_sequence ();
+
+ unsigned mucost = seq_cost (muinsns, speed_p);
+ mucost += rtx_cost (mur, mode, EQ, 0, speed_p);
+ mucost += rtx_cost (expand_normal (c4), mode, EQ, 1, speed_p);
+
+ if (mocost <= mucost)
+ {
+ emit_insn (moinsns);
+ *arg0 = make_tree (TREE_TYPE (*arg0), mor);
+ return code;
+ }
+
+ emit_insn (muinsns);
+ *arg0 = make_tree (TREE_TYPE (*arg0), mur);
+ *arg1 = c4;
+ return code;
+}
+
+/* Attempt to optimize unsigned (X % C1) == C2 (or (X % C1) != C2).
+ If C1 is odd to:
+ (X - C2) * C3 <= C4 (or >), where
+ C3 is modular multiplicative inverse of C1 and 1<<prec and
+ C4 is ((1<<prec) - 1) / C1 or ((1<<prec) - 1) / C1 - 1 (the latter
+ if C2 > ((1<<prec) - 1) % C1).
+ If C1 is even, S = ctz (C1) and C2 is 0, use
+ ((X * C3) r>> S) <= C4, where C3 is modular multiplicative
+ inverse of C1>>S and 1<<prec and C4 is (((1<<prec) - 1) / (C1>>S)) >> S.
+
+ For signed (X % C1) == 0 if C1 is odd to (all operations in it
+ unsigned):
+ (X * C3) + C4 <= 2 * C4, where
+ C3 is modular multiplicative inverse of (unsigned) C1 and 1<<prec and
+ C4 is ((1<<(prec - 1) - 1) / C1).
+ If C1 is even, S = ctz(C1), use
+ ((X * C3) + C4) r>> S <= (C4 >> (S - 1))
+ where C3 is modular multiplicative inverse of (unsigned)(C1>>S) and 1<<prec
+ and C4 is ((1<<(prec - 1) - 1) / (C1>>S)) & (-1<<S).
+
+ See the Hacker's Delight book, section 10-17. */
+enum tree_code
+maybe_optimize_mod_cmp (enum tree_code code, tree *arg0, tree *arg1)
+{
+ gcc_checking_assert (code == EQ_EXPR || code == NE_EXPR);
+ gcc_checking_assert (TREE_CODE (*arg1) == INTEGER_CST);
+
+ if (optimize < 2)
+ return code;
+
+ gimple *stmt = get_def_for_expr (*arg0, TRUNC_MOD_EXPR);
+ if (stmt == NULL)
+ return code;
+
+ tree treeop0 = gimple_assign_rhs1 (stmt);
+ tree treeop1 = gimple_assign_rhs2 (stmt);
+ if (TREE_CODE (treeop0) != SSA_NAME
+ || TREE_CODE (treeop1) != INTEGER_CST
+ /* Don't optimize the undefined behavior case x % 0;
+ x % 1 should have been optimized into zero, punt if
+ it makes it here for whatever reason;
+ x % -c should have been optimized into x % c. */
+ || compare_tree_int (treeop1, 2) <= 0
+ /* Likewise x % c == d where d >= c should be always false. */
+ || tree_int_cst_le (treeop1, *arg1))
+ return code;
+
+ /* Unsigned x % pow2 is handled right already, for signed
+ modulo handle it in maybe_optimize_pow2p_mod_cmp. */
+ if (integer_pow2p (treeop1))
+ return maybe_optimize_pow2p_mod_cmp (code, arg0, arg1);
+
+ tree type = TREE_TYPE (*arg0);
+ scalar_int_mode mode;
+ if (!is_a <scalar_int_mode> (TYPE_MODE (type), &mode))
+ return code;
+ if (GET_MODE_BITSIZE (mode) != TYPE_PRECISION (type)
+ || TYPE_PRECISION (type) <= 1)
+ return code;
+
+ signop sgn = UNSIGNED;
+ /* If both operands are known to have the sign bit clear, handle
+ even the signed modulo case as unsigned. treeop1 is always
+ positive >= 2, checked above. */
+ if (!TYPE_UNSIGNED (type) && get_range_pos_neg (treeop0) != 1)
+ sgn = SIGNED;
+
+ if (!TYPE_UNSIGNED (type))
+ {
+ if (tree_int_cst_sgn (*arg1) == -1)
+ return code;
+ type = unsigned_type_for (type);
+ if (!type || TYPE_MODE (type) != TYPE_MODE (TREE_TYPE (*arg0)))
+ return code;
+ }
+
+ int prec = TYPE_PRECISION (type);
+ wide_int w = wi::to_wide (treeop1);
+ int shift = wi::ctz (w);
+ /* Unsigned (X % C1) == C2 is equivalent to (X - C2) % C1 == 0 if
+ C2 <= -1U % C1, because for any Z >= 0U - C2 in that case (Z % C1) != 0.
+ If C1 is odd, we can handle all cases by subtracting
+ C4 below. We could handle even the even C1 and C2 > -1U % C1 cases
+ e.g. by testing for overflow on the subtraction, punt on that for now
+ though. */
+ if ((sgn == SIGNED || shift) && !integer_zerop (*arg1))
+ {
+ if (sgn == SIGNED)
+ return code;
+ wide_int x = wi::umod_trunc (wi::mask (prec, false, prec), w);
+ if (wi::gtu_p (wi::to_wide (*arg1), x))
+ return code;
+ }
+
+ imm_use_iterator imm_iter;
+ use_operand_p use_p;
+ FOR_EACH_IMM_USE_FAST (use_p, imm_iter, treeop0)
+ {
+ gimple *use_stmt = USE_STMT (use_p);
+ /* Punt if treeop0 is used in the same bb in a division
+ or another modulo with the same divisor. We should expect
+ the division and modulo combined together. */
+ if (use_stmt == stmt
+ || gimple_bb (use_stmt) != gimple_bb (stmt))
+ continue;
+ if (!is_gimple_assign (use_stmt)
+ || (gimple_assign_rhs_code (use_stmt) != TRUNC_DIV_EXPR
+ && gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR))
+ continue;
+ if (gimple_assign_rhs1 (use_stmt) != treeop0
+ || !operand_equal_p (gimple_assign_rhs2 (use_stmt), treeop1, 0))
+ continue;
+ return code;
+ }
+
+ w = wi::lrshift (w, shift);
+ wide_int a = wide_int::from (w, prec + 1, UNSIGNED);
+ wide_int b = wi::shifted_mask (prec, 1, false, prec + 1);
+ wide_int m = wide_int::from (mod_inv (a, b), prec, UNSIGNED);
+ tree c3 = wide_int_to_tree (type, m);
+ tree c5 = NULL_TREE;
+ wide_int d, e;
+ if (sgn == UNSIGNED)
+ {
+ d = wi::divmod_trunc (wi::mask (prec, false, prec), w, UNSIGNED, &e);
+ /* Use <= floor ((1<<prec) - 1) / C1 only if C2 <= ((1<<prec) - 1) % C1,
+ otherwise use < or subtract one from C4. E.g. for
+ x % 3U == 0 we transform this into x * 0xaaaaaaab <= 0x55555555, but
+ x % 3U == 1 already needs to be
+ (x - 1) * 0xaaaaaaabU <= 0x55555554. */
+ if (!shift && wi::gtu_p (wi::to_wide (*arg1), e))
+ d -= 1;
+ if (shift)
+ d = wi::lrshift (d, shift);
+ }
+ else
+ {
+ e = wi::udiv_trunc (wi::mask (prec - 1, false, prec), w);
+ if (!shift)
+ d = wi::lshift (e, 1);
+ else
+ {
+ e = wi::bit_and (e, wi::mask (shift, true, prec));
+ d = wi::lrshift (e, shift - 1);
+ }
+ c5 = wide_int_to_tree (type, e);
+ }
+ tree c4 = wide_int_to_tree (type, d);
+
+ rtx op0 = expand_normal (treeop0);
+ treeop0 = make_tree (TREE_TYPE (treeop0), op0);
+
+ bool speed_p = optimize_insn_for_speed_p ();
+
+ do_pending_stack_adjust ();
+
+ location_t loc = gimple_location (stmt);
+ struct separate_ops ops;
+ ops.code = TRUNC_MOD_EXPR;
+ ops.location = loc;
+ ops.type = TREE_TYPE (treeop0);
+ ops.op0 = treeop0;
+ ops.op1 = treeop1;
+ ops.op2 = NULL_TREE;
+ start_sequence ();
+ rtx mor = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type),
+ EXPAND_NORMAL);
+ rtx_insn *moinsns = get_insns ();
+ end_sequence ();
+
+ unsigned mocost = seq_cost (moinsns, speed_p);
+ mocost += rtx_cost (mor, mode, EQ, 0, speed_p);
+ mocost += rtx_cost (expand_normal (*arg1), mode, EQ, 1, speed_p);
+
+ tree t = fold_convert_loc (loc, type, treeop0);
+ if (!integer_zerop (*arg1))
+ t = fold_build2_loc (loc, MINUS_EXPR, type, t, fold_convert (type, *arg1));
+ t = fold_build2_loc (loc, MULT_EXPR, type, t, c3);
+ if (sgn == SIGNED)
+ t = fold_build2_loc (loc, PLUS_EXPR, type, t, c5);
+ if (shift)
+ {
+ tree s = build_int_cst (NULL_TREE, shift);
+ t = fold_build2_loc (loc, RROTATE_EXPR, type, t, s);
+ }
+
+ start_sequence ();
+ rtx mur = expand_normal (t);
+ rtx_insn *muinsns = get_insns ();
+ end_sequence ();
+
+ unsigned mucost = seq_cost (muinsns, speed_p);
+ mucost += rtx_cost (mur, mode, LE, 0, speed_p);
+ mucost += rtx_cost (expand_normal (c4), mode, LE, 1, speed_p);
+
+ if (mocost <= mucost)
+ {
+ emit_insn (moinsns);
+ *arg0 = make_tree (TREE_TYPE (*arg0), mor);
+ return code;
+ }
+
+ emit_insn (muinsns);
+ *arg0 = make_tree (type, mur);
+ *arg1 = c4;
+ return code == EQ_EXPR ? LE_EXPR : GT_EXPR;
+}
+\f
/* Generate code to calculate OPS, and exploded expression
using a store-flag instruction and return an rtx for the result.
OPS reflects a comparison.
/* We won't bother with store-flag operations involving function pointers
when function pointers must be canonicalized before comparisons. */
if (targetm.have_canonicalize_funcptr_for_compare ()
- && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
- && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
- == FUNCTION_TYPE))
- || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
- && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
- == FUNCTION_TYPE))))
+ && ((POINTER_TYPE_P (TREE_TYPE (arg0))
+ && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg0))))
+ || (POINTER_TYPE_P (TREE_TYPE (arg1))
+ && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg1))))))
return 0;
STRIP_NOPS (arg0);
STRIP_NOPS (arg1);
-
+
/* For vector typed comparisons emit code to generate the desired
all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
expander for this. */
}
}
+ /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
+ into (x - C2) * C3 < C4. */
+ if ((ops->code == EQ_EXPR || ops->code == NE_EXPR)
+ && TREE_CODE (arg0) == SSA_NAME
+ && TREE_CODE (arg1) == INTEGER_CST)
+ {
+ enum tree_code code = maybe_optimize_mod_cmp (ops->code, &arg0, &arg1);
+ if (code != ops->code)
+ {
+ struct separate_ops nops = *ops;
+ nops.code = ops->code = code;
+ nops.op0 = arg0;
+ nops.op1 = arg1;
+ nops.type = TREE_TYPE (arg0);
+ return do_store_flag (&nops, target, mode);
+ }
+ }
+
/* Get the rtx comparison code to use. We know that EXP is a comparison
operation of some type. Some comparisons against 1 and -1 can be
converted to comparisons with zero. Do so here so that the tests