&& (dtb == vect_external_def || dtb == vect_constant_def)));
}
+#define GATHER_SCATTER_OFFSET (-3)
+
static const int no_arg_map[] = { 0 };
static const int arg0_map[] = { 1, 0 };
static const int arg1_map[] = { 1, 1 };
static const int arg1_arg3_arg4_map[] = { 3, 1, 3, 4 };
static const int arg3_arg2_map[] = { 2, 3, 2 };
static const int op1_op0_map[] = { 2, 1, 0 };
-static const int off_map[] = { 1, -3 };
-static const int off_op0_map[] = { 2, -3, 0 };
-static const int off_arg2_arg3_map[] = { 3, -3, 2, 3 };
-static const int off_arg3_arg2_map[] = { 3, -3, 3, 2 };
+static const int off_map[] = { 1, GATHER_SCATTER_OFFSET };
+static const int off_op0_map[] = { 2, GATHER_SCATTER_OFFSET, 0 };
+static const int off_arg2_arg3_map[] = { 3, GATHER_SCATTER_OFFSET, 2, 3 };
+static const int off_arg3_arg2_map[] = { 3, GATHER_SCATTER_OFFSET, 3, 2 };
static const int mask_call_maps[6][7] = {
{ 1, 1, },
{ 2, 1, 2, },
{
oprnd_info = (*oprnds_info)[i];
int opno = map ? map[i] : int (i);
- if (opno == -3)
+ if (opno == GATHER_SCATTER_OFFSET)
{
gcc_assert (STMT_VINFO_GATHER_SCATTER_P (stmt_info));
if (!is_a <loop_vec_info> (vinfo)
If that failed for some reason (e.g. because another pattern
took priority), just handle cases in which the offset already
has the right type. */
- else if (gs_info->ifn != IFN_LAST
+ else if (GATHER_SCATTER_IFN_P (*gs_info)
&& !is_gimple_call (stmt_info->stmt)
&& !tree_nop_conversion_p (TREE_TYPE (gs_info->offset),
TREE_TYPE (gs_info->offset_vectype)))
}
else if (memory_access_type != VMAT_LOAD_STORE_LANES
&& (memory_access_type != VMAT_GATHER_SCATTER
- || (gs_info.decl && !VECTOR_BOOLEAN_TYPE_P (mask_vectype))))
+ || (GATHER_SCATTER_LEGACY_P (gs_info)
+ && !VECTOR_BOOLEAN_TYPE_P (mask_vectype))))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
return false;
}
else if (memory_access_type == VMAT_GATHER_SCATTER
- && gs_info.ifn == IFN_LAST
- && !gs_info.decl)
+ && GATHER_SCATTER_EMULATED_P (gs_info))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
final_mask, vec_mask, gsi);
}
- if (gs_info.ifn != IFN_LAST)
+ if (GATHER_SCATTER_IFN_P (gs_info))
{
if (costing_p)
{
vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
new_stmt = call;
}
- else if (gs_info.decl)
+ else if (GATHER_SCATTER_LEGACY_P (gs_info))
{
/* The builtin decls path for scatter is legacy, x86 only. */
gcc_assert (nunits.is_constant ()
return false;
}
else if (memory_access_type == VMAT_GATHER_SCATTER
- && gs_info.ifn == IFN_LAST
- && !gs_info.decl)
+ && GATHER_SCATTER_EMULATED_P (gs_info))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
/* 2. Create the vector-load in the loop. */
unsigned HOST_WIDE_INT align;
- if (gs_info.ifn != IFN_LAST)
+ if (GATHER_SCATTER_IFN_P (gs_info))
{
if (costing_p)
{
new_stmt = call;
data_ref = NULL_TREE;
}
- else if (gs_info.decl)
+ else if (GATHER_SCATTER_LEGACY_P (gs_info))
{
/* The builtin decls path for gather is legacy, x86 only. */
gcc_assert (!final_len && nunits.is_constant ());
#define PURE_SLP_STMT(S) ((S)->slp_type == pure_slp)
#define STMT_SLP_TYPE(S) (S)->slp_type
+#define GATHER_SCATTER_LEGACY_P(info) ((info).decl != NULL_TREE \
+ && (info).ifn == IFN_LAST)
+#define GATHER_SCATTER_IFN_P(info) ((info).decl == NULL_TREE \
+ && (info).ifn != IFN_LAST)
+#define GATHER_SCATTER_EMULATED_P(info) ((info).decl == NULL_TREE \
+ && (info).ifn == IFN_LAST)
+
+
/* Contains the scalar or vector costs for a vec_info. */
class vector_costs
{