/* A subroutine of get_load_store_type, with a subset of the same
arguments. Handle the case where STMT_INFO is a load or store that
- accesses consecutive elements with a negative step. */
+ accesses consecutive elements with a negative step. Sets *POFFSET
+ to the offset to be applied to the DR for the first access. */
static vect_memory_access_type
get_negative_load_store_type (vec_info *vinfo,
stmt_vec_info stmt_info, tree vectype,
vec_load_store_type vls_type,
- unsigned int ncopies)
+ unsigned int ncopies, poly_int64 *poffset)
{
dr_vec_info *dr_info = STMT_VINFO_DR_INFO (stmt_info);
dr_alignment_support alignment_support_scheme;
dump_printf_loc (MSG_NOTE, vect_location,
"negative step with invariant source;"
" no permute needed.\n");
+ *poffset = -TYPE_VECTOR_SUBPARTS (vectype) + 1;
return VMAT_CONTIGUOUS_DOWN;
}
return VMAT_ELEMENTWISE;
}
+ *poffset = -TYPE_VECTOR_SUBPARTS (vectype) + 1;
return VMAT_CONTIGUOUS_REVERSE;
}
tree vectype, slp_tree slp_node,
bool masked_p, vec_load_store_type vls_type,
vect_memory_access_type *memory_access_type,
+ poly_int64 *poffset,
dr_alignment_support *alignment_support_scheme,
int *misalignment,
gather_scatter_info *gs_info)
/* ??? The VMAT_CONTIGUOUS_REVERSE code generation is
only correct for single element "interleaving" SLP. */
*memory_access_type = get_negative_load_store_type
- (vinfo, stmt_info, vectype, vls_type, 1);
+ (vinfo, stmt_info, vectype, vls_type, 1, poffset);
else
{
/* Try to use consecutive accesses of DR_GROUP_SIZE elements,
bool masked_p, vec_load_store_type vls_type,
unsigned int ncopies,
vect_memory_access_type *memory_access_type,
+ poly_int64 *poffset,
dr_alignment_support *alignment_support_scheme,
int *misalignment,
gather_scatter_info *gs_info)
loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
*misalignment = DR_MISALIGNMENT_UNKNOWN;
+ *poffset = 0;
if (STMT_VINFO_GATHER_SCATTER_P (stmt_info))
{
*memory_access_type = VMAT_GATHER_SCATTER;
{
if (!get_group_load_store_type (vinfo, stmt_info, vectype, slp_node,
masked_p,
- vls_type, memory_access_type,
+ vls_type, memory_access_type, poffset,
alignment_support_scheme,
misalignment, gs_info))
return false;
{
if (cmp < 0)
*memory_access_type = get_negative_load_store_type
- (vinfo, stmt_info, vectype, vls_type, ncopies);
+ (vinfo, stmt_info, vectype, vls_type, ncopies, poffset);
else
*memory_access_type = VMAT_CONTIGUOUS;
*alignment_support_scheme
unsigned int group_size, i;
vec<tree> oprnds = vNULL;
vec<tree> result_chain = vNULL;
- tree offset = NULL_TREE;
vec<tree> vec_oprnds = vNULL;
bool slp = (slp_node != NULL);
unsigned int vec_num;
vect_memory_access_type memory_access_type;
enum dr_alignment_support alignment_support_scheme;
int misalignment;
+ poly_int64 poffset;
if (!get_load_store_type (vinfo, stmt_info, vectype, slp_node, mask, vls_type,
- ncopies, &memory_access_type,
+ ncopies, &memory_access_type, &poffset,
&alignment_support_scheme, &misalignment, &gs_info))
return false;
|| alignment_support_scheme == dr_aligned
|| alignment_support_scheme == dr_unaligned_supported);
- if (memory_access_type == VMAT_CONTIGUOUS_DOWN
- || memory_access_type == VMAT_CONTIGUOUS_REVERSE)
- offset = size_int (-TYPE_VECTOR_SUBPARTS (vectype) + 1);
+ tree offset = NULL_TREE;
+ if (!known_eq (poffset, 0))
+ offset = size_int (poffset);
tree bump;
tree vec_offset = NULL_TREE;
unsigned int group_size;
poly_uint64 group_gap_adj;
tree msq = NULL_TREE, lsq;
- tree offset = NULL_TREE;
tree byte_offset = NULL_TREE;
tree realignment_token = NULL_TREE;
gphi *phi = NULL;
vect_memory_access_type memory_access_type;
enum dr_alignment_support alignment_support_scheme;
int misalignment;
+ poly_int64 poffset;
if (!get_load_store_type (vinfo, stmt_info, vectype, slp_node, mask, VLS_LOAD,
- ncopies, &memory_access_type,
+ ncopies, &memory_access_type, &poffset,
&alignment_support_scheme, &misalignment, &gs_info))
return false;
else
at_loop = loop;
+ tree offset = NULL_TREE;
+ if (!known_eq (poffset, 0))
+ offset = size_int (poffset);
if (memory_access_type == VMAT_CONTIGUOUS_REVERSE)
offset = size_int (-TYPE_VECTOR_SUBPARTS (vectype) + 1);