and have vector types, V0 has the same element type as V1, and the
number of elements the result is that of MASK. */
tree
-c_build_shufflevector (location_t loc, tree v0, tree v1, vec<tree> mask,
- bool complain)
+c_build_shufflevector (location_t loc, tree v0, tree v1,
+ const vec<tree> &mask, bool complain)
{
tree ret;
bool wrap = true;
extern bool vector_types_convertible_p (const_tree t1, const_tree t2, bool emit_lax_note);
extern tree c_build_vec_perm_expr (location_t, tree, tree, tree, bool = true);
extern tree c_build_shufflevector (location_t, tree, tree,
- vec<tree>, bool = true);
+ const vec<tree> &, bool = true);
extern tree c_build_vec_convert (location_t, tree, location_t, tree, bool = true);
extern void init_c_lex (void);
extern tree c_build_va_arg (location_t, tree, location_t, tree);
extern tree c_finish_transaction (location_t, tree, int);
extern bool c_tree_equal (tree, tree);
-extern tree c_build_function_call_vec (location_t, vec<location_t>, tree,
- vec<tree, va_gc> *, vec<tree, va_gc> *);
+extern tree c_build_function_call_vec (location_t, const vec<location_t>&,
+ tree, vec<tree, va_gc> *,
+ vec<tree, va_gc> *);
extern tree c_omp_clause_copy_ctor (tree, tree, tree);
/* Set to 0 at beginning of a function definition, set to 1 if
/* Like build_function_call_vec, but call also resolve_overloaded_builtin. */
tree
-c_build_function_call_vec (location_t loc, vec<location_t> arg_loc,
+c_build_function_call_vec (location_t loc, const vec<location_t> &arg_loc,
tree function, vec<tree, va_gc> *params,
vec<tree, va_gc> *origtypes)
{
extern auto_vec<edge> get_loop_exit_edges (const class loop *, basic_block * = NULL);
extern edge single_exit (const class loop *);
-extern edge single_likely_exit (class loop *loop, vec<edge>);
+extern edge single_likely_exit (class loop *loop, const vec<edge> &);
extern unsigned num_loop_branches (const class loop *);
extern edge loop_preheader_edge (const class loop *);
to noreturn call. */
edge
-single_likely_exit (class loop *loop, vec<edge> exits)
+single_likely_exit (class loop *loop, const vec<edge> &exits)
{
edge found = single_exit (loop);
unsigned i;
/* Create callgraph node clone with new declaration. The actual body will be
copied later at compilation stage. The name of the new clone will be
constructed from the name of the original node, SUFFIX and NUM_SUFFIX. */
- cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
+ cgraph_node *create_virtual_clone (const vec<cgraph_edge *> &redirect_callers,
vec<ipa_replace_map *, va_gc> *tree_map,
ipa_param_adjustments *param_adjustments,
const char * suffix, unsigned num_suffix);
bitmap interface.
*/
cgraph_node *
-cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers,
+cgraph_node::create_virtual_clone (const vec<cgraph_edge *> &redirect_callers,
vec<ipa_replace_map *, va_gc> *tree_map,
ipa_param_adjustments *param_adjustments,
const char * suffix, unsigned num_suffix)
/* The function merges equivalent states of AUTOMATON. */
static void
-merge_states (automaton_t automaton, vec<state_t> equiv_classes)
+merge_states (automaton_t automaton, const vec<state_t> &equiv_classes)
{
state_t curr_state;
state_t new_state;
/* Another helper subroutine of walk_rtx: given a vec<char>, convert it
to a NUL-terminated string in malloc memory. */
static char *
-VEC_char_to_string (vec<char> v)
+VEC_char_to_string (const vec<char> &v)
{
size_t n = v.length ();
char *s = XNEWVEC (char, n + 1);
void gen_kids (FILE *, int, bool, int);
void gen_kids_1 (FILE *, int, bool, int,
- vec<dt_operand *>, vec<dt_operand *>, vec<dt_operand *>,
- vec<dt_operand *>, vec<dt_operand *>, vec<dt_node *>);
+ const vec<dt_operand *> &, const vec<dt_operand *> &,
+ const vec<dt_operand *> &, const vec<dt_operand *> &,
+ const vec<dt_operand *> &, const vec<dt_node *> &);
void analyze (sinfo_map_t &);
};
void
dt_node::gen_kids_1 (FILE *f, int indent, bool gimple, int depth,
- vec<dt_operand *> gimple_exprs,
- vec<dt_operand *> generic_exprs,
- vec<dt_operand *> fns,
- vec<dt_operand *> generic_fns,
- vec<dt_operand *> preds,
- vec<dt_node *> others)
+ const vec<dt_operand *> &gimple_exprs,
+ const vec<dt_operand *> &generic_exprs,
+ const vec<dt_operand *> &fns,
+ const vec<dt_operand *> &generic_fns,
+ const vec<dt_operand *> &preds,
+ const vec<dt_node *> &others)
{
char buf[128];
char *kid_opname = buf;
recursively. */
static void
-walk_captures (operand *op, vec<vec<capture *> > cpts)
+walk_captures (operand *op, vec<vec<capture *> > &cpts)
{
if (! op)
return;
go after the = _5 store and thus change behavior. */
static bool
-check_no_overlap (vec<store_immediate_info *> m_store_info, unsigned int i,
+check_no_overlap (const vec<store_immediate_info *> &m_store_info,
+ unsigned int i,
bool all_integer_cst_p, unsigned int first_order,
unsigned int last_order, unsigned HOST_WIDE_INT start,
unsigned HOST_WIDE_INT end, unsigned int first_earlier,
specified in vector ARGS. */
gcall *
-gimple_build_call_vec (tree fn, vec<tree> args)
+gimple_build_call_vec (tree fn, const vec<tree> &args)
{
unsigned i;
unsigned nargs = args.length ();
specified in vector ARGS. */
gcall *
-gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
+gimple_build_call_internal_vec (enum internal_fn fn, const vec<tree> &args)
{
unsigned i, nargs;
gcall *call;
ARGS is a vector of labels excluding the default. */
gswitch *
-gimple_build_switch (tree index, tree default_label, vec<tree> args)
+gimple_build_switch (tree index, tree default_label, const vec<tree> &args)
{
unsigned i, nlabels = args.length ();
/* Sort the case labels in LABEL_VEC in place in ascending order. */
void
-sort_case_labels (vec<tree> label_vec)
+sort_case_labels (vec<tree> &label_vec)
{
label_vec.qsort (compare_case_labels);
}
found or not. */
void
-preprocess_case_label_vec_for_gimple (vec<tree> labels,
+preprocess_case_label_vec_for_gimple (vec<tree> &labels,
tree index_type,
tree *default_casep)
{
gimple *gimple_alloc (enum gimple_code, unsigned CXX_MEM_STAT_INFO);
greturn *gimple_build_return (tree);
void gimple_call_reset_alias_info (gcall *);
-gcall *gimple_build_call_vec (tree, vec<tree> );
+gcall *gimple_build_call_vec (tree, const vec<tree> &);
gcall *gimple_build_call (tree, unsigned, ...);
gcall *gimple_build_call_valist (tree, unsigned, va_list);
gcall *gimple_build_call_internal (enum internal_fn, unsigned, ...);
-gcall *gimple_build_call_internal_vec (enum internal_fn, vec<tree> );
+gcall *gimple_build_call_internal_vec (enum internal_fn, const vec<tree> &);
gcall *gimple_build_call_from_tree (tree, tree);
gassign *gimple_build_assign (tree, tree CXX_MEM_STAT_INFO);
gassign *gimple_build_assign (tree, enum tree_code,
gimple *gimple_build_wce (gimple_seq);
gresx *gimple_build_resx (int);
gswitch *gimple_build_switch_nlabels (unsigned, tree, tree);
-gswitch *gimple_build_switch (tree, tree, vec<tree> );
+gswitch *gimple_build_switch (tree, tree, const vec<tree> &);
geh_dispatch *gimple_build_eh_dispatch (int);
gdebug *gimple_build_debug_bind (tree, tree, gimple * CXX_MEM_STAT_INFO);
gdebug *gimple_build_debug_source_bind (tree, tree, gimple * CXX_MEM_STAT_INFO);
extern bool infer_nonnull_range (gimple *, tree);
extern bool infer_nonnull_range_by_dereference (gimple *, tree);
extern bool infer_nonnull_range_by_attribute (gimple *, tree);
-extern void sort_case_labels (vec<tree>);
-extern void preprocess_case_label_vec_for_gimple (vec<tree>, tree, tree *);
+extern void sort_case_labels (vec<tree> &);
+extern void preprocess_case_label_vec_for_gimple (vec<tree> &, tree, tree *);
extern void gimple_seq_set_location (gimple_seq, location_t);
extern void gimple_seq_discard (gimple_seq);
extern void maybe_remove_unused_call_args (struct function *, gimple *);
static void move_succs (vec<edge, va_gc> **, basic_block);
static void sched_remove_insn (rtx_insn *);
static void clear_priorities (rtx_insn *, rtx_vec_t *);
-static void calc_priorities (rtx_vec_t);
+static void calc_priorities (const rtx_vec_t &);
static void add_jump_dependencies (rtx_insn *, rtx_insn *);
#endif /* INSN_SCHEDULING */
changed. ROOTS is a vector of instructions whose priority computation will
trigger initialization of all cleared priorities. */
static void
-calc_priorities (rtx_vec_t roots)
+calc_priorities (const rtx_vec_t &roots)
{
int i;
rtx_insn *insn;
The hook common_sched_info->luid_for_non_insn () is used to determine
if notes, labels, etc. need luids. */
void
-sched_init_luids (bb_vec_t bbs)
+sched_init_luids (const bb_vec_t &bbs)
{
int i;
basic_block bb;
/* Initialize haifa_insn_data for BBS. */
void
-haifa_init_h_i_d (bb_vec_t bbs)
+haifa_init_h_i_d (const bb_vec_t &bbs)
{
int i;
basic_block bb;
static tree
ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
- vec<tree> known_csts,
- vec<ipa_polymorphic_call_context> known_contexts,
- vec<ipa_agg_value_set> known_aggs,
+ const vec<tree> &known_csts,
+ const vec<ipa_polymorphic_call_context> &known_contexts,
+ const vec<ipa_agg_value_set> &known_aggs,
struct ipa_agg_replacement_value *agg_reps,
bool *speculative)
{
}
if (!t)
{
- struct ipa_agg_value_set *agg;
+ const ipa_agg_value_set *agg;
if (known_aggs.length () > (unsigned int) param_index)
agg = &known_aggs[param_index];
else
if (!t && known_aggs.length () > (unsigned int) param_index
&& !ie->indirect_info->by_ref)
{
- struct ipa_agg_value_set *agg = &known_aggs[param_index];
+ const ipa_agg_value_set *agg = &known_aggs[param_index];
t = ipa_find_agg_cst_for_param (agg,
(unsigned) param_index
< known_csts.length ()
this kind of adjustment is possible. */
static bool
-adjust_callers_for_value_intersection (vec<cgraph_edge *> callers,
+adjust_callers_for_value_intersection (vec<cgraph_edge *> &callers,
cgraph_node *node)
{
for (unsigned i = 0; i < callers.length (); i++)
static void
find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
- vec<tree> known_csts,
- vec<cgraph_edge *> callers)
+ vec<tree> &known_csts,
+ const vec<cgraph_edge *> &callers)
{
ipa_node_params *info = ipa_node_params_sum->get (node);
int i, count = ipa_get_param_count (info);
find_more_contexts_for_caller_subset (cgraph_node *node,
vec<ipa_polymorphic_call_context>
*known_contexts,
- vec<cgraph_edge *> callers)
+ const vec<cgraph_edge *> &callers)
{
ipa_node_params *info = ipa_node_params_sum->get (node);
int i, count = ipa_get_param_count (info);
static struct ipa_agg_replacement_value *
find_aggregate_values_for_callers_subset (struct cgraph_node *node,
- vec<cgraph_edge *> callers)
+ const vec<cgraph_edge *> &callers)
{
ipa_node_params *dest_info = ipa_node_params_sum->get (node);
struct ipa_agg_replacement_value *res;
/* Return a copy of KNOWN_CSTS if it is not empty, otherwise return vNULL. */
static vec<ipa_polymorphic_call_context>
-copy_useful_known_contexts (vec<ipa_polymorphic_call_context> known_contexts)
+copy_useful_known_contexts (const vec<ipa_polymorphic_call_context> &known_contexts)
{
if (known_contexts_useful_p (known_contexts))
return known_contexts.copy ();
class ipa_fn_summary *info,
class ipa_node_params *params_summary,
class ipa_fn_summary *callee_info,
- vec<int> operand_map,
- vec<HOST_WIDE_INT> offset_map,
+ const vec<int> &operand_map,
+ const vec<HOST_WIDE_INT> &offset_map,
clause_t possible_truths,
predicate *toplev_predicate)
{
class ipa_node_params *params_summary,
class ipa_fn_summary *callee_info,
vec<ipa_freqcounting_predicate, va_gc> *v,
- vec<int> operand_map,
- vec<HOST_WIDE_INT> offset_map,
+ const vec<int> &operand_map,
+ const vec<HOST_WIDE_INT> &offset_map,
clause_t possible_truths,
predicate *toplev_predicate)
/* Compute badness of all edges in NEW_EDGES and add them to the HEAP. */
static void
-add_new_edges_to_heap (edge_heap_t *heap, vec<cgraph_edge *> new_edges)
+add_new_edges_to_heap (edge_heap_t *heap, vec<cgraph_edge *> &new_edges)
{
while (new_edges.length () > 0)
{
predicate::remap_after_inlining (class ipa_fn_summary *info,
class ipa_node_params *params_summary,
class ipa_fn_summary *callee_info,
- vec<int> operand_map,
- vec<HOST_WIDE_INT> offset_map,
+ const vec<int> &operand_map,
+ const vec<HOST_WIDE_INT> &offset_map,
clause_t possible_truths,
const predicate &toplev_predicate)
{
predicate remap_after_inlining (class ipa_fn_summary *,
class ipa_node_params *params_summary,
class ipa_fn_summary *,
- vec<int>, vec<HOST_WIDE_INT>,
+ const vec<int> &, const vec<HOST_WIDE_INT> &,
clause_t, const predicate &);
void stream_in (class lto_input_block *);
initializer of a constant. */
tree
-ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
+ipa_find_agg_cst_for_param (const ipa_agg_value_set *agg, tree scalar,
HOST_WIDE_INT offset, bool by_ref,
bool *from_global_constant)
{
void ipa_analyze_node (struct cgraph_node *);
/* Aggregate jump function related functions. */
-tree ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
+tree ipa_find_agg_cst_for_param (const ipa_agg_value_set *agg, tree scalar,
HOST_WIDE_INT offset, bool by_ref,
bool *from_global_constant = NULL);
bool ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
static vec<ira_loop_tree_node_t>
ira_loop_tree_body_rev_postorder (ira_loop_tree_node_t loop_node ATTRIBUTE_UNUSED,
- vec<ira_loop_tree_node_t> loop_preorder)
+ const vec<ira_loop_tree_node_t> &loop_preorder)
{
vec<ira_loop_tree_node_t> topsort_nodes = vNULL;
unsigned int n_loop_preorder;
gives the iterator associated with argument I of ONAME. */
static void
-add_overload_instance (overloaded_name *oname, vec<mapping *> iterators, rtx x)
+add_overload_instance (overloaded_name *oname, const vec<mapping *> &iterators, rtx x)
{
/* Create the instance. */
overloaded_instance *instance = new overloaded_instance;
struct full_rtx_costs *);
extern bool native_encode_rtx (machine_mode, rtx, vec<target_unit> &,
unsigned int, unsigned int);
-extern rtx native_decode_rtx (machine_mode, vec<target_unit>,
+extern rtx native_decode_rtx (machine_mode, const vec<target_unit> &,
unsigned int);
-extern rtx native_decode_vector_rtx (machine_mode, vec<target_unit>,
+extern rtx native_decode_vector_rtx (machine_mode, const vec<target_unit> &,
unsigned int, unsigned int, unsigned int);
extern poly_uint64 subreg_lsb (const_rtx);
extern poly_uint64 subreg_size_lsb (poly_uint64, poly_uint64, poly_uint64);
extern void sched_extend_luids (void);
extern void sched_init_insn_luid (rtx_insn *);
-extern void sched_init_luids (bb_vec_t);
+extern void sched_init_luids (const bb_vec_t &);
extern void sched_finish_luids (void);
extern void sched_extend_target (void);
-extern void haifa_init_h_i_d (bb_vec_t);
+extern void haifa_init_h_i_d (const bb_vec_t &);
extern void haifa_finish_h_i_d (void);
/* Hooks that are common to all the schedulers. */
Return the vector on success, otherwise return NULL_RTX. */
rtx
-native_decode_vector_rtx (machine_mode mode, vec<target_unit> bytes,
+native_decode_vector_rtx (machine_mode mode, const vec<target_unit> &bytes,
unsigned int first_byte, unsigned int npatterns,
unsigned int nelts_per_pattern)
{
Return the rtx on success, otherwise return NULL_RTX. */
rtx
-native_decode_rtx (machine_mode mode, vec<target_unit> bytes,
+native_decode_rtx (machine_mode mode, const vec<target_unit> &bytes,
unsigned int first_byte)
{
if (VECTOR_MODE_P (mode))
condition are separated by NULL tree in the vector. */
static void
-gen_shrink_wrap_conditions (gcall *bi_call, vec<gimple *> conds,
+gen_shrink_wrap_conditions (gcall *bi_call, const vec<gimple *> &conds,
unsigned int *nconds)
{
gcall *call;
when it is non-null, it is called while all of the CONDS are true. */
static void
-shrink_wrap_one_built_in_call_with_conds (gcall *bi_call, vec <gimple *> conds,
+shrink_wrap_one_built_in_call_with_conds (gcall *bi_call,
+ const vec <gimple *> &conds,
unsigned int nconds,
gcall *bi_newcall = NULL)
{
wrapping transformation. */
static void
-shrink_wrap_conditional_dead_built_in_calls (vec<gcall *> calls)
+shrink_wrap_conditional_dead_built_in_calls (const vec<gcall *> &calls)
{
unsigned i = 0;
void
create_runtime_alias_checks (class loop *loop,
- vec<dr_with_seg_len_pair_t> *alias_pairs,
+ const vec<dr_with_seg_len_pair_t> *alias_pairs,
tree * cond_expr)
{
tree part_cond_expr;
is small enough to be handled. */
bool
-compute_all_dependences (vec<data_reference_p> datarefs,
+compute_all_dependences (const vec<data_reference_p> &datarefs,
vec<ddr_p> *dependence_relations,
- vec<loop_p> loop_nest,
+ const vec<loop_p> &loop_nest,
bool compute_self_and_rr)
{
struct data_dependence_relation *ddr;
extern void compute_affine_dependence (struct data_dependence_relation *,
loop_p);
extern void compute_self_dependence (struct data_dependence_relation *);
-extern bool compute_all_dependences (vec<data_reference_p> ,
+extern bool compute_all_dependences (const vec<data_reference_p> &,
vec<ddr_p> *,
- vec<loop_p>, bool);
+ const vec<loop_p> &, bool);
extern tree find_data_references_in_bb (class loop *, basic_block,
vec<data_reference_p> *);
extern unsigned int dr_alignment (innermost_loop_behavior *);
extern void prune_runtime_alias_test_list (vec<dr_with_seg_len_pair_t> *,
poly_uint64);
extern void create_runtime_alias_checks (class loop *,
- vec<dr_with_seg_len_pair_t> *, tree*);
+ const vec<dr_with_seg_len_pair_t> *,
+ tree*);
extern tree dr_direction_indicator (struct data_reference *);
extern tree dr_zero_step_indicator (struct data_reference *);
extern bool dr_known_forward_stride_p (struct data_reference *);
/* Return the index of the variable VAR in the LOOP_NEST array. */
static inline int
-index_in_loop_nest (int var, vec<loop_p> loop_nest)
+index_in_loop_nest (int var, const vec<loop_p> &loop_nest)
{
class loop *loopi;
int var_index;
mask if it was created for given SIZE and -1 otherwise. */
static int
-mask_exists (int size, vec<int> vec)
+mask_exists (int size, const vec<int> &vec)
{
unsigned int ix;
int v;
/* Build the vertices of the reduced dependence graph RDG. Return false
if that failed. */
- bool create_rdg_vertices (struct graph *rdg, vec<gimple *> stmts, loop_p loop);
+ bool create_rdg_vertices (struct graph *rdg, const vec<gimple *> &stmts,
+ loop_p loop);
/* Initialize STMTS with all the statements of LOOP. We use topological
order to discover all statements. The order is important because
statements from STMTS into separate loops. Returns the number of
distributed loops. Set NB_CALLS to number of generated builtin calls.
Set *DESTROY_P to whether LOOP needs to be destroyed. */
- int distribute_loop (class loop *loop, vec<gimple *> stmts,
+ int distribute_loop (class loop *loop, const vec<gimple *> &stmts,
control_dependences *cd, int *nb_calls, bool *destroy_p,
bool only_patterns_p);
}
bool
-loop_distribution::create_rdg_vertices (struct graph *rdg, vec<gimple *> stmts,
+loop_distribution::create_rdg_vertices (struct graph *rdg,
+ const vec<gimple *> &stmts,
loop_p loop)
{
int i;
/* Dump to FILE the PARTITIONS. */
static void
-dump_rdg_partitions (FILE *file, vec<partition *> partitions)
+dump_rdg_partitions (FILE *file, const vec<partition *> &partitions)
{
int i;
partition *partition;
}
/* Debug PARTITIONS. */
-extern void debug_rdg_partitions (vec<partition *> );
+extern void debug_rdg_partitions (const vec<partition *> &);
DEBUG_FUNCTION void
-debug_rdg_partitions (vec<partition *> partitions)
+debug_rdg_partitions (const vec<partition *> &partitions)
{
dump_rdg_partitions (stderr, partitions);
}
static bool
partition_contains_all_rw (struct graph *rdg,
- vec<partition *> partitions)
+ const vec<partition *> &partitions)
{
int i;
partition *partition;
Set *DESTROY_P to whether LOOP needs to be destroyed. */
int
-loop_distribution::distribute_loop (class loop *loop, vec<gimple *> stmts,
+loop_distribution::distribute_loop (class loop *loop,
+ const vec<gimple *> &stmts,
control_dependences *cd, int *nb_calls, bool *destroy_p,
bool only_patterns_p)
{
reduction results in REDUCTION_STORES. */
static bool
-oacc_entry_exit_ok_1 (bitmap in_loop_bbs, vec<basic_block> region_bbs,
+oacc_entry_exit_ok_1 (bitmap in_loop_bbs, const vec<basic_block> ®ion_bbs,
reduction_info_table_type *reduction_list,
bitmap reduction_stores)
{
if any changes were made. */
static bool
-oacc_entry_exit_single_gang (bitmap in_loop_bbs, vec<basic_block> region_bbs,
+oacc_entry_exit_single_gang (bitmap in_loop_bbs,
+ const vec<basic_block> ®ion_bbs,
bitmap reduction_stores)
{
tree gang_pos = NULL_TREE;
static void
hoist_memory_references (class loop *loop, bitmap mem_refs,
- vec<edge> exits)
+ const vec<edge> &exits)
{
im_mem_ref *ref;
unsigned i;
static bool
loop_suitable_for_sm (class loop *loop ATTRIBUTE_UNUSED,
- vec<edge> exits)
+ const vec<edge> &exits)
{
unsigned i;
edge ex;
Lookup by binary search. */
static int
-bound_index (vec<widest_int> bounds, const widest_int &bound)
+bound_index (const vec<widest_int> &bounds, const widest_int &bound)
{
unsigned int end = bounds.length ();
unsigned int begin = 0;
stmts. */
static tree
-update_ops (tree var, enum tree_code code, vec<operand_entry *> ops,
+update_ops (tree var, enum tree_code code, const vec<operand_entry *> &ops,
unsigned int *pidx, class loop *loop)
{
gimple *stmt = SSA_NAME_DEF_STMT (var);
cases, but it is unlikely to be worth it. */
static void
-swap_ops_for_binary_stmt (vec<operand_entry *> ops,
+swap_ops_for_binary_stmt (const vec<operand_entry *> &ops,
unsigned int opindex, gimple *stmt)
{
operand_entry *oe1, *oe2, *oe3;
static tree
rewrite_expr_tree (gimple *stmt, enum tree_code rhs_code, unsigned int opindex,
- vec<operand_entry *> ops, bool changed, bool next_changed)
+ const vec<operand_entry *> &ops, bool changed,
+ bool next_changed)
{
tree rhs1 = gimple_assign_rhs1 (stmt);
tree rhs2 = gimple_assign_rhs2 (stmt);
static void
rewrite_expr_tree_parallel (gassign *stmt, int width,
- vec<operand_entry *> ops)
+ const vec<operand_entry *> &ops)
{
enum tree_code opcode = gimple_assign_rhs_code (stmt);
int op_num = ops.length ();
bool
ao_ref_init_from_vn_reference (ao_ref *ref,
alias_set_type set, alias_set_type base_set,
- tree type, vec<vn_reference_op_s> ops)
+ tree type, const vec<vn_reference_op_s> &ops)
{
- vn_reference_op_t op;
unsigned i;
tree base = NULL_TREE;
tree *op0_p = &base;
size = wi::to_poly_offset (size_tree);
/* Lower the final access size from the outermost expression. */
- op = &ops[0];
+ const_vn_reference_op_t cst_op = &ops[0];
+ /* Cast away constness for the sake of the const-unsafe
+ FOR_EACH_VEC_ELT(). */
+ vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op);
size_tree = NULL_TREE;
if (op->opcode == COMPONENT_REF)
size_tree = DECL_SIZE (op->op0);
&& op->op0
&& DECL_P (TREE_OPERAND (op->op0, 0)))
{
- vn_reference_op_t pop = &ops[i-1];
+ const_vn_reference_op_t pop = &ops[i-1];
base = TREE_OPERAND (op->op0, 0);
if (known_eq (pop->off, -1))
{
vn_nary_op_t vn_nary_op_insert_pieces (unsigned int, enum tree_code,
tree, tree *, tree, unsigned int);
bool ao_ref_init_from_vn_reference (ao_ref *, alias_set_type, alias_set_type,
- tree, vec<vn_reference_op_s> );
+ tree, const vec<vn_reference_op_s> &);
vec<vn_reference_op_s> vn_reference_operands_for_lookup (tree);
tree vn_reference_lookup_pieces (tree, alias_set_type, alias_set_type, tree,
vec<vn_reference_op_s> ,
entries in *LHSC. */
static void
-process_all_all_constraints (vec<ce_s> lhsc,
- vec<ce_s> rhsc)
+process_all_all_constraints (const vec<ce_s> &lhsc,
+ const vec<ce_s> &rhsc)
{
struct constraint_expr *lhsp, *rhsp;
unsigned i, j;
/* Create constraints ID = { rhsc }. */
static void
-make_constraints_to (unsigned id, vec<ce_s> rhsc)
+make_constraints_to (unsigned id, const vec<ce_s> &rhsc)
{
struct constraint_expr *c;
struct constraint_expr includes;
the LHS point to global and escaped variables. */
static void
-handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
+handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> &rhsc,
tree fndecl)
{
auto_vec<ce_s> lhsc;
case BUILT_IN_REALLOC:
if (gimple_call_lhs (t))
{
+ auto_vec<ce_s> rhsc;
handle_lhs_call (t, gimple_call_lhs (t),
gimple_call_return_flags (t) | ERF_NOALIAS,
- vNULL, fndecl);
+ rhsc, fndecl);
get_constraint_for_ptr_offset (gimple_call_lhs (t),
NULL_TREE, &lhsc);
get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
/* Sort a fieldstack according to the field offset and sizes. */
static void
-sort_fieldstack (vec<fieldoff_s> fieldstack)
+sort_fieldstack (vec<fieldoff_s> &fieldstack)
{
fieldstack.qsort (fieldoff_compare);
}
FIELDSTACK is assumed to be sorted by offset. */
static bool
-check_for_overlaps (vec<fieldoff_s> fieldstack)
+check_for_overlaps (const vec<fieldoff_s> &fieldstack)
{
fieldoff_s *fo = NULL;
unsigned int i;
tree *cond_expr,
gimple_seq *cond_expr_stmt_list)
{
- vec<stmt_vec_info> may_misalign_stmts
+ const vec<stmt_vec_info> &may_misalign_stmts
= LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo);
stmt_vec_info stmt_info;
int mask = LOOP_VINFO_PTR_MASK (loop_vinfo);
static void
vect_create_cond_for_unequal_addrs (loop_vec_info loop_vinfo, tree *cond_expr)
{
- vec<vec_object_pair> pairs = LOOP_VINFO_CHECK_UNEQUAL_ADDRS (loop_vinfo);
+ const vec<vec_object_pair> &pairs
+ = LOOP_VINFO_CHECK_UNEQUAL_ADDRS (loop_vinfo);
unsigned int i;
vec_object_pair *pair;
FOR_EACH_VEC_ELT (pairs, i, pair)
static void
vect_create_cond_for_lower_bounds (loop_vec_info loop_vinfo, tree *cond_expr)
{
- vec<vec_lower_bound> lower_bounds = LOOP_VINFO_LOWER_BOUNDS (loop_vinfo);
+ const vec<vec_lower_bound> &lower_bounds
+ = LOOP_VINFO_LOWER_BOUNDS (loop_vinfo);
for (unsigned int i = 0; i < lower_bounds.length (); ++i)
{
tree expr = lower_bounds[i].expr;
void
vect_create_cond_for_alias_checks (loop_vec_info loop_vinfo, tree * cond_expr)
{
- vec<dr_with_seg_len_pair_t> comp_alias_ddrs =
+ const vec<dr_with_seg_len_pair_t> &comp_alias_ddrs =
LOOP_VINFO_COMP_ALIAS_DDRS (loop_vinfo);
if (comp_alias_ddrs.is_empty ())
static inline bool
vect_validate_multiplication (slp_tree_to_load_perm_map_t *perm_cache,
- vec<slp_tree> left_op, vec<slp_tree> right_op,
+ const vec<slp_tree> &left_op,
+ const vec<slp_tree> &right_op,
bool neg_first, bool *conj_first_operand,
bool fms)
{
static inline bool
vect_validate_multiplication (slp_tree_to_load_perm_map_t *perm_cache,
- vec<slp_tree> op, complex_perm_kinds_t permKind)
+ const vec<slp_tree> &op,
+ complex_perm_kinds_t permKind)
{
/* The left node is the more common case, test it first. */
if (!is_eq_or_top (linear_loads_p (perm_cache, op[0]), permKind))
else if (kind == slp_inst_kind_reduc_group)
{
/* Collect reduction statements. */
- vec<stmt_vec_info> reductions = as_a <loop_vec_info> (vinfo)->reductions;
+ const vec<stmt_vec_info> &reductions
+ = as_a <loop_vec_info> (vinfo)->reductions;
scalar_stmts.create (reductions.length ());
for (i = 0; reductions.iterate (i, &next_info); i++)
if (STMT_VINFO_RELEVANT_P (next_info)
{
unsigned int i;
poly_uint64 unrolling_factor = 1;
- vec<slp_instance> slp_instances = LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
+ const vec<slp_instance> &slp_instances
+ = LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
slp_instance instance;
int decided_to_slp = 0;
true if anything in the basic-block was vectorized. */
static bool
-vect_slp_bbs (vec<basic_block> bbs)
+vect_slp_bbs (const vec<basic_block> &bbs)
{
vec<data_reference_p> datarefs = vNULL;
auto_vec<int> dataref_groups;
void
duplicate_and_interleave (vec_info *vinfo, gimple_seq *seq, tree vector_type,
- vec<tree> elts, unsigned int nresults,
+ const vec<tree> &elts, unsigned int nresults,
vec<tree> &results)
{
unsigned int nelts = elts.length ();
bool
vect_transform_slp_perm_load (vec_info *vinfo,
- slp_tree node, vec<tree> dr_chain,
+ slp_tree node, const vec<tree> &dr_chain,
gimple_stmt_iterator *gsi, poly_uint64 vf,
bool analyze_only, unsigned *n_perms,
unsigned int *n_loads, bool dce_chain)
/* Generate vector code for SLP_INSTANCES in the loop/basic block. */
void
-vect_schedule_slp (vec_info *vinfo, vec<slp_instance> slp_instances)
+vect_schedule_slp (vec_info *vinfo, const vec<slp_instance> &slp_instances)
{
slp_instance instance;
unsigned int i;
extern void vect_slp_init (void);
extern void vect_slp_fini (void);
extern void vect_free_slp_instance (slp_instance);
-extern bool vect_transform_slp_perm_load (vec_info *, slp_tree, vec<tree>,
+extern bool vect_transform_slp_perm_load (vec_info *, slp_tree, const vec<tree> &,
gimple_stmt_iterator *, poly_uint64,
bool, unsigned *,
unsigned * = nullptr, bool = false);
extern bool vect_slp_analyze_operations (vec_info *);
-extern void vect_schedule_slp (vec_info *, vec<slp_instance>);
+extern void vect_schedule_slp (vec_info *, const vec<slp_instance> &);
extern opt_result vect_analyze_slp (vec_info *, unsigned);
extern bool vect_make_slp_decision (loop_vec_info);
extern void vect_detect_hybrid_slp (loop_vec_info);
unsigned int * = NULL,
tree * = NULL, tree * = NULL);
extern void duplicate_and_interleave (vec_info *, gimple_seq *, tree,
- vec<tree>, unsigned int, vec<tree> &);
+ const vec<tree> &, unsigned int, vec<tree> &);
extern int vect_get_place_in_interleaving_chain (stmt_vec_info, stmt_vec_info);
extern bool vect_update_shared_vectype (stmt_vec_info, tree);
extern slp_tree vect_create_new_slp_node (unsigned, tree_code);
are extracted from V, a vector of CONSTRUCTOR_ELT. */
tree
-build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
+build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
{
if (vec_safe_length (v) == 0)
return build_zero_cst (type);
are given by VALS. */
static tree
-build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
+build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
{
gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
tree_vector_builder builder (type, vals.length (), 1);
/* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
static void
-check_vector_cst (vec<tree> expected, tree actual)
+check_vector_cst (const vec<tree> &expected, tree actual)
{
ASSERT_KNOWN_EQ (expected.length (),
TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
and that its elements match EXPECTED. */
static void
-check_vector_cst_duplicate (vec<tree> expected, tree actual,
+check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
unsigned int npatterns)
{
ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
EXPECTED. */
static void
-check_vector_cst_fill (vec<tree> expected, tree actual,
+check_vector_cst_fill (const vec<tree> &expected, tree actual,
unsigned int npatterns)
{
ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
and that its elements match EXPECTED. */
static void
-check_vector_cst_stepped (vec<tree> expected, tree actual,
+check_vector_cst_stepped (const vec<tree> &expected, tree actual,
unsigned int npatterns)
{
ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
extern tree build_int_cstu (tree type, poly_uint64);
extern tree build_int_cst_type (tree, poly_int64);
extern tree make_vector (unsigned, unsigned CXX_MEM_STAT_INFO);
-extern tree build_vector_from_ctor (tree, vec<constructor_elt, va_gc> *);
+extern tree build_vector_from_ctor (tree, const vec<constructor_elt, va_gc> *);
extern tree build_vector_from_val (tree, tree);
extern tree build_uniform_cst (tree, tree);
extern tree build_vec_series (tree, tree, tree);