]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-param-manipulation.c
rs6000: Correct prefix testsuite failures on AIX.
[thirdparty/gcc.git] / gcc / ipa-param-manipulation.c
1 /* Manipulation of formal and actual parameters of functions and function
2 calls.
3 Copyright (C) 2017-2020 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "cgraph.h"
29 #include "fold-const.h"
30 #include "tree-eh.h"
31 #include "stor-layout.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "gimplify-me.h"
35 #include "tree-cfg.h"
36 #include "tree-dfa.h"
37 #include "ipa-param-manipulation.h"
38 #include "print-tree.h"
39 #include "gimple-pretty-print.h"
40 #include "builtins.h"
41 #include "tree-ssa.h"
42 #include "tree-inline.h"
43
44
45 /* Actual prefixes of different newly synthetized parameters. Keep in sync
46 with IPA_PARAM_PREFIX_* defines. */
47
48 static const char *ipa_param_prefixes[IPA_PARAM_PREFIX_COUNT]
49 = {"SYNTH",
50 "ISRA",
51 "simd",
52 "mask"};
53
54 /* Names of parameters for dumping. Keep in sync with enum ipa_parm_op. */
55
56 static const char *ipa_param_op_names[IPA_PARAM_PREFIX_COUNT]
57 = {"IPA_PARAM_OP_UNDEFINED",
58 "IPA_PARAM_OP_COPY",
59 "IPA_PARAM_OP_NEW",
60 "IPA_PARAM_OP_SPLIT"};
61
62 /* Fill an empty vector ARGS with PARM_DECLs representing formal parameters of
63 FNDECL. The function should not be called during LTO WPA phase except for
64 thunks (or functions with bodies streamed in). */
65
66 void
67 push_function_arg_decls (vec<tree> *args, tree fndecl)
68 {
69 int count;
70 tree parm;
71
72 /* Safety check that we do not attempt to use the function in WPA, except
73 when the function is a thunk and then we have DECL_ARGUMENTS or when we
74 have already explicitely loaded its body. */
75 gcc_assert (!flag_wpa
76 || DECL_ARGUMENTS (fndecl)
77 || gimple_has_body_p (fndecl));
78 count = 0;
79 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
80 count++;
81
82 args->reserve_exact (count);
83 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
84 args->quick_push (parm);
85 }
86
87 /* Fill an empty vector TYPES with trees representing formal parameters of
88 function type FNTYPE. */
89
90 void
91 push_function_arg_types (vec<tree> *types, tree fntype)
92 {
93 int count = 0;
94 tree t;
95
96 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
97 count++;
98
99 types->reserve_exact (count);
100 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
101 types->quick_push (TREE_VALUE (t));
102 }
103
104 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
105 friendly way, assuming they are meant to be applied to FNDECL. */
106
107 void
108 ipa_dump_adjusted_parameters (FILE *f,
109 vec<ipa_adjusted_param, va_gc> *adj_params)
110 {
111 unsigned i, len = vec_safe_length (adj_params);
112 bool first = true;
113
114 if (!len)
115 return;
116
117 fprintf (f, " IPA adjusted parameters: ");
118 for (i = 0; i < len; i++)
119 {
120 struct ipa_adjusted_param *apm;
121 apm = &(*adj_params)[i];
122
123 if (!first)
124 fprintf (f, " ");
125 else
126 first = false;
127
128 fprintf (f, "%i. %s %s", i, ipa_param_op_names[apm->op],
129 apm->prev_clone_adjustment ? "prev_clone_adjustment " : "");
130 switch (apm->op)
131 {
132 case IPA_PARAM_OP_UNDEFINED:
133 break;
134
135 case IPA_PARAM_OP_COPY:
136 fprintf (f, ", base_index: %u", apm->base_index);
137 fprintf (f, ", prev_clone_index: %u", apm->prev_clone_index);
138 break;
139
140 case IPA_PARAM_OP_SPLIT:
141 fprintf (f, ", offset: %u", apm->unit_offset);
142 /* fall-through */
143 case IPA_PARAM_OP_NEW:
144 fprintf (f, ", base_index: %u", apm->base_index);
145 fprintf (f, ", prev_clone_index: %u", apm->prev_clone_index);
146 print_node_brief (f, ", type: ", apm->type, 0);
147 print_node_brief (f, ", alias type: ", apm->alias_ptr_type, 0);
148 fprintf (f, " prefix: %s",
149 ipa_param_prefixes[apm->param_prefix_index]);
150 if (apm->reverse)
151 fprintf (f, ", reverse-sso");
152 break;
153 }
154 fprintf (f, "\n");
155 }
156 }
157
158 /* Fill NEW_TYPES with types of a function after its current OTYPES have been
159 modified as described in ADJ_PARAMS. When USE_PREV_INDICES is true, use
160 prev_clone_index from ADJ_PARAMS as opposed to base_index when the parameter
161 is false. */
162
163 static void
164 fill_vector_of_new_param_types (vec<tree> *new_types, vec<tree> *otypes,
165 vec<ipa_adjusted_param, va_gc> *adj_params,
166 bool use_prev_indices)
167 {
168 unsigned adj_len = vec_safe_length (adj_params);
169 new_types->reserve_exact (adj_len);
170 for (unsigned i = 0; i < adj_len ; i++)
171 {
172 ipa_adjusted_param *apm = &(*adj_params)[i];
173 if (apm->op == IPA_PARAM_OP_COPY)
174 {
175 unsigned index
176 = use_prev_indices ? apm->prev_clone_index : apm->base_index;
177 /* The following needs to be handled gracefully because of type
178 mismatches. This happens with LTO but apparently also in Fortran
179 with -fcoarray=lib -O2 -lcaf_single -latomic. */
180 if (index >= otypes->length ())
181 continue;
182 new_types->quick_push ((*otypes)[index]);
183 }
184 else if (apm->op == IPA_PARAM_OP_NEW
185 || apm->op == IPA_PARAM_OP_SPLIT)
186 {
187 tree ntype = apm->type;
188 if (is_gimple_reg_type (ntype)
189 && TYPE_MODE (ntype) != BLKmode)
190 {
191 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ntype));
192 if (TYPE_ALIGN (ntype) != malign)
193 ntype = build_aligned_type (ntype, malign);
194 }
195 new_types->quick_push (ntype);
196 }
197 else
198 gcc_unreachable ();
199 }
200 }
201
202 /* Build and return a function type just like ORIG_TYPE but with parameter
203 types given in NEW_PARAM_TYPES - which can be NULL if, but only if,
204 ORIG_TYPE itself has NULL TREE_ARG_TYPEs. If METHOD2FUNC is true, also make
205 it a FUNCTION_TYPE instead of FUNCTION_TYPE. */
206
207 static tree
208 build_adjusted_function_type (tree orig_type, vec<tree> *new_param_types,
209 bool method2func, bool skip_return)
210 {
211 tree new_arg_types = NULL;
212 if (TYPE_ARG_TYPES (orig_type))
213 {
214 gcc_checking_assert (new_param_types);
215 bool last_parm_void = (TREE_VALUE (tree_last (TYPE_ARG_TYPES (orig_type)))
216 == void_type_node);
217 unsigned len = new_param_types->length ();
218 for (unsigned i = 0; i < len; i++)
219 new_arg_types = tree_cons (NULL_TREE, (*new_param_types)[i],
220 new_arg_types);
221
222 tree new_reversed = nreverse (new_arg_types);
223 if (last_parm_void)
224 {
225 if (new_reversed)
226 TREE_CHAIN (new_arg_types) = void_list_node;
227 else
228 new_reversed = void_list_node;
229 }
230 new_arg_types = new_reversed;
231 }
232
233 /* Use build_distinct_type_copy to preserve as much as possible from original
234 type (debug info, attribute lists etc.). The one exception is
235 METHOD_TYPEs which must have THIS argument and when we are asked to remove
236 it, we need to build new FUNCTION_TYPE instead. */
237 tree new_type = NULL;
238 if (method2func)
239 {
240 tree ret_type;
241 if (skip_return)
242 ret_type = void_type_node;
243 else
244 ret_type = TREE_TYPE (orig_type);
245
246 new_type
247 = build_distinct_type_copy (build_function_type (ret_type,
248 new_arg_types));
249 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
250 }
251 else
252 {
253 new_type = build_distinct_type_copy (orig_type);
254 TYPE_ARG_TYPES (new_type) = new_arg_types;
255 if (skip_return)
256 TREE_TYPE (new_type) = void_type_node;
257 }
258
259 return new_type;
260 }
261
262 /* Return the maximum index in any IPA_PARAM_OP_COPY adjustment or -1 if there
263 is none. */
264
265 int
266 ipa_param_adjustments::get_max_base_index ()
267 {
268 unsigned adj_len = vec_safe_length (m_adj_params);
269 int max_index = -1;
270 for (unsigned i = 0; i < adj_len ; i++)
271 {
272 ipa_adjusted_param *apm = &(*m_adj_params)[i];
273 if (apm->op == IPA_PARAM_OP_COPY
274 && max_index < apm->base_index)
275 max_index = apm->base_index;
276 }
277 return max_index;
278 }
279
280
281 /* Fill SURVIVING_PARAMS with an array of bools where each one says whether a
282 parameter that originally was at that position still survives in the given
283 clone or is removed/replaced. If the final array is smaller than an index
284 of an original parameter, that parameter also did not survive. That a
285 parameter survives does not mean it has the same index as before. */
286
287 void
288 ipa_param_adjustments::get_surviving_params (vec<bool> *surviving_params)
289 {
290 unsigned adj_len = vec_safe_length (m_adj_params);
291 int max_index = get_max_base_index ();
292
293 if (max_index < 0)
294 return;
295 surviving_params->reserve_exact (max_index + 1);
296 surviving_params->quick_grow_cleared (max_index + 1);
297 for (unsigned i = 0; i < adj_len ; i++)
298 {
299 ipa_adjusted_param *apm = &(*m_adj_params)[i];
300 if (apm->op == IPA_PARAM_OP_COPY)
301 (*surviving_params)[apm->base_index] = true;
302 }
303 }
304
305 /* Fill NEW_INDICES with new indices of each surviving parameter or -1 for
306 those which do not survive. Any parameter outside of lenght of the vector
307 does not survive. There is currently no support for a parameter to be
308 copied to two distinct new parameters. */
309
310 void
311 ipa_param_adjustments::get_updated_indices (vec<int> *new_indices)
312 {
313 unsigned adj_len = vec_safe_length (m_adj_params);
314 int max_index = get_max_base_index ();
315
316 if (max_index < 0)
317 return;
318 unsigned res_len = max_index + 1;
319 new_indices->reserve_exact (res_len);
320 for (unsigned i = 0; i < res_len ; i++)
321 new_indices->quick_push (-1);
322 for (unsigned i = 0; i < adj_len ; i++)
323 {
324 ipa_adjusted_param *apm = &(*m_adj_params)[i];
325 if (apm->op == IPA_PARAM_OP_COPY)
326 (*new_indices)[apm->base_index] = i;
327 }
328 }
329
330 /* Return the original index for the given new parameter index. Return a
331 negative number if not available. */
332
333 int
334 ipa_param_adjustments::get_original_index (int newidx)
335 {
336 const ipa_adjusted_param *adj = &(*m_adj_params)[newidx];
337 if (adj->op != IPA_PARAM_OP_COPY)
338 return -1;
339 return adj->base_index;
340 }
341
342 /* Return true if the first parameter (assuming there was one) survives the
343 transformation intact and remains the first one. */
344
345 bool
346 ipa_param_adjustments::first_param_intact_p ()
347 {
348 return (!vec_safe_is_empty (m_adj_params)
349 && (*m_adj_params)[0].op == IPA_PARAM_OP_COPY
350 && (*m_adj_params)[0].base_index == 0);
351 }
352
353 /* Return true if we have to change what has formerly been a method into a
354 function. */
355
356 bool
357 ipa_param_adjustments::method2func_p (tree orig_type)
358 {
359 return ((TREE_CODE (orig_type) == METHOD_TYPE) && !first_param_intact_p ());
360 }
361
362 /* Given function type OLD_TYPE, return a new type derived from it after
363 performing all atored modifications. TYPE_ORIGINAL_P should be true when
364 OLD_TYPE refers to the type before any IPA transformations, as opposed to a
365 type that can be an intermediate one in between various IPA
366 transformations. */
367
368 tree
369 ipa_param_adjustments::build_new_function_type (tree old_type,
370 bool type_original_p)
371 {
372 auto_vec<tree,16> new_param_types, *new_param_types_p;
373 if (prototype_p (old_type))
374 {
375 auto_vec<tree, 16> otypes;
376 push_function_arg_types (&otypes, old_type);
377 fill_vector_of_new_param_types (&new_param_types, &otypes, m_adj_params,
378 !type_original_p);
379 new_param_types_p = &new_param_types;
380 }
381 else
382 new_param_types_p = NULL;
383
384 return build_adjusted_function_type (old_type, new_param_types_p,
385 method2func_p (old_type), m_skip_return);
386 }
387
388 /* Build variant of function decl ORIG_DECL which has no return value if
389 M_SKIP_RETURN is true and, if ORIG_DECL's types or parameters is known, has
390 this type adjusted as indicated in M_ADJ_PARAMS. Arguments from
391 DECL_ARGUMENTS list are not processed now, since they are linked by
392 TREE_CHAIN directly and not accessible in LTO during WPA. The caller is
393 responsible for eliminating them when clones are properly materialized. */
394
395 tree
396 ipa_param_adjustments::adjust_decl (tree orig_decl)
397 {
398 tree new_decl = copy_node (orig_decl);
399 tree orig_type = TREE_TYPE (orig_decl);
400 if (prototype_p (orig_type)
401 || (m_skip_return && !VOID_TYPE_P (TREE_TYPE (orig_type))))
402 {
403 tree new_type = build_new_function_type (orig_type, false);
404 TREE_TYPE (new_decl) = new_type;
405 }
406 if (method2func_p (orig_type))
407 DECL_VINDEX (new_decl) = NULL_TREE;
408
409 /* When signature changes, we need to clear builtin info. */
410 if (fndecl_built_in_p (new_decl))
411 set_decl_built_in_function (new_decl, NOT_BUILT_IN, 0);
412
413 DECL_VIRTUAL_P (new_decl) = 0;
414 DECL_LANG_SPECIFIC (new_decl) = NULL;
415
416 /* Drop MALLOC attribute for a void function. */
417 if (m_skip_return)
418 DECL_IS_MALLOC (new_decl) = 0;
419
420 return new_decl;
421 }
422
423 /* Wrapper around get_base_ref_and_offset for cases interesting for IPA-SRA
424 transformations. Return true if EXPR has an interesting form and fill in
425 *BASE_P and *UNIT_OFFSET_P with the appropriate info. */
426
427 static bool
428 isra_get_ref_base_and_offset (tree expr, tree *base_p, unsigned *unit_offset_p)
429 {
430 HOST_WIDE_INT offset, size;
431 bool reverse;
432 tree base
433 = get_ref_base_and_extent_hwi (expr, &offset, &size, &reverse);
434 if (!base || size < 0)
435 return false;
436
437 if ((offset % BITS_PER_UNIT) != 0)
438 return false;
439
440 if (TREE_CODE (base) == MEM_REF)
441 {
442 poly_int64 plmoff = mem_ref_offset (base).force_shwi ();
443 HOST_WIDE_INT moff;
444 bool is_cst = plmoff.is_constant (&moff);
445 if (!is_cst)
446 return false;
447 offset += moff * BITS_PER_UNIT;
448 base = TREE_OPERAND (base, 0);
449 }
450
451 if (offset < 0 || (offset / BITS_PER_UNIT) > UINT_MAX)
452 return false;
453
454 *base_p = base;
455 *unit_offset_p = offset / BITS_PER_UNIT;
456 return true;
457 }
458
459 /* Return true if EXPR describes a transitive split (i.e. one that happened for
460 both the caller and the callee) as recorded in PERFORMED_SPLITS. In that
461 case, store index of the respective record in PERFORMED_SPLITS into
462 *SM_IDX_P and the unit offset from all handled components in EXPR into
463 *UNIT_OFFSET_P. */
464
465 static bool
466 transitive_split_p (vec<ipa_param_performed_split, va_gc> *performed_splits,
467 tree expr, unsigned *sm_idx_p, unsigned *unit_offset_p)
468 {
469 tree base;
470 if (!isra_get_ref_base_and_offset (expr, &base, unit_offset_p))
471 return false;
472
473 if (TREE_CODE (base) == SSA_NAME)
474 {
475 base = SSA_NAME_VAR (base);
476 if (!base)
477 return false;
478 }
479
480 unsigned len = vec_safe_length (performed_splits);
481 for (unsigned i = 0 ; i < len; i++)
482 {
483 ipa_param_performed_split *sm = &(*performed_splits)[i];
484 if (sm->dummy_decl == base)
485 {
486 *sm_idx_p = i;
487 return true;
488 }
489 }
490 return false;
491 }
492
493 /* Structure to hold declarations representing transitive IPA-SRA splits. In
494 essence, if we need to pass UNIT_OFFSET of a parameter which originally has
495 number BASE_INDEX, we should pass down REPL. */
496
497 struct transitive_split_map
498 {
499 tree repl;
500 unsigned base_index;
501 unsigned unit_offset;
502 };
503
504 /* If call STMT contains any parameters representing transitive splits as
505 described by PERFORMED_SPLITS, return the number of extra parameters that
506 were addded during clone materialization and fill in INDEX_MAP with adjusted
507 indices of corresponding original parameters and TRANS_MAP with description
508 of all transitive replacement descriptions. Otherwise return zero. */
509
510 static unsigned
511 init_transitive_splits (vec<ipa_param_performed_split, va_gc> *performed_splits,
512 gcall *stmt, vec <unsigned> *index_map,
513 auto_vec <transitive_split_map> *trans_map)
514 {
515 unsigned phony_arguments = 0;
516 unsigned stmt_idx = 0, base_index = 0;
517 unsigned nargs = gimple_call_num_args (stmt);
518 while (stmt_idx < nargs)
519 {
520 unsigned unit_offset_delta;
521 tree base_arg = gimple_call_arg (stmt, stmt_idx);
522
523 if (phony_arguments > 0)
524 index_map->safe_push (stmt_idx);
525
526 unsigned sm_idx;
527 stmt_idx++;
528 if (transitive_split_p (performed_splits, base_arg, &sm_idx,
529 &unit_offset_delta))
530 {
531 if (phony_arguments == 0)
532 /* We have optimistically avoided constructing index_map do far but
533 now it is clear it will be necessary, so let's create the easy
534 bit we skipped until now. */
535 for (unsigned k = 0; k < stmt_idx; k++)
536 index_map->safe_push (k);
537
538 tree dummy = (*performed_splits)[sm_idx].dummy_decl;
539 for (unsigned j = sm_idx; j < performed_splits->length (); j++)
540 {
541 ipa_param_performed_split *caller_split
542 = &(*performed_splits)[j];
543 if (caller_split->dummy_decl != dummy)
544 break;
545
546 tree arg = gimple_call_arg (stmt, stmt_idx);
547 struct transitive_split_map tsm;
548 tsm.repl = arg;
549 tsm.base_index = base_index;
550 if (caller_split->unit_offset >= unit_offset_delta)
551 {
552 tsm.unit_offset
553 = (caller_split->unit_offset - unit_offset_delta);
554 trans_map->safe_push (tsm);
555 }
556
557 phony_arguments++;
558 stmt_idx++;
559 }
560 }
561 base_index++;
562 }
563 return phony_arguments;
564 }
565
566 /* Modify actual arguments of a function call in statement STMT, assuming it
567 calls CALLEE_DECL. CALLER_ADJ must be the description of parameter
568 adjustments of the caller or NULL if there are none. Return the new
569 statement that replaced the old one. When invoked, cfun and
570 current_function_decl have to be set to the caller. */
571
572 gcall *
573 ipa_param_adjustments::modify_call (gcall *stmt,
574 vec<ipa_param_performed_split,
575 va_gc> *performed_splits,
576 tree callee_decl, bool update_references)
577 {
578 unsigned len = vec_safe_length (m_adj_params);
579 auto_vec<tree, 16> vargs (len);
580 tree old_decl = gimple_call_fndecl (stmt);
581 unsigned old_nargs = gimple_call_num_args (stmt);
582 auto_vec<bool, 16> kept (old_nargs);
583 kept.quick_grow_cleared (old_nargs);
584
585 auto_vec <unsigned, 16> index_map;
586 auto_vec <transitive_split_map> trans_map;
587 bool transitive_remapping = false;
588
589 if (performed_splits)
590 {
591 unsigned removed = init_transitive_splits (performed_splits,
592 stmt, &index_map, &trans_map);
593 if (removed > 0)
594 {
595 transitive_remapping = true;
596 old_nargs -= removed;
597 }
598 }
599
600 cgraph_node *current_node = cgraph_node::get (current_function_decl);
601 if (update_references)
602 current_node->remove_stmt_references (stmt);
603
604 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
605 gimple_stmt_iterator prev_gsi = gsi;
606 gsi_prev (&prev_gsi);
607 for (unsigned i = 0; i < len; i++)
608 {
609 ipa_adjusted_param *apm = &(*m_adj_params)[i];
610 if (apm->op == IPA_PARAM_OP_COPY)
611 {
612 unsigned index = apm->base_index;
613 if (index >= old_nargs)
614 /* Can happen if the original call has argument mismatch,
615 ignore. */
616 continue;
617 if (transitive_remapping)
618 index = index_map[apm->base_index];
619
620 tree arg = gimple_call_arg (stmt, index);
621
622 vargs.quick_push (arg);
623 kept[index] = true;
624 continue;
625 }
626
627 /* At the moment the only user of IPA_PARAM_OP_NEW modifies calls itself.
628 If we ever want to support it during WPA IPA stage, we'll need a
629 mechanism to call into the IPA passes that introduced them. Currently
630 we simply mandate that IPA infrastructure understands all argument
631 modifications. Remember, edge redirection/modification is done only
632 once, not in steps for each pass modifying the callee like clone
633 materialization. */
634 gcc_assert (apm->op == IPA_PARAM_OP_SPLIT);
635
636 /* We have to handle transitive changes differently using the maps we
637 have created before. So look into them first. */
638 tree repl = NULL_TREE;
639 for (unsigned j = 0; j < trans_map.length (); j++)
640 if (trans_map[j].base_index == apm->base_index
641 && trans_map[j].unit_offset == apm->unit_offset)
642 {
643 repl = trans_map[j].repl;
644 break;
645 }
646 if (repl)
647 {
648 vargs.quick_push (repl);
649 continue;
650 }
651
652 unsigned index = apm->base_index;
653 if (index >= old_nargs)
654 /* Can happen if the original call has argument mismatch, ignore. */
655 continue;
656 if (transitive_remapping)
657 index = index_map[apm->base_index];
658 tree base = gimple_call_arg (stmt, index);
659
660 /* We create a new parameter out of the value of the old one, we can
661 do the following kind of transformations:
662
663 - A scalar passed by reference, potentially as a part of a larger
664 aggregate, is converted to a scalar passed by value.
665
666 - A part of an aggregate is passed instead of the whole aggregate. */
667
668 location_t loc = gimple_location (stmt);
669 tree off;
670 bool deref_base = false;
671 unsigned int deref_align = 0;
672 if (TREE_CODE (base) != ADDR_EXPR
673 && is_gimple_reg_type (TREE_TYPE (base)))
674 {
675 /* Detect type mismatches in calls in invalid programs and make a
676 poor attempt to gracefully convert them so that we don't ICE. */
677 if (!POINTER_TYPE_P (TREE_TYPE (base)))
678 base = force_value_to_type (ptr_type_node, base);
679
680 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset);
681 }
682 else
683 {
684 bool addrof;
685 if (TREE_CODE (base) == ADDR_EXPR)
686 {
687 base = TREE_OPERAND (base, 0);
688 addrof = true;
689 }
690 else
691 addrof = false;
692
693 tree prev_base = base;
694 poly_int64 base_offset;
695 base = get_addr_base_and_unit_offset (base, &base_offset);
696
697 /* Aggregate arguments can have non-invariant addresses. */
698 if (!base)
699 {
700 base = build_fold_addr_expr (prev_base);
701 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset);
702 }
703 else if (TREE_CODE (base) == MEM_REF)
704 {
705 if (!addrof)
706 {
707 deref_base = true;
708 deref_align = TYPE_ALIGN (TREE_TYPE (base));
709 }
710 off = build_int_cst (apm->alias_ptr_type,
711 base_offset + apm->unit_offset);
712 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
713 off);
714 base = TREE_OPERAND (base, 0);
715 }
716 else
717 {
718 off = build_int_cst (apm->alias_ptr_type,
719 base_offset + apm->unit_offset);
720 base = build_fold_addr_expr (base);
721 }
722 }
723
724 tree type = apm->type;
725 unsigned int align;
726 unsigned HOST_WIDE_INT misalign;
727
728 if (deref_base)
729 {
730 align = deref_align;
731 misalign = 0;
732 }
733 else
734 {
735 get_pointer_alignment_1 (base, &align, &misalign);
736 /* All users must make sure that we can be optimistic when it
737 comes to alignment in this case (by inspecting the final users
738 of these new parameters). */
739 if (TYPE_ALIGN (type) > align)
740 align = TYPE_ALIGN (type);
741 }
742 misalign
743 += (offset_int::from (wi::to_wide (off), SIGNED).to_short_addr ()
744 * BITS_PER_UNIT);
745 misalign = misalign & (align - 1);
746 if (misalign != 0)
747 align = least_bit_hwi (misalign);
748 if (align < TYPE_ALIGN (type))
749 type = build_aligned_type (type, align);
750 base = force_gimple_operand_gsi (&gsi, base,
751 true, NULL, true, GSI_SAME_STMT);
752 tree expr = fold_build2_loc (loc, MEM_REF, type, base, off);
753 REF_REVERSE_STORAGE_ORDER (expr) = apm->reverse;
754 /* If expr is not a valid gimple call argument emit
755 a load into a temporary. */
756 if (is_gimple_reg_type (TREE_TYPE (expr)))
757 {
758 gimple *tem = gimple_build_assign (NULL_TREE, expr);
759 if (gimple_in_ssa_p (cfun))
760 {
761 gimple_set_vuse (tem, gimple_vuse (stmt));
762 expr = make_ssa_name (TREE_TYPE (expr), tem);
763 }
764 else
765 expr = create_tmp_reg (TREE_TYPE (expr));
766 gimple_assign_set_lhs (tem, expr);
767 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
768 }
769 vargs.quick_push (expr);
770 }
771
772 if (m_always_copy_start >= 0)
773 for (unsigned i = m_always_copy_start; i < old_nargs; i++)
774 vargs.safe_push (gimple_call_arg (stmt, i));
775
776 /* For optimized away parameters, add on the caller side
777 before the call
778 DEBUG D#X => parm_Y(D)
779 stmts and associate D#X with parm in decl_debug_args_lookup
780 vector to say for debug info that if parameter parm had been passed,
781 it would have value parm_Y(D). */
782 if (MAY_HAVE_DEBUG_BIND_STMTS && old_decl && callee_decl)
783 {
784 vec<tree, va_gc> **debug_args = NULL;
785 unsigned i = 0;
786 for (tree old_parm = DECL_ARGUMENTS (old_decl);
787 old_parm && i < old_nargs && ((int) i) < m_always_copy_start;
788 old_parm = DECL_CHAIN (old_parm), i++)
789 {
790 if (!is_gimple_reg (old_parm) || kept[i])
791 continue;
792 tree origin = DECL_ORIGIN (old_parm);
793 tree arg = gimple_call_arg (stmt, i);
794
795 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
796 {
797 if (!fold_convertible_p (TREE_TYPE (origin), arg))
798 continue;
799 tree rhs1;
800 if (TREE_CODE (arg) == SSA_NAME
801 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg))
802 && (rhs1
803 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg)))
804 && useless_type_conversion_p (TREE_TYPE (origin),
805 TREE_TYPE (rhs1)))
806 arg = rhs1;
807 else
808 arg = fold_convert_loc (gimple_location (stmt),
809 TREE_TYPE (origin), arg);
810 }
811 if (debug_args == NULL)
812 debug_args = decl_debug_args_insert (callee_decl);
813 unsigned int ix;
814 tree ddecl = NULL_TREE;
815 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
816 if (ddecl == origin)
817 {
818 ddecl = (**debug_args)[ix + 1];
819 break;
820 }
821 if (ddecl == NULL)
822 {
823 ddecl = make_node (DEBUG_EXPR_DECL);
824 DECL_ARTIFICIAL (ddecl) = 1;
825 TREE_TYPE (ddecl) = TREE_TYPE (origin);
826 SET_DECL_MODE (ddecl, DECL_MODE (origin));
827
828 vec_safe_push (*debug_args, origin);
829 vec_safe_push (*debug_args, ddecl);
830 }
831 gimple *def_temp = gimple_build_debug_bind (ddecl,
832 unshare_expr (arg), stmt);
833 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
834 }
835 }
836
837 if (dump_file && (dump_flags & TDF_DETAILS))
838 {
839 fprintf (dump_file, "replacing stmt:");
840 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
841 }
842
843 gcall *new_stmt = gimple_build_call_vec (callee_decl, vargs);
844
845 if (tree lhs = gimple_call_lhs (stmt))
846 {
847 if (!m_skip_return)
848 gimple_call_set_lhs (new_stmt, lhs);
849 else if (TREE_CODE (lhs) == SSA_NAME)
850 {
851 /* LHS should now by a default-def SSA. Unfortunately default-def
852 SSA_NAMEs need a backing variable (or at least some code examining
853 SSAs assumes it is non-NULL). So we either have to re-use the
854 decl we have at hand or introdice a new one. */
855 tree repl = create_tmp_var (TREE_TYPE (lhs), "removed_return");
856 repl = get_or_create_ssa_default_def (cfun, repl);
857 SSA_NAME_IS_DEFAULT_DEF (repl) = true;
858 imm_use_iterator ui;
859 use_operand_p use_p;
860 gimple *using_stmt;
861 FOR_EACH_IMM_USE_STMT (using_stmt, ui, lhs)
862 {
863 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
864 {
865 SET_USE (use_p, repl);
866 }
867 update_stmt (using_stmt);
868 }
869 }
870 }
871
872 gimple_set_block (new_stmt, gimple_block (stmt));
873 if (gimple_has_location (stmt))
874 gimple_set_location (new_stmt, gimple_location (stmt));
875 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
876 gimple_call_copy_flags (new_stmt, stmt);
877 if (gimple_in_ssa_p (cfun))
878 gimple_move_vops (new_stmt, stmt);
879
880 if (dump_file && (dump_flags & TDF_DETAILS))
881 {
882 fprintf (dump_file, "with stmt:");
883 print_gimple_stmt (dump_file, new_stmt, 0);
884 fprintf (dump_file, "\n");
885 }
886 gsi_replace (&gsi, new_stmt, true);
887 if (update_references)
888 do
889 {
890 current_node->record_stmt_references (gsi_stmt (gsi));
891 gsi_prev (&gsi);
892 }
893 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
894 return new_stmt;
895 }
896
897 /* Dump information contained in the object in textual form to F. */
898
899 void
900 ipa_param_adjustments::dump (FILE *f)
901 {
902 fprintf (f, " m_always_copy_start: %i\n", m_always_copy_start);
903 ipa_dump_adjusted_parameters (f, m_adj_params);
904 if (m_skip_return)
905 fprintf (f, " Will SKIP return.\n");
906 }
907
908 /* Dump information contained in the object in textual form to stderr. */
909
910 void
911 ipa_param_adjustments::debug ()
912 {
913 dump (stderr);
914 }
915
916 /* Register that REPLACEMENT should replace parameter described in APM and
917 optionally as DUMMY to mark transitive splits across calls. */
918
919 void
920 ipa_param_body_adjustments::register_replacement (ipa_adjusted_param *apm,
921 tree replacement,
922 tree dummy)
923 {
924 gcc_checking_assert (apm->op == IPA_PARAM_OP_SPLIT
925 || apm->op == IPA_PARAM_OP_NEW);
926 gcc_checking_assert (!apm->prev_clone_adjustment);
927 ipa_param_body_replacement psr;
928 psr.base = m_oparms[apm->prev_clone_index];
929 psr.repl = replacement;
930 psr.dummy = dummy;
931 psr.unit_offset = apm->unit_offset;
932 m_replacements.safe_push (psr);
933 }
934
935 /* Copy or not, as appropriate given m_id and decl context, a pre-existing
936 PARM_DECL T so that it can be included in the parameters of the modified
937 function. */
938
939 tree
940 ipa_param_body_adjustments::carry_over_param (tree t)
941 {
942 tree new_parm;
943 if (m_id)
944 {
945 new_parm = remap_decl (t, m_id);
946 if (TREE_CODE (new_parm) != PARM_DECL)
947 new_parm = m_id->copy_decl (t, m_id);
948 }
949 else if (DECL_CONTEXT (t) != m_fndecl)
950 {
951 new_parm = copy_node (t);
952 DECL_CONTEXT (new_parm) = m_fndecl;
953 }
954 else
955 new_parm = t;
956 return new_parm;
957 }
958
959 /* Common initialization performed by all ipa_param_body_adjustments
960 constructors. OLD_FNDECL is the declaration we take original arguments
961 from, (it may be the same as M_FNDECL). VARS, if non-NULL, is a pointer to
962 a chained list of new local variables. TREE_MAP is the IPA-CP produced
963 mapping of trees to constants.
964
965 The function is rather long but it really onlu initializes all data members
966 of the class. It creates new param DECLs, finds their new types, */
967
968 void
969 ipa_param_body_adjustments::common_initialization (tree old_fndecl,
970 tree *vars,
971 vec<ipa_replace_map *,
972 va_gc> *tree_map)
973 {
974 push_function_arg_decls (&m_oparms, old_fndecl);
975 auto_vec<tree,16> otypes;
976 if (TYPE_ARG_TYPES (TREE_TYPE (old_fndecl)) != NULL_TREE)
977 push_function_arg_types (&otypes, TREE_TYPE (old_fndecl));
978 else
979 {
980 auto_vec<tree,16> oparms;
981 push_function_arg_decls (&oparms, old_fndecl);
982 unsigned ocount = oparms.length ();
983 otypes.reserve_exact (ocount);
984 for (unsigned i = 0; i < ocount; i++)
985 otypes.quick_push (TREE_TYPE (oparms[i]));
986 }
987 fill_vector_of_new_param_types (&m_new_types, &otypes, m_adj_params, true);
988
989 auto_vec<bool, 16> kept;
990 kept.reserve_exact (m_oparms.length ());
991 kept.quick_grow_cleared (m_oparms.length ());
992 auto_vec<tree, 16> isra_dummy_decls;
993 isra_dummy_decls.reserve_exact (m_oparms.length ());
994 isra_dummy_decls.quick_grow_cleared (m_oparms.length ());
995
996 unsigned adj_len = vec_safe_length (m_adj_params);
997 m_method2func = ((TREE_CODE (TREE_TYPE (m_fndecl)) == METHOD_TYPE)
998 && (adj_len == 0
999 || (*m_adj_params)[0].op != IPA_PARAM_OP_COPY
1000 || (*m_adj_params)[0].base_index != 0));
1001
1002 /* The main job of the this function is to go over the vector of adjusted
1003 parameters and create declarations or find corresponding old ones and push
1004 them to m_new_decls. For IPA-SRA replacements it also creates
1005 corresponding m_id->dst_node->clone.performed_splits entries. */
1006
1007 m_new_decls.reserve_exact (adj_len);
1008 for (unsigned i = 0; i < adj_len ; i++)
1009 {
1010 ipa_adjusted_param *apm = &(*m_adj_params)[i];
1011 unsigned prev_index = apm->prev_clone_index;
1012 tree new_parm;
1013 if (apm->op == IPA_PARAM_OP_COPY
1014 || apm->prev_clone_adjustment)
1015 {
1016 kept[prev_index] = true;
1017 new_parm = carry_over_param (m_oparms[prev_index]);
1018 m_new_decls.quick_push (new_parm);
1019 }
1020 else if (apm->op == IPA_PARAM_OP_NEW
1021 || apm->op == IPA_PARAM_OP_SPLIT)
1022 {
1023 tree new_type = m_new_types[i];
1024 gcc_checking_assert (new_type);
1025 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
1026 new_type);
1027 const char *prefix = ipa_param_prefixes[apm->param_prefix_index];
1028 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
1029 DECL_ARTIFICIAL (new_parm) = 1;
1030 DECL_ARG_TYPE (new_parm) = new_type;
1031 DECL_CONTEXT (new_parm) = m_fndecl;
1032 TREE_USED (new_parm) = 1;
1033 DECL_IGNORED_P (new_parm) = 1;
1034 layout_decl (new_parm, 0);
1035 m_new_decls.quick_push (new_parm);
1036
1037 if (apm->op == IPA_PARAM_OP_SPLIT)
1038 {
1039 m_split_modifications_p = true;
1040
1041 if (m_id)
1042 {
1043 tree dummy_decl;
1044 if (!isra_dummy_decls[prev_index])
1045 {
1046 dummy_decl = copy_decl_to_var (m_oparms[prev_index],
1047 m_id);
1048 /* Any attempt to remap this dummy in this particular
1049 instance of clone materialization should yield
1050 itself. */
1051 insert_decl_map (m_id, dummy_decl, dummy_decl);
1052
1053 DECL_CHAIN (dummy_decl) = *vars;
1054 *vars = dummy_decl;
1055 isra_dummy_decls[prev_index] = dummy_decl;
1056 }
1057 else
1058 dummy_decl = isra_dummy_decls[prev_index];
1059
1060 register_replacement (apm, new_parm, dummy_decl);
1061 ipa_param_performed_split ps;
1062 ps.dummy_decl = dummy_decl;
1063 ps.unit_offset = apm->unit_offset;
1064 vec_safe_push (m_id->dst_node->clone.performed_splits, ps);
1065 }
1066 else
1067 register_replacement (apm, new_parm);
1068 }
1069 }
1070 else
1071 gcc_unreachable ();
1072 }
1073
1074
1075 /* As part of body modifications, we will also have to replace remaining uses
1076 of remaining uses of removed PARM_DECLs (which do not however use the
1077 initial value) with their VAR_DECL copies.
1078
1079 We do this differently with and without m_id. With m_id, we rely on its
1080 mapping and create a replacement straight away. Without it, we have our
1081 own mechanism for which we have to populate m_removed_decls vector. Just
1082 don't mix them, that is why you should not call
1083 replace_removed_params_ssa_names or perform_cfun_body_modifications when
1084 you construct with ID not equal to NULL. */
1085
1086 unsigned op_len = m_oparms.length ();
1087 for (unsigned i = 0; i < op_len; i++)
1088 if (!kept[i])
1089 {
1090 if (m_id)
1091 {
1092 if (!m_id->decl_map->get (m_oparms[i]))
1093 {
1094 /* TODO: Perhaps at least aggregate-type params could re-use
1095 their isra_dummy_decl here? */
1096 tree var = copy_decl_to_var (m_oparms[i], m_id);
1097 insert_decl_map (m_id, m_oparms[i], var);
1098 /* Declare this new variable. */
1099 DECL_CHAIN (var) = *vars;
1100 *vars = var;
1101 }
1102 }
1103 else
1104 {
1105 m_removed_decls.safe_push (m_oparms[i]);
1106 m_removed_map.put (m_oparms[i], m_removed_decls.length () - 1);
1107 }
1108 }
1109
1110 if (!MAY_HAVE_DEBUG_STMTS)
1111 return;
1112
1113 /* Finally, when generating debug info, we fill vector m_reset_debug_decls
1114 with removed parameters declarations. We do this in order to re-map their
1115 debug bind statements and create debug decls for them. */
1116
1117 if (tree_map)
1118 {
1119 /* Do not output debuginfo for parameter declarations as if they vanished
1120 when they were in fact replaced by a constant. */
1121 auto_vec <int, 16> index_mapping;
1122 bool need_remap = false;
1123
1124 if (m_id && m_id->src_node->clone.param_adjustments)
1125 {
1126 ipa_param_adjustments *prev_adjustments
1127 = m_id->src_node->clone.param_adjustments;
1128 prev_adjustments->get_updated_indices (&index_mapping);
1129 need_remap = true;
1130 }
1131
1132 for (unsigned i = 0; i < tree_map->length (); i++)
1133 {
1134 int parm_num = (*tree_map)[i]->parm_num;
1135 gcc_assert (parm_num >= 0);
1136 if (need_remap)
1137 parm_num = index_mapping[parm_num];
1138 kept[parm_num] = true;
1139 }
1140 }
1141
1142 for (unsigned i = 0; i < op_len; i++)
1143 if (!kept[i] && is_gimple_reg (m_oparms[i]))
1144 m_reset_debug_decls.safe_push (m_oparms[i]);
1145 }
1146
1147 /* Constructor of ipa_param_body_adjustments from a simple list of
1148 modifications to parameters listed in ADJ_PARAMS which will prepare ground
1149 for modification of parameters of fndecl. Return value of the function will
1150 not be removed and the object will assume it does not run as a part of
1151 tree-function_versioning. */
1152
1153 ipa_param_body_adjustments
1154 ::ipa_param_body_adjustments (vec<ipa_adjusted_param, va_gc> *adj_params,
1155 tree fndecl)
1156 : m_adj_params (adj_params), m_adjustments (NULL), m_reset_debug_decls (),
1157 m_split_modifications_p (false), m_fndecl (fndecl), m_id (NULL),
1158 m_oparms (), m_new_decls (), m_new_types (), m_replacements (),
1159 m_removed_decls (), m_removed_map (), m_method2func (false)
1160 {
1161 common_initialization (fndecl, NULL, NULL);
1162 }
1163
1164 /* Constructor of ipa_param_body_adjustments from ipa_param_adjustments in
1165 ADJUSTMENTS which will prepare ground for modification of parameters of
1166 fndecl. The object will assume it does not run as a part of
1167 tree-function_versioning. */
1168
1169 ipa_param_body_adjustments
1170 ::ipa_param_body_adjustments (ipa_param_adjustments *adjustments,
1171 tree fndecl)
1172 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments),
1173 m_reset_debug_decls (), m_split_modifications_p (false), m_fndecl (fndecl),
1174 m_id (NULL), m_oparms (), m_new_decls (), m_new_types (),
1175 m_replacements (), m_removed_decls (), m_removed_map (),
1176 m_method2func (false)
1177 {
1178 common_initialization (fndecl, NULL, NULL);
1179 }
1180
1181 /* Constructor of ipa_param_body_adjustments which sets it up as a part of
1182 running tree_function_versioning. Planned modifications to the function are
1183 in ADJUSTMENTS. FNDECL designates the new function clone which is being
1184 modified. OLD_FNDECL is the function of which FNDECL is a clone (and which
1185 at the time of invocation still share DECL_ARGUMENTS). ID is the
1186 copy_body_data structure driving the wholy body copying process. VARS is a
1187 pointer to the head of the list of new local variables, TREE_MAP is the map
1188 that drives tree substitution in the cloning process. */
1189
1190 ipa_param_body_adjustments
1191 ::ipa_param_body_adjustments (ipa_param_adjustments *adjustments,
1192 tree fndecl, tree old_fndecl,
1193 copy_body_data *id, tree *vars,
1194 vec<ipa_replace_map *, va_gc> *tree_map)
1195 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments),
1196 m_reset_debug_decls (), m_split_modifications_p (false), m_fndecl (fndecl),
1197 m_id (id), m_oparms (), m_new_decls (), m_new_types (), m_replacements (),
1198 m_removed_decls (), m_removed_map (), m_method2func (false)
1199 {
1200 common_initialization (old_fndecl, vars, tree_map);
1201 }
1202
1203 /* Chain new param decls up and return them. */
1204
1205 tree
1206 ipa_param_body_adjustments::get_new_param_chain ()
1207 {
1208 tree result;
1209 tree *link = &result;
1210
1211 unsigned len = vec_safe_length (m_adj_params);
1212 for (unsigned i = 0; i < len; i++)
1213 {
1214 tree new_decl = m_new_decls[i];
1215 *link = new_decl;
1216 link = &DECL_CHAIN (new_decl);
1217 }
1218 *link = NULL_TREE;
1219 return result;
1220 }
1221
1222 /* Modify the function parameters FNDECL and its type according to the plan in
1223 ADJUSTMENTS. This function needs to be called when the decl has not already
1224 been processed with ipa_param_adjustments::adjust_decl, otherwise just
1225 seting DECL_ARGUMENTS to whatever get_new_param_chain will do is enough. */
1226
1227 void
1228 ipa_param_body_adjustments::modify_formal_parameters ()
1229 {
1230 tree orig_type = TREE_TYPE (m_fndecl);
1231 DECL_ARGUMENTS (m_fndecl) = get_new_param_chain ();
1232
1233 /* When signature changes, we need to clear builtin info. */
1234 if (fndecl_built_in_p (m_fndecl))
1235 set_decl_built_in_function (m_fndecl, NOT_BUILT_IN, 0);
1236
1237 /* At this point, removing return value is only implemented when going
1238 through tree_function_versioning, not when modifying function body
1239 directly. */
1240 gcc_assert (!m_adjustments || !m_adjustments->m_skip_return);
1241 tree new_type = build_adjusted_function_type (orig_type, &m_new_types,
1242 m_method2func, false);
1243
1244 TREE_TYPE (m_fndecl) = new_type;
1245 DECL_VIRTUAL_P (m_fndecl) = 0;
1246 DECL_LANG_SPECIFIC (m_fndecl) = NULL;
1247 if (m_method2func)
1248 DECL_VINDEX (m_fndecl) = NULL_TREE;
1249 }
1250
1251 /* Given BASE and UNIT_OFFSET, find the corresponding record among replacement
1252 structures. */
1253
1254 ipa_param_body_replacement *
1255 ipa_param_body_adjustments::lookup_replacement_1 (tree base,
1256 unsigned unit_offset)
1257 {
1258 unsigned int len = m_replacements.length ();
1259 for (unsigned i = 0; i < len; i++)
1260 {
1261 ipa_param_body_replacement *pbr = &m_replacements[i];
1262
1263 if (pbr->base == base
1264 && (pbr->unit_offset == unit_offset))
1265 return pbr;
1266 }
1267 return NULL;
1268 }
1269
1270 /* Given BASE and UNIT_OFFSET, find the corresponding replacement expression
1271 and return it, assuming it is known it does not hold value by reference or
1272 in reverse storage order. */
1273
1274 tree
1275 ipa_param_body_adjustments::lookup_replacement (tree base, unsigned unit_offset)
1276 {
1277 ipa_param_body_replacement *pbr = lookup_replacement_1 (base, unit_offset);
1278 if (!pbr)
1279 return NULL;
1280 return pbr->repl;
1281 }
1282
1283 /* If T is an SSA_NAME, return NULL if it is not a default def or
1284 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
1285 the base variable is always returned, regardless if it is a default
1286 def. Return T if it is not an SSA_NAME. */
1287
1288 static tree
1289 get_ssa_base_param (tree t, bool ignore_default_def)
1290 {
1291 if (TREE_CODE (t) == SSA_NAME)
1292 {
1293 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
1294 return SSA_NAME_VAR (t);
1295 else
1296 return NULL_TREE;
1297 }
1298 return t;
1299 }
1300
1301 /* Given an expression, return the structure describing how it should be
1302 replaced if it accesses a part of a split parameter or NULL otherwise.
1303
1304 Do not free the result, it will be deallocated when the object is destroyed.
1305
1306 If IGNORE_DEFAULT_DEF is cleared, consider only SSA_NAMEs of PARM_DECLs
1307 which are default definitions, if set, consider all SSA_NAMEs of
1308 PARM_DECLs. */
1309
1310 ipa_param_body_replacement *
1311 ipa_param_body_adjustments::get_expr_replacement (tree expr,
1312 bool ignore_default_def)
1313 {
1314 tree base;
1315 unsigned unit_offset;
1316
1317 if (!isra_get_ref_base_and_offset (expr, &base, &unit_offset))
1318 return NULL;
1319
1320 base = get_ssa_base_param (base, ignore_default_def);
1321 if (!base || TREE_CODE (base) != PARM_DECL)
1322 return NULL;
1323 return lookup_replacement_1 (base, unit_offset);
1324 }
1325
1326 /* Given OLD_DECL, which is a PARM_DECL of a parameter that is being removed
1327 (which includes it being split or replaced), return a new variable that
1328 should be used for any SSA names that will remain in the function that
1329 previously belonged to OLD_DECL. */
1330
1331 tree
1332 ipa_param_body_adjustments::get_replacement_ssa_base (tree old_decl)
1333 {
1334 unsigned *idx = m_removed_map.get (old_decl);
1335 if (!idx)
1336 return NULL;
1337
1338 tree repl;
1339 if (TREE_CODE (m_removed_decls[*idx]) == PARM_DECL)
1340 {
1341 gcc_assert (m_removed_decls[*idx] == old_decl);
1342 repl = copy_var_decl (old_decl, DECL_NAME (old_decl),
1343 TREE_TYPE (old_decl));
1344 m_removed_decls[*idx] = repl;
1345 }
1346 else
1347 repl = m_removed_decls[*idx];
1348 return repl;
1349 }
1350
1351 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
1352 parameter which is to be removed because its value is not used, create a new
1353 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
1354 original with it and return it. If there is no need to re-map, return NULL.
1355 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
1356
1357 tree
1358 ipa_param_body_adjustments::replace_removed_params_ssa_names (tree old_name,
1359 gimple *stmt)
1360 {
1361 gcc_assert (!m_id);
1362 if (TREE_CODE (old_name) != SSA_NAME)
1363 return NULL;
1364
1365 tree decl = SSA_NAME_VAR (old_name);
1366 if (decl == NULL_TREE
1367 || TREE_CODE (decl) != PARM_DECL)
1368 return NULL;
1369
1370 tree repl = get_replacement_ssa_base (decl);
1371 if (!repl)
1372 return NULL;
1373
1374 tree new_name = make_ssa_name (repl, stmt);
1375 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
1376 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
1377
1378 if (dump_file && (dump_flags & TDF_DETAILS))
1379 {
1380 fprintf (dump_file, "replacing an SSA name of a removed param ");
1381 print_generic_expr (dump_file, old_name);
1382 fprintf (dump_file, " with ");
1383 print_generic_expr (dump_file, new_name);
1384 fprintf (dump_file, "\n");
1385 }
1386
1387 replace_uses_by (old_name, new_name);
1388 return new_name;
1389 }
1390
1391 /* If the expression *EXPR_P should be replaced, do so. CONVERT specifies
1392 whether the function should care about type incompatibility of the current
1393 and new expressions. If it is false, the function will leave
1394 incompatibility issues to the caller - note that when the function
1395 encounters a BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR, it will modify
1396 their bases instead of the expressions themselves and then also performs any
1397 necessary conversions. */
1398
1399 bool
1400 ipa_param_body_adjustments::modify_expression (tree *expr_p, bool convert)
1401 {
1402 tree expr = *expr_p;
1403
1404 if (TREE_CODE (expr) == BIT_FIELD_REF
1405 || TREE_CODE (expr) == IMAGPART_EXPR
1406 || TREE_CODE (expr) == REALPART_EXPR)
1407 {
1408 expr_p = &TREE_OPERAND (expr, 0);
1409 expr = *expr_p;
1410 convert = true;
1411 }
1412
1413 ipa_param_body_replacement *pbr = get_expr_replacement (expr, false);
1414 if (!pbr)
1415 return false;
1416
1417 tree repl = pbr->repl;
1418 if (dump_file && (dump_flags & TDF_DETAILS))
1419 {
1420 fprintf (dump_file, "About to replace expr ");
1421 print_generic_expr (dump_file, expr);
1422 fprintf (dump_file, " with ");
1423 print_generic_expr (dump_file, repl);
1424 fprintf (dump_file, "\n");
1425 }
1426
1427 if (convert && !useless_type_conversion_p (TREE_TYPE (expr),
1428 TREE_TYPE (repl)))
1429 {
1430 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expr), repl);
1431 *expr_p = vce;
1432 }
1433 else
1434 *expr_p = repl;
1435 return true;
1436 }
1437
1438 /* If the assignment statement STMT contains any expressions that need to
1439 replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
1440 potential type incompatibilities. If any conversion sttements have to be
1441 pre-pended to STMT, they will be added to EXTRA_STMTS. Return true iff the
1442 statement was modified. */
1443
1444 bool
1445 ipa_param_body_adjustments::modify_assignment (gimple *stmt,
1446 gimple_seq *extra_stmts)
1447 {
1448 tree *lhs_p, *rhs_p;
1449 bool any;
1450
1451 if (!gimple_assign_single_p (stmt))
1452 return false;
1453
1454 rhs_p = gimple_assign_rhs1_ptr (stmt);
1455 lhs_p = gimple_assign_lhs_ptr (stmt);
1456
1457 any = modify_expression (lhs_p, false);
1458 any |= modify_expression (rhs_p, false);
1459 if (any
1460 && !useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
1461 {
1462 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
1463 {
1464 /* V_C_Es of constructors can cause trouble (PR 42714). */
1465 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
1466 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
1467 else
1468 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
1469 NULL);
1470 }
1471 else
1472 {
1473 tree new_rhs = fold_build1_loc (gimple_location (stmt),
1474 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
1475 *rhs_p);
1476 tree tmp = force_gimple_operand (new_rhs, extra_stmts, true,
1477 NULL_TREE);
1478 gimple_assign_set_rhs1 (stmt, tmp);
1479 }
1480 return true;
1481 }
1482
1483 return any;
1484 }
1485
1486 /* Data passed to remap_split_decl_to_dummy through walk_tree. */
1487
1488 struct simple_tree_swap_info
1489 {
1490 /* Change FROM to TO. */
1491 tree from, to;
1492 /* And set DONE to true when doing so. */
1493 bool done;
1494 };
1495
1496 /* Simple remapper to remap a split parameter to the same expression based on a
1497 special dummy decl so that edge redirections can detect transitive splitting
1498 and finish them. */
1499
1500 static tree
1501 remap_split_decl_to_dummy (tree *tp, int *walk_subtrees, void *data)
1502 {
1503 tree t = *tp;
1504
1505 if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
1506 {
1507 struct simple_tree_swap_info *swapinfo
1508 = (struct simple_tree_swap_info *) data;
1509 if (t == swapinfo->from
1510 || (TREE_CODE (t) == SSA_NAME
1511 && SSA_NAME_VAR (t) == swapinfo->from))
1512 {
1513 *tp = swapinfo->to;
1514 swapinfo->done = true;
1515 }
1516 *walk_subtrees = 0;
1517 }
1518 else if (TYPE_P (t))
1519 *walk_subtrees = 0;
1520 else
1521 *walk_subtrees = 1;
1522 return NULL_TREE;
1523 }
1524
1525
1526 /* If the call statement pointed at by STMT_P contains any expressions that
1527 need to replaced with a different one as noted by ADJUSTMENTS, do so. f the
1528 statement needs to be rebuilt, do so. Return true if any modifications have
1529 been performed.
1530
1531 If the method is invoked as a part of IPA clone materialization and if any
1532 parameter split is transitive, i.e. it applies to the functin that is being
1533 modified and also to the callee of the statement, replace the parameter
1534 passed to old callee with an equivalent expression based on a dummy decl
1535 followed by PARM_DECLs representing the actual replacements. The actual
1536 replacements will be then converted into SSA_NAMEs and then
1537 ipa_param_adjustments::modify_call will find the appropriate ones and leave
1538 only those in the call. */
1539
1540 bool
1541 ipa_param_body_adjustments::modify_call_stmt (gcall **stmt_p)
1542 {
1543 gcall *stmt = *stmt_p;
1544 auto_vec <unsigned, 4> pass_through_args;
1545 auto_vec <unsigned, 4> pass_through_pbr_indices;
1546
1547 if (m_split_modifications_p && m_id)
1548 {
1549 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1550 {
1551 tree t = gimple_call_arg (stmt, i);
1552 gcc_assert (TREE_CODE (t) != BIT_FIELD_REF
1553 && TREE_CODE (t) != IMAGPART_EXPR
1554 && TREE_CODE (t) != REALPART_EXPR);
1555
1556 tree base;
1557 unsigned unit_offset;
1558 if (!isra_get_ref_base_and_offset (t, &base, &unit_offset))
1559 continue;
1560
1561 bool by_ref = false;
1562 if (TREE_CODE (base) == SSA_NAME)
1563 {
1564 if (!SSA_NAME_IS_DEFAULT_DEF (base))
1565 continue;
1566 base = SSA_NAME_VAR (base);
1567 gcc_checking_assert (base);
1568 by_ref = true;
1569 }
1570 if (TREE_CODE (base) != PARM_DECL)
1571 continue;
1572
1573 bool base_among_replacements = false;
1574 unsigned j, repl_list_len = m_replacements.length ();
1575 for (j = 0; j < repl_list_len; j++)
1576 {
1577 ipa_param_body_replacement *pbr = &m_replacements[j];
1578 if (pbr->base == base)
1579 {
1580 base_among_replacements = true;
1581 break;
1582 }
1583 }
1584 if (!base_among_replacements)
1585 continue;
1586
1587 /* We still have to distinguish between an end-use that we have to
1588 transform now and a pass-through, which happens in the following
1589 two cases. */
1590
1591 /* TODO: After we adjust ptr_parm_has_nonarg_uses to also consider
1592 &MEM_REF[ssa_name + offset], we will also have to detect that case
1593 here. */
1594
1595 if (TREE_CODE (t) == SSA_NAME
1596 && SSA_NAME_IS_DEFAULT_DEF (t)
1597 && SSA_NAME_VAR (t)
1598 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL)
1599 {
1600 /* This must be a by_reference pass-through. */
1601 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
1602 pass_through_args.safe_push (i);
1603 pass_through_pbr_indices.safe_push (j);
1604 }
1605 else if (!by_ref && AGGREGATE_TYPE_P (TREE_TYPE (t)))
1606 {
1607 /* Currently IPA-SRA guarantees the aggregate access type
1608 exactly matches in this case. So if it does not match, it is
1609 a pass-through argument that will be sorted out at edge
1610 redirection time. */
1611 ipa_param_body_replacement *pbr
1612 = lookup_replacement_1 (base, unit_offset);
1613
1614 if (!pbr
1615 || (TYPE_MAIN_VARIANT (TREE_TYPE (t))
1616 != TYPE_MAIN_VARIANT (TREE_TYPE (pbr->repl))))
1617 {
1618 pass_through_args.safe_push (i);
1619 pass_through_pbr_indices.safe_push (j);
1620 }
1621 }
1622 }
1623 }
1624
1625 unsigned nargs = gimple_call_num_args (stmt);
1626 if (!pass_through_args.is_empty ())
1627 {
1628 auto_vec<tree, 16> vargs;
1629 unsigned pt_idx = 0;
1630 for (unsigned i = 0; i < nargs; i++)
1631 {
1632 if (pt_idx < pass_through_args.length ()
1633 && i == pass_through_args[pt_idx])
1634 {
1635 unsigned j = pass_through_pbr_indices[pt_idx];
1636 pt_idx++;
1637 tree base = m_replacements[j].base;
1638
1639 /* Map base will get mapped to the special transitive-isra marker
1640 dummy decl. */
1641 struct simple_tree_swap_info swapinfo;
1642 swapinfo.from = base;
1643 swapinfo.to = m_replacements[j].dummy;
1644 swapinfo.done = false;
1645 tree arg = gimple_call_arg (stmt, i);
1646 walk_tree (&arg, remap_split_decl_to_dummy, &swapinfo, NULL);
1647 gcc_assert (swapinfo.done);
1648 vargs.safe_push (arg);
1649 /* Now let's push all replacements pertaining to this parameter
1650 so that all gimple register ones get correct SSA_NAMES. Edge
1651 redirection will weed out the dummy argument as well as all
1652 unused replacements later. */
1653 unsigned int repl_list_len = m_replacements.length ();
1654 for (; j < repl_list_len; j++)
1655 {
1656 if (m_replacements[j].base != base)
1657 break;
1658 vargs.safe_push (m_replacements[j].repl);
1659 }
1660 }
1661 else
1662 {
1663 tree t = gimple_call_arg (stmt, i);
1664 modify_expression (&t, true);
1665 vargs.safe_push (t);
1666 }
1667 }
1668 gcall *new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
1669 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
1670 gimple_call_copy_flags (new_stmt, stmt);
1671 if (tree lhs = gimple_call_lhs (stmt))
1672 {
1673 modify_expression (&lhs, false);
1674 gimple_call_set_lhs (new_stmt, lhs);
1675 }
1676 *stmt_p = new_stmt;
1677 return true;
1678 }
1679
1680 /* Otherwise, no need to rebuild the statement, let's just modify arguments
1681 and the LHS if/as appropriate. */
1682 bool modified = false;
1683 for (unsigned i = 0; i < nargs; i++)
1684 {
1685 tree *t = gimple_call_arg_ptr (stmt, i);
1686 modified |= modify_expression (t, true);
1687 }
1688
1689 if (gimple_call_lhs (stmt))
1690 {
1691 tree *t = gimple_call_lhs_ptr (stmt);
1692 modified |= modify_expression (t, false);
1693 }
1694
1695 return modified;
1696 }
1697
1698 /* If the statement STMT contains any expressions that need to replaced with a
1699 different one as noted by ADJUSTMENTS, do so. Handle any potential type
1700 incompatibilities. If any conversion sttements have to be pre-pended to
1701 STMT, they will be added to EXTRA_STMTS. Return true iff the statement was
1702 modified. */
1703
1704 bool
1705 ipa_param_body_adjustments::modify_gimple_stmt (gimple **stmt,
1706 gimple_seq *extra_stmts)
1707 {
1708 bool modified = false;
1709 tree *t;
1710
1711 switch (gimple_code (*stmt))
1712 {
1713 case GIMPLE_RETURN:
1714 t = gimple_return_retval_ptr (as_a <greturn *> (*stmt));
1715 if (m_adjustments && m_adjustments->m_skip_return)
1716 *t = NULL_TREE;
1717 else if (*t != NULL_TREE)
1718 modified |= modify_expression (t, true);
1719 break;
1720
1721 case GIMPLE_ASSIGN:
1722 modified |= modify_assignment (*stmt, extra_stmts);
1723 break;
1724
1725 case GIMPLE_CALL:
1726 modified |= modify_call_stmt ((gcall **) stmt);
1727 break;
1728
1729 case GIMPLE_ASM:
1730 {
1731 gasm *asm_stmt = as_a <gasm *> (*stmt);
1732 for (unsigned i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1733 {
1734 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1735 modified |= modify_expression (t, true);
1736 }
1737 for (unsigned i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1738 {
1739 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1740 modified |= modify_expression (t, false);
1741 }
1742 }
1743 break;
1744
1745 default:
1746 break;
1747 }
1748 return modified;
1749 }
1750
1751
1752 /* Traverse body of the current function and perform the requested adjustments
1753 on its statements. Return true iff the CFG has been changed. */
1754
1755 bool
1756 ipa_param_body_adjustments::modify_cfun_body ()
1757 {
1758 bool cfg_changed = false;
1759 basic_block bb;
1760
1761 FOR_EACH_BB_FN (bb, cfun)
1762 {
1763 gimple_stmt_iterator gsi;
1764
1765 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1766 {
1767 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
1768 tree new_lhs, old_lhs = gimple_phi_result (phi);
1769 new_lhs = replace_removed_params_ssa_names (old_lhs, phi);
1770 if (new_lhs)
1771 {
1772 gimple_phi_set_result (phi, new_lhs);
1773 release_ssa_name (old_lhs);
1774 }
1775 }
1776
1777 gsi = gsi_start_bb (bb);
1778 while (!gsi_end_p (gsi))
1779 {
1780 gimple *stmt = gsi_stmt (gsi);
1781 gimple *stmt_copy = stmt;
1782 gimple_seq extra_stmts = NULL;
1783 bool modified = modify_gimple_stmt (&stmt, &extra_stmts);
1784 if (stmt != stmt_copy)
1785 {
1786 gcc_checking_assert (modified);
1787 gsi_replace (&gsi, stmt, false);
1788 }
1789 if (!gimple_seq_empty_p (extra_stmts))
1790 gsi_insert_seq_before (&gsi, extra_stmts, GSI_SAME_STMT);
1791
1792 def_operand_p defp;
1793 ssa_op_iter iter;
1794 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
1795 {
1796 tree old_def = DEF_FROM_PTR (defp);
1797 if (tree new_def = replace_removed_params_ssa_names (old_def,
1798 stmt))
1799 {
1800 SET_DEF (defp, new_def);
1801 release_ssa_name (old_def);
1802 modified = true;
1803 }
1804 }
1805
1806 if (modified)
1807 {
1808 update_stmt (stmt);
1809 if (maybe_clean_eh_stmt (stmt)
1810 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
1811 cfg_changed = true;
1812 }
1813 gsi_next (&gsi);
1814 }
1815 }
1816
1817 return cfg_changed;
1818 }
1819
1820 /* Call gimple_debug_bind_reset_value on all debug statements describing
1821 gimple register parameters that are being removed or replaced. */
1822
1823 void
1824 ipa_param_body_adjustments::reset_debug_stmts ()
1825 {
1826 int i, len;
1827 gimple_stmt_iterator *gsip = NULL, gsi;
1828
1829 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
1830 {
1831 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1832 gsip = &gsi;
1833 }
1834 len = m_reset_debug_decls.length ();
1835 for (i = 0; i < len; i++)
1836 {
1837 imm_use_iterator ui;
1838 gimple *stmt;
1839 gdebug *def_temp;
1840 tree name, vexpr, copy = NULL_TREE;
1841 use_operand_p use_p;
1842 tree decl = m_reset_debug_decls[i];
1843
1844 gcc_checking_assert (is_gimple_reg (decl));
1845 name = ssa_default_def (cfun, decl);
1846 vexpr = NULL;
1847 if (name)
1848 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
1849 {
1850 if (gimple_clobber_p (stmt))
1851 {
1852 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
1853 unlink_stmt_vdef (stmt);
1854 gsi_remove (&cgsi, true);
1855 release_defs (stmt);
1856 continue;
1857 }
1858 /* All other users must have been removed by function body
1859 modification. */
1860 gcc_assert (is_gimple_debug (stmt));
1861 if (vexpr == NULL && gsip != NULL)
1862 {
1863 vexpr = make_node (DEBUG_EXPR_DECL);
1864 def_temp = gimple_build_debug_source_bind (vexpr, decl, NULL);
1865 DECL_ARTIFICIAL (vexpr) = 1;
1866 TREE_TYPE (vexpr) = TREE_TYPE (name);
1867 SET_DECL_MODE (vexpr, DECL_MODE (decl));
1868 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
1869 }
1870 if (vexpr)
1871 {
1872 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
1873 SET_USE (use_p, vexpr);
1874 }
1875 else
1876 gimple_debug_bind_reset_value (stmt);
1877 update_stmt (stmt);
1878 }
1879 /* Create a VAR_DECL for debug info purposes. */
1880 if (!DECL_IGNORED_P (decl))
1881 {
1882 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1883 VAR_DECL, DECL_NAME (decl),
1884 TREE_TYPE (decl));
1885 if (DECL_PT_UID_SET_P (decl))
1886 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
1887 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
1888 TREE_READONLY (copy) = TREE_READONLY (decl);
1889 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
1890 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
1891 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
1892 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
1893 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
1894 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
1895 SET_DECL_RTL (copy, 0);
1896 TREE_USED (copy) = 1;
1897 DECL_CONTEXT (copy) = current_function_decl;
1898 add_local_decl (cfun, copy);
1899 DECL_CHAIN (copy)
1900 = BLOCK_VARS (DECL_INITIAL (current_function_decl));
1901 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
1902 }
1903 if (gsip != NULL && copy && target_for_debug_bind (decl))
1904 {
1905 gcc_assert (TREE_CODE (decl) == PARM_DECL);
1906 if (vexpr)
1907 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
1908 else
1909 def_temp = gimple_build_debug_source_bind (copy, decl,
1910 NULL);
1911 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
1912 }
1913 }
1914 }
1915
1916 /* Perform all necessary body changes to change signature, body and debug info
1917 of fun according to adjustments passed at construction. Return true if CFG
1918 was changed in any way. The main entry point for modification of standalone
1919 functions that is not part of IPA clone materialization. */
1920
1921 bool
1922 ipa_param_body_adjustments::perform_cfun_body_modifications ()
1923 {
1924 bool cfg_changed;
1925 modify_formal_parameters ();
1926 cfg_changed = modify_cfun_body ();
1927 reset_debug_stmts ();
1928
1929 return cfg_changed;
1930 }
1931