]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-param-manipulation.c
c++: Adjust push_template_decl_real
[thirdparty/gcc.git] / gcc / ipa-param-manipulation.c
1 /* Manipulation of formal and actual parameters of functions and function
2 calls.
3 Copyright (C) 2017-2020 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "cgraph.h"
29 #include "fold-const.h"
30 #include "tree-eh.h"
31 #include "stor-layout.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "gimplify-me.h"
35 #include "tree-cfg.h"
36 #include "tree-dfa.h"
37 #include "ipa-param-manipulation.h"
38 #include "print-tree.h"
39 #include "gimple-pretty-print.h"
40 #include "builtins.h"
41 #include "tree-ssa.h"
42 #include "tree-inline.h"
43
44
45 /* Actual prefixes of different newly synthetized parameters. Keep in sync
46 with IPA_PARAM_PREFIX_* defines. */
47
48 static const char *ipa_param_prefixes[IPA_PARAM_PREFIX_COUNT]
49 = {"SYNTH",
50 "ISRA",
51 "simd",
52 "mask"};
53
54 /* Names of parameters for dumping. Keep in sync with enum ipa_parm_op. */
55
56 static const char *ipa_param_op_names[IPA_PARAM_PREFIX_COUNT]
57 = {"IPA_PARAM_OP_UNDEFINED",
58 "IPA_PARAM_OP_COPY",
59 "IPA_PARAM_OP_NEW",
60 "IPA_PARAM_OP_SPLIT"};
61
62 /* Fill an empty vector ARGS with PARM_DECLs representing formal parameters of
63 FNDECL. The function should not be called during LTO WPA phase except for
64 thunks (or functions with bodies streamed in). */
65
66 void
67 push_function_arg_decls (vec<tree> *args, tree fndecl)
68 {
69 int count;
70 tree parm;
71
72 /* Safety check that we do not attempt to use the function in WPA, except
73 when the function is a thunk and then we have DECL_ARGUMENTS or when we
74 have already explicitely loaded its body. */
75 gcc_assert (!flag_wpa
76 || DECL_ARGUMENTS (fndecl)
77 || gimple_has_body_p (fndecl));
78 count = 0;
79 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
80 count++;
81
82 args->reserve_exact (count);
83 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
84 args->quick_push (parm);
85 }
86
87 /* Fill an empty vector TYPES with trees representing formal parameters of
88 function type FNTYPE. */
89
90 void
91 push_function_arg_types (vec<tree> *types, tree fntype)
92 {
93 int count = 0;
94 tree t;
95
96 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
97 count++;
98
99 types->reserve_exact (count);
100 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
101 types->quick_push (TREE_VALUE (t));
102 }
103
104 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
105 friendly way, assuming they are meant to be applied to FNDECL. */
106
107 void
108 ipa_dump_adjusted_parameters (FILE *f,
109 vec<ipa_adjusted_param, va_gc> *adj_params)
110 {
111 unsigned i, len = vec_safe_length (adj_params);
112 bool first = true;
113
114 fprintf (f, " IPA adjusted parameters: ");
115 for (i = 0; i < len; i++)
116 {
117 struct ipa_adjusted_param *apm;
118 apm = &(*adj_params)[i];
119
120 if (!first)
121 fprintf (f, " ");
122 else
123 first = false;
124
125 fprintf (f, "%i. %s %s", i, ipa_param_op_names[apm->op],
126 apm->prev_clone_adjustment ? "prev_clone_adjustment " : "");
127 switch (apm->op)
128 {
129 case IPA_PARAM_OP_UNDEFINED:
130 break;
131
132 case IPA_PARAM_OP_COPY:
133 fprintf (f, ", base_index: %u", apm->base_index);
134 fprintf (f, ", prev_clone_index: %u", apm->prev_clone_index);
135 break;
136
137 case IPA_PARAM_OP_SPLIT:
138 fprintf (f, ", offset: %u", apm->unit_offset);
139 /* fall-through */
140 case IPA_PARAM_OP_NEW:
141 fprintf (f, ", base_index: %u", apm->base_index);
142 fprintf (f, ", prev_clone_index: %u", apm->prev_clone_index);
143 print_node_brief (f, ", type: ", apm->type, 0);
144 print_node_brief (f, ", alias type: ", apm->alias_ptr_type, 0);
145 fprintf (f, " prefix: %s",
146 ipa_param_prefixes[apm->param_prefix_index]);
147 if (apm->reverse)
148 fprintf (f, ", reverse-sso");
149 break;
150 }
151 fprintf (f, "\n");
152 }
153 }
154
155 /* Fill NEW_TYPES with types of a function after its current OTYPES have been
156 modified as described in ADJ_PARAMS. When USE_PREV_INDICES is true, use
157 prev_clone_index from ADJ_PARAMS as opposed to base_index when the parameter
158 is false. */
159
160 static void
161 fill_vector_of_new_param_types (vec<tree> *new_types, vec<tree> *otypes,
162 vec<ipa_adjusted_param, va_gc> *adj_params,
163 bool use_prev_indices)
164 {
165 unsigned adj_len = vec_safe_length (adj_params);
166 new_types->reserve_exact (adj_len);
167 for (unsigned i = 0; i < adj_len ; i++)
168 {
169 ipa_adjusted_param *apm = &(*adj_params)[i];
170 if (apm->op == IPA_PARAM_OP_COPY)
171 {
172 unsigned index
173 = use_prev_indices ? apm->prev_clone_index : apm->base_index;
174 /* The following needs to be handled gracefully because of type
175 mismatches. This happens with LTO but apparently also in Fortran
176 with -fcoarray=lib -O2 -lcaf_single -latomic. */
177 if (index >= otypes->length ())
178 continue;
179 new_types->quick_push ((*otypes)[index]);
180 }
181 else if (apm->op == IPA_PARAM_OP_NEW
182 || apm->op == IPA_PARAM_OP_SPLIT)
183 {
184 tree ntype = apm->type;
185 if (is_gimple_reg_type (ntype)
186 && TYPE_MODE (ntype) != BLKmode)
187 {
188 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ntype));
189 if (TYPE_ALIGN (ntype) != malign)
190 ntype = build_aligned_type (ntype, malign);
191 }
192 new_types->quick_push (ntype);
193 }
194 else
195 gcc_unreachable ();
196 }
197 }
198
199 /* Build and return a function type just like ORIG_TYPE but with parameter
200 types given in NEW_PARAM_TYPES - which can be NULL if, but only if,
201 ORIG_TYPE itself has NULL TREE_ARG_TYPEs. If METHOD2FUNC is true, also make
202 it a FUNCTION_TYPE instead of FUNCTION_TYPE. */
203
204 static tree
205 build_adjusted_function_type (tree orig_type, vec<tree> *new_param_types,
206 bool method2func, bool skip_return)
207 {
208 tree new_arg_types = NULL;
209 if (TYPE_ARG_TYPES (orig_type))
210 {
211 gcc_checking_assert (new_param_types);
212 bool last_parm_void = (TREE_VALUE (tree_last (TYPE_ARG_TYPES (orig_type)))
213 == void_type_node);
214 unsigned len = new_param_types->length ();
215 for (unsigned i = 0; i < len; i++)
216 new_arg_types = tree_cons (NULL_TREE, (*new_param_types)[i],
217 new_arg_types);
218
219 tree new_reversed = nreverse (new_arg_types);
220 if (last_parm_void)
221 {
222 if (new_reversed)
223 TREE_CHAIN (new_arg_types) = void_list_node;
224 else
225 new_reversed = void_list_node;
226 }
227 new_arg_types = new_reversed;
228 }
229
230 /* Use build_distinct_type_copy to preserve as much as possible from original
231 type (debug info, attribute lists etc.). The one exception is
232 METHOD_TYPEs which must have THIS argument and when we are asked to remove
233 it, we need to build new FUNCTION_TYPE instead. */
234 tree new_type = NULL;
235 if (method2func)
236 {
237 tree ret_type;
238 if (skip_return)
239 ret_type = void_type_node;
240 else
241 ret_type = TREE_TYPE (orig_type);
242
243 new_type
244 = build_distinct_type_copy (build_function_type (ret_type,
245 new_arg_types));
246 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
247 }
248 else
249 {
250 new_type = build_distinct_type_copy (orig_type);
251 TYPE_ARG_TYPES (new_type) = new_arg_types;
252 if (skip_return)
253 TREE_TYPE (new_type) = void_type_node;
254 }
255
256 return new_type;
257 }
258
259 /* Return the maximum index in any IPA_PARAM_OP_COPY adjustment or -1 if there
260 is none. */
261
262 int
263 ipa_param_adjustments::get_max_base_index ()
264 {
265 unsigned adj_len = vec_safe_length (m_adj_params);
266 int max_index = -1;
267 for (unsigned i = 0; i < adj_len ; i++)
268 {
269 ipa_adjusted_param *apm = &(*m_adj_params)[i];
270 if (apm->op == IPA_PARAM_OP_COPY
271 && max_index < apm->base_index)
272 max_index = apm->base_index;
273 }
274 return max_index;
275 }
276
277
278 /* Fill SURVIVING_PARAMS with an array of bools where each one says whether a
279 parameter that originally was at that position still survives in the given
280 clone or is removed/replaced. If the final array is smaller than an index
281 of an original parameter, that parameter also did not survive. That a
282 parameter survives does not mean it has the same index as before. */
283
284 void
285 ipa_param_adjustments::get_surviving_params (vec<bool> *surviving_params)
286 {
287 unsigned adj_len = vec_safe_length (m_adj_params);
288 int max_index = get_max_base_index ();
289
290 if (max_index < 0)
291 return;
292 surviving_params->reserve_exact (max_index + 1);
293 surviving_params->quick_grow_cleared (max_index + 1);
294 for (unsigned i = 0; i < adj_len ; i++)
295 {
296 ipa_adjusted_param *apm = &(*m_adj_params)[i];
297 if (apm->op == IPA_PARAM_OP_COPY)
298 (*surviving_params)[apm->base_index] = true;
299 }
300 }
301
302 /* Fill NEW_INDICES with new indices of each surviving parameter or -1 for
303 those which do not survive. Any parameter outside of lenght of the vector
304 does not survive. There is currently no support for a parameter to be
305 copied to two distinct new parameters. */
306
307 void
308 ipa_param_adjustments::get_updated_indices (vec<int> *new_indices)
309 {
310 unsigned adj_len = vec_safe_length (m_adj_params);
311 int max_index = get_max_base_index ();
312
313 if (max_index < 0)
314 return;
315 unsigned res_len = max_index + 1;
316 new_indices->reserve_exact (res_len);
317 for (unsigned i = 0; i < res_len ; i++)
318 new_indices->quick_push (-1);
319 for (unsigned i = 0; i < adj_len ; i++)
320 {
321 ipa_adjusted_param *apm = &(*m_adj_params)[i];
322 if (apm->op == IPA_PARAM_OP_COPY)
323 (*new_indices)[apm->base_index] = i;
324 }
325 }
326
327 /* Return the original index for the given new parameter index. Return a
328 negative number if not available. */
329
330 int
331 ipa_param_adjustments::get_original_index (int newidx)
332 {
333 const ipa_adjusted_param *adj = &(*m_adj_params)[newidx];
334 if (adj->op != IPA_PARAM_OP_COPY)
335 return -1;
336 return adj->base_index;
337 }
338
339 /* Return true if the first parameter (assuming there was one) survives the
340 transformation intact and remains the first one. */
341
342 bool
343 ipa_param_adjustments::first_param_intact_p ()
344 {
345 return (!vec_safe_is_empty (m_adj_params)
346 && (*m_adj_params)[0].op == IPA_PARAM_OP_COPY
347 && (*m_adj_params)[0].base_index == 0);
348 }
349
350 /* Return true if we have to change what has formerly been a method into a
351 function. */
352
353 bool
354 ipa_param_adjustments::method2func_p (tree orig_type)
355 {
356 return ((TREE_CODE (orig_type) == METHOD_TYPE) && !first_param_intact_p ());
357 }
358
359 /* Given function type OLD_TYPE, return a new type derived from it after
360 performing all atored modifications. TYPE_ORIGINAL_P should be true when
361 OLD_TYPE refers to the type before any IPA transformations, as opposed to a
362 type that can be an intermediate one in between various IPA
363 transformations. */
364
365 tree
366 ipa_param_adjustments::build_new_function_type (tree old_type,
367 bool type_original_p)
368 {
369 auto_vec<tree,16> new_param_types, *new_param_types_p;
370 if (prototype_p (old_type))
371 {
372 auto_vec<tree, 16> otypes;
373 push_function_arg_types (&otypes, old_type);
374 fill_vector_of_new_param_types (&new_param_types, &otypes, m_adj_params,
375 !type_original_p);
376 new_param_types_p = &new_param_types;
377 }
378 else
379 new_param_types_p = NULL;
380
381 return build_adjusted_function_type (old_type, new_param_types_p,
382 method2func_p (old_type), m_skip_return);
383 }
384
385 /* Build variant of function decl ORIG_DECL which has no return value if
386 M_SKIP_RETURN is true and, if ORIG_DECL's types or parameters is known, has
387 this type adjusted as indicated in M_ADJ_PARAMS. Arguments from
388 DECL_ARGUMENTS list are not processed now, since they are linked by
389 TREE_CHAIN directly and not accessible in LTO during WPA. The caller is
390 responsible for eliminating them when clones are properly materialized. */
391
392 tree
393 ipa_param_adjustments::adjust_decl (tree orig_decl)
394 {
395 tree new_decl = copy_node (orig_decl);
396 tree orig_type = TREE_TYPE (orig_decl);
397 if (prototype_p (orig_type)
398 || (m_skip_return && !VOID_TYPE_P (TREE_TYPE (orig_type))))
399 {
400 tree new_type = build_new_function_type (orig_type, false);
401 TREE_TYPE (new_decl) = new_type;
402 }
403 if (method2func_p (orig_type))
404 DECL_VINDEX (new_decl) = NULL_TREE;
405
406 /* When signature changes, we need to clear builtin info. */
407 if (fndecl_built_in_p (new_decl))
408 set_decl_built_in_function (new_decl, NOT_BUILT_IN, 0);
409
410 DECL_VIRTUAL_P (new_decl) = 0;
411 DECL_LANG_SPECIFIC (new_decl) = NULL;
412
413 /* Drop MALLOC attribute for a void function. */
414 if (m_skip_return)
415 DECL_IS_MALLOC (new_decl) = 0;
416
417 return new_decl;
418 }
419
420 /* Wrapper around get_base_ref_and_offset for cases interesting for IPA-SRA
421 transformations. Return true if EXPR has an interesting form and fill in
422 *BASE_P and *UNIT_OFFSET_P with the appropriate info. */
423
424 static bool
425 isra_get_ref_base_and_offset (tree expr, tree *base_p, unsigned *unit_offset_p)
426 {
427 HOST_WIDE_INT offset, size;
428 bool reverse;
429 tree base
430 = get_ref_base_and_extent_hwi (expr, &offset, &size, &reverse);
431 if (!base || size < 0)
432 return false;
433
434 if ((offset % BITS_PER_UNIT) != 0)
435 return false;
436
437 if (TREE_CODE (base) == MEM_REF)
438 {
439 poly_int64 plmoff = mem_ref_offset (base).force_shwi ();
440 HOST_WIDE_INT moff;
441 bool is_cst = plmoff.is_constant (&moff);
442 if (!is_cst)
443 return false;
444 offset += moff * BITS_PER_UNIT;
445 base = TREE_OPERAND (base, 0);
446 }
447
448 if (offset < 0 || (offset / BITS_PER_UNIT) > UINT_MAX)
449 return false;
450
451 *base_p = base;
452 *unit_offset_p = offset / BITS_PER_UNIT;
453 return true;
454 }
455
456 /* Return true if EXPR describes a transitive split (i.e. one that happened for
457 both the caller and the callee) as recorded in PERFORMED_SPLITS. In that
458 case, store index of the respective record in PERFORMED_SPLITS into
459 *SM_IDX_P and the unit offset from all handled components in EXPR into
460 *UNIT_OFFSET_P. */
461
462 static bool
463 transitive_split_p (vec<ipa_param_performed_split, va_gc> *performed_splits,
464 tree expr, unsigned *sm_idx_p, unsigned *unit_offset_p)
465 {
466 tree base;
467 if (!isra_get_ref_base_and_offset (expr, &base, unit_offset_p))
468 return false;
469
470 if (TREE_CODE (base) == SSA_NAME)
471 {
472 base = SSA_NAME_VAR (base);
473 if (!base)
474 return false;
475 }
476
477 unsigned len = vec_safe_length (performed_splits);
478 for (unsigned i = 0 ; i < len; i++)
479 {
480 ipa_param_performed_split *sm = &(*performed_splits)[i];
481 if (sm->dummy_decl == base)
482 {
483 *sm_idx_p = i;
484 return true;
485 }
486 }
487 return false;
488 }
489
490 /* Structure to hold declarations representing transitive IPA-SRA splits. In
491 essence, if we need to pass UNIT_OFFSET of a parameter which originally has
492 number BASE_INDEX, we should pass down REPL. */
493
494 struct transitive_split_map
495 {
496 tree repl;
497 unsigned base_index;
498 unsigned unit_offset;
499 };
500
501 /* If call STMT contains any parameters representing transitive splits as
502 described by PERFORMED_SPLITS, return the number of extra parameters that
503 were addded during clone materialization and fill in INDEX_MAP with adjusted
504 indices of corresponding original parameters and TRANS_MAP with description
505 of all transitive replacement descriptions. Otherwise return zero. */
506
507 static unsigned
508 init_transitive_splits (vec<ipa_param_performed_split, va_gc> *performed_splits,
509 gcall *stmt, vec <unsigned> *index_map,
510 auto_vec <transitive_split_map> *trans_map)
511 {
512 unsigned phony_arguments = 0;
513 unsigned stmt_idx = 0, base_index = 0;
514 unsigned nargs = gimple_call_num_args (stmt);
515 while (stmt_idx < nargs)
516 {
517 unsigned unit_offset_delta;
518 tree base_arg = gimple_call_arg (stmt, stmt_idx);
519
520 if (phony_arguments > 0)
521 index_map->safe_push (stmt_idx);
522
523 unsigned sm_idx;
524 stmt_idx++;
525 if (transitive_split_p (performed_splits, base_arg, &sm_idx,
526 &unit_offset_delta))
527 {
528 if (phony_arguments == 0)
529 /* We have optimistically avoided constructing index_map do far but
530 now it is clear it will be necessary, so let's create the easy
531 bit we skipped until now. */
532 for (unsigned k = 0; k < stmt_idx; k++)
533 index_map->safe_push (k);
534
535 tree dummy = (*performed_splits)[sm_idx].dummy_decl;
536 for (unsigned j = sm_idx; j < performed_splits->length (); j++)
537 {
538 ipa_param_performed_split *caller_split
539 = &(*performed_splits)[j];
540 if (caller_split->dummy_decl != dummy)
541 break;
542
543 tree arg = gimple_call_arg (stmt, stmt_idx);
544 struct transitive_split_map tsm;
545 tsm.repl = arg;
546 tsm.base_index = base_index;
547 if (caller_split->unit_offset >= unit_offset_delta)
548 {
549 tsm.unit_offset
550 = (caller_split->unit_offset - unit_offset_delta);
551 trans_map->safe_push (tsm);
552 }
553
554 phony_arguments++;
555 stmt_idx++;
556 }
557 }
558 base_index++;
559 }
560 return phony_arguments;
561 }
562
563 /* Modify actual arguments of a function call in statement STMT, assuming it
564 calls CALLEE_DECL. CALLER_ADJ must be the description of parameter
565 adjustments of the caller or NULL if there are none. Return the new
566 statement that replaced the old one. When invoked, cfun and
567 current_function_decl have to be set to the caller. */
568
569 gcall *
570 ipa_param_adjustments::modify_call (gcall *stmt,
571 vec<ipa_param_performed_split,
572 va_gc> *performed_splits,
573 tree callee_decl, bool update_references)
574 {
575 unsigned len = vec_safe_length (m_adj_params);
576 auto_vec<tree, 16> vargs (len);
577 tree old_decl = gimple_call_fndecl (stmt);
578 unsigned old_nargs = gimple_call_num_args (stmt);
579 auto_vec<bool, 16> kept (old_nargs);
580 kept.quick_grow_cleared (old_nargs);
581
582 auto_vec <unsigned, 16> index_map;
583 auto_vec <transitive_split_map> trans_map;
584 bool transitive_remapping = false;
585
586 if (performed_splits)
587 {
588 unsigned removed = init_transitive_splits (performed_splits,
589 stmt, &index_map, &trans_map);
590 if (removed > 0)
591 {
592 transitive_remapping = true;
593 old_nargs -= removed;
594 }
595 }
596
597 cgraph_node *current_node = cgraph_node::get (current_function_decl);
598 if (update_references)
599 current_node->remove_stmt_references (stmt);
600
601 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
602 gimple_stmt_iterator prev_gsi = gsi;
603 gsi_prev (&prev_gsi);
604 for (unsigned i = 0; i < len; i++)
605 {
606 ipa_adjusted_param *apm = &(*m_adj_params)[i];
607 if (apm->op == IPA_PARAM_OP_COPY)
608 {
609 unsigned index = apm->base_index;
610 if (index >= old_nargs)
611 /* Can happen if the original call has argument mismatch,
612 ignore. */
613 continue;
614 if (transitive_remapping)
615 index = index_map[apm->base_index];
616
617 tree arg = gimple_call_arg (stmt, index);
618
619 vargs.quick_push (arg);
620 kept[index] = true;
621 continue;
622 }
623
624 /* At the moment the only user of IPA_PARAM_OP_NEW modifies calls itself.
625 If we ever want to support it during WPA IPA stage, we'll need a
626 mechanism to call into the IPA passes that introduced them. Currently
627 we simply mandate that IPA infrastructure understands all argument
628 modifications. Remember, edge redirection/modification is done only
629 once, not in steps for each pass modifying the callee like clone
630 materialization. */
631 gcc_assert (apm->op == IPA_PARAM_OP_SPLIT);
632
633 /* We have to handle transitive changes differently using the maps we
634 have created before. So look into them first. */
635 tree repl = NULL_TREE;
636 for (unsigned j = 0; j < trans_map.length (); j++)
637 if (trans_map[j].base_index == apm->base_index
638 && trans_map[j].unit_offset == apm->unit_offset)
639 {
640 repl = trans_map[j].repl;
641 break;
642 }
643 if (repl)
644 {
645 vargs.quick_push (repl);
646 continue;
647 }
648
649 unsigned index = apm->base_index;
650 if (index >= old_nargs)
651 /* Can happen if the original call has argument mismatch, ignore. */
652 continue;
653 if (transitive_remapping)
654 index = index_map[apm->base_index];
655 tree base = gimple_call_arg (stmt, index);
656
657 /* We create a new parameter out of the value of the old one, we can
658 do the following kind of transformations:
659
660 - A scalar passed by reference, potentially as a part of a larger
661 aggregate, is converted to a scalar passed by value.
662
663 - A part of an aggregate is passed instead of the whole aggregate. */
664
665 location_t loc = gimple_location (stmt);
666 tree off;
667 bool deref_base = false;
668 unsigned int deref_align = 0;
669 if (TREE_CODE (base) != ADDR_EXPR
670 && is_gimple_reg_type (TREE_TYPE (base)))
671 {
672 /* Detect type mismatches in calls in invalid programs and make a
673 poor attempt to gracefully convert them so that we don't ICE. */
674 if (!POINTER_TYPE_P (TREE_TYPE (base)))
675 base = force_value_to_type (ptr_type_node, base);
676
677 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset);
678 }
679 else
680 {
681 bool addrof;
682 if (TREE_CODE (base) == ADDR_EXPR)
683 {
684 base = TREE_OPERAND (base, 0);
685 addrof = true;
686 }
687 else
688 addrof = false;
689
690 tree prev_base = base;
691 poly_int64 base_offset;
692 base = get_addr_base_and_unit_offset (base, &base_offset);
693
694 /* Aggregate arguments can have non-invariant addresses. */
695 if (!base)
696 {
697 base = build_fold_addr_expr (prev_base);
698 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset);
699 }
700 else if (TREE_CODE (base) == MEM_REF)
701 {
702 if (!addrof)
703 {
704 deref_base = true;
705 deref_align = TYPE_ALIGN (TREE_TYPE (base));
706 }
707 off = build_int_cst (apm->alias_ptr_type,
708 base_offset + apm->unit_offset);
709 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
710 off);
711 base = TREE_OPERAND (base, 0);
712 }
713 else
714 {
715 off = build_int_cst (apm->alias_ptr_type,
716 base_offset + apm->unit_offset);
717 base = build_fold_addr_expr (base);
718 }
719 }
720
721 tree type = apm->type;
722 unsigned int align;
723 unsigned HOST_WIDE_INT misalign;
724
725 if (deref_base)
726 {
727 align = deref_align;
728 misalign = 0;
729 }
730 else
731 {
732 get_pointer_alignment_1 (base, &align, &misalign);
733 /* All users must make sure that we can be optimistic when it
734 comes to alignment in this case (by inspecting the final users
735 of these new parameters). */
736 if (TYPE_ALIGN (type) > align)
737 align = TYPE_ALIGN (type);
738 }
739 misalign
740 += (offset_int::from (wi::to_wide (off), SIGNED).to_short_addr ()
741 * BITS_PER_UNIT);
742 misalign = misalign & (align - 1);
743 if (misalign != 0)
744 align = least_bit_hwi (misalign);
745 if (align < TYPE_ALIGN (type))
746 type = build_aligned_type (type, align);
747 base = force_gimple_operand_gsi (&gsi, base,
748 true, NULL, true, GSI_SAME_STMT);
749 tree expr = fold_build2_loc (loc, MEM_REF, type, base, off);
750 REF_REVERSE_STORAGE_ORDER (expr) = apm->reverse;
751 /* If expr is not a valid gimple call argument emit
752 a load into a temporary. */
753 if (is_gimple_reg_type (TREE_TYPE (expr)))
754 {
755 gimple *tem = gimple_build_assign (NULL_TREE, expr);
756 if (gimple_in_ssa_p (cfun))
757 {
758 gimple_set_vuse (tem, gimple_vuse (stmt));
759 expr = make_ssa_name (TREE_TYPE (expr), tem);
760 }
761 else
762 expr = create_tmp_reg (TREE_TYPE (expr));
763 gimple_assign_set_lhs (tem, expr);
764 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
765 }
766 vargs.quick_push (expr);
767 }
768
769 if (m_always_copy_start >= 0)
770 for (unsigned i = m_always_copy_start; i < old_nargs; i++)
771 vargs.safe_push (gimple_call_arg (stmt, i));
772
773 /* For optimized away parameters, add on the caller side
774 before the call
775 DEBUG D#X => parm_Y(D)
776 stmts and associate D#X with parm in decl_debug_args_lookup
777 vector to say for debug info that if parameter parm had been passed,
778 it would have value parm_Y(D). */
779 if (MAY_HAVE_DEBUG_BIND_STMTS && old_decl && callee_decl)
780 {
781 vec<tree, va_gc> **debug_args = NULL;
782 unsigned i = 0;
783 for (tree old_parm = DECL_ARGUMENTS (old_decl);
784 old_parm && i < old_nargs && ((int) i) < m_always_copy_start;
785 old_parm = DECL_CHAIN (old_parm), i++)
786 {
787 if (!is_gimple_reg (old_parm) || kept[i])
788 continue;
789 tree origin = DECL_ORIGIN (old_parm);
790 tree arg = gimple_call_arg (stmt, i);
791
792 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
793 {
794 if (!fold_convertible_p (TREE_TYPE (origin), arg))
795 continue;
796 tree rhs1;
797 if (TREE_CODE (arg) == SSA_NAME
798 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg))
799 && (rhs1
800 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg)))
801 && useless_type_conversion_p (TREE_TYPE (origin),
802 TREE_TYPE (rhs1)))
803 arg = rhs1;
804 else
805 arg = fold_convert_loc (gimple_location (stmt),
806 TREE_TYPE (origin), arg);
807 }
808 if (debug_args == NULL)
809 debug_args = decl_debug_args_insert (callee_decl);
810 unsigned int ix;
811 tree ddecl = NULL_TREE;
812 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
813 if (ddecl == origin)
814 {
815 ddecl = (**debug_args)[ix + 1];
816 break;
817 }
818 if (ddecl == NULL)
819 {
820 ddecl = make_node (DEBUG_EXPR_DECL);
821 DECL_ARTIFICIAL (ddecl) = 1;
822 TREE_TYPE (ddecl) = TREE_TYPE (origin);
823 SET_DECL_MODE (ddecl, DECL_MODE (origin));
824
825 vec_safe_push (*debug_args, origin);
826 vec_safe_push (*debug_args, ddecl);
827 }
828 gimple *def_temp = gimple_build_debug_bind (ddecl,
829 unshare_expr (arg), stmt);
830 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
831 }
832 }
833
834 if (dump_file && (dump_flags & TDF_DETAILS))
835 {
836 fprintf (dump_file, "replacing stmt:");
837 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
838 }
839
840 gcall *new_stmt = gimple_build_call_vec (callee_decl, vargs);
841
842 if (tree lhs = gimple_call_lhs (stmt))
843 {
844 if (!m_skip_return)
845 gimple_call_set_lhs (new_stmt, lhs);
846 else if (TREE_CODE (lhs) == SSA_NAME)
847 {
848 /* LHS should now by a default-def SSA. Unfortunately default-def
849 SSA_NAMEs need a backing variable (or at least some code examining
850 SSAs assumes it is non-NULL). So we either have to re-use the
851 decl we have at hand or introdice a new one. */
852 tree repl = create_tmp_var (TREE_TYPE (lhs), "removed_return");
853 repl = get_or_create_ssa_default_def (cfun, repl);
854 SSA_NAME_IS_DEFAULT_DEF (repl) = true;
855 imm_use_iterator ui;
856 use_operand_p use_p;
857 gimple *using_stmt;
858 FOR_EACH_IMM_USE_STMT (using_stmt, ui, lhs)
859 {
860 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
861 {
862 SET_USE (use_p, repl);
863 }
864 update_stmt (using_stmt);
865 }
866 }
867 }
868
869 gimple_set_block (new_stmt, gimple_block (stmt));
870 if (gimple_has_location (stmt))
871 gimple_set_location (new_stmt, gimple_location (stmt));
872 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
873 gimple_call_copy_flags (new_stmt, stmt);
874 if (gimple_in_ssa_p (cfun))
875 gimple_move_vops (new_stmt, stmt);
876
877 if (dump_file && (dump_flags & TDF_DETAILS))
878 {
879 fprintf (dump_file, "with stmt:");
880 print_gimple_stmt (dump_file, new_stmt, 0);
881 fprintf (dump_file, "\n");
882 }
883 gsi_replace (&gsi, new_stmt, true);
884 if (update_references)
885 do
886 {
887 current_node->record_stmt_references (gsi_stmt (gsi));
888 gsi_prev (&gsi);
889 }
890 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
891 return new_stmt;
892 }
893
894 /* Dump information contained in the object in textual form to F. */
895
896 void
897 ipa_param_adjustments::dump (FILE *f)
898 {
899 fprintf (f, " m_always_copy_start: %i\n", m_always_copy_start);
900 ipa_dump_adjusted_parameters (f, m_adj_params);
901 if (m_skip_return)
902 fprintf (f, " Will SKIP return.\n");
903 }
904
905 /* Dump information contained in the object in textual form to stderr. */
906
907 void
908 ipa_param_adjustments::debug ()
909 {
910 dump (stderr);
911 }
912
913 /* Register that REPLACEMENT should replace parameter described in APM and
914 optionally as DUMMY to mark transitive splits across calls. */
915
916 void
917 ipa_param_body_adjustments::register_replacement (ipa_adjusted_param *apm,
918 tree replacement,
919 tree dummy)
920 {
921 gcc_checking_assert (apm->op == IPA_PARAM_OP_SPLIT
922 || apm->op == IPA_PARAM_OP_NEW);
923 gcc_checking_assert (!apm->prev_clone_adjustment);
924 ipa_param_body_replacement psr;
925 psr.base = m_oparms[apm->prev_clone_index];
926 psr.repl = replacement;
927 psr.dummy = dummy;
928 psr.unit_offset = apm->unit_offset;
929 m_replacements.safe_push (psr);
930 }
931
932 /* Copy or not, as appropriate given m_id and decl context, a pre-existing
933 PARM_DECL T so that it can be included in the parameters of the modified
934 function. */
935
936 tree
937 ipa_param_body_adjustments::carry_over_param (tree t)
938 {
939 tree new_parm;
940 if (m_id)
941 {
942 new_parm = remap_decl (t, m_id);
943 if (TREE_CODE (new_parm) != PARM_DECL)
944 new_parm = m_id->copy_decl (t, m_id);
945 }
946 else if (DECL_CONTEXT (t) != m_fndecl)
947 {
948 new_parm = copy_node (t);
949 DECL_CONTEXT (new_parm) = m_fndecl;
950 }
951 else
952 new_parm = t;
953 return new_parm;
954 }
955
956 /* Common initialization performed by all ipa_param_body_adjustments
957 constructors. OLD_FNDECL is the declaration we take original arguments
958 from, (it may be the same as M_FNDECL). VARS, if non-NULL, is a pointer to
959 a chained list of new local variables. TREE_MAP is the IPA-CP produced
960 mapping of trees to constants.
961
962 The function is rather long but it really onlu initializes all data members
963 of the class. It creates new param DECLs, finds their new types, */
964
965 void
966 ipa_param_body_adjustments::common_initialization (tree old_fndecl,
967 tree *vars,
968 vec<ipa_replace_map *,
969 va_gc> *tree_map)
970 {
971 push_function_arg_decls (&m_oparms, old_fndecl);
972 auto_vec<tree,16> otypes;
973 if (TYPE_ARG_TYPES (TREE_TYPE (old_fndecl)) != NULL_TREE)
974 push_function_arg_types (&otypes, TREE_TYPE (old_fndecl));
975 else
976 {
977 auto_vec<tree,16> oparms;
978 push_function_arg_decls (&oparms, old_fndecl);
979 unsigned ocount = oparms.length ();
980 otypes.reserve_exact (ocount);
981 for (unsigned i = 0; i < ocount; i++)
982 otypes.quick_push (TREE_TYPE (oparms[i]));
983 }
984 fill_vector_of_new_param_types (&m_new_types, &otypes, m_adj_params, true);
985
986 auto_vec<bool, 16> kept;
987 kept.reserve_exact (m_oparms.length ());
988 kept.quick_grow_cleared (m_oparms.length ());
989 auto_vec<tree, 16> isra_dummy_decls;
990 isra_dummy_decls.reserve_exact (m_oparms.length ());
991 isra_dummy_decls.quick_grow_cleared (m_oparms.length ());
992
993 unsigned adj_len = vec_safe_length (m_adj_params);
994 m_method2func = ((TREE_CODE (TREE_TYPE (m_fndecl)) == METHOD_TYPE)
995 && (adj_len == 0
996 || (*m_adj_params)[0].op != IPA_PARAM_OP_COPY
997 || (*m_adj_params)[0].base_index != 0));
998
999 /* The main job of the this function is to go over the vector of adjusted
1000 parameters and create declarations or find corresponding old ones and push
1001 them to m_new_decls. For IPA-SRA replacements it also creates
1002 corresponding m_id->dst_node->clone.performed_splits entries. */
1003
1004 m_new_decls.reserve_exact (adj_len);
1005 for (unsigned i = 0; i < adj_len ; i++)
1006 {
1007 ipa_adjusted_param *apm = &(*m_adj_params)[i];
1008 unsigned prev_index = apm->prev_clone_index;
1009 tree new_parm;
1010 if (apm->op == IPA_PARAM_OP_COPY
1011 || apm->prev_clone_adjustment)
1012 {
1013 kept[prev_index] = true;
1014 new_parm = carry_over_param (m_oparms[prev_index]);
1015 m_new_decls.quick_push (new_parm);
1016 }
1017 else if (apm->op == IPA_PARAM_OP_NEW
1018 || apm->op == IPA_PARAM_OP_SPLIT)
1019 {
1020 tree new_type = m_new_types[i];
1021 gcc_checking_assert (new_type);
1022 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
1023 new_type);
1024 const char *prefix = ipa_param_prefixes[apm->param_prefix_index];
1025 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
1026 DECL_ARTIFICIAL (new_parm) = 1;
1027 DECL_ARG_TYPE (new_parm) = new_type;
1028 DECL_CONTEXT (new_parm) = m_fndecl;
1029 TREE_USED (new_parm) = 1;
1030 DECL_IGNORED_P (new_parm) = 1;
1031 layout_decl (new_parm, 0);
1032 m_new_decls.quick_push (new_parm);
1033
1034 if (apm->op == IPA_PARAM_OP_SPLIT)
1035 {
1036 m_split_modifications_p = true;
1037
1038 if (m_id)
1039 {
1040 tree dummy_decl;
1041 if (!isra_dummy_decls[prev_index])
1042 {
1043 dummy_decl = copy_decl_to_var (m_oparms[prev_index],
1044 m_id);
1045 /* Any attempt to remap this dummy in this particular
1046 instance of clone materialization should yield
1047 itself. */
1048 insert_decl_map (m_id, dummy_decl, dummy_decl);
1049
1050 DECL_CHAIN (dummy_decl) = *vars;
1051 *vars = dummy_decl;
1052 isra_dummy_decls[prev_index] = dummy_decl;
1053 }
1054 else
1055 dummy_decl = isra_dummy_decls[prev_index];
1056
1057 register_replacement (apm, new_parm, dummy_decl);
1058 ipa_param_performed_split ps;
1059 ps.dummy_decl = dummy_decl;
1060 ps.unit_offset = apm->unit_offset;
1061 vec_safe_push (m_id->dst_node->clone.performed_splits, ps);
1062 }
1063 else
1064 register_replacement (apm, new_parm);
1065 }
1066 }
1067 else
1068 gcc_unreachable ();
1069 }
1070
1071
1072 /* As part of body modifications, we will also have to replace remaining uses
1073 of remaining uses of removed PARM_DECLs (which do not however use the
1074 initial value) with their VAR_DECL copies.
1075
1076 We do this differently with and without m_id. With m_id, we rely on its
1077 mapping and create a replacement straight away. Without it, we have our
1078 own mechanism for which we have to populate m_removed_decls vector. Just
1079 don't mix them, that is why you should not call
1080 replace_removed_params_ssa_names or perform_cfun_body_modifications when
1081 you construct with ID not equal to NULL. */
1082
1083 unsigned op_len = m_oparms.length ();
1084 for (unsigned i = 0; i < op_len; i++)
1085 if (!kept[i])
1086 {
1087 if (m_id)
1088 {
1089 if (!m_id->decl_map->get (m_oparms[i]))
1090 {
1091 /* TODO: Perhaps at least aggregate-type params could re-use
1092 their isra_dummy_decl here? */
1093 tree var = copy_decl_to_var (m_oparms[i], m_id);
1094 insert_decl_map (m_id, m_oparms[i], var);
1095 /* Declare this new variable. */
1096 DECL_CHAIN (var) = *vars;
1097 *vars = var;
1098 }
1099 }
1100 else
1101 {
1102 m_removed_decls.safe_push (m_oparms[i]);
1103 m_removed_map.put (m_oparms[i], m_removed_decls.length () - 1);
1104 }
1105 }
1106
1107 if (!MAY_HAVE_DEBUG_STMTS)
1108 return;
1109
1110 /* Finally, when generating debug info, we fill vector m_reset_debug_decls
1111 with removed parameters declarations. We do this in order to re-map their
1112 debug bind statements and create debug decls for them. */
1113
1114 if (tree_map)
1115 {
1116 /* Do not output debuginfo for parameter declarations as if they vanished
1117 when they were in fact replaced by a constant. */
1118 auto_vec <int, 16> index_mapping;
1119 bool need_remap = false;
1120
1121 if (m_id && m_id->src_node->clone.param_adjustments)
1122 {
1123 ipa_param_adjustments *prev_adjustments
1124 = m_id->src_node->clone.param_adjustments;
1125 prev_adjustments->get_updated_indices (&index_mapping);
1126 need_remap = true;
1127 }
1128
1129 for (unsigned i = 0; i < tree_map->length (); i++)
1130 {
1131 int parm_num = (*tree_map)[i]->parm_num;
1132 gcc_assert (parm_num >= 0);
1133 if (need_remap)
1134 parm_num = index_mapping[parm_num];
1135 kept[parm_num] = true;
1136 }
1137 }
1138
1139 for (unsigned i = 0; i < op_len; i++)
1140 if (!kept[i] && is_gimple_reg (m_oparms[i]))
1141 m_reset_debug_decls.safe_push (m_oparms[i]);
1142 }
1143
1144 /* Constructor of ipa_param_body_adjustments from a simple list of
1145 modifications to parameters listed in ADJ_PARAMS which will prepare ground
1146 for modification of parameters of fndecl. Return value of the function will
1147 not be removed and the object will assume it does not run as a part of
1148 tree-function_versioning. */
1149
1150 ipa_param_body_adjustments
1151 ::ipa_param_body_adjustments (vec<ipa_adjusted_param, va_gc> *adj_params,
1152 tree fndecl)
1153 : m_adj_params (adj_params), m_adjustments (NULL), m_reset_debug_decls (),
1154 m_split_modifications_p (false), m_fndecl (fndecl), m_id (NULL),
1155 m_oparms (), m_new_decls (), m_new_types (), m_replacements (),
1156 m_removed_decls (), m_removed_map (), m_method2func (false)
1157 {
1158 common_initialization (fndecl, NULL, NULL);
1159 }
1160
1161 /* Constructor of ipa_param_body_adjustments from ipa_param_adjustments in
1162 ADJUSTMENTS which will prepare ground for modification of parameters of
1163 fndecl. The object will assume it does not run as a part of
1164 tree-function_versioning. */
1165
1166 ipa_param_body_adjustments
1167 ::ipa_param_body_adjustments (ipa_param_adjustments *adjustments,
1168 tree fndecl)
1169 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments),
1170 m_reset_debug_decls (), m_split_modifications_p (false), m_fndecl (fndecl),
1171 m_id (NULL), m_oparms (), m_new_decls (), m_new_types (),
1172 m_replacements (), m_removed_decls (), m_removed_map (),
1173 m_method2func (false)
1174 {
1175 common_initialization (fndecl, NULL, NULL);
1176 }
1177
1178 /* Constructor of ipa_param_body_adjustments which sets it up as a part of
1179 running tree_function_versioning. Planned modifications to the function are
1180 in ADJUSTMENTS. FNDECL designates the new function clone which is being
1181 modified. OLD_FNDECL is the function of which FNDECL is a clone (and which
1182 at the time of invocation still share DECL_ARGUMENTS). ID is the
1183 copy_body_data structure driving the wholy body copying process. VARS is a
1184 pointer to the head of the list of new local variables, TREE_MAP is the map
1185 that drives tree substitution in the cloning process. */
1186
1187 ipa_param_body_adjustments
1188 ::ipa_param_body_adjustments (ipa_param_adjustments *adjustments,
1189 tree fndecl, tree old_fndecl,
1190 copy_body_data *id, tree *vars,
1191 vec<ipa_replace_map *, va_gc> *tree_map)
1192 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments),
1193 m_reset_debug_decls (), m_split_modifications_p (false), m_fndecl (fndecl),
1194 m_id (id), m_oparms (), m_new_decls (), m_new_types (), m_replacements (),
1195 m_removed_decls (), m_removed_map (), m_method2func (false)
1196 {
1197 common_initialization (old_fndecl, vars, tree_map);
1198 }
1199
1200 /* Chain new param decls up and return them. */
1201
1202 tree
1203 ipa_param_body_adjustments::get_new_param_chain ()
1204 {
1205 tree result;
1206 tree *link = &result;
1207
1208 unsigned len = vec_safe_length (m_adj_params);
1209 for (unsigned i = 0; i < len; i++)
1210 {
1211 tree new_decl = m_new_decls[i];
1212 *link = new_decl;
1213 link = &DECL_CHAIN (new_decl);
1214 }
1215 *link = NULL_TREE;
1216 return result;
1217 }
1218
1219 /* Modify the function parameters FNDECL and its type according to the plan in
1220 ADJUSTMENTS. This function needs to be called when the decl has not already
1221 been processed with ipa_param_adjustments::adjust_decl, otherwise just
1222 seting DECL_ARGUMENTS to whatever get_new_param_chain will do is enough. */
1223
1224 void
1225 ipa_param_body_adjustments::modify_formal_parameters ()
1226 {
1227 tree orig_type = TREE_TYPE (m_fndecl);
1228 DECL_ARGUMENTS (m_fndecl) = get_new_param_chain ();
1229
1230 /* When signature changes, we need to clear builtin info. */
1231 if (fndecl_built_in_p (m_fndecl))
1232 set_decl_built_in_function (m_fndecl, NOT_BUILT_IN, 0);
1233
1234 /* At this point, removing return value is only implemented when going
1235 through tree_function_versioning, not when modifying function body
1236 directly. */
1237 gcc_assert (!m_adjustments || !m_adjustments->m_skip_return);
1238 tree new_type = build_adjusted_function_type (orig_type, &m_new_types,
1239 m_method2func, false);
1240
1241 TREE_TYPE (m_fndecl) = new_type;
1242 DECL_VIRTUAL_P (m_fndecl) = 0;
1243 DECL_LANG_SPECIFIC (m_fndecl) = NULL;
1244 if (m_method2func)
1245 DECL_VINDEX (m_fndecl) = NULL_TREE;
1246 }
1247
1248 /* Given BASE and UNIT_OFFSET, find the corresponding record among replacement
1249 structures. */
1250
1251 ipa_param_body_replacement *
1252 ipa_param_body_adjustments::lookup_replacement_1 (tree base,
1253 unsigned unit_offset)
1254 {
1255 unsigned int len = m_replacements.length ();
1256 for (unsigned i = 0; i < len; i++)
1257 {
1258 ipa_param_body_replacement *pbr = &m_replacements[i];
1259
1260 if (pbr->base == base
1261 && (pbr->unit_offset == unit_offset))
1262 return pbr;
1263 }
1264 return NULL;
1265 }
1266
1267 /* Given BASE and UNIT_OFFSET, find the corresponding replacement expression
1268 and return it, assuming it is known it does not hold value by reference or
1269 in reverse storage order. */
1270
1271 tree
1272 ipa_param_body_adjustments::lookup_replacement (tree base, unsigned unit_offset)
1273 {
1274 ipa_param_body_replacement *pbr = lookup_replacement_1 (base, unit_offset);
1275 if (!pbr)
1276 return NULL;
1277 return pbr->repl;
1278 }
1279
1280 /* If T is an SSA_NAME, return NULL if it is not a default def or
1281 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
1282 the base variable is always returned, regardless if it is a default
1283 def. Return T if it is not an SSA_NAME. */
1284
1285 static tree
1286 get_ssa_base_param (tree t, bool ignore_default_def)
1287 {
1288 if (TREE_CODE (t) == SSA_NAME)
1289 {
1290 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
1291 return SSA_NAME_VAR (t);
1292 else
1293 return NULL_TREE;
1294 }
1295 return t;
1296 }
1297
1298 /* Given an expression, return the structure describing how it should be
1299 replaced if it accesses a part of a split parameter or NULL otherwise.
1300
1301 Do not free the result, it will be deallocated when the object is destroyed.
1302
1303 If IGNORE_DEFAULT_DEF is cleared, consider only SSA_NAMEs of PARM_DECLs
1304 which are default definitions, if set, consider all SSA_NAMEs of
1305 PARM_DECLs. */
1306
1307 ipa_param_body_replacement *
1308 ipa_param_body_adjustments::get_expr_replacement (tree expr,
1309 bool ignore_default_def)
1310 {
1311 tree base;
1312 unsigned unit_offset;
1313
1314 if (!isra_get_ref_base_and_offset (expr, &base, &unit_offset))
1315 return NULL;
1316
1317 base = get_ssa_base_param (base, ignore_default_def);
1318 if (!base || TREE_CODE (base) != PARM_DECL)
1319 return NULL;
1320 return lookup_replacement_1 (base, unit_offset);
1321 }
1322
1323 /* Given OLD_DECL, which is a PARM_DECL of a parameter that is being removed
1324 (which includes it being split or replaced), return a new variable that
1325 should be used for any SSA names that will remain in the function that
1326 previously belonged to OLD_DECL. */
1327
1328 tree
1329 ipa_param_body_adjustments::get_replacement_ssa_base (tree old_decl)
1330 {
1331 unsigned *idx = m_removed_map.get (old_decl);
1332 if (!idx)
1333 return NULL;
1334
1335 tree repl;
1336 if (TREE_CODE (m_removed_decls[*idx]) == PARM_DECL)
1337 {
1338 gcc_assert (m_removed_decls[*idx] == old_decl);
1339 repl = copy_var_decl (old_decl, DECL_NAME (old_decl),
1340 TREE_TYPE (old_decl));
1341 m_removed_decls[*idx] = repl;
1342 }
1343 else
1344 repl = m_removed_decls[*idx];
1345 return repl;
1346 }
1347
1348 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
1349 parameter which is to be removed because its value is not used, create a new
1350 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
1351 original with it and return it. If there is no need to re-map, return NULL.
1352 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
1353
1354 tree
1355 ipa_param_body_adjustments::replace_removed_params_ssa_names (tree old_name,
1356 gimple *stmt)
1357 {
1358 gcc_assert (!m_id);
1359 if (TREE_CODE (old_name) != SSA_NAME)
1360 return NULL;
1361
1362 tree decl = SSA_NAME_VAR (old_name);
1363 if (decl == NULL_TREE
1364 || TREE_CODE (decl) != PARM_DECL)
1365 return NULL;
1366
1367 tree repl = get_replacement_ssa_base (decl);
1368 if (!repl)
1369 return NULL;
1370
1371 tree new_name = make_ssa_name (repl, stmt);
1372 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
1373 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
1374
1375 if (dump_file && (dump_flags & TDF_DETAILS))
1376 {
1377 fprintf (dump_file, "replacing an SSA name of a removed param ");
1378 print_generic_expr (dump_file, old_name);
1379 fprintf (dump_file, " with ");
1380 print_generic_expr (dump_file, new_name);
1381 fprintf (dump_file, "\n");
1382 }
1383
1384 replace_uses_by (old_name, new_name);
1385 return new_name;
1386 }
1387
1388 /* If the expression *EXPR_P should be replaced, do so. CONVERT specifies
1389 whether the function should care about type incompatibility of the current
1390 and new expressions. If it is false, the function will leave
1391 incompatibility issues to the caller - note that when the function
1392 encounters a BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR, it will modify
1393 their bases instead of the expressions themselves and then also performs any
1394 necessary conversions. */
1395
1396 bool
1397 ipa_param_body_adjustments::modify_expression (tree *expr_p, bool convert)
1398 {
1399 tree expr = *expr_p;
1400
1401 if (TREE_CODE (expr) == BIT_FIELD_REF
1402 || TREE_CODE (expr) == IMAGPART_EXPR
1403 || TREE_CODE (expr) == REALPART_EXPR)
1404 {
1405 expr_p = &TREE_OPERAND (expr, 0);
1406 expr = *expr_p;
1407 convert = true;
1408 }
1409
1410 ipa_param_body_replacement *pbr = get_expr_replacement (expr, false);
1411 if (!pbr)
1412 return false;
1413
1414 tree repl = pbr->repl;
1415 if (dump_file && (dump_flags & TDF_DETAILS))
1416 {
1417 fprintf (dump_file, "About to replace expr ");
1418 print_generic_expr (dump_file, expr);
1419 fprintf (dump_file, " with ");
1420 print_generic_expr (dump_file, repl);
1421 fprintf (dump_file, "\n");
1422 }
1423
1424 if (convert && !useless_type_conversion_p (TREE_TYPE (expr),
1425 TREE_TYPE (repl)))
1426 {
1427 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expr), repl);
1428 *expr_p = vce;
1429 }
1430 else
1431 *expr_p = repl;
1432 return true;
1433 }
1434
1435 /* If the assignment statement STMT contains any expressions that need to
1436 replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
1437 potential type incompatibilities. If any conversion sttements have to be
1438 pre-pended to STMT, they will be added to EXTRA_STMTS. Return true iff the
1439 statement was modified. */
1440
1441 bool
1442 ipa_param_body_adjustments::modify_assignment (gimple *stmt,
1443 gimple_seq *extra_stmts)
1444 {
1445 tree *lhs_p, *rhs_p;
1446 bool any;
1447
1448 if (!gimple_assign_single_p (stmt))
1449 return false;
1450
1451 rhs_p = gimple_assign_rhs1_ptr (stmt);
1452 lhs_p = gimple_assign_lhs_ptr (stmt);
1453
1454 any = modify_expression (lhs_p, false);
1455 any |= modify_expression (rhs_p, false);
1456 if (any
1457 && !useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
1458 {
1459 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
1460 {
1461 /* V_C_Es of constructors can cause trouble (PR 42714). */
1462 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
1463 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
1464 else
1465 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
1466 NULL);
1467 }
1468 else
1469 {
1470 tree new_rhs = fold_build1_loc (gimple_location (stmt),
1471 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
1472 *rhs_p);
1473 tree tmp = force_gimple_operand (new_rhs, extra_stmts, true,
1474 NULL_TREE);
1475 gimple_assign_set_rhs1 (stmt, tmp);
1476 }
1477 return true;
1478 }
1479
1480 return any;
1481 }
1482
1483 /* Data passed to remap_split_decl_to_dummy through walk_tree. */
1484
1485 struct simple_tree_swap_info
1486 {
1487 /* Change FROM to TO. */
1488 tree from, to;
1489 /* And set DONE to true when doing so. */
1490 bool done;
1491 };
1492
1493 /* Simple remapper to remap a split parameter to the same expression based on a
1494 special dummy decl so that edge redirections can detect transitive splitting
1495 and finish them. */
1496
1497 static tree
1498 remap_split_decl_to_dummy (tree *tp, int *walk_subtrees, void *data)
1499 {
1500 tree t = *tp;
1501
1502 if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
1503 {
1504 struct simple_tree_swap_info *swapinfo
1505 = (struct simple_tree_swap_info *) data;
1506 if (t == swapinfo->from
1507 || (TREE_CODE (t) == SSA_NAME
1508 && SSA_NAME_VAR (t) == swapinfo->from))
1509 {
1510 *tp = swapinfo->to;
1511 swapinfo->done = true;
1512 }
1513 *walk_subtrees = 0;
1514 }
1515 else if (TYPE_P (t))
1516 *walk_subtrees = 0;
1517 else
1518 *walk_subtrees = 1;
1519 return NULL_TREE;
1520 }
1521
1522
1523 /* If the call statement pointed at by STMT_P contains any expressions that
1524 need to replaced with a different one as noted by ADJUSTMENTS, do so. f the
1525 statement needs to be rebuilt, do so. Return true if any modifications have
1526 been performed.
1527
1528 If the method is invoked as a part of IPA clone materialization and if any
1529 parameter split is transitive, i.e. it applies to the functin that is being
1530 modified and also to the callee of the statement, replace the parameter
1531 passed to old callee with an equivalent expression based on a dummy decl
1532 followed by PARM_DECLs representing the actual replacements. The actual
1533 replacements will be then converted into SSA_NAMEs and then
1534 ipa_param_adjustments::modify_call will find the appropriate ones and leave
1535 only those in the call. */
1536
1537 bool
1538 ipa_param_body_adjustments::modify_call_stmt (gcall **stmt_p)
1539 {
1540 gcall *stmt = *stmt_p;
1541 auto_vec <unsigned, 4> pass_through_args;
1542 auto_vec <unsigned, 4> pass_through_pbr_indices;
1543
1544 if (m_split_modifications_p && m_id)
1545 {
1546 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1547 {
1548 tree t = gimple_call_arg (stmt, i);
1549 gcc_assert (TREE_CODE (t) != BIT_FIELD_REF
1550 && TREE_CODE (t) != IMAGPART_EXPR
1551 && TREE_CODE (t) != REALPART_EXPR);
1552
1553 tree base;
1554 unsigned unit_offset;
1555 if (!isra_get_ref_base_and_offset (t, &base, &unit_offset))
1556 continue;
1557
1558 bool by_ref = false;
1559 if (TREE_CODE (base) == SSA_NAME)
1560 {
1561 if (!SSA_NAME_IS_DEFAULT_DEF (base))
1562 continue;
1563 base = SSA_NAME_VAR (base);
1564 gcc_checking_assert (base);
1565 by_ref = true;
1566 }
1567 if (TREE_CODE (base) != PARM_DECL)
1568 continue;
1569
1570 bool base_among_replacements = false;
1571 unsigned j, repl_list_len = m_replacements.length ();
1572 for (j = 0; j < repl_list_len; j++)
1573 {
1574 ipa_param_body_replacement *pbr = &m_replacements[j];
1575 if (pbr->base == base)
1576 {
1577 base_among_replacements = true;
1578 break;
1579 }
1580 }
1581 if (!base_among_replacements)
1582 continue;
1583
1584 /* We still have to distinguish between an end-use that we have to
1585 transform now and a pass-through, which happens in the following
1586 two cases. */
1587
1588 /* TODO: After we adjust ptr_parm_has_nonarg_uses to also consider
1589 &MEM_REF[ssa_name + offset], we will also have to detect that case
1590 here. */
1591
1592 if (TREE_CODE (t) == SSA_NAME
1593 && SSA_NAME_IS_DEFAULT_DEF (t)
1594 && SSA_NAME_VAR (t)
1595 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL)
1596 {
1597 /* This must be a by_reference pass-through. */
1598 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
1599 pass_through_args.safe_push (i);
1600 pass_through_pbr_indices.safe_push (j);
1601 }
1602 else if (!by_ref && AGGREGATE_TYPE_P (TREE_TYPE (t)))
1603 {
1604 /* Currently IPA-SRA guarantees the aggregate access type
1605 exactly matches in this case. So if it does not match, it is
1606 a pass-through argument that will be sorted out at edge
1607 redirection time. */
1608 ipa_param_body_replacement *pbr
1609 = lookup_replacement_1 (base, unit_offset);
1610
1611 if (!pbr
1612 || (TYPE_MAIN_VARIANT (TREE_TYPE (t))
1613 != TYPE_MAIN_VARIANT (TREE_TYPE (pbr->repl))))
1614 {
1615 pass_through_args.safe_push (i);
1616 pass_through_pbr_indices.safe_push (j);
1617 }
1618 }
1619 }
1620 }
1621
1622 unsigned nargs = gimple_call_num_args (stmt);
1623 if (!pass_through_args.is_empty ())
1624 {
1625 auto_vec<tree, 16> vargs;
1626 unsigned pt_idx = 0;
1627 for (unsigned i = 0; i < nargs; i++)
1628 {
1629 if (pt_idx < pass_through_args.length ()
1630 && i == pass_through_args[pt_idx])
1631 {
1632 unsigned j = pass_through_pbr_indices[pt_idx];
1633 pt_idx++;
1634 tree base = m_replacements[j].base;
1635
1636 /* Map base will get mapped to the special transitive-isra marker
1637 dummy decl. */
1638 struct simple_tree_swap_info swapinfo;
1639 swapinfo.from = base;
1640 swapinfo.to = m_replacements[j].dummy;
1641 swapinfo.done = false;
1642 tree arg = gimple_call_arg (stmt, i);
1643 walk_tree (&arg, remap_split_decl_to_dummy, &swapinfo, NULL);
1644 gcc_assert (swapinfo.done);
1645 vargs.safe_push (arg);
1646 /* Now let's push all replacements pertaining to this parameter
1647 so that all gimple register ones get correct SSA_NAMES. Edge
1648 redirection will weed out the dummy argument as well as all
1649 unused replacements later. */
1650 unsigned int repl_list_len = m_replacements.length ();
1651 for (; j < repl_list_len; j++)
1652 {
1653 if (m_replacements[j].base != base)
1654 break;
1655 vargs.safe_push (m_replacements[j].repl);
1656 }
1657 }
1658 else
1659 {
1660 tree t = gimple_call_arg (stmt, i);
1661 modify_expression (&t, true);
1662 vargs.safe_push (t);
1663 }
1664 }
1665 gcall *new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
1666 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
1667 gimple_call_copy_flags (new_stmt, stmt);
1668 if (tree lhs = gimple_call_lhs (stmt))
1669 {
1670 modify_expression (&lhs, false);
1671 gimple_call_set_lhs (new_stmt, lhs);
1672 }
1673 *stmt_p = new_stmt;
1674 return true;
1675 }
1676
1677 /* Otherwise, no need to rebuild the statement, let's just modify arguments
1678 and the LHS if/as appropriate. */
1679 bool modified = false;
1680 for (unsigned i = 0; i < nargs; i++)
1681 {
1682 tree *t = gimple_call_arg_ptr (stmt, i);
1683 modified |= modify_expression (t, true);
1684 }
1685
1686 if (gimple_call_lhs (stmt))
1687 {
1688 tree *t = gimple_call_lhs_ptr (stmt);
1689 modified |= modify_expression (t, false);
1690 }
1691
1692 return modified;
1693 }
1694
1695 /* If the statement STMT contains any expressions that need to replaced with a
1696 different one as noted by ADJUSTMENTS, do so. Handle any potential type
1697 incompatibilities. If any conversion sttements have to be pre-pended to
1698 STMT, they will be added to EXTRA_STMTS. Return true iff the statement was
1699 modified. */
1700
1701 bool
1702 ipa_param_body_adjustments::modify_gimple_stmt (gimple **stmt,
1703 gimple_seq *extra_stmts)
1704 {
1705 bool modified = false;
1706 tree *t;
1707
1708 switch (gimple_code (*stmt))
1709 {
1710 case GIMPLE_RETURN:
1711 t = gimple_return_retval_ptr (as_a <greturn *> (*stmt));
1712 if (m_adjustments && m_adjustments->m_skip_return)
1713 *t = NULL_TREE;
1714 else if (*t != NULL_TREE)
1715 modified |= modify_expression (t, true);
1716 break;
1717
1718 case GIMPLE_ASSIGN:
1719 modified |= modify_assignment (*stmt, extra_stmts);
1720 break;
1721
1722 case GIMPLE_CALL:
1723 modified |= modify_call_stmt ((gcall **) stmt);
1724 break;
1725
1726 case GIMPLE_ASM:
1727 {
1728 gasm *asm_stmt = as_a <gasm *> (*stmt);
1729 for (unsigned i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1730 {
1731 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1732 modified |= modify_expression (t, true);
1733 }
1734 for (unsigned i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1735 {
1736 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1737 modified |= modify_expression (t, false);
1738 }
1739 }
1740 break;
1741
1742 default:
1743 break;
1744 }
1745 return modified;
1746 }
1747
1748
1749 /* Traverse body of the current function and perform the requested adjustments
1750 on its statements. Return true iff the CFG has been changed. */
1751
1752 bool
1753 ipa_param_body_adjustments::modify_cfun_body ()
1754 {
1755 bool cfg_changed = false;
1756 basic_block bb;
1757
1758 FOR_EACH_BB_FN (bb, cfun)
1759 {
1760 gimple_stmt_iterator gsi;
1761
1762 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1763 {
1764 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
1765 tree new_lhs, old_lhs = gimple_phi_result (phi);
1766 new_lhs = replace_removed_params_ssa_names (old_lhs, phi);
1767 if (new_lhs)
1768 {
1769 gimple_phi_set_result (phi, new_lhs);
1770 release_ssa_name (old_lhs);
1771 }
1772 }
1773
1774 gsi = gsi_start_bb (bb);
1775 while (!gsi_end_p (gsi))
1776 {
1777 gimple *stmt = gsi_stmt (gsi);
1778 gimple *stmt_copy = stmt;
1779 gimple_seq extra_stmts = NULL;
1780 bool modified = modify_gimple_stmt (&stmt, &extra_stmts);
1781 if (stmt != stmt_copy)
1782 {
1783 gcc_checking_assert (modified);
1784 gsi_replace (&gsi, stmt, false);
1785 }
1786 if (!gimple_seq_empty_p (extra_stmts))
1787 gsi_insert_seq_before (&gsi, extra_stmts, GSI_SAME_STMT);
1788
1789 def_operand_p defp;
1790 ssa_op_iter iter;
1791 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
1792 {
1793 tree old_def = DEF_FROM_PTR (defp);
1794 if (tree new_def = replace_removed_params_ssa_names (old_def,
1795 stmt))
1796 {
1797 SET_DEF (defp, new_def);
1798 release_ssa_name (old_def);
1799 modified = true;
1800 }
1801 }
1802
1803 if (modified)
1804 {
1805 update_stmt (stmt);
1806 if (maybe_clean_eh_stmt (stmt)
1807 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
1808 cfg_changed = true;
1809 }
1810 gsi_next (&gsi);
1811 }
1812 }
1813
1814 return cfg_changed;
1815 }
1816
1817 /* Call gimple_debug_bind_reset_value on all debug statements describing
1818 gimple register parameters that are being removed or replaced. */
1819
1820 void
1821 ipa_param_body_adjustments::reset_debug_stmts ()
1822 {
1823 int i, len;
1824 gimple_stmt_iterator *gsip = NULL, gsi;
1825
1826 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
1827 {
1828 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1829 gsip = &gsi;
1830 }
1831 len = m_reset_debug_decls.length ();
1832 for (i = 0; i < len; i++)
1833 {
1834 imm_use_iterator ui;
1835 gimple *stmt;
1836 gdebug *def_temp;
1837 tree name, vexpr, copy = NULL_TREE;
1838 use_operand_p use_p;
1839 tree decl = m_reset_debug_decls[i];
1840
1841 gcc_checking_assert (is_gimple_reg (decl));
1842 name = ssa_default_def (cfun, decl);
1843 vexpr = NULL;
1844 if (name)
1845 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
1846 {
1847 if (gimple_clobber_p (stmt))
1848 {
1849 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
1850 unlink_stmt_vdef (stmt);
1851 gsi_remove (&cgsi, true);
1852 release_defs (stmt);
1853 continue;
1854 }
1855 /* All other users must have been removed by function body
1856 modification. */
1857 gcc_assert (is_gimple_debug (stmt));
1858 if (vexpr == NULL && gsip != NULL)
1859 {
1860 vexpr = make_node (DEBUG_EXPR_DECL);
1861 def_temp = gimple_build_debug_source_bind (vexpr, decl, NULL);
1862 DECL_ARTIFICIAL (vexpr) = 1;
1863 TREE_TYPE (vexpr) = TREE_TYPE (name);
1864 SET_DECL_MODE (vexpr, DECL_MODE (decl));
1865 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
1866 }
1867 if (vexpr)
1868 {
1869 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
1870 SET_USE (use_p, vexpr);
1871 }
1872 else
1873 gimple_debug_bind_reset_value (stmt);
1874 update_stmt (stmt);
1875 }
1876 /* Create a VAR_DECL for debug info purposes. */
1877 if (!DECL_IGNORED_P (decl))
1878 {
1879 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1880 VAR_DECL, DECL_NAME (decl),
1881 TREE_TYPE (decl));
1882 if (DECL_PT_UID_SET_P (decl))
1883 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
1884 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
1885 TREE_READONLY (copy) = TREE_READONLY (decl);
1886 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
1887 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
1888 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
1889 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
1890 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
1891 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
1892 SET_DECL_RTL (copy, 0);
1893 TREE_USED (copy) = 1;
1894 DECL_CONTEXT (copy) = current_function_decl;
1895 add_local_decl (cfun, copy);
1896 DECL_CHAIN (copy)
1897 = BLOCK_VARS (DECL_INITIAL (current_function_decl));
1898 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
1899 }
1900 if (gsip != NULL && copy && target_for_debug_bind (decl))
1901 {
1902 gcc_assert (TREE_CODE (decl) == PARM_DECL);
1903 if (vexpr)
1904 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
1905 else
1906 def_temp = gimple_build_debug_source_bind (copy, decl,
1907 NULL);
1908 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
1909 }
1910 }
1911 }
1912
1913 /* Perform all necessary body changes to change signature, body and debug info
1914 of fun according to adjustments passed at construction. Return true if CFG
1915 was changed in any way. The main entry point for modification of standalone
1916 functions that is not part of IPA clone materialization. */
1917
1918 bool
1919 ipa_param_body_adjustments::perform_cfun_body_modifications ()
1920 {
1921 bool cfg_changed;
1922 modify_formal_parameters ();
1923 cfg_changed = modify_cfun_body ();
1924 reset_debug_stmts ();
1925
1926 return cfg_changed;
1927 }
1928