]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-modref.c
IPA: fix one more UBSAN error
[thirdparty/gcc.git] / gcc / ipa-modref.c
1 /* Search for references that a functions loads or stores.
2 Copyright (C) 2020 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
23 accesses across function calls. The summary has a form of decision tree
24 described in ipa-modref-tree.h.
25
26 This file contains a tree pass and an IPA pass. Both performs the same
27 analys however tree pass is executed during early and late optimization
28 passes to propagate info downwards in the compilation order. IPA pass
29 propagates across the callgraph and is able to handle recursion and works on
30 whole program during link-time analysis.
31
32 LTO mode differs from the local mode by not recording alias sets but types
33 that are translated to alias sets later. This is necessary in order stream
34 the information because the alias sets are rebuild at stream-in time and may
35 not correspond to ones seen during analysis. For this reason part of analysis
36 is duplicated. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "backend.h"
42 #include "tree.h"
43 #include "gimple.h"
44 #include "alloc-pool.h"
45 #include "tree-pass.h"
46 #include "gimple-iterator.h"
47 #include "tree-dfa.h"
48 #include "cgraph.h"
49 #include "ipa-utils.h"
50 #include "symbol-summary.h"
51 #include "gimple-pretty-print.h"
52 #include "gimple-walk.h"
53 #include "print-tree.h"
54 #include "tree-streamer.h"
55 #include "alias.h"
56 #include "calls.h"
57 #include "ipa-modref-tree.h"
58 #include "ipa-modref.h"
59 #include "value-range.h"
60 #include "ipa-prop.h"
61 #include "ipa-fnsummary.h"
62
63 /* Class (from which there is one global instance) that holds modref summaries
64 for all analyzed functions. */
65 class GTY((user)) modref_summaries
66 : public fast_function_summary <modref_summary *, va_gc>
67 {
68 public:
69 modref_summaries (symbol_table *symtab)
70 : fast_function_summary <modref_summary *, va_gc> (symtab) {}
71 virtual void insert (cgraph_node *, modref_summary *state);
72 virtual void duplicate (cgraph_node *src_node,
73 cgraph_node *dst_node,
74 modref_summary *src_data,
75 modref_summary *dst_data);
76 static modref_summaries *create_ggc (symbol_table *symtab)
77 {
78 return new (ggc_alloc_no_dtor<modref_summaries> ())
79 modref_summaries (symtab);
80 }
81 };
82
83 class modref_summary_lto;
84
85 /* Class (from which there is one global instance) that holds modref summaries
86 for all analyzed functions. */
87 class GTY((user)) modref_summaries_lto
88 : public fast_function_summary <modref_summary_lto *, va_gc>
89 {
90 public:
91 modref_summaries_lto (symbol_table *symtab)
92 : fast_function_summary <modref_summary_lto *, va_gc> (symtab),
93 propagated (false) {}
94 virtual void insert (cgraph_node *, modref_summary_lto *state);
95 virtual void duplicate (cgraph_node *src_node,
96 cgraph_node *dst_node,
97 modref_summary_lto *src_data,
98 modref_summary_lto *dst_data);
99 static modref_summaries_lto *create_ggc (symbol_table *symtab)
100 {
101 return new (ggc_alloc_no_dtor<modref_summaries_lto> ())
102 modref_summaries_lto (symtab);
103 }
104 bool propagated;
105 };
106
107 /* Global variable holding all modref summaries
108 (from analysis to IPA propagation time). */
109 static GTY(()) fast_function_summary <modref_summary *, va_gc>
110 *summaries;
111
112 /* Global variable holding all modref optimizaiton summaries
113 (from IPA propagation time or used by local optimization pass). */
114 static GTY(()) fast_function_summary <modref_summary *, va_gc>
115 *optimization_summaries;
116
117 /* LTO summaries hold info from analysis to LTO streaming or from LTO
118 stream-in through propagation to LTO stream-out. */
119 static GTY(()) fast_function_summary <modref_summary_lto *, va_gc>
120 *summaries_lto;
121
122 /* Summary for a single function which this pass produces. */
123
124 modref_summary::modref_summary ()
125 : loads (NULL), stores (NULL)
126 {
127 }
128
129 modref_summary::~modref_summary ()
130 {
131 if (loads)
132 ggc_delete (loads);
133 if (stores)
134 ggc_delete (stores);
135 }
136
137 /* Return true if summary is potentially useful for optimization. */
138
139 bool
140 modref_summary::useful_p (int ecf_flags)
141 {
142 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
143 return false;
144 if (loads && !loads->every_base)
145 return true;
146 if (ecf_flags & ECF_PURE)
147 return false;
148 return stores && !stores->every_base;
149 }
150
151 /* Single function summary used for LTO. */
152
153 typedef modref_tree <tree> modref_records_lto;
154 struct GTY(()) modref_summary_lto
155 {
156 /* Load and stores in functions using types rather then alias sets.
157
158 This is necessary to make the information streamable for LTO but is also
159 more verbose and thus more likely to hit the limits. */
160 modref_records_lto *loads;
161 modref_records_lto *stores;
162
163 modref_summary_lto ();
164 ~modref_summary_lto ();
165 void dump (FILE *);
166 bool useful_p (int ecf_flags);
167 };
168
169 /* Summary for a single function which this pass produces. */
170
171 modref_summary_lto::modref_summary_lto ()
172 : loads (NULL), stores (NULL)
173 {
174 }
175
176 modref_summary_lto::~modref_summary_lto ()
177 {
178 if (loads)
179 ggc_delete (loads);
180 if (stores)
181 ggc_delete (stores);
182 }
183
184
185 /* Return true if lto summary is potentially useful for optimization. */
186
187 bool
188 modref_summary_lto::useful_p (int ecf_flags)
189 {
190 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
191 return false;
192 if (loads && !loads->every_base)
193 return true;
194 if (ecf_flags & ECF_PURE)
195 return false;
196 return stores && !stores->every_base;
197 }
198
199 /* Dump A to OUT. */
200
201 static void
202 dump_access (modref_access_node *a, FILE *out)
203 {
204 fprintf (out, " access:");
205 if (a->parm_index != -1)
206 {
207 fprintf (out, " Parm %i", a->parm_index);
208 if (a->parm_offset_known)
209 {
210 fprintf (out, " param offset:");
211 print_dec ((poly_int64_pod)a->parm_offset, out, SIGNED);
212 }
213 }
214 if (a->range_info_useful_p ())
215 {
216 fprintf (out, " offset:");
217 print_dec ((poly_int64_pod)a->offset, out, SIGNED);
218 fprintf (out, " size:");
219 print_dec ((poly_int64_pod)a->size, out, SIGNED);
220 fprintf (out, " max_size:");
221 print_dec ((poly_int64_pod)a->max_size, out, SIGNED);
222 }
223 fprintf (out, "\n");
224 }
225
226 /* Dump records TT to OUT. */
227
228 static void
229 dump_records (modref_records *tt, FILE *out)
230 {
231 fprintf (out, " Limits: %i bases, %i refs\n",
232 (int)tt->max_bases, (int)tt->max_refs);
233 if (tt->every_base)
234 {
235 fprintf (out, " Every base\n");
236 return;
237 }
238 size_t i;
239 modref_base_node <alias_set_type> *n;
240 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
241 {
242 fprintf (out, " Base %i: alias set %i\n", (int)i, n->base);
243 if (n->every_ref)
244 {
245 fprintf (out, " Every ref\n");
246 continue;
247 }
248 size_t j;
249 modref_ref_node <alias_set_type> *r;
250 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
251 {
252 fprintf (out, " Ref %i: alias set %i\n", (int)j, r->ref);
253 if (r->every_access)
254 {
255 fprintf (out, " Every access\n");
256 continue;
257 }
258 size_t k;
259 modref_access_node *a;
260 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
261 dump_access (a, out);
262 }
263 }
264 }
265
266 /* Dump records TT to OUT. */
267
268 static void
269 dump_lto_records (modref_records_lto *tt, FILE *out)
270 {
271 fprintf (out, " Limits: %i bases, %i refs\n",
272 (int)tt->max_bases, (int)tt->max_refs);
273 if (tt->every_base)
274 {
275 fprintf (out, " Every base\n");
276 return;
277 }
278 size_t i;
279 modref_base_node <tree> *n;
280 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
281 {
282 fprintf (out, " Base %i:", (int)i);
283 print_generic_expr (dump_file, n->base);
284 fprintf (out, " (alias set %i)\n",
285 n->base ? get_alias_set (n->base) : 0);
286 if (n->every_ref)
287 {
288 fprintf (out, " Every ref\n");
289 continue;
290 }
291 size_t j;
292 modref_ref_node <tree> *r;
293 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
294 {
295 fprintf (out, " Ref %i:", (int)j);
296 print_generic_expr (dump_file, r->ref);
297 fprintf (out, " (alias set %i)\n",
298 r->ref ? get_alias_set (r->ref) : 0);
299 if (r->every_access)
300 {
301 fprintf (out, " Every access\n");
302 continue;
303 }
304 size_t k;
305 modref_access_node *a;
306 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
307 dump_access (a, out);
308 }
309 }
310 }
311
312 /* Dump summary. */
313
314 void
315 modref_summary::dump (FILE *out)
316 {
317 fprintf (out, " loads:\n");
318 dump_records (loads, out);
319 fprintf (out, " stores:\n");
320 dump_records (stores, out);
321 }
322
323 /* Dump summary. */
324
325 void
326 modref_summary_lto::dump (FILE *out)
327 {
328 fprintf (out, " loads:\n");
329 dump_lto_records (loads, out);
330 fprintf (out, " stores:\n");
331 dump_lto_records (stores, out);
332 }
333
334 /* Get function summary for FUNC if it exists, return NULL otherwise. */
335
336 modref_summary *
337 get_modref_function_summary (cgraph_node *func)
338 {
339 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
340 if (!optimization_summaries)
341 return NULL;
342
343 /* A single function body may be represented by multiple symbols with
344 different visibility. For example, if FUNC is an interposable alias,
345 we don't want to return anything, even if we have summary for the target
346 function. */
347 enum availability avail;
348 func = func->function_or_virtual_thunk_symbol
349 (&avail, cgraph_node::get (current_function_decl));
350 if (avail <= AVAIL_INTERPOSABLE)
351 return NULL;
352
353 modref_summary *r = optimization_summaries->get (func);
354 return r;
355 }
356
357 /* Construct modref_access_node from REF. */
358 static modref_access_node
359 get_access (ao_ref *ref)
360 {
361 tree base;
362
363 base = ao_ref_base (ref);
364 modref_access_node a = {ref->offset, ref->size, ref->max_size,
365 0, -1, false};
366 if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
367 {
368 tree memref = base;
369 base = TREE_OPERAND (base, 0);
370 if (TREE_CODE (base) == SSA_NAME
371 && SSA_NAME_IS_DEFAULT_DEF (base)
372 && TREE_CODE (SSA_NAME_VAR (base)) == PARM_DECL)
373 {
374 a.parm_index = 0;
375 for (tree t = DECL_ARGUMENTS (current_function_decl);
376 t != SSA_NAME_VAR (base); t = DECL_CHAIN (t))
377 {
378 if (!t)
379 {
380 a.parm_index = -1;
381 break;
382 }
383 a.parm_index++;
384 }
385 if (TREE_CODE (memref) == MEM_REF)
386 {
387 a.parm_offset_known
388 = wi::to_poly_wide (TREE_OPERAND
389 (memref, 1)).to_shwi (&a.parm_offset);
390 }
391 else
392 a.parm_offset_known = false;
393 }
394 else
395 a.parm_index = -1;
396 }
397 else
398 a.parm_index = -1;
399 return a;
400 }
401
402 /* Record access into the modref_records data structure. */
403
404 static void
405 record_access (modref_records *tt, ao_ref *ref)
406 {
407 alias_set_type base_set = !flag_strict_aliasing ? 0
408 : ao_ref_base_alias_set (ref);
409 alias_set_type ref_set = !flag_strict_aliasing ? 0
410 : (ao_ref_alias_set (ref));
411 modref_access_node a = get_access (ref);
412 if (dump_file)
413 {
414 fprintf (dump_file, " - Recording base_set=%i ref_set=%i parm=%i\n",
415 base_set, ref_set, a.parm_index);
416 }
417 tt->insert (base_set, ref_set, a);
418 }
419
420 /* IPA version of record_access_tree. */
421
422 static void
423 record_access_lto (modref_records_lto *tt, ao_ref *ref)
424 {
425 /* get_alias_set sometimes use different type to compute the alias set
426 than TREE_TYPE (base). Do same adjustments. */
427 tree base_type = NULL_TREE, ref_type = NULL_TREE;
428 if (flag_strict_aliasing)
429 {
430 tree base;
431
432 base = ref->ref;
433 while (handled_component_p (base))
434 base = TREE_OPERAND (base, 0);
435
436 base_type = reference_alias_ptr_type_1 (&base);
437
438 if (!base_type)
439 base_type = TREE_TYPE (base);
440 else
441 base_type = TYPE_REF_CAN_ALIAS_ALL (base_type)
442 ? NULL_TREE : TREE_TYPE (base_type);
443
444 tree ref_expr = ref->ref;
445 ref_type = reference_alias_ptr_type_1 (&ref_expr);
446
447 if (!ref_type)
448 ref_type = TREE_TYPE (ref_expr);
449 else
450 ref_type = TYPE_REF_CAN_ALIAS_ALL (ref_type)
451 ? NULL_TREE : TREE_TYPE (ref_type);
452
453 /* Sanity check that we are in sync with what get_alias_set does. */
454 gcc_checking_assert ((!base_type && !ao_ref_base_alias_set (ref))
455 || get_alias_set (base_type)
456 == ao_ref_base_alias_set (ref));
457 gcc_checking_assert ((!ref_type && !ao_ref_alias_set (ref))
458 || get_alias_set (ref_type)
459 == ao_ref_alias_set (ref));
460
461 /* Do not bother to record types that have no meaningful alias set.
462 Also skip variably modified types since these go to local streams. */
463 if (base_type && (!get_alias_set (base_type)
464 || variably_modified_type_p (base_type, NULL_TREE)))
465 base_type = NULL_TREE;
466 if (ref_type && (!get_alias_set (ref_type)
467 || variably_modified_type_p (ref_type, NULL_TREE)))
468 ref_type = NULL_TREE;
469 }
470 modref_access_node a = get_access (ref);
471 if (dump_file)
472 {
473 fprintf (dump_file, " - Recording base type:");
474 print_generic_expr (dump_file, base_type);
475 fprintf (dump_file, " (alias set %i) ref type:",
476 base_type ? get_alias_set (base_type) : 0);
477 print_generic_expr (dump_file, ref_type);
478 fprintf (dump_file, " (alias set %i) parm:%i\n",
479 ref_type ? get_alias_set (ref_type) : 0,
480 a.parm_index);
481 }
482
483 tt->insert (base_type, ref_type, a);
484 }
485
486 /* Returns true if and only if we should store the access to EXPR.
487 Some accesses, e.g. loads from automatic variables, are not interesting. */
488
489 static bool
490 record_access_p (tree expr)
491 {
492 if (refs_local_or_readonly_memory_p (expr))
493 {
494 if (dump_file)
495 fprintf (dump_file, " - Read-only or local, ignoring.\n");
496 return false;
497 }
498 return true;
499 }
500
501 /* Return true if ECF flags says that stores can be ignored. */
502
503 static bool
504 ignore_stores_p (tree caller, int flags)
505 {
506 if (flags & ECF_PURE)
507 return true;
508 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
509 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
510 return true;
511 return false;
512 }
513
514 /* Merge side effects of call STMT to function with CALLEE_SUMMARY
515 int CUR_SUMMARY. Return true if something changed.
516 If IGNORE_STORES is true, do not merge stores. */
517
518 bool
519 merge_call_side_effects (modref_summary *cur_summary,
520 gimple *stmt, modref_summary *callee_summary,
521 bool ignore_stores, cgraph_node *callee_node)
522 {
523 auto_vec <modref_parm_map, 32> parm_map;
524 bool changed = false;
525
526 if (dump_file)
527 fprintf (dump_file, " - Merging side effects of %s with parm map:",
528 callee_node->dump_name ());
529
530 parm_map.safe_grow_cleared (gimple_call_num_args (stmt));
531 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
532 {
533 tree op = gimple_call_arg (stmt, i);
534 bool offset_known;
535 poly_int64 offset;
536
537 offset_known = unadjusted_ptr_and_unit_offset (op, &op, &offset);
538 if (TREE_CODE (op) == SSA_NAME
539 && SSA_NAME_IS_DEFAULT_DEF (op)
540 && TREE_CODE (SSA_NAME_VAR (op)) == PARM_DECL)
541 {
542 int index = 0;
543 for (tree t = DECL_ARGUMENTS (current_function_decl);
544 t != SSA_NAME_VAR (op); t = DECL_CHAIN (t))
545 {
546 if (!t)
547 {
548 index = -1;
549 break;
550 }
551 index++;
552 }
553 parm_map[i].parm_index = index;
554 parm_map[i].parm_offset_known = offset_known;
555 parm_map[i].parm_offset = offset;
556 }
557 else if (points_to_local_or_readonly_memory_p (op))
558 parm_map[i].parm_index = -2;
559 else
560 parm_map[i].parm_index = -1;
561 if (dump_file)
562 {
563 fprintf (dump_file, " %i", parm_map[i].parm_index);
564 if (parm_map[i].parm_offset_known)
565 {
566 fprintf (dump_file, " offset:");
567 print_dec ((poly_int64_pod)parm_map[i].parm_offset,
568 dump_file, SIGNED);
569 }
570 }
571 }
572 if (dump_file)
573 fprintf (dump_file, "\n");
574
575 /* Merge with callee's summary. */
576 changed |= cur_summary->loads->merge (callee_summary->loads, &parm_map);
577 if (!ignore_stores)
578 changed |= cur_summary->stores->merge (callee_summary->stores,
579 &parm_map);
580 return changed;
581 }
582
583 /* Analyze function call STMT in function F.
584 Remember recursive calls in RECURSIVE_CALLS. */
585
586 static bool
587 analyze_call (modref_summary *cur_summary,
588 gimple *stmt, vec <gimple *> *recursive_calls)
589 {
590 /* Check flags on the function call. In certain cases, analysis can be
591 simplified. */
592 int flags = gimple_call_flags (stmt);
593 if (flags & (ECF_CONST | ECF_NOVOPS))
594 {
595 if (dump_file)
596 fprintf (dump_file,
597 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
598 "except for args.\n");
599 return true;
600 }
601
602 /* Pure functions do not affect global memory. Stores by functions which are
603 noreturn and do not throw can safely be ignored. */
604 bool ignore_stores = ignore_stores_p (current_function_decl, flags);
605
606 /* Next, we try to get the callee's function declaration. The goal is to
607 merge their summary with ours. */
608 tree callee = gimple_call_fndecl (stmt);
609
610 /* Check if this is an indirect call. */
611 if (!callee)
612 {
613 /* If the indirect call does not write memory, our store summary is
614 unaffected, but we have to discard our loads summary (we don't know
615 anything about the loads that the called function performs). */
616 if (ignore_stores)
617 {
618 if (dump_file)
619 fprintf (dump_file, " - Indirect call which does not write memory, "
620 "discarding loads.\n");
621 cur_summary->loads->collapse ();
622 return true;
623 }
624 if (dump_file)
625 fprintf (dump_file, " - Indirect call.\n");
626 return false;
627 }
628
629 struct cgraph_node *callee_node = cgraph_node::get_create (callee);
630
631 /* We can not safely optimize based on summary of callee if it does
632 not always bind to current def: it is possible that memory load
633 was optimized out earlier which may not happen in the interposed
634 variant. */
635 if (!callee_node->binds_to_current_def_p ())
636 {
637 if (dump_file)
638 fprintf (dump_file, " - May be interposed: collapsing loads.\n");
639 cur_summary->loads->collapse ();
640 }
641
642 /* If this is a recursive call, the target summary is the same as ours, so
643 there's nothing to do. */
644 if (recursive_call_p (current_function_decl, callee))
645 {
646 recursive_calls->safe_push (stmt);
647 if (dump_file)
648 fprintf (dump_file, " - Skipping recursive call.\n");
649 return true;
650 }
651
652 gcc_assert (callee_node != NULL);
653
654 /* Get the function symbol and its availability. */
655 enum availability avail;
656 callee_node = callee_node->function_symbol (&avail);
657 if (avail <= AVAIL_INTERPOSABLE)
658 {
659 /* Keep stores summary, but discard all loads for interposable function
660 symbols. */
661 if (ignore_stores)
662 {
663 cur_summary->loads->collapse ();
664 return true;
665 }
666 if (dump_file)
667 fprintf (dump_file, " - Function availability <= AVAIL_INTERPOSABLE.\n");
668 return false;
669 }
670
671 /* Get callee's modref summary. As above, if there's no summary, we either
672 have to give up or, if stores are ignored, we can just purge loads. */
673 modref_summary *callee_summary = optimization_summaries->get (callee_node);
674 if (!callee_summary)
675 {
676 if (ignore_stores)
677 {
678 cur_summary->loads->collapse ();
679 return true;
680 }
681 if (dump_file)
682 fprintf (dump_file, " - No modref summary available for callee.\n");
683 return false;
684 }
685
686 merge_call_side_effects (cur_summary, stmt, callee_summary, ignore_stores,
687 callee_node);
688
689 return true;
690 }
691
692 /* Support analyzis in non-lto and lto mode in parallel. */
693
694 struct summary_ptrs
695 {
696 struct modref_summary *nolto;
697 struct modref_summary_lto *lto;
698 };
699
700 /* Helper for analyze_stmt. */
701
702 static bool
703 analyze_load (gimple *, tree, tree op, void *data)
704 {
705 modref_summary *summary = ((summary_ptrs *)data)->nolto;
706 modref_summary_lto *summary_lto = ((summary_ptrs *)data)->lto;
707
708 if (dump_file)
709 {
710 fprintf (dump_file, " - Analyzing load: ");
711 print_generic_expr (dump_file, op);
712 fprintf (dump_file, "\n");
713 }
714
715 if (!record_access_p (op))
716 return false;
717
718 ao_ref r;
719 ao_ref_init (&r, op);
720
721 if (summary)
722 record_access (summary->loads, &r);
723 if (summary_lto)
724 record_access_lto (summary_lto->loads, &r);
725 return false;
726 }
727
728 /* Helper for analyze_stmt. */
729
730 static bool
731 analyze_store (gimple *, tree, tree op, void *data)
732 {
733 modref_summary *summary = ((summary_ptrs *)data)->nolto;
734 modref_summary_lto *summary_lto = ((summary_ptrs *)data)->lto;
735
736 if (dump_file)
737 {
738 fprintf (dump_file, " - Analyzing store: ");
739 print_generic_expr (dump_file, op);
740 fprintf (dump_file, "\n");
741 }
742
743 if (!record_access_p (op))
744 return false;
745
746 ao_ref r;
747 ao_ref_init (&r, op);
748
749 if (summary)
750 record_access (summary->stores, &r);
751 if (summary_lto)
752 record_access_lto (summary_lto->stores, &r);
753 return false;
754 }
755
756 /* Analyze statement STMT of function F.
757 If IPA is true do not merge in side effects of calls. */
758
759 static bool
760 analyze_stmt (modref_summary *summary, modref_summary_lto *summary_lto,
761 gimple *stmt, bool ipa, vec <gimple *> *recursive_calls)
762 {
763 /* In general we can not ignore clobbers because they are barries for code
764 motion, however after inlining it is safe to do becuase local optimization
765 passes do not consider clobbers from other functions.
766 Similar logic is in ipa-pure-consts. */
767 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
768 return true;
769
770 struct summary_ptrs sums = {summary, summary_lto};
771
772 /* Analyze all loads and stores in STMT. */
773 walk_stmt_load_store_ops (stmt, &sums,
774 analyze_load, analyze_store);
775
776 switch (gimple_code (stmt))
777 {
778 case GIMPLE_ASM:
779 /* If the ASM statement does not read nor write memory, there's nothing
780 to do. Otherwise just give up. */
781 if (!gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
782 return true;
783 if (dump_file)
784 fprintf (dump_file, " - Function contains GIMPLE_ASM statement "
785 "which clobbers memory.\n");
786 return false;
787 case GIMPLE_CALL:
788 if (!ipa)
789 return analyze_call (summary, stmt, recursive_calls);
790 return true;
791 default:
792 /* Nothing to do for other types of statements. */
793 return true;
794 }
795 }
796
797 /* Remove summary of current function because during the function body
798 scan we determined it is not useful. LTO, NOLTO and IPA determines the
799 mode of scan. */
800
801 static void
802 remove_summary (bool lto, bool nolto, bool ipa)
803 {
804 cgraph_node *fnode = cgraph_node::get (current_function_decl);
805 if (!ipa)
806 optimization_summaries->remove (fnode);
807 else
808 {
809 if (nolto)
810 summaries->remove (fnode);
811 if (lto)
812 summaries_lto->remove (fnode);
813 }
814 if (dump_file)
815 fprintf (dump_file,
816 " - modref done with result: not tracked.\n");
817 }
818
819 /* Analyze function F. IPA indicates whether we're running in local mode
820 (false) or the IPA mode (true). */
821
822 static void
823 analyze_function (function *f, bool ipa)
824 {
825 if (dump_file)
826 fprintf (dump_file, "modref analyzing '%s' (ipa=%i)%s%s\n",
827 function_name (f), ipa,
828 TREE_READONLY (current_function_decl) ? " (const)" : "",
829 DECL_PURE_P (current_function_decl) ? " (pure)" : "");
830
831 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
832 if (!flag_ipa_modref)
833 return;
834
835 /* Compute no-LTO summaries when local optimization is going to happen. */
836 bool nolto = (!ipa || ((!flag_lto || flag_fat_lto_objects) && !in_lto_p)
837 || (in_lto_p && !flag_wpa
838 && flag_incremental_link != INCREMENTAL_LINK_LTO));
839 /* Compute LTO when LTO streaming is going to happen. */
840 bool lto = ipa && ((flag_lto && !in_lto_p)
841 || flag_wpa
842 || flag_incremental_link == INCREMENTAL_LINK_LTO);
843 cgraph_node *fnode = cgraph_node::get (current_function_decl);
844
845 modref_summary *summary = NULL;
846 modref_summary_lto *summary_lto = NULL;
847
848 /* Initialize the summary.
849 If we run in local mode there is possibly pre-existing summary from
850 IPA pass. Dump it so it is easy to compare if mod-ref info has
851 improved. */
852 if (!ipa)
853 {
854 if (!optimization_summaries)
855 optimization_summaries = modref_summaries::create_ggc (symtab);
856 else /* Remove existing summary if we are re-running the pass. */
857 {
858 if (dump_file
859 && (summary
860 = optimization_summaries->get (cgraph_node::get (f->decl)))
861 != NULL
862 && summary->loads)
863 {
864 fprintf (dump_file, "Past summary:\n");
865 optimization_summaries->get
866 (cgraph_node::get (f->decl))->dump (dump_file);
867 }
868 optimization_summaries->remove (cgraph_node::get (f->decl));
869 }
870 summary = optimization_summaries->get_create (cgraph_node::get (f->decl));
871 gcc_checking_assert (nolto && !lto);
872 }
873 /* In IPA mode we analyze every function precisely once. Asser that. */
874 else
875 {
876 if (nolto)
877 {
878 if (!summaries)
879 summaries = modref_summaries::create_ggc (symtab);
880 else
881 summaries->remove (cgraph_node::get (f->decl));
882 summary = summaries->get_create (cgraph_node::get (f->decl));
883 }
884 if (lto)
885 {
886 if (!summaries_lto)
887 summaries_lto = modref_summaries_lto::create_ggc (symtab);
888 else
889 summaries_lto->remove (cgraph_node::get (f->decl));
890 summary_lto = summaries_lto->get_create (cgraph_node::get (f->decl));
891 }
892 }
893
894
895 /* Create and initialize summary for F.
896 Note that summaries may be already allocated from previous
897 run of the pass. */
898 if (nolto)
899 {
900 gcc_assert (!summary->loads);
901 summary->loads = modref_records::create_ggc (param_modref_max_bases,
902 param_modref_max_refs,
903 param_modref_max_accesses);
904 gcc_assert (!summary->stores);
905 summary->stores = modref_records::create_ggc (param_modref_max_bases,
906 param_modref_max_refs,
907 param_modref_max_accesses);
908 }
909 if (lto)
910 {
911 gcc_assert (!summary_lto->loads);
912 summary_lto->loads = modref_records_lto::create_ggc
913 (param_modref_max_bases,
914 param_modref_max_refs,
915 param_modref_max_accesses);
916 gcc_assert (!summary_lto->stores);
917 summary_lto->stores = modref_records_lto::create_ggc
918 (param_modref_max_bases,
919 param_modref_max_refs,
920 param_modref_max_accesses);
921 }
922 int ecf_flags = flags_from_decl_or_type (current_function_decl);
923 auto_vec <gimple *, 32> recursive_calls;
924
925 /* Analyze each statement in each basic block of the function. If the
926 statement cannot be analyzed (for any reason), the entire function cannot
927 be analyzed by modref. */
928 basic_block bb;
929 FOR_EACH_BB_FN (bb, f)
930 {
931 gimple_stmt_iterator si;
932 for (si = gsi_after_labels (bb); !gsi_end_p (si); gsi_next (&si))
933 {
934 if (!analyze_stmt (summary, summary_lto,
935 gsi_stmt (si), ipa, &recursive_calls)
936 || ((!summary || !summary->useful_p (ecf_flags))
937 && (!summary_lto || !summary_lto->useful_p (ecf_flags))))
938 {
939 remove_summary (lto, nolto, ipa);
940 return;
941 }
942 }
943 }
944
945 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
946 This needs to be done after all other side effects are computed. */
947 if (!ipa)
948 {
949 bool changed = true;
950 while (changed)
951 {
952 changed = false;
953 for (unsigned i = 0; i < recursive_calls.length (); i++)
954 {
955 changed |= merge_call_side_effects
956 (summary, recursive_calls[i], summary,
957 ignore_stores_p (current_function_decl,
958 gimple_call_flags
959 (recursive_calls[i])),
960 fnode);
961 if (!summary->useful_p (ecf_flags))
962 {
963 remove_summary (lto, nolto, ipa);
964 return;
965 }
966 }
967 }
968 }
969 if (summary && !summary->useful_p (ecf_flags))
970 {
971 if (!ipa)
972 optimization_summaries->remove (fnode);
973 else
974 summaries->remove (fnode);
975 summary = NULL;
976 }
977 if (summary_lto && !summary_lto->useful_p (ecf_flags))
978 {
979 summaries_lto->remove (fnode);
980 summary_lto = NULL;
981 }
982
983 if (dump_file)
984 {
985 fprintf (dump_file, " - modref done with result: tracked.\n");
986 if (summary)
987 summary->dump (dump_file);
988 if (summary_lto)
989 summary_lto->dump (dump_file);
990 }
991 }
992
993 /* Callback for generate_summary. */
994
995 static void
996 modref_generate (void)
997 {
998 struct cgraph_node *node;
999 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1000 {
1001 function *f = DECL_STRUCT_FUNCTION (node->decl);
1002 if (!f)
1003 continue;
1004 push_cfun (f);
1005 analyze_function (f, true);
1006 pop_cfun ();
1007 }
1008 }
1009
1010 /* Called when a new function is inserted to callgraph late. */
1011
1012 void
1013 modref_summaries::insert (struct cgraph_node *node, modref_summary *)
1014 {
1015 /* Local passes ought to be executed by the pass manager. */
1016 if (this == optimization_summaries)
1017 {
1018 optimization_summaries->remove (node);
1019 return;
1020 }
1021 if (!DECL_STRUCT_FUNCTION (node->decl))
1022 {
1023 summaries->remove (node);
1024 return;
1025 }
1026 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1027 analyze_function (DECL_STRUCT_FUNCTION (node->decl), true);
1028 pop_cfun ();
1029 }
1030
1031 /* Called when a new function is inserted to callgraph late. */
1032
1033 void
1034 modref_summaries_lto::insert (struct cgraph_node *node, modref_summary_lto *)
1035 {
1036 /* We do not support adding new function when IPA information is already
1037 propagated. This is done only by SIMD cloning that is not very
1038 critical. */
1039 if (!DECL_STRUCT_FUNCTION (node->decl)
1040 || propagated)
1041 {
1042 summaries_lto->remove (node);
1043 return;
1044 }
1045 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1046 analyze_function (DECL_STRUCT_FUNCTION (node->decl), true);
1047 pop_cfun ();
1048 }
1049
1050 /* Called when new clone is inserted to callgraph late. */
1051
1052 void
1053 modref_summaries::duplicate (cgraph_node *, cgraph_node *dst,
1054 modref_summary *src_data,
1055 modref_summary *dst_data)
1056 {
1057 /* Do not duplicte optimization summaries; we do not handle parameter
1058 transforms on them. */
1059 if (this == optimization_summaries)
1060 {
1061 optimization_summaries->remove (dst);
1062 return;
1063 }
1064 dst_data->stores = modref_records::create_ggc
1065 (src_data->stores->max_bases,
1066 src_data->stores->max_refs,
1067 src_data->stores->max_accesses);
1068 dst_data->stores->copy_from (src_data->stores);
1069 dst_data->loads = modref_records::create_ggc
1070 (src_data->loads->max_bases,
1071 src_data->loads->max_refs,
1072 src_data->loads->max_accesses);
1073 dst_data->loads->copy_from (src_data->loads);
1074 }
1075
1076 /* Called when new clone is inserted to callgraph late. */
1077
1078 void
1079 modref_summaries_lto::duplicate (cgraph_node *, cgraph_node *,
1080 modref_summary_lto *src_data,
1081 modref_summary_lto *dst_data)
1082 {
1083 dst_data->stores = modref_records_lto::create_ggc
1084 (src_data->stores->max_bases,
1085 src_data->stores->max_refs,
1086 src_data->stores->max_accesses);
1087 dst_data->stores->copy_from (src_data->stores);
1088 dst_data->loads = modref_records_lto::create_ggc
1089 (src_data->loads->max_bases,
1090 src_data->loads->max_refs,
1091 src_data->loads->max_accesses);
1092 dst_data->loads->copy_from (src_data->loads);
1093 }
1094
1095 namespace
1096 {
1097 /* Definition of the modref pass on GIMPLE. */
1098 const pass_data pass_data_modref = {
1099 GIMPLE_PASS,
1100 "modref",
1101 OPTGROUP_IPA,
1102 TV_TREE_MODREF,
1103 (PROP_cfg | PROP_ssa),
1104 0,
1105 0,
1106 0,
1107 0,
1108 };
1109
1110 class pass_modref : public gimple_opt_pass
1111 {
1112 public:
1113 pass_modref (gcc::context *ctxt)
1114 : gimple_opt_pass (pass_data_modref, ctxt) {}
1115
1116 /* opt_pass methods: */
1117 opt_pass *clone ()
1118 {
1119 return new pass_modref (m_ctxt);
1120 }
1121 virtual bool gate (function *)
1122 {
1123 return flag_ipa_modref;
1124 }
1125 virtual unsigned int execute (function *);
1126 };
1127
1128 /* Encode TT to the output block OB using the summary streaming API. */
1129
1130 static void
1131 write_modref_records (modref_records_lto *tt, struct output_block *ob)
1132 {
1133 streamer_write_uhwi (ob, tt->max_bases);
1134 streamer_write_uhwi (ob, tt->max_refs);
1135 streamer_write_uhwi (ob, tt->max_accesses);
1136
1137 streamer_write_uhwi (ob, tt->every_base);
1138 streamer_write_uhwi (ob, vec_safe_length (tt->bases));
1139 size_t i;
1140 modref_base_node <tree> *base_node;
1141 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, base_node)
1142 {
1143 stream_write_tree (ob, base_node->base, true);
1144
1145 streamer_write_uhwi (ob, base_node->every_ref);
1146 streamer_write_uhwi (ob, vec_safe_length (base_node->refs));
1147
1148 size_t j;
1149 modref_ref_node <tree> *ref_node;
1150 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
1151 {
1152 stream_write_tree (ob, ref_node->ref, true);
1153 streamer_write_uhwi (ob, ref_node->every_access);
1154 streamer_write_uhwi (ob, vec_safe_length (ref_node->accesses));
1155
1156 size_t k;
1157 modref_access_node *access_node;
1158 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
1159 {
1160 streamer_write_hwi (ob, access_node->parm_index);
1161 if (access_node->parm_index != -1)
1162 {
1163 streamer_write_uhwi (ob, access_node->parm_offset_known);
1164 if (access_node->parm_offset_known)
1165 {
1166 streamer_write_poly_int64 (ob, access_node->parm_offset);
1167 streamer_write_poly_int64 (ob, access_node->offset);
1168 streamer_write_poly_int64 (ob, access_node->size);
1169 streamer_write_poly_int64 (ob, access_node->max_size);
1170 }
1171 }
1172 }
1173 }
1174 }
1175 }
1176
1177 /* Read a modref_tree from the input block IB using the data from DATA_IN.
1178 This assumes that the tree was encoded using write_modref_tree.
1179 Either nolto_ret or lto_ret is initialized by the tree depending whether
1180 LTO streaming is expected or not. */
1181
1182 void
1183 read_modref_records (lto_input_block *ib, struct data_in *data_in,
1184 modref_records **nolto_ret,
1185 modref_records_lto **lto_ret)
1186 {
1187 size_t max_bases = streamer_read_uhwi (ib);
1188 size_t max_refs = streamer_read_uhwi (ib);
1189 size_t max_accesses = streamer_read_uhwi (ib);
1190
1191 if (lto_ret)
1192 *lto_ret = modref_records_lto::create_ggc (max_bases, max_refs,
1193 max_accesses);
1194 if (nolto_ret)
1195 *nolto_ret = modref_records::create_ggc (max_bases, max_refs,
1196 max_accesses);
1197 gcc_checking_assert (lto_ret || nolto_ret);
1198
1199 size_t every_base = streamer_read_uhwi (ib);
1200 size_t nbase = streamer_read_uhwi (ib);
1201
1202 gcc_assert (!every_base || nbase == 0);
1203 if (every_base)
1204 {
1205 if (nolto_ret)
1206 (*nolto_ret)->collapse ();
1207 if (lto_ret)
1208 (*lto_ret)->collapse ();
1209 }
1210 for (size_t i = 0; i < nbase; i++)
1211 {
1212 tree base_tree = stream_read_tree (ib, data_in);
1213 modref_base_node <alias_set_type> *nolto_base_node = NULL;
1214 modref_base_node <tree> *lto_base_node = NULL;
1215
1216 /* At stream in time we have LTO alias info. Check if we streamed in
1217 something obviously unnecessary. Do not glob types by alias sets;
1218 it is not 100% clear that ltrans types will get merged same way.
1219 Types may get refined based on ODR type conflicts. */
1220 if (base_tree && !get_alias_set (base_tree))
1221 {
1222 if (dump_file)
1223 {
1224 fprintf (dump_file, "Streamed in alias set 0 type ");
1225 print_generic_expr (dump_file, base_tree);
1226 fprintf (dump_file, "\n");
1227 }
1228 base_tree = NULL;
1229 }
1230
1231 if (nolto_ret)
1232 nolto_base_node = (*nolto_ret)->insert_base (base_tree
1233 ? get_alias_set (base_tree)
1234 : 0);
1235 if (lto_ret)
1236 lto_base_node = (*lto_ret)->insert_base (base_tree);
1237 size_t every_ref = streamer_read_uhwi (ib);
1238 size_t nref = streamer_read_uhwi (ib);
1239
1240 gcc_assert (!every_ref || nref == 0);
1241 if (every_ref)
1242 {
1243 if (nolto_base_node)
1244 nolto_base_node->collapse ();
1245 if (lto_base_node)
1246 lto_base_node->collapse ();
1247 }
1248 for (size_t j = 0; j < nref; j++)
1249 {
1250 tree ref_tree = stream_read_tree (ib, data_in);
1251
1252 if (ref_tree && !get_alias_set (ref_tree))
1253 {
1254 if (dump_file)
1255 {
1256 fprintf (dump_file, "Streamed in alias set 0 type ");
1257 print_generic_expr (dump_file, ref_tree);
1258 fprintf (dump_file, "\n");
1259 }
1260 ref_tree = NULL;
1261 }
1262
1263 modref_ref_node <alias_set_type> *nolto_ref_node = NULL;
1264 modref_ref_node <tree> *lto_ref_node = NULL;
1265
1266 if (nolto_base_node)
1267 nolto_ref_node
1268 = nolto_base_node->insert_ref (ref_tree
1269 ? get_alias_set (ref_tree) : 0,
1270 max_refs);
1271 if (lto_base_node)
1272 lto_ref_node = lto_base_node->insert_ref (ref_tree, max_refs);
1273
1274 size_t every_access = streamer_read_uhwi (ib);
1275 size_t naccesses = streamer_read_uhwi (ib);
1276
1277 if (nolto_ref_node)
1278 nolto_ref_node->every_access = every_access;
1279 if (lto_ref_node)
1280 lto_ref_node->every_access = every_access;
1281
1282 for (size_t k = 0; k < naccesses; k++)
1283 {
1284 int parm_index = streamer_read_hwi (ib);
1285 bool parm_offset_known = false;
1286 poly_int64 parm_offset = 0;
1287 poly_int64 offset = 0;
1288 poly_int64 size = -1;
1289 poly_int64 max_size = -1;
1290
1291 if (parm_index != -1)
1292 {
1293 parm_offset_known = streamer_read_uhwi (ib);
1294 if (parm_offset_known)
1295 {
1296 parm_offset = streamer_read_poly_int64 (ib);
1297 offset = streamer_read_poly_int64 (ib);
1298 size = streamer_read_poly_int64 (ib);
1299 max_size = streamer_read_poly_int64 (ib);
1300 }
1301 }
1302 modref_access_node a = {offset, size, max_size, parm_offset,
1303 parm_index, parm_offset_known};
1304 if (nolto_ref_node)
1305 nolto_ref_node->insert_access (a, max_accesses);
1306 if (lto_ref_node)
1307 lto_ref_node->insert_access (a, max_accesses);
1308 }
1309 }
1310 }
1311 if (lto_ret)
1312 (*lto_ret)->cleanup ();
1313 if (nolto_ret)
1314 (*nolto_ret)->cleanup ();
1315 }
1316
1317 /* Callback for write_summary. */
1318
1319 static void
1320 modref_write ()
1321 {
1322 struct output_block *ob = create_output_block (LTO_section_ipa_modref);
1323 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
1324 unsigned int count = 0;
1325 int i;
1326
1327 if (!summaries_lto)
1328 {
1329 streamer_write_uhwi (ob, 0);
1330 streamer_write_char_stream (ob->main_stream, 0);
1331 produce_asm (ob, NULL);
1332 destroy_output_block (ob);
1333 return;
1334 }
1335
1336 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
1337 {
1338 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1339 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
1340 modref_summary_lto *r;
1341
1342 if (cnode && cnode->definition && !cnode->alias
1343 && (r = summaries_lto->get (cnode))
1344 && r->useful_p (flags_from_decl_or_type (cnode->decl)))
1345 count++;
1346 }
1347 streamer_write_uhwi (ob, count);
1348
1349 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
1350 {
1351 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1352 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
1353
1354 if (cnode && cnode->definition && !cnode->alias)
1355 {
1356
1357 modref_summary_lto *r = summaries_lto->get (cnode);
1358
1359 if (!r || !r->useful_p (flags_from_decl_or_type (cnode->decl)))
1360 continue;
1361
1362 streamer_write_uhwi (ob, lto_symtab_encoder_encode (encoder, cnode));
1363
1364 write_modref_records (r->loads, ob);
1365 write_modref_records (r->stores, ob);
1366 }
1367 }
1368 streamer_write_char_stream (ob->main_stream, 0);
1369 produce_asm (ob, NULL);
1370 destroy_output_block (ob);
1371 }
1372
1373 static void
1374 read_section (struct lto_file_decl_data *file_data, const char *data,
1375 size_t len)
1376 {
1377 const struct lto_function_header *header
1378 = (const struct lto_function_header *) data;
1379 const int cfg_offset = sizeof (struct lto_function_header);
1380 const int main_offset = cfg_offset + header->cfg_size;
1381 const int string_offset = main_offset + header->main_size;
1382 struct data_in *data_in;
1383 unsigned int i;
1384 unsigned int f_count;
1385
1386 lto_input_block ib ((const char *) data + main_offset, header->main_size,
1387 file_data->mode_table);
1388
1389 data_in
1390 = lto_data_in_create (file_data, (const char *) data + string_offset,
1391 header->string_size, vNULL);
1392 f_count = streamer_read_uhwi (&ib);
1393 for (i = 0; i < f_count; i++)
1394 {
1395 struct cgraph_node *node;
1396 lto_symtab_encoder_t encoder;
1397
1398 unsigned int index = streamer_read_uhwi (&ib);
1399 encoder = file_data->symtab_node_encoder;
1400 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder,
1401 index));
1402
1403 modref_summary *modref_sum = summaries
1404 ? summaries->get_create (node) : NULL;
1405 modref_summary_lto *modref_sum_lto = summaries_lto
1406 ? summaries_lto->get_create (node)
1407 : NULL;
1408
1409 if (optimization_summaries)
1410 modref_sum = optimization_summaries->get_create (node);
1411
1412 gcc_assert (!modref_sum || (!modref_sum->loads
1413 && !modref_sum->stores));
1414 gcc_assert (!modref_sum_lto || (!modref_sum_lto->loads
1415 && !modref_sum_lto->stores));
1416 read_modref_records (&ib, data_in,
1417 modref_sum ? &modref_sum->loads : NULL,
1418 modref_sum_lto ? &modref_sum_lto->loads : NULL);
1419 read_modref_records (&ib, data_in,
1420 modref_sum ? &modref_sum->stores : NULL,
1421 modref_sum_lto ? &modref_sum_lto->stores : NULL);
1422 if (dump_file)
1423 {
1424 fprintf (dump_file, "Read modref for %s\n",
1425 node->dump_name ());
1426 if (modref_sum)
1427 modref_sum->dump (dump_file);
1428 if (modref_sum_lto)
1429 modref_sum_lto->dump (dump_file);
1430 }
1431 }
1432
1433 lto_free_section_data (file_data, LTO_section_ipa_modref, NULL, data,
1434 len);
1435 lto_data_in_delete (data_in);
1436 }
1437
1438 /* Callback for read_summary. */
1439
1440 static void
1441 modref_read (void)
1442 {
1443 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1444 struct lto_file_decl_data *file_data;
1445 unsigned int j = 0;
1446
1447 gcc_checking_assert (!optimization_summaries && !summaries && !summaries_lto);
1448 if (flag_ltrans)
1449 optimization_summaries = modref_summaries::create_ggc (symtab);
1450 else
1451 {
1452 if (flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
1453 summaries_lto = modref_summaries_lto::create_ggc (symtab);
1454 if (!flag_wpa
1455 || (flag_incremental_link == INCREMENTAL_LINK_LTO
1456 && flag_fat_lto_objects))
1457 summaries = modref_summaries::create_ggc (symtab);
1458 }
1459
1460 while ((file_data = file_data_vec[j++]))
1461 {
1462 size_t len;
1463 const char *data = lto_get_summary_section_data (file_data,
1464 LTO_section_ipa_modref,
1465 &len);
1466 if (data)
1467 read_section (file_data, data, len);
1468 else
1469 /* Fatal error here. We do not want to support compiling ltrans units
1470 with different version of compiler or different flags than the WPA
1471 unit, so this should never happen. */
1472 fatal_error (input_location,
1473 "IPA modref summary is missing in input file");
1474 }
1475 }
1476
1477 /* Update parameter indexes in TT according to MAP. */
1478
1479 void
1480 remap_arguments (vec <int> *map, modref_records *tt)
1481 {
1482 size_t i;
1483 modref_base_node <alias_set_type> *base_node;
1484 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, base_node)
1485 {
1486 size_t j;
1487 modref_ref_node <alias_set_type> *ref_node;
1488 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
1489 {
1490 size_t k;
1491 modref_access_node *access_node;
1492 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
1493 if (access_node->parm_index > 0)
1494 {
1495 if (access_node->parm_index < (int)map->length ())
1496 access_node->parm_index = (*map)[access_node->parm_index];
1497 else
1498 access_node->parm_index = -1;
1499 }
1500 }
1501 }
1502 }
1503
1504 /* If signature changed, update the summary. */
1505
1506 static unsigned int
1507 modref_transform (struct cgraph_node *node)
1508 {
1509 if (!node->clone.param_adjustments || !optimization_summaries)
1510 return 0;
1511 modref_summary *r = optimization_summaries->get (node);
1512 if (!r)
1513 return 0;
1514 if (dump_file)
1515 {
1516 fprintf (dump_file, "Updating summary for %s from:\n",
1517 node->dump_name ());
1518 r->dump (dump_file);
1519 }
1520
1521 size_t i, max = 0;
1522 ipa_adjusted_param *p;
1523
1524 FOR_EACH_VEC_SAFE_ELT (node->clone.param_adjustments->m_adj_params, i, p)
1525 {
1526 int idx = node->clone.param_adjustments->get_original_index (i);
1527 if (idx > (int)max)
1528 max = idx;
1529 }
1530
1531 auto_vec <int, 32> map;
1532
1533 map.reserve (max + 1);
1534 for (i = 0; i <= max; i++)
1535 map.quick_push (-1);
1536 FOR_EACH_VEC_SAFE_ELT (node->clone.param_adjustments->m_adj_params, i, p)
1537 {
1538 int idx = node->clone.param_adjustments->get_original_index (i);
1539 if (idx >= 0)
1540 map[idx] = i;
1541 }
1542 remap_arguments (&map, r->loads);
1543 remap_arguments (&map, r->stores);
1544 if (dump_file)
1545 {
1546 fprintf (dump_file, "to:\n");
1547 r->dump (dump_file);
1548 }
1549 return 0;
1550 }
1551
1552 /* Definition of the modref IPA pass. */
1553 const pass_data pass_data_ipa_modref =
1554 {
1555 IPA_PASS, /* type */
1556 "modref", /* name */
1557 OPTGROUP_IPA, /* optinfo_flags */
1558 TV_IPA_MODREF, /* tv_id */
1559 0, /* properties_required */
1560 0, /* properties_provided */
1561 0, /* properties_destroyed */
1562 0, /* todo_flags_start */
1563 ( TODO_dump_symtab ), /* todo_flags_finish */
1564 };
1565
1566 class pass_ipa_modref : public ipa_opt_pass_d
1567 {
1568 public:
1569 pass_ipa_modref (gcc::context *ctxt)
1570 : ipa_opt_pass_d (pass_data_ipa_modref, ctxt,
1571 modref_generate, /* generate_summary */
1572 modref_write, /* write_summary */
1573 modref_read, /* read_summary */
1574 modref_write, /* write_optimization_summary */
1575 modref_read, /* read_optimization_summary */
1576 NULL, /* stmt_fixup */
1577 0, /* function_transform_todo_flags_start */
1578 modref_transform,/* function_transform */
1579 NULL) /* variable_transform */
1580 {}
1581
1582 /* opt_pass methods: */
1583 opt_pass *clone () { return new pass_ipa_modref (m_ctxt); }
1584 virtual bool gate (function *)
1585 {
1586 return true;
1587 }
1588 virtual unsigned int execute (function *);
1589
1590 };
1591
1592 }
1593
1594 unsigned int pass_modref::execute (function *f)
1595 {
1596 analyze_function (f, false);
1597 return 0;
1598 }
1599
1600 gimple_opt_pass *
1601 make_pass_modref (gcc::context *ctxt)
1602 {
1603 return new pass_modref (ctxt);
1604 }
1605
1606 ipa_opt_pass_d *
1607 make_pass_ipa_modref (gcc::context *ctxt)
1608 {
1609 return new pass_ipa_modref (ctxt);
1610 }
1611
1612 /* Skip edges from and to nodes without ipa_pure_const enabled.
1613 Ignore not available symbols. */
1614
1615 static bool
1616 ignore_edge (struct cgraph_edge *e)
1617 {
1618 /* We merge summaries of inline clones into summaries of functions they
1619 are inlined to. For that reason the complete function bodies must
1620 act as unit. */
1621 if (!e->inline_failed)
1622 return false;
1623 enum availability avail;
1624 cgraph_node *callee = e->callee->function_or_virtual_thunk_symbol
1625 (&avail, e->caller);
1626
1627 return (avail <= AVAIL_INTERPOSABLE
1628 || ((!optimization_summaries || !optimization_summaries->get (callee))
1629 && (!summaries_lto || !summaries_lto->get (callee)))
1630 || flags_from_decl_or_type (e->callee->decl)
1631 & (ECF_CONST | ECF_NOVOPS));
1632 }
1633
1634 /* Compute parm_map for CALLE_EDGE. */
1635
1636 static void
1637 compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
1638 {
1639 class ipa_edge_args *args;
1640 if (ipa_node_params_sum
1641 && !callee_edge->call_stmt_cannot_inline_p
1642 && (args = IPA_EDGE_REF (callee_edge)) != NULL)
1643 {
1644 int i, count = ipa_get_cs_argument_count (args);
1645 class ipa_node_params *caller_parms_info, *callee_pi;
1646 class ipa_call_summary *es
1647 = ipa_call_summaries->get (callee_edge);
1648 cgraph_node *callee
1649 = callee_edge->callee->function_or_virtual_thunk_symbol
1650 (NULL, callee_edge->caller);
1651
1652 caller_parms_info = IPA_NODE_REF (callee_edge->caller->inlined_to
1653 ? callee_edge->caller->inlined_to
1654 : callee_edge->caller);
1655 callee_pi = IPA_NODE_REF (callee);
1656
1657 (*parm_map).safe_grow_cleared (count);
1658
1659 for (i = 0; i < count; i++)
1660 {
1661 if (es && es->param[i].points_to_local_or_readonly_memory)
1662 {
1663 (*parm_map)[i].parm_index = -2;
1664 continue;
1665 }
1666
1667 struct ipa_jump_func *jf
1668 = ipa_get_ith_jump_func (args, i);
1669 if (jf && callee_pi)
1670 {
1671 tree cst = ipa_value_from_jfunc (caller_parms_info,
1672 jf,
1673 ipa_get_type
1674 (callee_pi, i));
1675 if (cst && points_to_local_or_readonly_memory_p (cst))
1676 {
1677 (*parm_map)[i].parm_index = -2;
1678 continue;
1679 }
1680 }
1681 if (jf && jf->type == IPA_JF_PASS_THROUGH)
1682 {
1683 (*parm_map)[i].parm_index
1684 = ipa_get_jf_pass_through_formal_id (jf);
1685 if (ipa_get_jf_pass_through_operation (jf) == NOP_EXPR)
1686 {
1687 (*parm_map)[i].parm_offset_known = true;
1688 (*parm_map)[i].parm_offset = 0;
1689 }
1690 else if (ipa_get_jf_pass_through_operation (jf)
1691 == POINTER_PLUS_EXPR
1692 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf),
1693 &(*parm_map)[i].parm_offset))
1694 (*parm_map)[i].parm_offset_known = true;
1695 else
1696 (*parm_map)[i].parm_offset_known = false;
1697 continue;
1698 }
1699 if (jf && jf->type == IPA_JF_ANCESTOR)
1700 {
1701 (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
1702 (*parm_map)[i].parm_offset_known = true;
1703 gcc_checking_assert
1704 (!(ipa_get_jf_ancestor_offset (jf) & (BITS_PER_UNIT - 1)));
1705 (*parm_map)[i].parm_offset
1706 = ipa_get_jf_ancestor_offset (jf) >> LOG2_BITS_PER_UNIT;
1707 }
1708 else
1709 (*parm_map)[i].parm_index = -1;
1710 }
1711 if (dump_file)
1712 {
1713 fprintf (dump_file, " Parm map: ");
1714 for (i = 0; i < count; i++)
1715 fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
1716 fprintf (dump_file, "\n");
1717 }
1718 }
1719 }
1720
1721 /* Call EDGE was inlined; merge summary from callee to the caller. */
1722
1723 void
1724 ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
1725 {
1726 if (!summaries && !summaries_lto)
1727 return;
1728
1729 struct cgraph_node *to = (edge->caller->inlined_to
1730 ? edge->caller->inlined_to : edge->caller);
1731 class modref_summary *to_info = summaries ? summaries->get (to) : NULL;
1732 class modref_summary_lto *to_info_lto = summaries_lto
1733 ? summaries_lto->get (to) : NULL;
1734
1735 if (!to_info && !to_info_lto)
1736 {
1737 if (summaries)
1738 summaries->remove (edge->callee);
1739 if (summaries_lto)
1740 summaries_lto->remove (edge->callee);
1741 return;
1742 }
1743
1744 class modref_summary *callee_info = summaries ? summaries->get (edge->callee)
1745 : NULL;
1746 class modref_summary_lto *callee_info_lto
1747 = summaries_lto ? summaries_lto->get (edge->callee) : NULL;
1748 int flags = flags_from_decl_or_type (edge->callee->decl);
1749
1750 if (!callee_info && to_info)
1751 {
1752 if (ignore_stores_p (edge->caller->decl, flags))
1753 to_info->loads->collapse ();
1754 else
1755 {
1756 summaries->remove (to);
1757 to_info = NULL;
1758 }
1759 }
1760 if (!callee_info_lto && to_info_lto)
1761 {
1762 if (ignore_stores_p (edge->caller->decl, flags))
1763 to_info_lto->loads->collapse ();
1764 else
1765 {
1766 summaries_lto->remove (to);
1767 to_info_lto = NULL;
1768 }
1769 }
1770 if (callee_info || callee_info_lto)
1771 {
1772 auto_vec <modref_parm_map, 32> parm_map;
1773
1774 compute_parm_map (edge, &parm_map);
1775
1776 if (!ignore_stores_p (edge->caller->decl, flags))
1777 {
1778 if (to_info && callee_info)
1779 to_info->stores->merge (callee_info->stores, &parm_map);
1780 if (to_info_lto && callee_info_lto)
1781 to_info_lto->stores->merge (callee_info_lto->stores, &parm_map);
1782 }
1783 if (to_info && callee_info)
1784 to_info->loads->merge (callee_info->loads, &parm_map);
1785 if (to_info_lto && callee_info_lto)
1786 to_info_lto->loads->merge (callee_info_lto->loads, &parm_map);
1787 }
1788 if (summaries)
1789 {
1790 if (to_info && !to_info->useful_p (flags))
1791 {
1792 if (dump_file)
1793 fprintf (dump_file, "Removed mod-ref summary for %s\n",
1794 to->dump_name ());
1795 summaries->remove (to);
1796 }
1797 else if (to_info && dump_file)
1798 {
1799 if (dump_file)
1800 fprintf (dump_file, "Updated mod-ref summary for %s\n",
1801 to->dump_name ());
1802 to_info->dump (dump_file);
1803 }
1804 if (callee_info)
1805 summaries->remove (edge->callee);
1806 }
1807 if (summaries_lto)
1808 {
1809 if (to_info_lto && !to_info_lto->useful_p (flags))
1810 {
1811 if (dump_file)
1812 fprintf (dump_file, "Removed mod-ref summary for %s\n",
1813 to->dump_name ());
1814 summaries_lto->remove (to);
1815 }
1816 else if (to_info_lto && dump_file)
1817 {
1818 if (dump_file)
1819 fprintf (dump_file, "Updated mod-ref summary for %s\n",
1820 to->dump_name ());
1821 to_info_lto->dump (dump_file);
1822 }
1823 if (callee_info_lto)
1824 summaries_lto->remove (edge->callee);
1825 }
1826 return;
1827 }
1828
1829 /* Collapse loads and return true if something changed. */
1830
1831 bool
1832 collapse_loads (modref_summary *cur_summary,
1833 modref_summary_lto *cur_summary_lto)
1834 {
1835 bool changed = false;
1836
1837 if (cur_summary && !cur_summary->loads->every_base)
1838 {
1839 cur_summary->loads->collapse ();
1840 changed = true;
1841 }
1842 if (cur_summary_lto
1843 && !cur_summary_lto->loads->every_base)
1844 {
1845 cur_summary_lto->loads->collapse ();
1846 changed = true;
1847 }
1848 return changed;
1849 }
1850
1851 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE. */
1852
1853 static void
1854 modref_propagate_in_scc (cgraph_node *component_node)
1855 {
1856 bool changed = true;
1857 int iteration = 0;
1858
1859 while (changed)
1860 {
1861 changed = false;
1862 for (struct cgraph_node *cur = component_node; cur;
1863 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
1864 {
1865 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
1866 modref_summary *cur_summary = optimization_summaries
1867 ? optimization_summaries->get (node)
1868 : NULL;
1869 modref_summary_lto *cur_summary_lto = summaries_lto
1870 ? summaries_lto->get (node)
1871 : NULL;
1872
1873 if (!cur_summary && !cur_summary_lto)
1874 continue;
1875
1876 if (dump_file)
1877 fprintf (dump_file, " Processing %s%s%s\n",
1878 cur->dump_name (),
1879 TREE_READONLY (cur->decl) ? " (const)" : "",
1880 DECL_PURE_P (cur->decl) ? " (pure)" : "");
1881
1882 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
1883 {
1884 if (e->indirect_info->ecf_flags & (ECF_CONST | ECF_NOVOPS))
1885 continue;
1886 if (ignore_stores_p (cur->decl, e->indirect_info->ecf_flags))
1887 {
1888 if (dump_file)
1889 fprintf (dump_file, " Indirect call: "
1890 "collapsing loads\n");
1891 changed |= collapse_loads (cur_summary, cur_summary_lto);
1892 }
1893 else
1894 {
1895 if (dump_file)
1896 fprintf (dump_file, " Indirect call: giving up\n");
1897 if (optimization_summaries)
1898 optimization_summaries->remove (node);
1899 if (summaries_lto)
1900 summaries_lto->remove (node);
1901 changed = true;
1902 cur_summary = NULL;
1903 cur_summary_lto = NULL;
1904 break;
1905 }
1906 }
1907
1908 if (!cur_summary && !cur_summary_lto)
1909 continue;
1910
1911 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
1912 callee_edge = callee_edge->next_callee)
1913 {
1914 int flags = flags_from_decl_or_type (callee_edge->callee->decl);
1915 modref_summary *callee_summary = NULL;
1916 modref_summary_lto *callee_summary_lto = NULL;
1917 struct cgraph_node *callee;
1918
1919 if (flags & (ECF_CONST | ECF_NOVOPS)
1920 || !callee_edge->inline_failed)
1921 continue;
1922
1923 /* Get the callee and its summary. */
1924 enum availability avail;
1925 callee = callee_edge->callee->function_or_virtual_thunk_symbol
1926 (&avail, cur);
1927
1928 /* It is not necessary to re-process calls outside of the
1929 SCC component. */
1930 if (iteration > 0
1931 && (!callee->aux
1932 || ((struct ipa_dfs_info *)cur->aux)->scc_no
1933 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
1934 continue;
1935
1936 if (dump_file)
1937 fprintf (dump_file, " Call to %s\n",
1938 callee_edge->callee->dump_name ());
1939
1940 bool ignore_stores = ignore_stores_p (cur->decl, flags);
1941
1942 if (avail <= AVAIL_INTERPOSABLE)
1943 {
1944 if (!ignore_stores)
1945 {
1946 if (dump_file)
1947 fprintf (dump_file, " Call target interposable"
1948 " or not available\n");
1949
1950 if (optimization_summaries)
1951 optimization_summaries->remove (node);
1952 if (summaries_lto)
1953 summaries_lto->remove (node);
1954 cur_summary = NULL;
1955 cur_summary_lto = NULL;
1956 changed = true;
1957 break;
1958 }
1959 else
1960 {
1961 if (dump_file)
1962 fprintf (dump_file, " Call target interposable"
1963 " or not available; collapsing loads\n");
1964
1965 changed |= collapse_loads (cur_summary, cur_summary_lto);
1966 continue;
1967 }
1968 }
1969
1970 /* We don't know anything about CALLEE, hence we cannot tell
1971 anything about the entire component. */
1972
1973 if (cur_summary
1974 && !(callee_summary = optimization_summaries->get (callee)))
1975 {
1976 if (!ignore_stores)
1977 {
1978 if (dump_file)
1979 fprintf (dump_file, " No call target summary\n");
1980
1981 optimization_summaries->remove (node);
1982 cur_summary = NULL;
1983 changed = true;
1984 }
1985 else
1986 {
1987 if (dump_file)
1988 fprintf (dump_file, " No call target summary;"
1989 " collapsing loads\n");
1990
1991 if (!cur_summary->loads->every_base)
1992 {
1993 cur_summary->loads->collapse ();
1994 changed = true;
1995 }
1996 }
1997 }
1998 if (cur_summary_lto
1999 && !(callee_summary_lto = summaries_lto->get (callee)))
2000 {
2001 if (!ignore_stores)
2002 {
2003 if (dump_file)
2004 fprintf (dump_file, " No call target summary\n");
2005
2006 summaries_lto->remove (node);
2007 cur_summary_lto = NULL;
2008 changed = true;
2009 }
2010 else
2011 {
2012 if (dump_file)
2013 fprintf (dump_file, " No call target summary;"
2014 " collapsing loads\n");
2015
2016 if (!cur_summary_lto->loads->every_base)
2017 {
2018 cur_summary_lto->loads->collapse ();
2019 changed = true;
2020 }
2021 }
2022 }
2023
2024 /* We can not safely optimize based on summary of callee if it
2025 does not always bind to current def: it is possible that
2026 memory load was optimized out earlier which may not happen in
2027 the interposed variant. */
2028 if (!callee_edge->binds_to_current_def_p ())
2029 {
2030 changed |= collapse_loads (cur_summary, cur_summary_lto);
2031 if (dump_file)
2032 fprintf (dump_file, " May not bind local;"
2033 " collapsing loads\n");
2034 }
2035
2036
2037 auto_vec <modref_parm_map, 32> parm_map;
2038
2039 compute_parm_map (callee_edge, &parm_map);
2040
2041 /* Merge in callee's information. */
2042 if (callee_summary)
2043 {
2044 changed |= cur_summary->loads->merge
2045 (callee_summary->loads, &parm_map);
2046 if (!ignore_stores)
2047 changed |= cur_summary->stores->merge
2048 (callee_summary->stores, &parm_map);
2049 }
2050 if (callee_summary_lto)
2051 {
2052 changed |= cur_summary_lto->loads->merge
2053 (callee_summary_lto->loads, &parm_map);
2054 if (!ignore_stores)
2055 changed |= cur_summary_lto->stores->merge
2056 (callee_summary_lto->stores, &parm_map);
2057 }
2058 if (dump_file && changed)
2059 {
2060 if (cur_summary)
2061 cur_summary->dump (dump_file);
2062 if (cur_summary_lto)
2063 cur_summary_lto->dump (dump_file);
2064 }
2065 }
2066 }
2067 iteration++;
2068 }
2069 if (dump_file)
2070 {
2071 fprintf (dump_file,
2072 "Propagation finished in %i iterations\n", iteration);
2073 for (struct cgraph_node *cur = component_node; cur;
2074 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
2075 if (!cur->inlined_to)
2076 {
2077 modref_summary *cur_summary = optimization_summaries
2078 ? optimization_summaries->get (cur)
2079 : NULL;
2080 modref_summary_lto *cur_summary_lto = summaries_lto
2081 ? summaries_lto->get (cur)
2082 : NULL;
2083
2084 fprintf (dump_file, "Propagated modref for %s%s%s\n",
2085 cur->dump_name (),
2086 TREE_READONLY (cur->decl) ? " (const)" : "",
2087 DECL_PURE_P (cur->decl) ? " (pure)" : "");
2088 if (optimization_summaries)
2089 {
2090 if (cur_summary)
2091 cur_summary->dump (dump_file);
2092 else
2093 fprintf (dump_file, " Not tracked\n");
2094 }
2095 if (summaries_lto)
2096 {
2097 if (cur_summary_lto)
2098 cur_summary_lto->dump (dump_file);
2099 else
2100 fprintf (dump_file, " Not tracked (lto)\n");
2101 }
2102 }
2103 }
2104 }
2105
2106 /* Run the IPA pass. This will take a function's summaries and calls and
2107 construct new summaries which represent a transitive closure. So that
2108 summary of an analyzed function contains information about the loads and
2109 stores that the function or any function that it calls does. */
2110
2111 unsigned int
2112 pass_ipa_modref::execute (function *)
2113 {
2114 if (!summaries && !summaries_lto)
2115 return 0;
2116
2117 if (optimization_summaries)
2118 ggc_delete (optimization_summaries);
2119 optimization_summaries = summaries;
2120 summaries = NULL;
2121
2122 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *,
2123 symtab->cgraph_count);
2124 int order_pos;
2125 order_pos = ipa_reduced_postorder (order, true, ignore_edge);
2126 int i;
2127
2128 /* Iterate over all strongly connected components in post-order. */
2129 for (i = 0; i < order_pos; i++)
2130 {
2131 /* Get the component's representative. That's just any node in the
2132 component from which we can traverse the entire component. */
2133 struct cgraph_node *component_node = order[i];
2134
2135 if (dump_file)
2136 fprintf (dump_file, "\n\nStart of SCC component\n");
2137
2138 modref_propagate_in_scc (component_node);
2139 }
2140 if (summaries_lto)
2141 ((modref_summaries_lto *)summaries_lto)->propagated = true;
2142 ipa_free_postorder_info ();
2143 free (order);
2144 return 0;
2145 }
2146
2147 /* Summaries must stay alive until end of compilation. */
2148
2149 void
2150 ipa_modref_c_finalize ()
2151 {
2152 if (optimization_summaries)
2153 ggc_delete (optimization_summaries);
2154 optimization_summaries = NULL;
2155 gcc_checking_assert (!summaries);
2156 if (summaries_lto)
2157 {
2158 ggc_delete (summaries_lto);
2159 summaries_lto = NULL;
2160 }
2161 }
2162
2163 #include "gt-ipa-modref.h"