]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-modref.c
random memory leak fixes
[thirdparty/gcc.git] / gcc / ipa-modref.c
1 /* Search for references that a functions loads or stores.
2 Copyright (C) 2020 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
23 accesses across function calls. The summary has a form of decision tree
24 described in ipa-modref-tree.h.
25
26 This file contains a tree pass and an IPA pass. Both performs the same
27 analys however tree pass is executed during early and late optimization
28 passes to propagate info downwards in the compilation order. IPA pass
29 propagates across the callgraph and is able to handle recursion and works on
30 whole program during link-time analysis.
31
32 LTO mode differs from the local mode by not recording alias sets but types
33 that are translated to alias sets later. This is necessary in order stream
34 the information because the alias sets are rebuild at stream-in time and may
35 not correspond to ones seen during analysis. For this reason part of analysis
36 is duplicated. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "backend.h"
42 #include "tree.h"
43 #include "gimple.h"
44 #include "alloc-pool.h"
45 #include "tree-pass.h"
46 #include "gimple-iterator.h"
47 #include "tree-dfa.h"
48 #include "cgraph.h"
49 #include "ipa-utils.h"
50 #include "symbol-summary.h"
51 #include "gimple-pretty-print.h"
52 #include "gimple-walk.h"
53 #include "print-tree.h"
54 #include "tree-streamer.h"
55 #include "alias.h"
56 #include "calls.h"
57 #include "ipa-modref-tree.h"
58 #include "ipa-modref.h"
59 #include "value-range.h"
60 #include "ipa-prop.h"
61 #include "ipa-fnsummary.h"
62
63 /* Class (from which there is one global instance) that holds modref summaries
64 for all analyzed functions. */
65 class GTY((user)) modref_summaries
66 : public fast_function_summary <modref_summary *, va_gc>
67 {
68 public:
69 modref_summaries (symbol_table *symtab)
70 : fast_function_summary <modref_summary *, va_gc> (symtab) {}
71 virtual void insert (cgraph_node *, modref_summary *state);
72 virtual void duplicate (cgraph_node *src_node,
73 cgraph_node *dst_node,
74 modref_summary *src_data,
75 modref_summary *dst_data);
76 /* This flag controls whether newly inserted functions should be analyzed
77 in IPA or normal mode. Functions inserted between IPA analysis and
78 ipa-modref pass execution needs to be analyzed in IPA mode while all
79 other insertions leads to normal analysis. */
80 bool ipa;
81 static modref_summaries *create_ggc (symbol_table *symtab)
82 {
83 return new (ggc_alloc_no_dtor<modref_summaries> ())
84 modref_summaries (symtab);
85 }
86 };
87
88 /* Global variable holding all modref summaries. */
89 static GTY(()) fast_function_summary <modref_summary *, va_gc> *summaries;
90
91 /* Summary for a single function which this pass produces. */
92
93 modref_summary::modref_summary ()
94 : loads (NULL), stores (NULL), loads_lto (NULL),
95 stores_lto (NULL), finished (0)
96 {
97 }
98
99 modref_summary::~modref_summary ()
100 {
101 if (loads)
102 ggc_delete (loads);
103 if (stores)
104 ggc_delete (stores);
105 if (loads_lto)
106 ggc_delete (loads_lto);
107 if (stores_lto)
108 ggc_delete (stores_lto);
109 }
110
111 /* Return true if lto summary is potentially useful for optimization. */
112
113 bool
114 modref_summary::lto_useful_p (int ecf_flags)
115 {
116 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
117 return false;
118 if (loads_lto && !loads_lto->every_base)
119 return true;
120 if (ecf_flags & ECF_PURE)
121 return false;
122 return stores_lto && !stores_lto->every_base;
123 }
124
125 /* Return true if summary is potentially useful for optimization. */
126
127 bool
128 modref_summary::useful_p (int ecf_flags)
129 {
130 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
131 return false;
132 if (lto_useful_p (ecf_flags))
133 return true;
134 if (loads && !loads->every_base)
135 return true;
136 if (ecf_flags & ECF_PURE)
137 return false;
138 return stores && !stores->every_base;
139 }
140
141 /* Dump A to OUT. */
142
143 static void
144 dump_access (modref_access_node *a, FILE *out)
145 {
146 fprintf (out, " access:");
147 if (a->parm_index != -1)
148 {
149 fprintf (out, " Parm %i", a->parm_index);
150 if (a->parm_offset_known)
151 {
152 fprintf (out, " param offset:");
153 print_dec ((poly_int64_pod)a->parm_offset, out, SIGNED);
154 }
155 }
156 if (a->range_info_useful_p ())
157 {
158 fprintf (out, " offset:");
159 print_dec ((poly_int64_pod)a->offset, out, SIGNED);
160 fprintf (out, " size:");
161 print_dec ((poly_int64_pod)a->size, out, SIGNED);
162 fprintf (out, " max_size:");
163 print_dec ((poly_int64_pod)a->max_size, out, SIGNED);
164 }
165 fprintf (out, "\n");
166 }
167
168 /* Dump records TT to OUT. */
169
170 static void
171 dump_records (modref_records *tt, FILE *out)
172 {
173 fprintf (out, " Limits: %i bases, %i refs\n",
174 (int)tt->max_bases, (int)tt->max_refs);
175 if (tt->every_base)
176 {
177 fprintf (out, " Every base\n");
178 return;
179 }
180 size_t i;
181 modref_base_node <alias_set_type> *n;
182 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
183 {
184 fprintf (out, " Base %i: alias set %i\n", (int)i, n->base);
185 if (n->every_ref)
186 {
187 fprintf (out, " Every ref\n");
188 continue;
189 }
190 size_t j;
191 modref_ref_node <alias_set_type> *r;
192 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
193 {
194 fprintf (out, " Ref %i: alias set %i\n", (int)j, r->ref);
195 if (r->every_access)
196 {
197 fprintf (out, " Every access\n");
198 continue;
199 }
200 size_t k;
201 modref_access_node *a;
202 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
203 dump_access (a, out);
204 }
205 }
206 }
207
208 /* Dump records TT to OUT. */
209
210 static void
211 dump_lto_records (modref_records_lto *tt, FILE *out)
212 {
213 fprintf (out, " Limits: %i bases, %i refs\n",
214 (int)tt->max_bases, (int)tt->max_refs);
215 if (tt->every_base)
216 {
217 fprintf (out, " Every base\n");
218 return;
219 }
220 size_t i;
221 modref_base_node <tree> *n;
222 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
223 {
224 fprintf (out, " Base %i:", (int)i);
225 print_generic_expr (dump_file, n->base);
226 fprintf (out, " (alias set %i)\n",
227 n->base ? get_alias_set (n->base) : 0);
228 if (n->every_ref)
229 {
230 fprintf (out, " Every ref\n");
231 continue;
232 }
233 size_t j;
234 modref_ref_node <tree> *r;
235 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
236 {
237 fprintf (out, " Ref %i:", (int)j);
238 print_generic_expr (dump_file, r->ref);
239 fprintf (out, " (alias set %i)\n",
240 r->ref ? get_alias_set (r->ref) : 0);
241 if (r->every_access)
242 {
243 fprintf (out, " Every access\n");
244 continue;
245 }
246 size_t k;
247 modref_access_node *a;
248 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
249 dump_access (a, out);
250 }
251 }
252 }
253
254 /* Dump summary. */
255
256 void
257 modref_summary::dump (FILE *out)
258 {
259 if (loads)
260 {
261 fprintf (out, " loads:\n");
262 dump_records (loads, out);
263 }
264 if (stores)
265 {
266 fprintf (out, " stores:\n");
267 dump_records (stores, out);
268 }
269 if (loads_lto)
270 {
271 fprintf (out, " LTO loads:\n");
272 dump_lto_records (loads_lto, out);
273 }
274 if (stores_lto)
275 {
276 fprintf (out, " LTO stores:\n");
277 dump_lto_records (stores_lto, out);
278 }
279 }
280
281
282 /* Get function summary for FUNC if it exists, return NULL otherwise. */
283
284 modref_summary *
285 get_modref_function_summary (cgraph_node *func)
286 {
287 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
288 if (!summaries)
289 return NULL;
290
291 /* A single function body may be represented by multiple symbols with
292 different visibility. For example, if FUNC is an interposable alias,
293 we don't want to return anything, even if we have summary for the target
294 function. */
295 enum availability avail;
296 func = func->function_or_virtual_thunk_symbol
297 (&avail, cgraph_node::get (current_function_decl));
298 if (avail <= AVAIL_INTERPOSABLE)
299 return NULL;
300
301 /* Attempt to get summary for FUNC. If analysis of FUNC hasn't finished yet,
302 don't return anything. */
303 modref_summary *r = summaries->get (func);
304 if (r && r->finished)
305 return r;
306
307 return NULL;
308 }
309
310 /* Construct modref_access_node from REF. */
311 static modref_access_node
312 get_access (ao_ref *ref)
313 {
314 tree base;
315
316 base = ao_ref_base (ref);
317 modref_access_node a = {ref->offset, ref->size, ref->max_size,
318 0, -1, false};
319 if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
320 {
321 tree memref = base;
322 base = TREE_OPERAND (base, 0);
323 if (TREE_CODE (base) == SSA_NAME
324 && SSA_NAME_IS_DEFAULT_DEF (base)
325 && TREE_CODE (SSA_NAME_VAR (base)) == PARM_DECL)
326 {
327 a.parm_index = 0;
328 for (tree t = DECL_ARGUMENTS (current_function_decl);
329 t != SSA_NAME_VAR (base); t = DECL_CHAIN (t))
330 {
331 if (!t)
332 {
333 a.parm_index = -1;
334 break;
335 }
336 a.parm_index++;
337 }
338 if (TREE_CODE (memref) == MEM_REF)
339 {
340 a.parm_offset_known
341 = wi::to_poly_wide (TREE_OPERAND
342 (memref, 1)).to_shwi (&a.parm_offset);
343 }
344 else
345 a.parm_offset_known = false;
346 }
347 else
348 a.parm_index = -1;
349 }
350 else
351 a.parm_index = -1;
352 return a;
353 }
354
355 /* Record access into the modref_records data structure. */
356
357 static void
358 record_access (modref_records *tt, ao_ref *ref)
359 {
360 alias_set_type base_set = !flag_strict_aliasing ? 0
361 : ao_ref_base_alias_set (ref);
362 alias_set_type ref_set = !flag_strict_aliasing ? 0
363 : (ao_ref_alias_set (ref));
364 modref_access_node a = get_access (ref);
365 if (dump_file)
366 {
367 fprintf (dump_file, " - Recording base_set=%i ref_set=%i parm=%i\n",
368 base_set, ref_set, a.parm_index);
369 }
370 tt->insert (base_set, ref_set, a);
371 }
372
373 /* IPA version of record_access_tree. */
374
375 static void
376 record_access_lto (modref_records_lto *tt, ao_ref *ref)
377 {
378 /* get_alias_set sometimes use different type to compute the alias set
379 than TREE_TYPE (base). Do same adjustments. */
380 tree base_type = NULL_TREE, ref_type = NULL_TREE;
381 if (flag_strict_aliasing)
382 {
383 tree base;
384
385 base = ref->ref;
386 while (handled_component_p (base))
387 base = TREE_OPERAND (base, 0);
388
389 base_type = reference_alias_ptr_type_1 (&base);
390
391 if (!base_type)
392 base_type = TREE_TYPE (base);
393 else
394 base_type = TYPE_REF_CAN_ALIAS_ALL (base_type)
395 ? NULL_TREE : TREE_TYPE (base_type);
396
397 tree ref_expr = ref->ref;
398 ref_type = reference_alias_ptr_type_1 (&ref_expr);
399
400 if (!ref_type)
401 ref_type = TREE_TYPE (ref_expr);
402 else
403 ref_type = TYPE_REF_CAN_ALIAS_ALL (ref_type)
404 ? NULL_TREE : TREE_TYPE (ref_type);
405
406 /* Sanity check that we are in sync with what get_alias_set does. */
407 gcc_checking_assert ((!base_type && !ao_ref_base_alias_set (ref))
408 || get_alias_set (base_type)
409 == ao_ref_base_alias_set (ref));
410 gcc_checking_assert ((!ref_type && !ao_ref_alias_set (ref))
411 || get_alias_set (ref_type)
412 == ao_ref_alias_set (ref));
413
414 /* Do not bother to record types that have no meaningful alias set.
415 Also skip variably modified types since these go to local streams. */
416 if (base_type && (!get_alias_set (base_type)
417 || variably_modified_type_p (base_type, NULL_TREE)))
418 base_type = NULL_TREE;
419 if (ref_type && (!get_alias_set (ref_type)
420 || variably_modified_type_p (ref_type, NULL_TREE)))
421 ref_type = NULL_TREE;
422 }
423 modref_access_node a = get_access (ref);
424 if (dump_file)
425 {
426 fprintf (dump_file, " - Recording base type:");
427 print_generic_expr (dump_file, base_type);
428 fprintf (dump_file, " (alias set %i) ref type:",
429 base_type ? get_alias_set (base_type) : 0);
430 print_generic_expr (dump_file, ref_type);
431 fprintf (dump_file, " (alias set %i) parm:%i\n",
432 ref_type ? get_alias_set (ref_type) : 0,
433 a.parm_index);
434 }
435
436 tt->insert (base_type, ref_type, a);
437 }
438
439 /* Returns true if and only if we should store the access to EXPR.
440 Some accesses, e.g. loads from automatic variables, are not interesting. */
441
442 static bool
443 record_access_p (tree expr)
444 {
445 if (refs_local_or_readonly_memory_p (expr))
446 {
447 if (dump_file)
448 fprintf (dump_file, " - Read-only or local, ignoring.\n");
449 return false;
450 }
451 return true;
452 }
453
454 /* Return true if ECF flags says that stores can be ignored. */
455
456 static bool
457 ignore_stores_p (tree caller, int flags)
458 {
459 if (flags & ECF_PURE)
460 return true;
461 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
462 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
463 return true;
464 return false;
465 }
466
467 /* Merge side effects of call STMT to function with CALLEE_SUMMARY
468 int CUR_SUMMARY. Return true if something changed.
469 If IGNORE_STORES is true, do not merge stores. */
470
471 bool
472 merge_call_side_effects (modref_summary *cur_summary,
473 gimple *stmt, modref_summary *callee_summary,
474 bool ignore_stores)
475 {
476 auto_vec <modref_parm_map, 32> parm_map;
477 bool changed = false;
478
479 parm_map.safe_grow (gimple_call_num_args (stmt));
480 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
481 {
482 tree op = gimple_call_arg (stmt, i);
483 STRIP_NOPS (op);
484 if (TREE_CODE (op) == SSA_NAME
485 && SSA_NAME_IS_DEFAULT_DEF (op)
486 && TREE_CODE (SSA_NAME_VAR (op)) == PARM_DECL)
487 {
488 int index = 0;
489 for (tree t = DECL_ARGUMENTS (current_function_decl);
490 t != SSA_NAME_VAR (op); t = DECL_CHAIN (t))
491 {
492 if (!t)
493 {
494 index = -1;
495 break;
496 }
497 index++;
498 }
499 parm_map[i].parm_index = index;
500 parm_map[i].parm_offset_known = true;
501 parm_map[i].parm_offset = 0;
502 }
503 else if (points_to_local_or_readonly_memory_p (op))
504 parm_map[i].parm_index = -2;
505 else
506 parm_map[i].parm_index = -1;
507 }
508
509 /* Merge with callee's summary. */
510 if (cur_summary->loads)
511 changed |= cur_summary->loads->merge (callee_summary->loads, &parm_map);
512 if (cur_summary->loads_lto)
513 changed |= cur_summary->loads_lto->merge (callee_summary->loads_lto,
514 &parm_map);
515 if (!ignore_stores)
516 {
517 if (cur_summary->stores)
518 changed |= cur_summary->stores->merge (callee_summary->stores,
519 &parm_map);
520 if (cur_summary->stores_lto)
521 changed |= cur_summary->stores_lto->merge (callee_summary->stores_lto,
522 &parm_map);
523 }
524 return changed;
525 }
526
527 /* Analyze function call STMT in function F.
528 Remember recursive calls in RECURSIVE_CALLS. */
529
530 static bool
531 analyze_call (modref_summary *cur_summary,
532 gimple *stmt, vec <gimple *> *recursive_calls)
533 {
534 /* Check flags on the function call. In certain cases, analysis can be
535 simplified. */
536 int flags = gimple_call_flags (stmt);
537 if (flags & (ECF_CONST | ECF_NOVOPS))
538 {
539 if (dump_file)
540 fprintf (dump_file,
541 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
542 "except for args.\n");
543 return true;
544 }
545
546 /* Pure functions do not affect global memory. Stores by functions which are
547 noreturn and do not throw can safely be ignored. */
548 bool ignore_stores = ignore_stores_p (current_function_decl, flags);
549
550 /* Next, we try to get the callee's function declaration. The goal is to
551 merge their summary with ours. */
552 tree callee = gimple_call_fndecl (stmt);
553
554 /* Check if this is an indirect call. */
555 if (!callee)
556 {
557 /* If the indirect call does not write memory, our store summary is
558 unaffected, but we have to discard our loads summary (we don't know
559 anything about the loads that the called function performs). */
560 if (ignore_stores)
561 {
562 if (dump_file)
563 fprintf (dump_file, " - Indirect call which does not write memory, "
564 "discarding loads.\n");
565 if (cur_summary->loads)
566 cur_summary->loads->collapse ();
567 if (cur_summary->loads_lto)
568 cur_summary->loads_lto->collapse ();
569 return true;
570 }
571 if (dump_file)
572 fprintf (dump_file, " - Indirect call.\n");
573 return false;
574 }
575
576 struct cgraph_node *callee_node = cgraph_node::get_create (callee);
577
578 /* We can not safely optimize based on summary of callee if it does
579 not always bind to current def: it is possible that memory load
580 was optimized out earlier which may not happen in the interposed
581 variant. */
582 if (!callee_node->binds_to_current_def_p ())
583 {
584 if (dump_file)
585 fprintf (dump_file, " - May be interposed: collapsing loads.\n");
586 if (cur_summary->loads)
587 cur_summary->loads->collapse ();
588 if (cur_summary->loads_lto)
589 cur_summary->loads_lto->collapse ();
590 }
591
592 /* If this is a recursive call, the target summary is the same as ours, so
593 there's nothing to do. */
594 if (recursive_call_p (current_function_decl, callee))
595 {
596 recursive_calls->safe_push (stmt);
597 if (dump_file)
598 fprintf (dump_file, " - Skipping recursive call.\n");
599 return true;
600 }
601
602 gcc_assert (callee_node != NULL);
603
604 /* Get the function symbol and its availability. */
605 enum availability avail;
606 callee_node = callee_node->function_symbol (&avail);
607 if (avail <= AVAIL_INTERPOSABLE)
608 {
609 /* Keep stores summary, but discard all loads for interposable function
610 symbols. */
611 if (ignore_stores)
612 {
613 if (cur_summary->loads)
614 cur_summary->loads->collapse ();
615 if (cur_summary->loads_lto)
616 cur_summary->loads_lto->collapse ();
617 return true;
618 }
619 if (dump_file)
620 fprintf (dump_file, " - Function availability <= AVAIL_INTERPOSABLE.\n");
621 return false;
622 }
623
624 /* Get callee's modref summary. As above, if there's no summary, we either
625 have to give up or, if stores are ignored, we can just purge loads. */
626 modref_summary *callee_summary = summaries->get (callee_node);
627 if (!callee_summary)
628 {
629 if (ignore_stores)
630 {
631 if (cur_summary->loads)
632 cur_summary->loads->collapse ();
633 if (cur_summary->loads_lto)
634 cur_summary->loads_lto->collapse ();
635 return true;
636 }
637 if (dump_file)
638 fprintf (dump_file, " - No modref summary available for callee.\n");
639 return false;
640 }
641
642 merge_call_side_effects (cur_summary, stmt, callee_summary, ignore_stores);
643
644 return true;
645 }
646
647 /* Helper for analyze_stmt. */
648
649 static bool
650 analyze_load (gimple *, tree, tree op, void *data)
651 {
652 modref_summary *summary = (modref_summary *)data;
653
654 if (dump_file)
655 {
656 fprintf (dump_file, " - Analyzing load: ");
657 print_generic_expr (dump_file, op);
658 fprintf (dump_file, "\n");
659 }
660
661 if (!record_access_p (op))
662 return false;
663
664 ao_ref r;
665 ao_ref_init (&r, op);
666
667 if (summary->loads)
668 record_access (summary->loads, &r);
669 if (summary->loads_lto)
670 record_access_lto (summary->loads_lto, &r);
671 return false;
672 }
673
674 /* Helper for analyze_stmt. */
675
676 static bool
677 analyze_store (gimple *, tree, tree op, void *data)
678 {
679 modref_summary *summary = (modref_summary *)data;
680
681 if (dump_file)
682 {
683 fprintf (dump_file, " - Analyzing store: ");
684 print_generic_expr (dump_file, op);
685 fprintf (dump_file, "\n");
686 }
687
688 if (!record_access_p (op))
689 return false;
690
691 ao_ref r;
692 ao_ref_init (&r, op);
693
694 if (summary->stores)
695 record_access (((modref_summary *)data)->stores, &r);
696 if (summary->stores_lto)
697 record_access_lto (((modref_summary *)data)->stores_lto, &r);
698 return false;
699 }
700
701 /* Analyze statement STMT of function F.
702 If IPA is true do not merge in side effects of calls. */
703
704 static bool
705 analyze_stmt (modref_summary *summary, gimple *stmt, bool ipa,
706 vec <gimple *> *recursive_calls)
707 {
708 /* In general we can not ignore clobbers because they are barries for code
709 motion, however after inlining it is safe to do becuase local optimization
710 passes do not consider clobbers from other functions.
711 Similar logic is in ipa-pure-consts. */
712 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
713 return true;
714
715 /* Analyze all loads and stores in STMT. */
716 walk_stmt_load_store_ops (stmt, summary,
717 analyze_load, analyze_store);
718
719 switch (gimple_code (stmt))
720 {
721 case GIMPLE_ASM:
722 /* If the ASM statement does not read nor write memory, there's nothing
723 to do. Otherwise just give up. */
724 if (!gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
725 return true;
726 if (dump_file)
727 fprintf (dump_file, " - Function contains GIMPLE_ASM statement "
728 "which clobbers memory.\n");
729 return false;
730 case GIMPLE_CALL:
731 if (!ipa)
732 return analyze_call (summary, stmt, recursive_calls);
733 return true;
734 default:
735 /* Nothing to do for other types of statements. */
736 return true;
737 }
738 }
739
740 /* Analyze function F. IPA indicates whether we're running in local mode (false)
741 or the IPA mode (true). */
742
743 static void
744 analyze_function (function *f, bool ipa)
745 {
746 if (dump_file)
747 fprintf (dump_file, "modref analyzing '%s' (ipa=%i)%s%s\n",
748 function_name (f), ipa,
749 TREE_READONLY (current_function_decl) ? " (const)" : "",
750 DECL_PURE_P (current_function_decl) ? " (pure)" : "");
751
752 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
753 if (!flag_ipa_modref)
754 return;
755
756 /* Initialize the summary. */
757 if (!summaries)
758 summaries = modref_summaries::create_ggc (symtab);
759 else /* Remove existing summary if we are re-running the pass. */
760 summaries->remove (cgraph_node::get (f->decl));
761
762 ((modref_summaries *)summaries)->ipa = ipa;
763
764 modref_summary *summary = summaries->get_create (cgraph_node::get (f->decl));
765
766 /* Compute no-LTO summaries when local optimization is going to happen. */
767 bool nolto = (!ipa || ((!flag_lto || flag_fat_lto_objects) && !in_lto_p)
768 || (in_lto_p && !flag_wpa
769 && flag_incremental_link != INCREMENTAL_LINK_LTO));
770
771 /* Compute LTO when LTO streaming is going to happen. */
772 bool lto = ipa && ((flag_lto && !in_lto_p)
773 || flag_wpa
774 || flag_incremental_link == INCREMENTAL_LINK_LTO);
775
776 /* Create and initialize summary for F.
777 Note that summaries may be already allocated from previous
778 run of the pass. */
779 if (nolto)
780 {
781 gcc_assert (!summary->loads);
782 summary->loads = modref_records::create_ggc (param_modref_max_bases,
783 param_modref_max_refs,
784 param_modref_max_accesses);
785 gcc_assert (!summary->stores);
786 summary->stores = modref_records::create_ggc (param_modref_max_bases,
787 param_modref_max_refs,
788 param_modref_max_accesses);
789 }
790 if (lto)
791 {
792 gcc_assert (!summary->loads_lto);
793 summary->loads_lto = modref_records_lto::create_ggc
794 (param_modref_max_bases,
795 param_modref_max_refs,
796 param_modref_max_accesses);
797 gcc_assert (!summary->stores_lto);
798 summary->stores_lto = modref_records_lto::create_ggc
799 (param_modref_max_bases,
800 param_modref_max_refs,
801 param_modref_max_accesses);
802 }
803 summary->finished = false;
804 int ecf_flags = flags_from_decl_or_type (current_function_decl);
805 auto_vec <gimple *, 32> recursive_calls;
806
807 /* Analyze each statement in each basic block of the function. If the
808 statement cannot be analyzed (for any reason), the entire function cannot
809 be analyzed by modref. */
810 basic_block bb;
811 FOR_EACH_BB_FN (bb, f)
812 {
813 gimple_stmt_iterator si;
814 for (si = gsi_after_labels (bb); !gsi_end_p (si); gsi_next (&si))
815 {
816 if (!analyze_stmt (summary, gsi_stmt (si), ipa, &recursive_calls)
817 || !summary->useful_p (ecf_flags))
818 {
819 cgraph_node *fnode = cgraph_node::get (current_function_decl);
820 summaries->remove (fnode);
821 if (dump_file)
822 fprintf (dump_file,
823 " - modref done with result: not tracked.\n");
824 return;
825 }
826 }
827 }
828
829 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
830 This needs to be done after all other side effects are computed. */
831 if (!ipa)
832 {
833 bool changed = true;
834 while (changed)
835 {
836 changed = false;
837 for (unsigned i = 0; i < recursive_calls.length (); i++)
838 {
839 changed |= merge_call_side_effects
840 (summary, recursive_calls[i], summary,
841 ignore_stores_p (current_function_decl,
842 gimple_call_flags
843 (recursive_calls[i])));
844 if (!summary->useful_p (ecf_flags))
845 {
846 cgraph_node *fnode = cgraph_node::get (current_function_decl);
847 summaries->remove (fnode);
848 if (dump_file)
849 fprintf (dump_file,
850 " - modref done with result: not tracked.\n");
851 return;
852 }
853 }
854 }
855 }
856
857 if (!ipa)
858 summary->finished = true;
859
860 if (dump_file)
861 {
862 fprintf (dump_file, " - modref done with result: tracked.\n");
863 summary->dump (dump_file);
864 }
865 }
866
867 /* Callback for generate_summary. */
868
869 static void
870 modref_generate (void)
871 {
872 struct cgraph_node *node;
873 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
874 {
875 function *f = DECL_STRUCT_FUNCTION (node->decl);
876 if (!f)
877 continue;
878 push_cfun (f);
879 analyze_function (f, true);
880 pop_cfun ();
881 }
882 }
883
884 /* Called when a new function is inserted to callgraph late. */
885
886 void
887 modref_summaries::insert (struct cgraph_node *node, modref_summary *)
888 {
889 if (!DECL_STRUCT_FUNCTION (node->decl))
890 return;
891 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
892 analyze_function (DECL_STRUCT_FUNCTION (node->decl), ipa);
893 pop_cfun ();
894 }
895
896 /* Called when new clone is inserted to callgraph late. */
897
898 void
899 modref_summaries::duplicate (cgraph_node *, cgraph_node *,
900 modref_summary *src_data,
901 modref_summary *dst_data)
902 {
903 dst_data->finished = src_data->finished;
904 if (src_data->stores)
905 {
906 dst_data->stores = modref_records::create_ggc
907 (src_data->stores->max_bases,
908 src_data->stores->max_refs,
909 src_data->stores->max_accesses);
910 dst_data->stores->copy_from (src_data->stores);
911 }
912 if (src_data->loads)
913 {
914 dst_data->loads = modref_records::create_ggc
915 (src_data->loads->max_bases,
916 src_data->loads->max_refs,
917 src_data->loads->max_accesses);
918 dst_data->loads->copy_from (src_data->loads);
919 }
920 if (src_data->stores_lto)
921 {
922 dst_data->stores_lto = modref_records_lto::create_ggc
923 (src_data->stores_lto->max_bases,
924 src_data->stores_lto->max_refs,
925 src_data->stores_lto->max_accesses);
926 dst_data->stores_lto->copy_from (src_data->stores_lto);
927 }
928 if (src_data->loads_lto)
929 {
930 dst_data->loads_lto = modref_records_lto::create_ggc
931 (src_data->loads_lto->max_bases,
932 src_data->loads_lto->max_refs,
933 src_data->stores_lto->max_accesses);
934 dst_data->loads_lto->copy_from (src_data->loads_lto);
935 }
936 }
937
938 namespace
939 {
940 /* Definition of the modref pass on GIMPLE. */
941 const pass_data pass_data_modref = {
942 GIMPLE_PASS,
943 "modref",
944 OPTGROUP_IPA,
945 TV_TREE_MODREF,
946 (PROP_cfg | PROP_ssa),
947 0,
948 0,
949 0,
950 0,
951 };
952
953 class pass_modref : public gimple_opt_pass
954 {
955 public:
956 pass_modref (gcc::context *ctxt)
957 : gimple_opt_pass (pass_data_modref, ctxt) {}
958
959 /* opt_pass methods: */
960 opt_pass *clone ()
961 {
962 return new pass_modref (m_ctxt);
963 }
964 virtual bool gate (function *)
965 {
966 return flag_ipa_modref;
967 }
968 virtual unsigned int execute (function *);
969 };
970
971 /* Encode TT to the output block OB using the summary streaming API. */
972
973 static void
974 write_modref_records (modref_records_lto *tt, struct output_block *ob)
975 {
976 streamer_write_uhwi (ob, tt->max_bases);
977 streamer_write_uhwi (ob, tt->max_refs);
978 streamer_write_uhwi (ob, tt->max_accesses);
979
980 streamer_write_uhwi (ob, tt->every_base);
981 streamer_write_uhwi (ob, vec_safe_length (tt->bases));
982 size_t i;
983 modref_base_node <tree> *base_node;
984 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, base_node)
985 {
986 stream_write_tree (ob, base_node->base, true);
987
988 streamer_write_uhwi (ob, base_node->every_ref);
989 streamer_write_uhwi (ob, vec_safe_length (base_node->refs));
990
991 size_t j;
992 modref_ref_node <tree> *ref_node;
993 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
994 {
995 stream_write_tree (ob, ref_node->ref, true);
996 streamer_write_uhwi (ob, ref_node->every_access);
997 streamer_write_uhwi (ob, vec_safe_length (ref_node->accesses));
998
999 size_t k;
1000 modref_access_node *access_node;
1001 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
1002 {
1003 streamer_write_uhwi (ob, access_node->parm_index);
1004 if (access_node->parm_index != -1)
1005 {
1006 streamer_write_uhwi (ob, access_node->parm_offset_known);
1007 if (access_node->parm_offset_known)
1008 {
1009 streamer_write_poly_int64 (ob, access_node->parm_offset);
1010 streamer_write_poly_int64 (ob, access_node->offset);
1011 streamer_write_poly_int64 (ob, access_node->size);
1012 streamer_write_poly_int64 (ob, access_node->max_size);
1013 }
1014 }
1015 }
1016 }
1017 }
1018 }
1019
1020 /* Read a modref_tree from the input block IB using the data from DATA_IN.
1021 This assumes that the tree was encoded using write_modref_tree.
1022 Either nolto_ret or lto_ret is initialized by the tree depending whether
1023 LTO streaming is expected or not. */
1024
1025 void
1026 read_modref_records (lto_input_block *ib, struct data_in *data_in,
1027 modref_records **nolto_ret,
1028 modref_records_lto **lto_ret)
1029 {
1030 size_t max_bases = streamer_read_uhwi (ib);
1031 size_t max_refs = streamer_read_uhwi (ib);
1032 size_t max_accesses = streamer_read_uhwi (ib);
1033
1034 /* Decide whether we want to turn LTO data types to non-LTO (i.e. when
1035 LTO re-streaming is not going to happen). */
1036 if (flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
1037 *lto_ret = modref_records_lto::create_ggc (max_bases, max_refs,
1038 max_accesses);
1039 else
1040 *nolto_ret = modref_records::create_ggc (max_bases, max_refs,
1041 max_accesses);
1042
1043 size_t every_base = streamer_read_uhwi (ib);
1044 size_t nbase = streamer_read_uhwi (ib);
1045
1046 gcc_assert (!every_base || nbase == 0);
1047 if (every_base)
1048 {
1049 if (*nolto_ret)
1050 (*nolto_ret)->collapse ();
1051 if (*lto_ret)
1052 (*lto_ret)->collapse ();
1053 }
1054 for (size_t i = 0; i < nbase; i++)
1055 {
1056 tree base_tree = stream_read_tree (ib, data_in);
1057 modref_base_node <alias_set_type> *nolto_base_node = NULL;
1058 modref_base_node <tree> *lto_base_node = NULL;
1059
1060 /* At stream in time we have LTO alias info. Check if we streamed in
1061 something obviously unnecessary. Do not glob types by alias sets;
1062 it is not 100% clear that ltrans types will get merged same way.
1063 Types may get refined based on ODR type conflicts. */
1064 if (base_tree && !get_alias_set (base_tree))
1065 {
1066 if (dump_file)
1067 {
1068 fprintf (dump_file, "Streamed in alias set 0 type ");
1069 print_generic_expr (dump_file, base_tree);
1070 fprintf (dump_file, "\n");
1071 }
1072 base_tree = NULL;
1073 }
1074
1075 if (*nolto_ret)
1076 nolto_base_node = (*nolto_ret)->insert_base (base_tree
1077 ? get_alias_set (base_tree)
1078 : 0);
1079 if (*lto_ret)
1080 lto_base_node = (*lto_ret)->insert_base (base_tree);
1081 size_t every_ref = streamer_read_uhwi (ib);
1082 size_t nref = streamer_read_uhwi (ib);
1083
1084 gcc_assert (!every_ref || nref == 0);
1085 if (every_ref)
1086 {
1087 if (nolto_base_node)
1088 nolto_base_node->collapse ();
1089 if (lto_base_node)
1090 lto_base_node->collapse ();
1091 }
1092 for (size_t j = 0; j < nref; j++)
1093 {
1094 tree ref_tree = stream_read_tree (ib, data_in);
1095
1096 if (ref_tree && !get_alias_set (ref_tree))
1097 {
1098 if (dump_file)
1099 {
1100 fprintf (dump_file, "Streamed in alias set 0 type ");
1101 print_generic_expr (dump_file, ref_tree);
1102 fprintf (dump_file, "\n");
1103 }
1104 ref_tree = NULL;
1105 }
1106
1107 modref_ref_node <alias_set_type> *nolto_ref_node = NULL;
1108 modref_ref_node <tree> *lto_ref_node = NULL;
1109
1110 if (nolto_base_node)
1111 nolto_ref_node
1112 = nolto_base_node->insert_ref (ref_tree
1113 ? get_alias_set (ref_tree) : 0,
1114 max_refs);
1115 if (lto_base_node)
1116 lto_ref_node = lto_base_node->insert_ref (ref_tree, max_refs);
1117
1118 size_t every_access = streamer_read_uhwi (ib);
1119 size_t naccesses = streamer_read_uhwi (ib);
1120
1121 if (nolto_ref_node)
1122 nolto_ref_node->every_access = every_access;
1123 if (lto_ref_node)
1124 lto_ref_node->every_access = every_access;
1125
1126 for (size_t k = 0; k < naccesses; k++)
1127 {
1128 int parm_index = streamer_read_uhwi (ib);
1129 bool parm_offset_known = false;
1130 poly_int64 parm_offset = 0;
1131 poly_int64 offset = 0;
1132 poly_int64 size = -1;
1133 poly_int64 max_size = -1;
1134
1135 if (parm_index != -1)
1136 {
1137 parm_offset_known = streamer_read_uhwi (ib);
1138 if (parm_offset_known)
1139 {
1140 parm_offset = streamer_read_poly_int64 (ib);
1141 offset = streamer_read_poly_int64 (ib);
1142 size = streamer_read_poly_int64 (ib);
1143 max_size = streamer_read_poly_int64 (ib);
1144 }
1145 }
1146 modref_access_node a = {offset, size, max_size, parm_offset,
1147 parm_index, parm_offset_known};
1148 if (nolto_ref_node)
1149 nolto_ref_node->insert_access (a, max_accesses);
1150 if (lto_ref_node)
1151 lto_ref_node->insert_access (a, max_accesses);
1152 }
1153 }
1154 }
1155 if (*lto_ret)
1156 (*lto_ret)->cleanup ();
1157 if (*nolto_ret)
1158 (*nolto_ret)->cleanup ();
1159 }
1160
1161 /* Callback for write_summary. */
1162
1163 static void
1164 modref_write ()
1165 {
1166 struct output_block *ob = create_output_block (LTO_section_ipa_modref);
1167 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
1168 unsigned int count = 0;
1169 int i;
1170
1171 if (!summaries)
1172 {
1173 streamer_write_uhwi (ob, 0);
1174 streamer_write_char_stream (ob->main_stream, 0);
1175 produce_asm (ob, NULL);
1176 destroy_output_block (ob);
1177 return;
1178 }
1179
1180 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
1181 {
1182 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1183 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
1184 modref_summary *r;
1185
1186 if (cnode && cnode->definition && !cnode->alias
1187 && (r = summaries->get (cnode))
1188 && r->lto_useful_p (flags_from_decl_or_type (cnode->decl)))
1189 count++;
1190 }
1191 streamer_write_uhwi (ob, count);
1192
1193 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
1194 {
1195 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1196 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
1197
1198 if (cnode && cnode->definition && !cnode->alias)
1199 {
1200
1201 modref_summary *r = summaries->get (cnode);
1202
1203 if (!r || !r->lto_useful_p (flags_from_decl_or_type (cnode->decl)))
1204 continue;
1205
1206 streamer_write_uhwi (ob, lto_symtab_encoder_encode (encoder, cnode));
1207
1208 streamer_write_uhwi (ob, r->loads_lto ? 1 : 0);
1209 streamer_write_uhwi (ob, r->stores_lto ? 1 : 0);
1210 if (r->loads_lto)
1211 write_modref_records (r->loads_lto, ob);
1212 if (r->stores_lto)
1213 write_modref_records (r->stores_lto, ob);
1214 }
1215 }
1216 streamer_write_char_stream (ob->main_stream, 0);
1217 produce_asm (ob, NULL);
1218 destroy_output_block (ob);
1219 }
1220
1221 static void
1222 read_section (struct lto_file_decl_data *file_data, const char *data,
1223 size_t len)
1224 {
1225 const struct lto_function_header *header
1226 = (const struct lto_function_header *) data;
1227 const int cfg_offset = sizeof (struct lto_function_header);
1228 const int main_offset = cfg_offset + header->cfg_size;
1229 const int string_offset = main_offset + header->main_size;
1230 struct data_in *data_in;
1231 unsigned int i;
1232 unsigned int f_count;
1233
1234 lto_input_block ib ((const char *) data + main_offset, header->main_size,
1235 file_data->mode_table);
1236
1237 data_in
1238 = lto_data_in_create (file_data, (const char *) data + string_offset,
1239 header->string_size, vNULL);
1240 f_count = streamer_read_uhwi (&ib);
1241 for (i = 0; i < f_count; i++)
1242 {
1243 struct cgraph_node *node;
1244 lto_symtab_encoder_t encoder;
1245
1246 unsigned int index = streamer_read_uhwi (&ib);
1247 encoder = file_data->symtab_node_encoder;
1248 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder,
1249 index));
1250
1251 modref_summary *modref_sum = summaries->get_create (node);
1252 modref_sum->finished = false;
1253 int have_loads = streamer_read_uhwi (&ib);
1254 int have_stores = streamer_read_uhwi (&ib);
1255 gcc_assert (!modref_sum->loads_lto
1256 && !modref_sum->stores_lto
1257 && !modref_sum->loads
1258 && !modref_sum->stores);
1259 if (have_loads)
1260 read_modref_records (&ib, data_in,
1261 &modref_sum->loads,
1262 &modref_sum->loads_lto);
1263 if (have_stores)
1264 read_modref_records (&ib, data_in,
1265 &modref_sum->stores,
1266 &modref_sum->stores_lto);
1267 if (dump_file)
1268 {
1269 fprintf (dump_file, "Read modref for %s\n",
1270 node->dump_name ());
1271 modref_sum->dump (dump_file);
1272 }
1273 if (flag_ltrans)
1274 modref_sum->finished = true;
1275 }
1276
1277 lto_free_section_data (file_data, LTO_section_ipa_modref, NULL, data,
1278 len);
1279 lto_data_in_delete (data_in);
1280 }
1281
1282 /* Callback for read_summary. */
1283
1284 static void
1285 modref_read (void)
1286 {
1287 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1288 struct lto_file_decl_data *file_data;
1289 unsigned int j = 0;
1290
1291 if (!summaries)
1292 summaries = modref_summaries::create_ggc (symtab);
1293 ((modref_summaries *)summaries)->ipa = true;
1294
1295 while ((file_data = file_data_vec[j++]))
1296 {
1297 size_t len;
1298 const char *data = lto_get_summary_section_data (file_data,
1299 LTO_section_ipa_modref,
1300 &len);
1301 if (data)
1302 read_section (file_data, data, len);
1303 else
1304 /* Fatal error here. We do not want to support compiling ltrans units
1305 with different version of compiler or different flags than the WPA
1306 unit, so this should never happen. */
1307 fatal_error (input_location,
1308 "IPA modref summary is missing in input file");
1309 }
1310 }
1311
1312 /* Definition of the modref IPA pass. */
1313 const pass_data pass_data_ipa_modref =
1314 {
1315 IPA_PASS, /* type */
1316 "modref", /* name */
1317 OPTGROUP_IPA, /* optinfo_flags */
1318 TV_IPA_MODREF, /* tv_id */
1319 0, /* properties_required */
1320 0, /* properties_provided */
1321 0, /* properties_destroyed */
1322 0, /* todo_flags_start */
1323 ( TODO_dump_symtab ), /* todo_flags_finish */
1324 };
1325
1326 class pass_ipa_modref : public ipa_opt_pass_d
1327 {
1328 public:
1329 pass_ipa_modref (gcc::context *ctxt)
1330 : ipa_opt_pass_d (pass_data_ipa_modref, ctxt,
1331 modref_generate, /* generate_summary */
1332 modref_write, /* write_summary */
1333 modref_read, /* read_summary */
1334 modref_write, /* write_optimization_summary */
1335 modref_read, /* read_optimization_summary */
1336 NULL, /* stmt_fixup */
1337 0, /* function_transform_todo_flags_start */
1338 NULL, /* function_transform */
1339 NULL) /* variable_transform */
1340 {}
1341
1342 /* opt_pass methods: */
1343 opt_pass *clone () { return new pass_ipa_modref (m_ctxt); }
1344 virtual bool gate (function *)
1345 {
1346 return true;
1347 }
1348 virtual unsigned int execute (function *);
1349
1350 };
1351
1352 }
1353
1354 unsigned int pass_modref::execute (function *f)
1355 {
1356 /* If new function is being added during IPA, we can skip analysis. */
1357 if (summaries && ((modref_summaries *)summaries)->ipa)
1358 return 0;
1359 analyze_function (f, false);
1360 return 0;
1361 }
1362
1363 gimple_opt_pass *
1364 make_pass_modref (gcc::context *ctxt)
1365 {
1366 return new pass_modref (ctxt);
1367 }
1368
1369 ipa_opt_pass_d *
1370 make_pass_ipa_modref (gcc::context *ctxt)
1371 {
1372 return new pass_ipa_modref (ctxt);
1373 }
1374
1375 /* Skip edges from and to nodes without ipa_pure_const enabled.
1376 Ignore not available symbols. */
1377
1378 static bool
1379 ignore_edge (struct cgraph_edge *e)
1380 {
1381 enum availability avail;
1382 cgraph_node *callee = e->callee->function_or_virtual_thunk_symbol
1383 (&avail, e->caller);
1384
1385 return (avail <= AVAIL_INTERPOSABLE
1386 || !summaries->get (callee)
1387 || flags_from_decl_or_type (e->callee->decl)
1388 & (ECF_CONST | ECF_NOVOPS));
1389 }
1390
1391 /* Compute parm_map for CALLE_EDGE. */
1392
1393 static void
1394 compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
1395 {
1396 class ipa_edge_args *args;
1397 if (ipa_node_params_sum
1398 && !callee_edge->call_stmt_cannot_inline_p
1399 && (args = IPA_EDGE_REF (callee_edge)) != NULL)
1400 {
1401 int i, count = ipa_get_cs_argument_count (args);
1402 class ipa_node_params *caller_parms_info, *callee_pi;
1403 class ipa_call_summary *es
1404 = ipa_call_summaries->get (callee_edge);
1405 cgraph_node *callee
1406 = callee_edge->callee->function_or_virtual_thunk_symbol
1407 (NULL, callee_edge->caller);
1408
1409 caller_parms_info = IPA_NODE_REF (callee_edge->caller->inlined_to
1410 ? callee_edge->caller->inlined_to
1411 : callee_edge->caller);
1412 callee_pi = IPA_NODE_REF (callee);
1413
1414 (*parm_map).safe_grow (count);
1415
1416 for (i = 0; i < count; i++)
1417 {
1418 if (es && es->param[i].points_to_local_or_readonly_memory)
1419 {
1420 (*parm_map)[i].parm_index = -2;
1421 continue;
1422 }
1423
1424 struct ipa_jump_func *jf
1425 = ipa_get_ith_jump_func (args, i);
1426 if (jf && callee_pi)
1427 {
1428 tree cst = ipa_value_from_jfunc (caller_parms_info,
1429 jf,
1430 ipa_get_type
1431 (callee_pi, i));
1432 if (cst && points_to_local_or_readonly_memory_p (cst))
1433 {
1434 (*parm_map)[i].parm_index = -2;
1435 continue;
1436 }
1437 }
1438 if (jf && jf->type == IPA_JF_PASS_THROUGH)
1439 {
1440 (*parm_map)[i].parm_index
1441 = ipa_get_jf_pass_through_formal_id (jf);
1442 (*parm_map)[i].parm_offset_known
1443 = ipa_get_jf_pass_through_operation (jf) == NOP_EXPR;
1444 (*parm_map)[i].parm_offset = 0;
1445 continue;
1446 }
1447 if (jf && jf->type == IPA_JF_ANCESTOR)
1448 {
1449 (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
1450 (*parm_map)[i].parm_offset_known = true;
1451 (*parm_map)[i].parm_offset = ipa_get_jf_ancestor_offset (jf);
1452 }
1453 else
1454 (*parm_map)[i].parm_index = -1;
1455 }
1456 if (dump_file)
1457 {
1458 fprintf (dump_file, " Parm map: ");
1459 for (i = 0; i < count; i++)
1460 fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
1461 fprintf (dump_file, "\n");
1462 }
1463 }
1464 }
1465
1466 /* Call EDGE was inlined; merge summary from callee to the caller. */
1467
1468 void
1469 ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
1470 {
1471 if (!summaries)
1472 return;
1473
1474 struct cgraph_node *to = (edge->caller->inlined_to
1475 ? edge->caller->inlined_to : edge->caller);
1476 class modref_summary *to_info = summaries->get (to);
1477
1478 if (!to_info)
1479 return;
1480
1481 class modref_summary *callee_info = summaries->get (edge->callee);
1482 int flags = flags_from_decl_or_type (edge->callee->decl);
1483
1484 if (!callee_info)
1485 {
1486 if (ignore_stores_p (edge->callee->decl, flags))
1487 {
1488 if (to_info->loads)
1489 to_info->loads->collapse ();
1490 if (to_info->loads_lto)
1491 to_info->loads_lto->collapse ();
1492 }
1493 else
1494 {
1495 summaries->remove (to);
1496 summaries->remove (edge->callee);
1497 return;
1498 }
1499 }
1500 else
1501 {
1502 auto_vec <modref_parm_map, 32> parm_map;
1503
1504 compute_parm_map (edge, &parm_map);
1505
1506 if (to_info->loads)
1507 to_info->loads->merge (callee_info->loads, &parm_map);
1508 if (to_info->stores)
1509 to_info->stores->merge (callee_info->stores, &parm_map);
1510 if (to_info->loads_lto)
1511 to_info->loads_lto->merge (callee_info->loads_lto, &parm_map);
1512 if (to_info->stores_lto)
1513 to_info->stores_lto->merge (callee_info->stores_lto, &parm_map);
1514 }
1515 if (!to_info->useful_p (flags))
1516 summaries->remove (to);
1517 summaries->remove (edge->callee);
1518 return;
1519 }
1520
1521 /* Collapse loads and return true if something changed. */
1522
1523 bool
1524 collapse_loads (modref_summary *cur_summary)
1525 {
1526 bool changed = false;
1527
1528 if (cur_summary->loads && !cur_summary->loads->every_base)
1529 {
1530 cur_summary->loads->collapse ();
1531 changed = true;
1532 }
1533 if (cur_summary->loads_lto
1534 && !cur_summary->loads_lto->every_base)
1535 {
1536 cur_summary->loads_lto->collapse ();
1537 changed = true;
1538 }
1539 return changed;
1540 }
1541
1542 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE. */
1543
1544 static void
1545 modref_propagate_in_scc (cgraph_node *component_node)
1546 {
1547 bool changed = true;
1548 int iteration = 0;
1549
1550 while (changed)
1551 {
1552 changed = false;
1553 for (struct cgraph_node *cur = component_node; cur;
1554 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
1555 {
1556 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
1557 modref_summary *cur_summary = summaries->get (node);
1558
1559 if (!cur_summary)
1560 continue;
1561
1562 if (dump_file)
1563 fprintf (dump_file, " Processing %s%s%s\n",
1564 cur->dump_name (),
1565 TREE_READONLY (cur->decl) ? " (const)" : "",
1566 DECL_PURE_P (cur->decl) ? " (pure)" : "");
1567
1568 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
1569 {
1570 if (e->indirect_info->ecf_flags & (ECF_CONST | ECF_NOVOPS))
1571 continue;
1572 if (ignore_stores_p (cur->decl, e->indirect_info->ecf_flags))
1573 {
1574 if (dump_file)
1575 fprintf (dump_file, " Indirect call: "
1576 "collapsing loads\n");
1577 changed |= collapse_loads (cur_summary);
1578 }
1579 else
1580 {
1581 if (dump_file)
1582 fprintf (dump_file, " Indirect call: giving up\n");
1583 summaries->remove (node);
1584 changed = true;
1585 cur_summary = NULL;
1586 break;
1587 }
1588 }
1589
1590 if (!cur_summary)
1591 continue;
1592
1593 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
1594 callee_edge = callee_edge->next_callee)
1595 {
1596 int flags = flags_from_decl_or_type (callee_edge->callee->decl);
1597 modref_summary *callee_summary;
1598 struct cgraph_node *callee;
1599
1600 if (flags & (ECF_CONST | ECF_NOVOPS)
1601 || !callee_edge->inline_failed)
1602 continue;
1603
1604 /* Get the callee and its summary. */
1605 enum availability avail;
1606 callee = callee_edge->callee->function_or_virtual_thunk_symbol
1607 (&avail, cur);
1608
1609 /* It is not necessary to re-process calls outside of the
1610 SCC component. */
1611 if (iteration > 0
1612 && (!callee->aux
1613 || ((struct ipa_dfs_info *)cur->aux)->scc_no
1614 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
1615 continue;
1616
1617 if (dump_file)
1618 fprintf (dump_file, " Call to %s\n",
1619 callee_edge->callee->dump_name ());
1620
1621 bool ignore_stores = ignore_stores_p (cur->decl, flags);
1622
1623 /* We don't know anything about CALLEE, hence we cannot tell
1624 anything about the entire component. */
1625
1626 if (avail <= AVAIL_INTERPOSABLE
1627 || !(callee_summary = summaries->get (callee)))
1628 {
1629 if (!ignore_stores)
1630 {
1631 if (dump_file && avail <= AVAIL_INTERPOSABLE)
1632 fprintf (dump_file, " Call target interposable"
1633 " or not available\n");
1634 else if (dump_file)
1635 fprintf (dump_file, " No call target summary\n");
1636
1637 summaries->remove (node);
1638 changed = true;
1639 break;
1640 }
1641 else
1642 {
1643 if (dump_file && avail <= AVAIL_INTERPOSABLE)
1644 fprintf (dump_file, " Call target interposable"
1645 " or not available; collapsing loads\n");
1646 else if (dump_file)
1647 fprintf (dump_file, " No call target summary;"
1648 " collapsing loads\n");
1649
1650 changed |= collapse_loads (cur_summary);
1651 continue;
1652 }
1653 }
1654
1655 /* We can not safely optimize based on summary of callee if it
1656 does not always bind to current def: it is possible that
1657 memory load was optimized out earlier which may not happen in
1658 the interposed variant. */
1659 if (!callee_edge->binds_to_current_def_p ())
1660 {
1661 changed |= collapse_loads (cur_summary);
1662 if (dump_file)
1663 fprintf (dump_file, " May not bind local;"
1664 " collapsing loads\n");
1665 }
1666
1667
1668 auto_vec <modref_parm_map, 32> parm_map;
1669
1670 compute_parm_map (callee_edge, &parm_map);
1671
1672 /* Merge in callee's information. */
1673 if (callee_summary->loads)
1674 changed |= cur_summary->loads->merge
1675 (callee_summary->loads, &parm_map);
1676 if (callee_summary->stores)
1677 changed |= cur_summary->stores->merge
1678 (callee_summary->stores, &parm_map);
1679 if (callee_summary->loads_lto)
1680 changed |= cur_summary->loads_lto->merge
1681 (callee_summary->loads_lto, &parm_map);
1682 if (callee_summary->stores_lto)
1683 changed |= cur_summary->stores_lto->merge
1684 (callee_summary->stores_lto, &parm_map);
1685 if (dump_file && changed)
1686 cur_summary->dump (dump_file);
1687 }
1688 }
1689 iteration++;
1690 }
1691 for (struct cgraph_node *cur = component_node; cur;
1692 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
1693 {
1694 modref_summary *cur_summary = summaries->get (cur);
1695 if (cur_summary)
1696 cur_summary->finished = true;
1697 }
1698 if (dump_file)
1699 {
1700 fprintf (dump_file,
1701 "Propagation finished in %i iterations\n", iteration);
1702 for (struct cgraph_node *cur = component_node; cur;
1703 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
1704 if (!cur->inlined_to)
1705 {
1706 modref_summary *cur_summary = summaries->get (cur);
1707
1708 fprintf (dump_file, "Propagated modref for %s%s%s\n",
1709 cur->dump_name (),
1710 TREE_READONLY (cur->decl) ? " (const)" : "",
1711 DECL_PURE_P (cur->decl) ? " (pure)" : "");
1712 if (cur_summary)
1713 cur_summary->dump (dump_file);
1714 else
1715 fprintf (dump_file, " Not tracked\n");
1716 }
1717 }
1718 }
1719
1720 /* Run the IPA pass. This will take a function's summaries and calls and
1721 construct new summaries which represent a transitive closure. So that
1722 summary of an analyzed function contains information about the loads and
1723 stores that the function or any function that it calls does. */
1724
1725 unsigned int
1726 pass_ipa_modref::execute (function *)
1727 {
1728 if (!summaries)
1729 return 0;
1730
1731 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *,
1732 symtab->cgraph_count);
1733 int order_pos;
1734 order_pos = ipa_reduced_postorder (order, true, ignore_edge);
1735 int i;
1736
1737 /* Iterate over all strongly connected components in post-order. */
1738 for (i = 0; i < order_pos; i++)
1739 {
1740 /* Get the component's representative. That's just any node in the
1741 component from which we can traverse the entire component. */
1742 struct cgraph_node *component_node = order[i];
1743
1744 if (dump_file)
1745 fprintf (dump_file, "\n\nStart of SCC component\n");
1746
1747 modref_propagate_in_scc (component_node);
1748 }
1749 ((modref_summaries *)summaries)->ipa = false;
1750 ipa_free_postorder_info ();
1751 free (order);
1752 return 0;
1753 }
1754
1755 /* Summaries must stay alive until end of compilation. */
1756
1757 void
1758 ipa_modref_c_finalize ()
1759 {
1760 if (summaries)
1761 ggc_delete (summaries);
1762 summaries = NULL;
1763 }
1764
1765 #include "gt-ipa-modref.h"