1 /* Search for references that a functions loads or stores.
2 Copyright (C) 2020 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
23 accesses across function calls. The summary has a form of decision tree
24 described in ipa-modref-tree.h.
26 This file contains a tree pass and an IPA pass. Both performs the same
27 analys however tree pass is executed during early and late optimization
28 passes to propagate info downwards in the compilation order. IPA pass
29 propagates across the callgraph and is able to handle recursion and works on
30 whole program during link-time analysis.
32 LTO mode differs from the local mode by not recording alias sets but types
33 that are translated to alias sets later. This is necessary in order stream
34 the information because the alias sets are rebuild at stream-in time and may
35 not correspond to ones seen during analysis. For this reason part of analysis
40 #include "coretypes.h"
44 #include "alloc-pool.h"
45 #include "tree-pass.h"
46 #include "gimple-iterator.h"
49 #include "ipa-utils.h"
50 #include "symbol-summary.h"
51 #include "gimple-pretty-print.h"
52 #include "gimple-walk.h"
53 #include "print-tree.h"
54 #include "tree-streamer.h"
57 #include "ipa-modref-tree.h"
58 #include "ipa-modref.h"
59 #include "value-range.h"
61 #include "ipa-fnsummary.h"
63 /* Class (from which there is one global instance) that holds modref summaries
64 for all analyzed functions. */
65 class GTY((user
)) modref_summaries
66 : public fast_function_summary
<modref_summary
*, va_gc
>
69 modref_summaries (symbol_table
*symtab
)
70 : fast_function_summary
<modref_summary
*, va_gc
> (symtab
) {}
71 virtual void insert (cgraph_node
*, modref_summary
*state
);
72 virtual void duplicate (cgraph_node
*src_node
,
73 cgraph_node
*dst_node
,
74 modref_summary
*src_data
,
75 modref_summary
*dst_data
);
76 /* This flag controls whether newly inserted functions should be analyzed
77 in IPA or normal mode. Functions inserted between IPA analysis and
78 ipa-modref pass execution needs to be analyzed in IPA mode while all
79 other insertions leads to normal analysis. */
81 static modref_summaries
*create_ggc (symbol_table
*symtab
)
83 return new (ggc_alloc_no_dtor
<modref_summaries
> ())
84 modref_summaries (symtab
);
88 /* Global variable holding all modref summaries. */
89 static GTY(()) fast_function_summary
<modref_summary
*, va_gc
> *summaries
;
91 /* Summary for a single function which this pass produces. */
93 modref_summary::modref_summary ()
94 : loads (NULL
), stores (NULL
), loads_lto (NULL
),
95 stores_lto (NULL
), finished (0)
99 modref_summary::~modref_summary ()
106 ggc_delete (loads_lto
);
108 ggc_delete (stores_lto
);
111 /* Return true if lto summary is potentially useful for optimization. */
114 modref_summary::lto_useful_p (int ecf_flags
)
116 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
118 if (loads_lto
&& !loads_lto
->every_base
)
120 if (ecf_flags
& ECF_PURE
)
122 return stores_lto
&& !stores_lto
->every_base
;
125 /* Return true if summary is potentially useful for optimization. */
128 modref_summary::useful_p (int ecf_flags
)
130 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
132 if (lto_useful_p (ecf_flags
))
134 if (loads
&& !loads
->every_base
)
136 if (ecf_flags
& ECF_PURE
)
138 return stores
&& !stores
->every_base
;
144 dump_access (modref_access_node
*a
, FILE *out
)
146 fprintf (out
, " access:");
147 if (a
->parm_index
!= -1)
149 fprintf (out
, " Parm %i", a
->parm_index
);
150 if (a
->parm_offset_known
)
152 fprintf (out
, " param offset:");
153 print_dec ((poly_int64_pod
)a
->parm_offset
, out
, SIGNED
);
156 if (a
->range_info_useful_p ())
158 fprintf (out
, " offset:");
159 print_dec ((poly_int64_pod
)a
->offset
, out
, SIGNED
);
160 fprintf (out
, " size:");
161 print_dec ((poly_int64_pod
)a
->size
, out
, SIGNED
);
162 fprintf (out
, " max_size:");
163 print_dec ((poly_int64_pod
)a
->max_size
, out
, SIGNED
);
168 /* Dump records TT to OUT. */
171 dump_records (modref_records
*tt
, FILE *out
)
173 fprintf (out
, " Limits: %i bases, %i refs\n",
174 (int)tt
->max_bases
, (int)tt
->max_refs
);
177 fprintf (out
, " Every base\n");
181 modref_base_node
<alias_set_type
> *n
;
182 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, n
)
184 fprintf (out
, " Base %i: alias set %i\n", (int)i
, n
->base
);
187 fprintf (out
, " Every ref\n");
191 modref_ref_node
<alias_set_type
> *r
;
192 FOR_EACH_VEC_SAFE_ELT (n
->refs
, j
, r
)
194 fprintf (out
, " Ref %i: alias set %i\n", (int)j
, r
->ref
);
197 fprintf (out
, " Every access\n");
201 modref_access_node
*a
;
202 FOR_EACH_VEC_SAFE_ELT (r
->accesses
, k
, a
)
203 dump_access (a
, out
);
208 /* Dump records TT to OUT. */
211 dump_lto_records (modref_records_lto
*tt
, FILE *out
)
213 fprintf (out
, " Limits: %i bases, %i refs\n",
214 (int)tt
->max_bases
, (int)tt
->max_refs
);
217 fprintf (out
, " Every base\n");
221 modref_base_node
<tree
> *n
;
222 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, n
)
224 fprintf (out
, " Base %i:", (int)i
);
225 print_generic_expr (dump_file
, n
->base
);
226 fprintf (out
, " (alias set %i)\n",
227 n
->base
? get_alias_set (n
->base
) : 0);
230 fprintf (out
, " Every ref\n");
234 modref_ref_node
<tree
> *r
;
235 FOR_EACH_VEC_SAFE_ELT (n
->refs
, j
, r
)
237 fprintf (out
, " Ref %i:", (int)j
);
238 print_generic_expr (dump_file
, r
->ref
);
239 fprintf (out
, " (alias set %i)\n",
240 r
->ref
? get_alias_set (r
->ref
) : 0);
243 fprintf (out
, " Every access\n");
247 modref_access_node
*a
;
248 FOR_EACH_VEC_SAFE_ELT (r
->accesses
, k
, a
)
249 dump_access (a
, out
);
257 modref_summary::dump (FILE *out
)
261 fprintf (out
, " loads:\n");
262 dump_records (loads
, out
);
266 fprintf (out
, " stores:\n");
267 dump_records (stores
, out
);
271 fprintf (out
, " LTO loads:\n");
272 dump_lto_records (loads_lto
, out
);
276 fprintf (out
, " LTO stores:\n");
277 dump_lto_records (stores_lto
, out
);
282 /* Get function summary for FUNC if it exists, return NULL otherwise. */
285 get_modref_function_summary (cgraph_node
*func
)
287 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
291 /* A single function body may be represented by multiple symbols with
292 different visibility. For example, if FUNC is an interposable alias,
293 we don't want to return anything, even if we have summary for the target
295 enum availability avail
;
296 func
= func
->function_or_virtual_thunk_symbol
297 (&avail
, cgraph_node::get (current_function_decl
));
298 if (avail
<= AVAIL_INTERPOSABLE
)
301 /* Attempt to get summary for FUNC. If analysis of FUNC hasn't finished yet,
302 don't return anything. */
303 modref_summary
*r
= summaries
->get (func
);
304 if (r
&& r
->finished
)
310 /* Construct modref_access_node from REF. */
311 static modref_access_node
312 get_access (ao_ref
*ref
)
316 base
= ao_ref_base (ref
);
317 modref_access_node a
= {ref
->offset
, ref
->size
, ref
->max_size
,
319 if (TREE_CODE (base
) == MEM_REF
|| TREE_CODE (base
) == TARGET_MEM_REF
)
322 base
= TREE_OPERAND (base
, 0);
323 if (TREE_CODE (base
) == SSA_NAME
324 && SSA_NAME_IS_DEFAULT_DEF (base
)
325 && TREE_CODE (SSA_NAME_VAR (base
)) == PARM_DECL
)
328 for (tree t
= DECL_ARGUMENTS (current_function_decl
);
329 t
!= SSA_NAME_VAR (base
); t
= DECL_CHAIN (t
))
338 if (TREE_CODE (memref
) == MEM_REF
)
341 = wi::to_poly_wide (TREE_OPERAND
342 (memref
, 1)).to_shwi (&a
.parm_offset
);
345 a
.parm_offset_known
= false;
355 /* Record access into the modref_records data structure. */
358 record_access (modref_records
*tt
, ao_ref
*ref
)
360 alias_set_type base_set
= !flag_strict_aliasing
? 0
361 : ao_ref_base_alias_set (ref
);
362 alias_set_type ref_set
= !flag_strict_aliasing
? 0
363 : (ao_ref_alias_set (ref
));
364 modref_access_node a
= get_access (ref
);
367 fprintf (dump_file
, " - Recording base_set=%i ref_set=%i parm=%i\n",
368 base_set
, ref_set
, a
.parm_index
);
370 tt
->insert (base_set
, ref_set
, a
);
373 /* IPA version of record_access_tree. */
376 record_access_lto (modref_records_lto
*tt
, ao_ref
*ref
)
378 /* get_alias_set sometimes use different type to compute the alias set
379 than TREE_TYPE (base). Do same adjustments. */
380 tree base_type
= NULL_TREE
, ref_type
= NULL_TREE
;
381 if (flag_strict_aliasing
)
386 while (handled_component_p (base
))
387 base
= TREE_OPERAND (base
, 0);
389 base_type
= reference_alias_ptr_type_1 (&base
);
392 base_type
= TREE_TYPE (base
);
394 base_type
= TYPE_REF_CAN_ALIAS_ALL (base_type
)
395 ? NULL_TREE
: TREE_TYPE (base_type
);
397 tree ref_expr
= ref
->ref
;
398 ref_type
= reference_alias_ptr_type_1 (&ref_expr
);
401 ref_type
= TREE_TYPE (ref_expr
);
403 ref_type
= TYPE_REF_CAN_ALIAS_ALL (ref_type
)
404 ? NULL_TREE
: TREE_TYPE (ref_type
);
406 /* Sanity check that we are in sync with what get_alias_set does. */
407 gcc_checking_assert ((!base_type
&& !ao_ref_base_alias_set (ref
))
408 || get_alias_set (base_type
)
409 == ao_ref_base_alias_set (ref
));
410 gcc_checking_assert ((!ref_type
&& !ao_ref_alias_set (ref
))
411 || get_alias_set (ref_type
)
412 == ao_ref_alias_set (ref
));
414 /* Do not bother to record types that have no meaningful alias set.
415 Also skip variably modified types since these go to local streams. */
416 if (base_type
&& (!get_alias_set (base_type
)
417 || variably_modified_type_p (base_type
, NULL_TREE
)))
418 base_type
= NULL_TREE
;
419 if (ref_type
&& (!get_alias_set (ref_type
)
420 || variably_modified_type_p (ref_type
, NULL_TREE
)))
421 ref_type
= NULL_TREE
;
423 modref_access_node a
= get_access (ref
);
426 fprintf (dump_file
, " - Recording base type:");
427 print_generic_expr (dump_file
, base_type
);
428 fprintf (dump_file
, " (alias set %i) ref type:",
429 base_type
? get_alias_set (base_type
) : 0);
430 print_generic_expr (dump_file
, ref_type
);
431 fprintf (dump_file
, " (alias set %i) parm:%i\n",
432 ref_type
? get_alias_set (ref_type
) : 0,
436 tt
->insert (base_type
, ref_type
, a
);
439 /* Returns true if and only if we should store the access to EXPR.
440 Some accesses, e.g. loads from automatic variables, are not interesting. */
443 record_access_p (tree expr
)
445 if (refs_local_or_readonly_memory_p (expr
))
448 fprintf (dump_file
, " - Read-only or local, ignoring.\n");
454 /* Return true if ECF flags says that stores can be ignored. */
457 ignore_stores_p (tree caller
, int flags
)
459 if (flags
& ECF_PURE
)
461 if ((flags
& (ECF_NORETURN
| ECF_NOTHROW
)) == (ECF_NORETURN
| ECF_NOTHROW
)
462 || (!opt_for_fn (caller
, flag_exceptions
) && (flags
& ECF_NORETURN
)))
467 /* Merge side effects of call STMT to function with CALLEE_SUMMARY
468 int CUR_SUMMARY. Return true if something changed.
469 If IGNORE_STORES is true, do not merge stores. */
472 merge_call_side_effects (modref_summary
*cur_summary
,
473 gimple
*stmt
, modref_summary
*callee_summary
,
476 auto_vec
<modref_parm_map
, 32> parm_map
;
477 bool changed
= false;
479 parm_map
.safe_grow (gimple_call_num_args (stmt
));
480 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); i
++)
482 tree op
= gimple_call_arg (stmt
, i
);
484 if (TREE_CODE (op
) == SSA_NAME
485 && SSA_NAME_IS_DEFAULT_DEF (op
)
486 && TREE_CODE (SSA_NAME_VAR (op
)) == PARM_DECL
)
489 for (tree t
= DECL_ARGUMENTS (current_function_decl
);
490 t
!= SSA_NAME_VAR (op
); t
= DECL_CHAIN (t
))
499 parm_map
[i
].parm_index
= index
;
500 parm_map
[i
].parm_offset_known
= true;
501 parm_map
[i
].parm_offset
= 0;
503 else if (points_to_local_or_readonly_memory_p (op
))
504 parm_map
[i
].parm_index
= -2;
506 parm_map
[i
].parm_index
= -1;
509 /* Merge with callee's summary. */
510 if (cur_summary
->loads
)
511 changed
|= cur_summary
->loads
->merge (callee_summary
->loads
, &parm_map
);
512 if (cur_summary
->loads_lto
)
513 changed
|= cur_summary
->loads_lto
->merge (callee_summary
->loads_lto
,
517 if (cur_summary
->stores
)
518 changed
|= cur_summary
->stores
->merge (callee_summary
->stores
,
520 if (cur_summary
->stores_lto
)
521 changed
|= cur_summary
->stores_lto
->merge (callee_summary
->stores_lto
,
527 /* Analyze function call STMT in function F.
528 Remember recursive calls in RECURSIVE_CALLS. */
531 analyze_call (modref_summary
*cur_summary
,
532 gimple
*stmt
, vec
<gimple
*> *recursive_calls
)
534 /* Check flags on the function call. In certain cases, analysis can be
536 int flags
= gimple_call_flags (stmt
);
537 if (flags
& (ECF_CONST
| ECF_NOVOPS
))
541 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
542 "except for args.\n");
546 /* Pure functions do not affect global memory. Stores by functions which are
547 noreturn and do not throw can safely be ignored. */
548 bool ignore_stores
= ignore_stores_p (current_function_decl
, flags
);
550 /* Next, we try to get the callee's function declaration. The goal is to
551 merge their summary with ours. */
552 tree callee
= gimple_call_fndecl (stmt
);
554 /* Check if this is an indirect call. */
557 /* If the indirect call does not write memory, our store summary is
558 unaffected, but we have to discard our loads summary (we don't know
559 anything about the loads that the called function performs). */
563 fprintf (dump_file
, " - Indirect call which does not write memory, "
564 "discarding loads.\n");
565 if (cur_summary
->loads
)
566 cur_summary
->loads
->collapse ();
567 if (cur_summary
->loads_lto
)
568 cur_summary
->loads_lto
->collapse ();
572 fprintf (dump_file
, " - Indirect call.\n");
576 struct cgraph_node
*callee_node
= cgraph_node::get_create (callee
);
578 /* We can not safely optimize based on summary of callee if it does
579 not always bind to current def: it is possible that memory load
580 was optimized out earlier which may not happen in the interposed
582 if (!callee_node
->binds_to_current_def_p ())
585 fprintf (dump_file
, " - May be interposed: collapsing loads.\n");
586 if (cur_summary
->loads
)
587 cur_summary
->loads
->collapse ();
588 if (cur_summary
->loads_lto
)
589 cur_summary
->loads_lto
->collapse ();
592 /* If this is a recursive call, the target summary is the same as ours, so
593 there's nothing to do. */
594 if (recursive_call_p (current_function_decl
, callee
))
596 recursive_calls
->safe_push (stmt
);
598 fprintf (dump_file
, " - Skipping recursive call.\n");
602 gcc_assert (callee_node
!= NULL
);
604 /* Get the function symbol and its availability. */
605 enum availability avail
;
606 callee_node
= callee_node
->function_symbol (&avail
);
607 if (avail
<= AVAIL_INTERPOSABLE
)
609 /* Keep stores summary, but discard all loads for interposable function
613 if (cur_summary
->loads
)
614 cur_summary
->loads
->collapse ();
615 if (cur_summary
->loads_lto
)
616 cur_summary
->loads_lto
->collapse ();
620 fprintf (dump_file
, " - Function availability <= AVAIL_INTERPOSABLE.\n");
624 /* Get callee's modref summary. As above, if there's no summary, we either
625 have to give up or, if stores are ignored, we can just purge loads. */
626 modref_summary
*callee_summary
= summaries
->get (callee_node
);
631 if (cur_summary
->loads
)
632 cur_summary
->loads
->collapse ();
633 if (cur_summary
->loads_lto
)
634 cur_summary
->loads_lto
->collapse ();
638 fprintf (dump_file
, " - No modref summary available for callee.\n");
642 merge_call_side_effects (cur_summary
, stmt
, callee_summary
, ignore_stores
);
647 /* Helper for analyze_stmt. */
650 analyze_load (gimple
*, tree
, tree op
, void *data
)
652 modref_summary
*summary
= (modref_summary
*)data
;
656 fprintf (dump_file
, " - Analyzing load: ");
657 print_generic_expr (dump_file
, op
);
658 fprintf (dump_file
, "\n");
661 if (!record_access_p (op
))
665 ao_ref_init (&r
, op
);
668 record_access (summary
->loads
, &r
);
669 if (summary
->loads_lto
)
670 record_access_lto (summary
->loads_lto
, &r
);
674 /* Helper for analyze_stmt. */
677 analyze_store (gimple
*, tree
, tree op
, void *data
)
679 modref_summary
*summary
= (modref_summary
*)data
;
683 fprintf (dump_file
, " - Analyzing store: ");
684 print_generic_expr (dump_file
, op
);
685 fprintf (dump_file
, "\n");
688 if (!record_access_p (op
))
692 ao_ref_init (&r
, op
);
695 record_access (((modref_summary
*)data
)->stores
, &r
);
696 if (summary
->stores_lto
)
697 record_access_lto (((modref_summary
*)data
)->stores_lto
, &r
);
701 /* Analyze statement STMT of function F.
702 If IPA is true do not merge in side effects of calls. */
705 analyze_stmt (modref_summary
*summary
, gimple
*stmt
, bool ipa
,
706 vec
<gimple
*> *recursive_calls
)
708 /* In general we can not ignore clobbers because they are barries for code
709 motion, however after inlining it is safe to do becuase local optimization
710 passes do not consider clobbers from other functions.
711 Similar logic is in ipa-pure-consts. */
712 if ((ipa
|| cfun
->after_inlining
) && gimple_clobber_p (stmt
))
715 /* Analyze all loads and stores in STMT. */
716 walk_stmt_load_store_ops (stmt
, summary
,
717 analyze_load
, analyze_store
);
719 switch (gimple_code (stmt
))
722 /* If the ASM statement does not read nor write memory, there's nothing
723 to do. Otherwise just give up. */
724 if (!gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
727 fprintf (dump_file
, " - Function contains GIMPLE_ASM statement "
728 "which clobbers memory.\n");
732 return analyze_call (summary
, stmt
, recursive_calls
);
735 /* Nothing to do for other types of statements. */
740 /* Analyze function F. IPA indicates whether we're running in local mode (false)
741 or the IPA mode (true). */
744 analyze_function (function
*f
, bool ipa
)
747 fprintf (dump_file
, "modref analyzing '%s' (ipa=%i)%s%s\n",
748 function_name (f
), ipa
,
749 TREE_READONLY (current_function_decl
) ? " (const)" : "",
750 DECL_PURE_P (current_function_decl
) ? " (pure)" : "");
752 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
753 if (!flag_ipa_modref
)
756 /* Initialize the summary. */
758 summaries
= modref_summaries::create_ggc (symtab
);
759 else /* Remove existing summary if we are re-running the pass. */
760 summaries
->remove (cgraph_node::get (f
->decl
));
762 ((modref_summaries
*)summaries
)->ipa
= ipa
;
764 modref_summary
*summary
= summaries
->get_create (cgraph_node::get (f
->decl
));
766 /* Compute no-LTO summaries when local optimization is going to happen. */
767 bool nolto
= (!ipa
|| ((!flag_lto
|| flag_fat_lto_objects
) && !in_lto_p
)
768 || (in_lto_p
&& !flag_wpa
769 && flag_incremental_link
!= INCREMENTAL_LINK_LTO
));
771 /* Compute LTO when LTO streaming is going to happen. */
772 bool lto
= ipa
&& ((flag_lto
&& !in_lto_p
)
774 || flag_incremental_link
== INCREMENTAL_LINK_LTO
);
776 /* Create and initialize summary for F.
777 Note that summaries may be already allocated from previous
781 gcc_assert (!summary
->loads
);
782 summary
->loads
= modref_records::create_ggc (param_modref_max_bases
,
783 param_modref_max_refs
,
784 param_modref_max_accesses
);
785 gcc_assert (!summary
->stores
);
786 summary
->stores
= modref_records::create_ggc (param_modref_max_bases
,
787 param_modref_max_refs
,
788 param_modref_max_accesses
);
792 gcc_assert (!summary
->loads_lto
);
793 summary
->loads_lto
= modref_records_lto::create_ggc
794 (param_modref_max_bases
,
795 param_modref_max_refs
,
796 param_modref_max_accesses
);
797 gcc_assert (!summary
->stores_lto
);
798 summary
->stores_lto
= modref_records_lto::create_ggc
799 (param_modref_max_bases
,
800 param_modref_max_refs
,
801 param_modref_max_accesses
);
803 summary
->finished
= false;
804 int ecf_flags
= flags_from_decl_or_type (current_function_decl
);
805 auto_vec
<gimple
*, 32> recursive_calls
;
807 /* Analyze each statement in each basic block of the function. If the
808 statement cannot be analyzed (for any reason), the entire function cannot
809 be analyzed by modref. */
811 FOR_EACH_BB_FN (bb
, f
)
813 gimple_stmt_iterator si
;
814 for (si
= gsi_after_labels (bb
); !gsi_end_p (si
); gsi_next (&si
))
816 if (!analyze_stmt (summary
, gsi_stmt (si
), ipa
, &recursive_calls
)
817 || !summary
->useful_p (ecf_flags
))
819 cgraph_node
*fnode
= cgraph_node::get (current_function_decl
);
820 summaries
->remove (fnode
);
823 " - modref done with result: not tracked.\n");
829 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
830 This needs to be done after all other side effects are computed. */
837 for (unsigned i
= 0; i
< recursive_calls
.length (); i
++)
839 changed
|= merge_call_side_effects
840 (summary
, recursive_calls
[i
], summary
,
841 ignore_stores_p (current_function_decl
,
843 (recursive_calls
[i
])));
844 if (!summary
->useful_p (ecf_flags
))
846 cgraph_node
*fnode
= cgraph_node::get (current_function_decl
);
847 summaries
->remove (fnode
);
850 " - modref done with result: not tracked.\n");
858 summary
->finished
= true;
862 fprintf (dump_file
, " - modref done with result: tracked.\n");
863 summary
->dump (dump_file
);
867 /* Callback for generate_summary. */
870 modref_generate (void)
872 struct cgraph_node
*node
;
873 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node
)
875 function
*f
= DECL_STRUCT_FUNCTION (node
->decl
);
879 analyze_function (f
, true);
884 /* Called when a new function is inserted to callgraph late. */
887 modref_summaries::insert (struct cgraph_node
*node
, modref_summary
*)
889 if (!DECL_STRUCT_FUNCTION (node
->decl
))
891 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
892 analyze_function (DECL_STRUCT_FUNCTION (node
->decl
), ipa
);
896 /* Called when new clone is inserted to callgraph late. */
899 modref_summaries::duplicate (cgraph_node
*, cgraph_node
*,
900 modref_summary
*src_data
,
901 modref_summary
*dst_data
)
903 dst_data
->finished
= src_data
->finished
;
904 if (src_data
->stores
)
906 dst_data
->stores
= modref_records::create_ggc
907 (src_data
->stores
->max_bases
,
908 src_data
->stores
->max_refs
,
909 src_data
->stores
->max_accesses
);
910 dst_data
->stores
->copy_from (src_data
->stores
);
914 dst_data
->loads
= modref_records::create_ggc
915 (src_data
->loads
->max_bases
,
916 src_data
->loads
->max_refs
,
917 src_data
->loads
->max_accesses
);
918 dst_data
->loads
->copy_from (src_data
->loads
);
920 if (src_data
->stores_lto
)
922 dst_data
->stores_lto
= modref_records_lto::create_ggc
923 (src_data
->stores_lto
->max_bases
,
924 src_data
->stores_lto
->max_refs
,
925 src_data
->stores_lto
->max_accesses
);
926 dst_data
->stores_lto
->copy_from (src_data
->stores_lto
);
928 if (src_data
->loads_lto
)
930 dst_data
->loads_lto
= modref_records_lto::create_ggc
931 (src_data
->loads_lto
->max_bases
,
932 src_data
->loads_lto
->max_refs
,
933 src_data
->stores_lto
->max_accesses
);
934 dst_data
->loads_lto
->copy_from (src_data
->loads_lto
);
940 /* Definition of the modref pass on GIMPLE. */
941 const pass_data pass_data_modref
= {
946 (PROP_cfg
| PROP_ssa
),
953 class pass_modref
: public gimple_opt_pass
956 pass_modref (gcc::context
*ctxt
)
957 : gimple_opt_pass (pass_data_modref
, ctxt
) {}
959 /* opt_pass methods: */
962 return new pass_modref (m_ctxt
);
964 virtual bool gate (function
*)
966 return flag_ipa_modref
;
968 virtual unsigned int execute (function
*);
971 /* Encode TT to the output block OB using the summary streaming API. */
974 write_modref_records (modref_records_lto
*tt
, struct output_block
*ob
)
976 streamer_write_uhwi (ob
, tt
->max_bases
);
977 streamer_write_uhwi (ob
, tt
->max_refs
);
978 streamer_write_uhwi (ob
, tt
->max_accesses
);
980 streamer_write_uhwi (ob
, tt
->every_base
);
981 streamer_write_uhwi (ob
, vec_safe_length (tt
->bases
));
983 modref_base_node
<tree
> *base_node
;
984 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, base_node
)
986 stream_write_tree (ob
, base_node
->base
, true);
988 streamer_write_uhwi (ob
, base_node
->every_ref
);
989 streamer_write_uhwi (ob
, vec_safe_length (base_node
->refs
));
992 modref_ref_node
<tree
> *ref_node
;
993 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
995 stream_write_tree (ob
, ref_node
->ref
, true);
996 streamer_write_uhwi (ob
, ref_node
->every_access
);
997 streamer_write_uhwi (ob
, vec_safe_length (ref_node
->accesses
));
1000 modref_access_node
*access_node
;
1001 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
1003 streamer_write_uhwi (ob
, access_node
->parm_index
);
1004 if (access_node
->parm_index
!= -1)
1006 streamer_write_uhwi (ob
, access_node
->parm_offset_known
);
1007 if (access_node
->parm_offset_known
)
1009 streamer_write_poly_int64 (ob
, access_node
->parm_offset
);
1010 streamer_write_poly_int64 (ob
, access_node
->offset
);
1011 streamer_write_poly_int64 (ob
, access_node
->size
);
1012 streamer_write_poly_int64 (ob
, access_node
->max_size
);
1020 /* Read a modref_tree from the input block IB using the data from DATA_IN.
1021 This assumes that the tree was encoded using write_modref_tree.
1022 Either nolto_ret or lto_ret is initialized by the tree depending whether
1023 LTO streaming is expected or not. */
1026 read_modref_records (lto_input_block
*ib
, struct data_in
*data_in
,
1027 modref_records
**nolto_ret
,
1028 modref_records_lto
**lto_ret
)
1030 size_t max_bases
= streamer_read_uhwi (ib
);
1031 size_t max_refs
= streamer_read_uhwi (ib
);
1032 size_t max_accesses
= streamer_read_uhwi (ib
);
1034 /* Decide whether we want to turn LTO data types to non-LTO (i.e. when
1035 LTO re-streaming is not going to happen). */
1036 if (flag_wpa
|| flag_incremental_link
== INCREMENTAL_LINK_LTO
)
1037 *lto_ret
= modref_records_lto::create_ggc (max_bases
, max_refs
,
1040 *nolto_ret
= modref_records::create_ggc (max_bases
, max_refs
,
1043 size_t every_base
= streamer_read_uhwi (ib
);
1044 size_t nbase
= streamer_read_uhwi (ib
);
1046 gcc_assert (!every_base
|| nbase
== 0);
1050 (*nolto_ret
)->collapse ();
1052 (*lto_ret
)->collapse ();
1054 for (size_t i
= 0; i
< nbase
; i
++)
1056 tree base_tree
= stream_read_tree (ib
, data_in
);
1057 modref_base_node
<alias_set_type
> *nolto_base_node
= NULL
;
1058 modref_base_node
<tree
> *lto_base_node
= NULL
;
1060 /* At stream in time we have LTO alias info. Check if we streamed in
1061 something obviously unnecessary. Do not glob types by alias sets;
1062 it is not 100% clear that ltrans types will get merged same way.
1063 Types may get refined based on ODR type conflicts. */
1064 if (base_tree
&& !get_alias_set (base_tree
))
1068 fprintf (dump_file
, "Streamed in alias set 0 type ");
1069 print_generic_expr (dump_file
, base_tree
);
1070 fprintf (dump_file
, "\n");
1076 nolto_base_node
= (*nolto_ret
)->insert_base (base_tree
1077 ? get_alias_set (base_tree
)
1080 lto_base_node
= (*lto_ret
)->insert_base (base_tree
);
1081 size_t every_ref
= streamer_read_uhwi (ib
);
1082 size_t nref
= streamer_read_uhwi (ib
);
1084 gcc_assert (!every_ref
|| nref
== 0);
1087 if (nolto_base_node
)
1088 nolto_base_node
->collapse ();
1090 lto_base_node
->collapse ();
1092 for (size_t j
= 0; j
< nref
; j
++)
1094 tree ref_tree
= stream_read_tree (ib
, data_in
);
1096 if (ref_tree
&& !get_alias_set (ref_tree
))
1100 fprintf (dump_file
, "Streamed in alias set 0 type ");
1101 print_generic_expr (dump_file
, ref_tree
);
1102 fprintf (dump_file
, "\n");
1107 modref_ref_node
<alias_set_type
> *nolto_ref_node
= NULL
;
1108 modref_ref_node
<tree
> *lto_ref_node
= NULL
;
1110 if (nolto_base_node
)
1112 = nolto_base_node
->insert_ref (ref_tree
1113 ? get_alias_set (ref_tree
) : 0,
1116 lto_ref_node
= lto_base_node
->insert_ref (ref_tree
, max_refs
);
1118 size_t every_access
= streamer_read_uhwi (ib
);
1119 size_t naccesses
= streamer_read_uhwi (ib
);
1122 nolto_ref_node
->every_access
= every_access
;
1124 lto_ref_node
->every_access
= every_access
;
1126 for (size_t k
= 0; k
< naccesses
; k
++)
1128 int parm_index
= streamer_read_uhwi (ib
);
1129 bool parm_offset_known
= false;
1130 poly_int64 parm_offset
= 0;
1131 poly_int64 offset
= 0;
1132 poly_int64 size
= -1;
1133 poly_int64 max_size
= -1;
1135 if (parm_index
!= -1)
1137 parm_offset_known
= streamer_read_uhwi (ib
);
1138 if (parm_offset_known
)
1140 parm_offset
= streamer_read_poly_int64 (ib
);
1141 offset
= streamer_read_poly_int64 (ib
);
1142 size
= streamer_read_poly_int64 (ib
);
1143 max_size
= streamer_read_poly_int64 (ib
);
1146 modref_access_node a
= {offset
, size
, max_size
, parm_offset
,
1147 parm_index
, parm_offset_known
};
1149 nolto_ref_node
->insert_access (a
, max_accesses
);
1151 lto_ref_node
->insert_access (a
, max_accesses
);
1156 (*lto_ret
)->cleanup ();
1158 (*nolto_ret
)->cleanup ();
1161 /* Callback for write_summary. */
1166 struct output_block
*ob
= create_output_block (LTO_section_ipa_modref
);
1167 lto_symtab_encoder_t encoder
= ob
->decl_state
->symtab_node_encoder
;
1168 unsigned int count
= 0;
1173 streamer_write_uhwi (ob
, 0);
1174 streamer_write_char_stream (ob
->main_stream
, 0);
1175 produce_asm (ob
, NULL
);
1176 destroy_output_block (ob
);
1180 for (i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
1182 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
1183 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
1186 if (cnode
&& cnode
->definition
&& !cnode
->alias
1187 && (r
= summaries
->get (cnode
))
1188 && r
->lto_useful_p (flags_from_decl_or_type (cnode
->decl
)))
1191 streamer_write_uhwi (ob
, count
);
1193 for (i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
1195 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
1196 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
1198 if (cnode
&& cnode
->definition
&& !cnode
->alias
)
1201 modref_summary
*r
= summaries
->get (cnode
);
1203 if (!r
|| !r
->lto_useful_p (flags_from_decl_or_type (cnode
->decl
)))
1206 streamer_write_uhwi (ob
, lto_symtab_encoder_encode (encoder
, cnode
));
1208 streamer_write_uhwi (ob
, r
->loads_lto
? 1 : 0);
1209 streamer_write_uhwi (ob
, r
->stores_lto
? 1 : 0);
1211 write_modref_records (r
->loads_lto
, ob
);
1213 write_modref_records (r
->stores_lto
, ob
);
1216 streamer_write_char_stream (ob
->main_stream
, 0);
1217 produce_asm (ob
, NULL
);
1218 destroy_output_block (ob
);
1222 read_section (struct lto_file_decl_data
*file_data
, const char *data
,
1225 const struct lto_function_header
*header
1226 = (const struct lto_function_header
*) data
;
1227 const int cfg_offset
= sizeof (struct lto_function_header
);
1228 const int main_offset
= cfg_offset
+ header
->cfg_size
;
1229 const int string_offset
= main_offset
+ header
->main_size
;
1230 struct data_in
*data_in
;
1232 unsigned int f_count
;
1234 lto_input_block
ib ((const char *) data
+ main_offset
, header
->main_size
,
1235 file_data
->mode_table
);
1238 = lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
1239 header
->string_size
, vNULL
);
1240 f_count
= streamer_read_uhwi (&ib
);
1241 for (i
= 0; i
< f_count
; i
++)
1243 struct cgraph_node
*node
;
1244 lto_symtab_encoder_t encoder
;
1246 unsigned int index
= streamer_read_uhwi (&ib
);
1247 encoder
= file_data
->symtab_node_encoder
;
1248 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
1251 modref_summary
*modref_sum
= summaries
->get_create (node
);
1252 modref_sum
->finished
= false;
1253 int have_loads
= streamer_read_uhwi (&ib
);
1254 int have_stores
= streamer_read_uhwi (&ib
);
1255 gcc_assert (!modref_sum
->loads_lto
1256 && !modref_sum
->stores_lto
1257 && !modref_sum
->loads
1258 && !modref_sum
->stores
);
1260 read_modref_records (&ib
, data_in
,
1262 &modref_sum
->loads_lto
);
1264 read_modref_records (&ib
, data_in
,
1265 &modref_sum
->stores
,
1266 &modref_sum
->stores_lto
);
1269 fprintf (dump_file
, "Read modref for %s\n",
1270 node
->dump_name ());
1271 modref_sum
->dump (dump_file
);
1274 modref_sum
->finished
= true;
1277 lto_free_section_data (file_data
, LTO_section_ipa_modref
, NULL
, data
,
1279 lto_data_in_delete (data_in
);
1282 /* Callback for read_summary. */
1287 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
1288 struct lto_file_decl_data
*file_data
;
1292 summaries
= modref_summaries::create_ggc (symtab
);
1293 ((modref_summaries
*)summaries
)->ipa
= true;
1295 while ((file_data
= file_data_vec
[j
++]))
1298 const char *data
= lto_get_summary_section_data (file_data
,
1299 LTO_section_ipa_modref
,
1302 read_section (file_data
, data
, len
);
1304 /* Fatal error here. We do not want to support compiling ltrans units
1305 with different version of compiler or different flags than the WPA
1306 unit, so this should never happen. */
1307 fatal_error (input_location
,
1308 "IPA modref summary is missing in input file");
1312 /* Definition of the modref IPA pass. */
1313 const pass_data pass_data_ipa_modref
=
1315 IPA_PASS
, /* type */
1316 "modref", /* name */
1317 OPTGROUP_IPA
, /* optinfo_flags */
1318 TV_IPA_MODREF
, /* tv_id */
1319 0, /* properties_required */
1320 0, /* properties_provided */
1321 0, /* properties_destroyed */
1322 0, /* todo_flags_start */
1323 ( TODO_dump_symtab
), /* todo_flags_finish */
1326 class pass_ipa_modref
: public ipa_opt_pass_d
1329 pass_ipa_modref (gcc::context
*ctxt
)
1330 : ipa_opt_pass_d (pass_data_ipa_modref
, ctxt
,
1331 modref_generate
, /* generate_summary */
1332 modref_write
, /* write_summary */
1333 modref_read
, /* read_summary */
1334 modref_write
, /* write_optimization_summary */
1335 modref_read
, /* read_optimization_summary */
1336 NULL
, /* stmt_fixup */
1337 0, /* function_transform_todo_flags_start */
1338 NULL
, /* function_transform */
1339 NULL
) /* variable_transform */
1342 /* opt_pass methods: */
1343 opt_pass
*clone () { return new pass_ipa_modref (m_ctxt
); }
1344 virtual bool gate (function
*)
1348 virtual unsigned int execute (function
*);
1354 unsigned int pass_modref::execute (function
*f
)
1356 /* If new function is being added during IPA, we can skip analysis. */
1357 if (summaries
&& ((modref_summaries
*)summaries
)->ipa
)
1359 analyze_function (f
, false);
1364 make_pass_modref (gcc::context
*ctxt
)
1366 return new pass_modref (ctxt
);
1370 make_pass_ipa_modref (gcc::context
*ctxt
)
1372 return new pass_ipa_modref (ctxt
);
1375 /* Skip edges from and to nodes without ipa_pure_const enabled.
1376 Ignore not available symbols. */
1379 ignore_edge (struct cgraph_edge
*e
)
1381 enum availability avail
;
1382 cgraph_node
*callee
= e
->callee
->function_or_virtual_thunk_symbol
1383 (&avail
, e
->caller
);
1385 return (avail
<= AVAIL_INTERPOSABLE
1386 || !summaries
->get (callee
)
1387 || flags_from_decl_or_type (e
->callee
->decl
)
1388 & (ECF_CONST
| ECF_NOVOPS
));
1391 /* Compute parm_map for CALLE_EDGE. */
1394 compute_parm_map (cgraph_edge
*callee_edge
, vec
<modref_parm_map
> *parm_map
)
1396 class ipa_edge_args
*args
;
1397 if (ipa_node_params_sum
1398 && !callee_edge
->call_stmt_cannot_inline_p
1399 && (args
= IPA_EDGE_REF (callee_edge
)) != NULL
)
1401 int i
, count
= ipa_get_cs_argument_count (args
);
1402 class ipa_node_params
*caller_parms_info
, *callee_pi
;
1403 class ipa_call_summary
*es
1404 = ipa_call_summaries
->get (callee_edge
);
1406 = callee_edge
->callee
->function_or_virtual_thunk_symbol
1407 (NULL
, callee_edge
->caller
);
1409 caller_parms_info
= IPA_NODE_REF (callee_edge
->caller
->inlined_to
1410 ? callee_edge
->caller
->inlined_to
1411 : callee_edge
->caller
);
1412 callee_pi
= IPA_NODE_REF (callee
);
1414 (*parm_map
).safe_grow (count
);
1416 for (i
= 0; i
< count
; i
++)
1418 if (es
&& es
->param
[i
].points_to_local_or_readonly_memory
)
1420 (*parm_map
)[i
].parm_index
= -2;
1424 struct ipa_jump_func
*jf
1425 = ipa_get_ith_jump_func (args
, i
);
1426 if (jf
&& callee_pi
)
1428 tree cst
= ipa_value_from_jfunc (caller_parms_info
,
1432 if (cst
&& points_to_local_or_readonly_memory_p (cst
))
1434 (*parm_map
)[i
].parm_index
= -2;
1438 if (jf
&& jf
->type
== IPA_JF_PASS_THROUGH
)
1440 (*parm_map
)[i
].parm_index
1441 = ipa_get_jf_pass_through_formal_id (jf
);
1442 (*parm_map
)[i
].parm_offset_known
1443 = ipa_get_jf_pass_through_operation (jf
) == NOP_EXPR
;
1444 (*parm_map
)[i
].parm_offset
= 0;
1447 if (jf
&& jf
->type
== IPA_JF_ANCESTOR
)
1449 (*parm_map
)[i
].parm_index
= ipa_get_jf_ancestor_formal_id (jf
);
1450 (*parm_map
)[i
].parm_offset_known
= true;
1451 (*parm_map
)[i
].parm_offset
= ipa_get_jf_ancestor_offset (jf
);
1454 (*parm_map
)[i
].parm_index
= -1;
1458 fprintf (dump_file
, " Parm map: ");
1459 for (i
= 0; i
< count
; i
++)
1460 fprintf (dump_file
, " %i", (*parm_map
)[i
].parm_index
);
1461 fprintf (dump_file
, "\n");
1466 /* Call EDGE was inlined; merge summary from callee to the caller. */
1469 ipa_merge_modref_summary_after_inlining (cgraph_edge
*edge
)
1474 struct cgraph_node
*to
= (edge
->caller
->inlined_to
1475 ? edge
->caller
->inlined_to
: edge
->caller
);
1476 class modref_summary
*to_info
= summaries
->get (to
);
1481 class modref_summary
*callee_info
= summaries
->get (edge
->callee
);
1482 int flags
= flags_from_decl_or_type (edge
->callee
->decl
);
1486 if (ignore_stores_p (edge
->callee
->decl
, flags
))
1489 to_info
->loads
->collapse ();
1490 if (to_info
->loads_lto
)
1491 to_info
->loads_lto
->collapse ();
1495 summaries
->remove (to
);
1496 summaries
->remove (edge
->callee
);
1502 auto_vec
<modref_parm_map
, 32> parm_map
;
1504 compute_parm_map (edge
, &parm_map
);
1507 to_info
->loads
->merge (callee_info
->loads
, &parm_map
);
1508 if (to_info
->stores
)
1509 to_info
->stores
->merge (callee_info
->stores
, &parm_map
);
1510 if (to_info
->loads_lto
)
1511 to_info
->loads_lto
->merge (callee_info
->loads_lto
, &parm_map
);
1512 if (to_info
->stores_lto
)
1513 to_info
->stores_lto
->merge (callee_info
->stores_lto
, &parm_map
);
1515 if (!to_info
->useful_p (flags
))
1516 summaries
->remove (to
);
1517 summaries
->remove (edge
->callee
);
1521 /* Collapse loads and return true if something changed. */
1524 collapse_loads (modref_summary
*cur_summary
)
1526 bool changed
= false;
1528 if (cur_summary
->loads
&& !cur_summary
->loads
->every_base
)
1530 cur_summary
->loads
->collapse ();
1533 if (cur_summary
->loads_lto
1534 && !cur_summary
->loads_lto
->every_base
)
1536 cur_summary
->loads_lto
->collapse ();
1542 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE. */
1545 modref_propagate_in_scc (cgraph_node
*component_node
)
1547 bool changed
= true;
1553 for (struct cgraph_node
*cur
= component_node
; cur
;
1554 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
1556 cgraph_node
*node
= cur
->inlined_to
? cur
->inlined_to
: cur
;
1557 modref_summary
*cur_summary
= summaries
->get (node
);
1563 fprintf (dump_file
, " Processing %s%s%s\n",
1565 TREE_READONLY (cur
->decl
) ? " (const)" : "",
1566 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
1568 for (cgraph_edge
*e
= cur
->indirect_calls
; e
; e
= e
->next_callee
)
1570 if (e
->indirect_info
->ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
1572 if (ignore_stores_p (cur
->decl
, e
->indirect_info
->ecf_flags
))
1575 fprintf (dump_file
, " Indirect call: "
1576 "collapsing loads\n");
1577 changed
|= collapse_loads (cur_summary
);
1582 fprintf (dump_file
, " Indirect call: giving up\n");
1583 summaries
->remove (node
);
1593 for (cgraph_edge
*callee_edge
= cur
->callees
; callee_edge
;
1594 callee_edge
= callee_edge
->next_callee
)
1596 int flags
= flags_from_decl_or_type (callee_edge
->callee
->decl
);
1597 modref_summary
*callee_summary
;
1598 struct cgraph_node
*callee
;
1600 if (flags
& (ECF_CONST
| ECF_NOVOPS
)
1601 || !callee_edge
->inline_failed
)
1604 /* Get the callee and its summary. */
1605 enum availability avail
;
1606 callee
= callee_edge
->callee
->function_or_virtual_thunk_symbol
1609 /* It is not necessary to re-process calls outside of the
1613 || ((struct ipa_dfs_info
*)cur
->aux
)->scc_no
1614 != ((struct ipa_dfs_info
*)callee
->aux
)->scc_no
))
1618 fprintf (dump_file
, " Call to %s\n",
1619 callee_edge
->callee
->dump_name ());
1621 bool ignore_stores
= ignore_stores_p (cur
->decl
, flags
);
1623 /* We don't know anything about CALLEE, hence we cannot tell
1624 anything about the entire component. */
1626 if (avail
<= AVAIL_INTERPOSABLE
1627 || !(callee_summary
= summaries
->get (callee
)))
1631 if (dump_file
&& avail
<= AVAIL_INTERPOSABLE
)
1632 fprintf (dump_file
, " Call target interposable"
1633 " or not available\n");
1635 fprintf (dump_file
, " No call target summary\n");
1637 summaries
->remove (node
);
1643 if (dump_file
&& avail
<= AVAIL_INTERPOSABLE
)
1644 fprintf (dump_file
, " Call target interposable"
1645 " or not available; collapsing loads\n");
1647 fprintf (dump_file
, " No call target summary;"
1648 " collapsing loads\n");
1650 changed
|= collapse_loads (cur_summary
);
1655 /* We can not safely optimize based on summary of callee if it
1656 does not always bind to current def: it is possible that
1657 memory load was optimized out earlier which may not happen in
1658 the interposed variant. */
1659 if (!callee_edge
->binds_to_current_def_p ())
1661 changed
|= collapse_loads (cur_summary
);
1663 fprintf (dump_file
, " May not bind local;"
1664 " collapsing loads\n");
1668 auto_vec
<modref_parm_map
, 32> parm_map
;
1670 compute_parm_map (callee_edge
, &parm_map
);
1672 /* Merge in callee's information. */
1673 if (callee_summary
->loads
)
1674 changed
|= cur_summary
->loads
->merge
1675 (callee_summary
->loads
, &parm_map
);
1676 if (callee_summary
->stores
)
1677 changed
|= cur_summary
->stores
->merge
1678 (callee_summary
->stores
, &parm_map
);
1679 if (callee_summary
->loads_lto
)
1680 changed
|= cur_summary
->loads_lto
->merge
1681 (callee_summary
->loads_lto
, &parm_map
);
1682 if (callee_summary
->stores_lto
)
1683 changed
|= cur_summary
->stores_lto
->merge
1684 (callee_summary
->stores_lto
, &parm_map
);
1685 if (dump_file
&& changed
)
1686 cur_summary
->dump (dump_file
);
1691 for (struct cgraph_node
*cur
= component_node
; cur
;
1692 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
1694 modref_summary
*cur_summary
= summaries
->get (cur
);
1696 cur_summary
->finished
= true;
1701 "Propagation finished in %i iterations\n", iteration
);
1702 for (struct cgraph_node
*cur
= component_node
; cur
;
1703 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
1704 if (!cur
->inlined_to
)
1706 modref_summary
*cur_summary
= summaries
->get (cur
);
1708 fprintf (dump_file
, "Propagated modref for %s%s%s\n",
1710 TREE_READONLY (cur
->decl
) ? " (const)" : "",
1711 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
1713 cur_summary
->dump (dump_file
);
1715 fprintf (dump_file
, " Not tracked\n");
1720 /* Run the IPA pass. This will take a function's summaries and calls and
1721 construct new summaries which represent a transitive closure. So that
1722 summary of an analyzed function contains information about the loads and
1723 stores that the function or any function that it calls does. */
1726 pass_ipa_modref::execute (function
*)
1731 struct cgraph_node
**order
= XCNEWVEC (struct cgraph_node
*,
1732 symtab
->cgraph_count
);
1734 order_pos
= ipa_reduced_postorder (order
, true, ignore_edge
);
1737 /* Iterate over all strongly connected components in post-order. */
1738 for (i
= 0; i
< order_pos
; i
++)
1740 /* Get the component's representative. That's just any node in the
1741 component from which we can traverse the entire component. */
1742 struct cgraph_node
*component_node
= order
[i
];
1745 fprintf (dump_file
, "\n\nStart of SCC component\n");
1747 modref_propagate_in_scc (component_node
);
1749 ((modref_summaries
*)summaries
)->ipa
= false;
1750 ipa_free_postorder_info ();
1755 /* Summaries must stay alive until end of compilation. */
1758 ipa_modref_c_finalize ()
1761 ggc_delete (summaries
);
1765 #include "gt-ipa-modref.h"