1 /* Search for references that a functions loads or stores.
2 Copyright (C) 2020 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
23 accesses across function calls. The summary has a form of decision tree
24 described in ipa-modref-tree.h.
26 This file contains a tree pass and an IPA pass. Both performs the same
27 analys however tree pass is executed during early and late optimization
28 passes to propagate info downwards in the compilation order. IPA pass
29 propagates across the callgraph and is able to handle recursion and works on
30 whole program during link-time analysis.
32 LTO mode differs from the local mode by not recording alias sets but types
33 that are translated to alias sets later. This is necessary in order stream
34 the information because the alias sets are rebuild at stream-in time and may
35 not correspond to ones seen during analysis. For this reason part of analysis
40 #include "coretypes.h"
44 #include "alloc-pool.h"
45 #include "tree-pass.h"
46 #include "gimple-iterator.h"
49 #include "ipa-utils.h"
50 #include "symbol-summary.h"
51 #include "gimple-pretty-print.h"
52 #include "gimple-walk.h"
53 #include "print-tree.h"
54 #include "tree-streamer.h"
57 #include "ipa-modref-tree.h"
58 #include "ipa-modref.h"
59 #include "value-range.h"
61 #include "ipa-fnsummary.h"
63 /* Class (from which there is one global instance) that holds modref summaries
64 for all analyzed functions. */
65 class GTY((user
)) modref_summaries
66 : public fast_function_summary
<modref_summary
*, va_gc
>
69 modref_summaries (symbol_table
*symtab
)
70 : fast_function_summary
<modref_summary
*, va_gc
> (symtab
) {}
71 virtual void insert (cgraph_node
*, modref_summary
*state
);
72 virtual void duplicate (cgraph_node
*src_node
,
73 cgraph_node
*dst_node
,
74 modref_summary
*src_data
,
75 modref_summary
*dst_data
);
76 /* This flag controls whether newly inserted functions should be analyzed
77 in IPA or normal mode. Functions inserted between IPA analysis and
78 ipa-modref pass execution needs to be analyzed in IPA mode while all
79 other insertions leads to normal analysis. */
81 static modref_summaries
*create_ggc (symbol_table
*symtab
)
83 return new (ggc_alloc_no_dtor
<modref_summaries
> ())
84 modref_summaries (symtab
);
88 /* Global variable holding all modref summaries. */
89 static GTY(()) fast_function_summary
<modref_summary
*, va_gc
> *summaries
;
91 /* Summary for a single function which this pass produces. */
93 modref_summary::modref_summary ()
94 : loads (NULL
), stores (NULL
), loads_lto (NULL
),
95 stores_lto (NULL
), finished (0)
99 modref_summary::~modref_summary ()
106 ggc_delete (loads_lto
);
108 ggc_delete (stores_lto
);
111 /* Return true if lto summary is potentially useful for optimization. */
114 modref_summary::lto_useful_p (int ecf_flags
)
116 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
118 if (loads_lto
&& !loads_lto
->every_base
)
120 if (ecf_flags
& ECF_PURE
)
122 return stores_lto
&& !stores_lto
->every_base
;
125 /* Return true if summary is potentially useful for optimization. */
128 modref_summary::useful_p (int ecf_flags
)
130 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
132 if (lto_useful_p (ecf_flags
))
134 if (loads
&& !loads
->every_base
)
136 if (ecf_flags
& ECF_PURE
)
138 return stores
&& !stores
->every_base
;
144 dump_access (modref_access_node
*a
, FILE *out
)
146 fprintf (out
, " access:");
147 if (a
->parm_index
!= -1)
149 fprintf (out
, " Parm %i", a
->parm_index
);
150 if (a
->parm_offset_known
)
152 fprintf (out
, " param offset:");
153 print_dec ((poly_int64_pod
)a
->parm_offset
, out
, SIGNED
);
156 if (a
->range_info_useful_p ())
158 fprintf (out
, " offset:");
159 print_dec ((poly_int64_pod
)a
->offset
, out
, SIGNED
);
160 fprintf (out
, " size:");
161 print_dec ((poly_int64_pod
)a
->size
, out
, SIGNED
);
162 fprintf (out
, " max_size:");
163 print_dec ((poly_int64_pod
)a
->max_size
, out
, SIGNED
);
168 /* Dump records TT to OUT. */
171 dump_records (modref_records
*tt
, FILE *out
)
173 fprintf (out
, " Limits: %i bases, %i refs\n",
174 (int)tt
->max_bases
, (int)tt
->max_refs
);
177 fprintf (out
, " Every base\n");
181 modref_base_node
<alias_set_type
> *n
;
182 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, n
)
184 fprintf (out
, " Base %i: alias set %i\n", (int)i
, n
->base
);
187 fprintf (out
, " Every ref\n");
191 modref_ref_node
<alias_set_type
> *r
;
192 FOR_EACH_VEC_SAFE_ELT (n
->refs
, j
, r
)
194 fprintf (out
, " Ref %i: alias set %i\n", (int)j
, r
->ref
);
197 fprintf (out
, " Every access\n");
201 modref_access_node
*a
;
202 FOR_EACH_VEC_SAFE_ELT (r
->accesses
, k
, a
)
203 dump_access (a
, out
);
208 /* Dump records TT to OUT. */
211 dump_lto_records (modref_records_lto
*tt
, FILE *out
)
213 fprintf (out
, " Limits: %i bases, %i refs\n",
214 (int)tt
->max_bases
, (int)tt
->max_refs
);
217 fprintf (out
, " Every base\n");
221 modref_base_node
<tree
> *n
;
222 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, n
)
224 fprintf (out
, " Base %i:", (int)i
);
225 print_generic_expr (dump_file
, n
->base
);
226 fprintf (out
, " (alias set %i)\n",
227 n
->base
? get_alias_set (n
->base
) : 0);
230 fprintf (out
, " Every ref\n");
234 modref_ref_node
<tree
> *r
;
235 FOR_EACH_VEC_SAFE_ELT (n
->refs
, j
, r
)
237 fprintf (out
, " Ref %i:", (int)j
);
238 print_generic_expr (dump_file
, r
->ref
);
239 fprintf (out
, " (alias set %i)\n",
240 r
->ref
? get_alias_set (r
->ref
) : 0);
243 fprintf (out
, " Every access\n");
247 modref_access_node
*a
;
248 FOR_EACH_VEC_SAFE_ELT (r
->accesses
, k
, a
)
249 dump_access (a
, out
);
257 modref_summary::dump (FILE *out
)
261 fprintf (out
, " loads:\n");
262 dump_records (loads
, out
);
266 fprintf (out
, " stores:\n");
267 dump_records (stores
, out
);
271 fprintf (out
, " LTO loads:\n");
272 dump_lto_records (loads_lto
, out
);
276 fprintf (out
, " LTO stores:\n");
277 dump_lto_records (stores_lto
, out
);
282 /* Get function summary for FUNC if it exists, return NULL otherwise. */
285 get_modref_function_summary (cgraph_node
*func
)
287 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
291 /* A single function body may be represented by multiple symbols with
292 different visibility. For example, if FUNC is an interposable alias,
293 we don't want to return anything, even if we have summary for the target
295 enum availability avail
;
296 func
= func
->function_or_virtual_thunk_symbol
297 (&avail
, cgraph_node::get (current_function_decl
));
298 if (avail
<= AVAIL_INTERPOSABLE
)
301 /* Attempt to get summary for FUNC. If analysis of FUNC hasn't finished yet,
302 don't return anything. */
303 modref_summary
*r
= summaries
->get (func
);
304 if (r
&& r
->finished
)
310 /* Construct modref_access_node from REF. */
311 static modref_access_node
312 get_access (ao_ref
*ref
)
316 base
= ao_ref_base (ref
);
317 modref_access_node a
= {ref
->offset
, ref
->size
, ref
->max_size
,
319 if (TREE_CODE (base
) == MEM_REF
|| TREE_CODE (base
) == TARGET_MEM_REF
)
322 base
= TREE_OPERAND (base
, 0);
323 if (TREE_CODE (base
) == SSA_NAME
324 && SSA_NAME_IS_DEFAULT_DEF (base
)
325 && TREE_CODE (SSA_NAME_VAR (base
)) == PARM_DECL
)
328 for (tree t
= DECL_ARGUMENTS (current_function_decl
);
329 t
!= SSA_NAME_VAR (base
); t
= DECL_CHAIN (t
))
338 if (TREE_CODE (memref
) == MEM_REF
)
341 = wi::to_poly_wide (TREE_OPERAND
342 (memref
, 1)).to_shwi (&a
.parm_offset
);
345 a
.parm_offset_known
= false;
355 /* Record access into the modref_records data structure. */
358 record_access (modref_records
*tt
, ao_ref
*ref
)
360 alias_set_type base_set
= !flag_strict_aliasing
? 0
361 : ao_ref_base_alias_set (ref
);
362 alias_set_type ref_set
= !flag_strict_aliasing
? 0
363 : (ao_ref_alias_set (ref
));
364 modref_access_node a
= get_access (ref
);
367 fprintf (dump_file
, " - Recording base_set=%i ref_set=%i parm=%i\n",
368 base_set
, ref_set
, a
.parm_index
);
370 tt
->insert (base_set
, ref_set
, a
);
373 /* IPA version of record_access_tree. */
376 record_access_lto (modref_records_lto
*tt
, ao_ref
*ref
)
378 /* get_alias_set sometimes use different type to compute the alias set
379 than TREE_TYPE (base). Do same adjustments. */
380 tree base_type
= NULL_TREE
, ref_type
= NULL_TREE
;
381 if (flag_strict_aliasing
)
386 while (handled_component_p (base
))
387 base
= TREE_OPERAND (base
, 0);
389 base_type
= reference_alias_ptr_type_1 (&base
);
392 base_type
= TREE_TYPE (base
);
394 base_type
= TYPE_REF_CAN_ALIAS_ALL (base_type
)
395 ? NULL_TREE
: TREE_TYPE (base_type
);
397 tree ref_expr
= ref
->ref
;
398 ref_type
= reference_alias_ptr_type_1 (&ref_expr
);
401 ref_type
= TREE_TYPE (ref_expr
);
403 ref_type
= TYPE_REF_CAN_ALIAS_ALL (ref_type
)
404 ? NULL_TREE
: TREE_TYPE (ref_type
);
406 /* Sanity check that we are in sync with what get_alias_set does. */
407 gcc_checking_assert ((!base_type
&& !ao_ref_base_alias_set (ref
))
408 || get_alias_set (base_type
)
409 == ao_ref_base_alias_set (ref
));
410 gcc_checking_assert ((!ref_type
&& !ao_ref_alias_set (ref
))
411 || get_alias_set (ref_type
)
412 == ao_ref_alias_set (ref
));
414 /* Do not bother to record types that have no meaningful alias set.
415 Also skip variably modified types since these go to local streams. */
416 if (base_type
&& (!get_alias_set (base_type
)
417 || variably_modified_type_p (base_type
, NULL_TREE
)))
418 base_type
= NULL_TREE
;
419 if (ref_type
&& (!get_alias_set (ref_type
)
420 || variably_modified_type_p (ref_type
, NULL_TREE
)))
421 ref_type
= NULL_TREE
;
423 modref_access_node a
= get_access (ref
);
426 fprintf (dump_file
, " - Recording base type:");
427 print_generic_expr (dump_file
, base_type
);
428 fprintf (dump_file
, " (alias set %i) ref type:",
429 base_type
? get_alias_set (base_type
) : 0);
430 print_generic_expr (dump_file
, ref_type
);
431 fprintf (dump_file
, " (alias set %i) parm:%i\n",
432 ref_type
? get_alias_set (ref_type
) : 0,
436 tt
->insert (base_type
, ref_type
, a
);
439 /* Returns true if and only if we should store the access to EXPR.
440 Some accesses, e.g. loads from automatic variables, are not interesting. */
443 record_access_p (tree expr
)
445 if (refs_local_or_readonly_memory_p (expr
))
448 fprintf (dump_file
, " - Read-only or local, ignoring.\n");
454 /* Return true if ECF flags says that stores can be ignored. */
457 ignore_stores_p (tree caller
, int flags
)
459 if (flags
& ECF_PURE
)
461 if ((flags
& (ECF_NORETURN
| ECF_NOTHROW
)) == (ECF_NORETURN
| ECF_NOTHROW
)
462 || (!opt_for_fn (caller
, flag_exceptions
) && (flags
& ECF_NORETURN
)))
467 /* Merge side effects of call STMT to function with CALLEE_SUMMARY
468 int CUR_SUMMARY. Return true if something changed.
469 If IGNORE_STORES is true, do not merge stores. */
472 merge_call_side_effects (modref_summary
*cur_summary
,
473 gimple
*stmt
, modref_summary
*callee_summary
,
476 auto_vec
<modref_parm_map
, 32> parm_map
;
477 bool changed
= false;
479 parm_map
.safe_grow (gimple_call_num_args (stmt
));
480 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); i
++)
482 tree op
= gimple_call_arg (stmt
, i
);
484 if (TREE_CODE (op
) == SSA_NAME
485 && SSA_NAME_IS_DEFAULT_DEF (op
)
486 && TREE_CODE (SSA_NAME_VAR (op
)) == PARM_DECL
)
489 for (tree t
= DECL_ARGUMENTS (current_function_decl
);
490 t
!= SSA_NAME_VAR (op
); t
= DECL_CHAIN (t
))
499 parm_map
[i
].parm_index
= index
;
500 parm_map
[i
].parm_offset_known
= true;
501 parm_map
[i
].parm_offset
= 0;
503 else if (points_to_local_or_readonly_memory_p (op
))
504 parm_map
[i
].parm_index
= -2;
506 parm_map
[i
].parm_index
= -1;
509 /* Merge with callee's summary. */
510 if (cur_summary
->loads
)
511 changed
|= cur_summary
->loads
->merge (callee_summary
->loads
, &parm_map
);
512 if (cur_summary
->loads_lto
)
513 changed
|= cur_summary
->loads_lto
->merge (callee_summary
->loads_lto
,
517 if (cur_summary
->stores
)
518 changed
|= cur_summary
->stores
->merge (callee_summary
->stores
,
520 if (cur_summary
->stores_lto
)
521 changed
|= cur_summary
->stores_lto
->merge (callee_summary
->stores_lto
,
527 /* Analyze function call STMT in function F.
528 Remember recursive calls in RECURSIVE_CALLS. */
531 analyze_call (modref_summary
*cur_summary
,
532 gimple
*stmt
, vec
<gimple
*> *recursive_calls
)
534 /* Check flags on the function call. In certain cases, analysis can be
536 int flags
= gimple_call_flags (stmt
);
537 if (flags
& (ECF_CONST
| ECF_NOVOPS
))
541 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
542 "except for args.\n");
546 /* Pure functions do not affect global memory. Stores by functions which are
547 noreturn and do not throw can safely be ignored. */
548 bool ignore_stores
= ignore_stores_p (current_function_decl
, flags
);
550 /* Next, we try to get the callee's function declaration. The goal is to
551 merge their summary with ours. */
552 tree callee
= gimple_call_fndecl (stmt
);
554 /* Check if this is an indirect call. */
557 /* If the indirect call does not write memory, our store summary is
558 unaffected, but we have to discard our loads summary (we don't know
559 anything about the loads that the called function performs). */
563 fprintf (dump_file
, " - Indirect call which does not write memory, "
564 "discarding loads.\n");
565 if (cur_summary
->loads
)
566 cur_summary
->loads
->collapse ();
567 if (cur_summary
->loads_lto
)
568 cur_summary
->loads_lto
->collapse ();
572 fprintf (dump_file
, " - Indirect call.\n");
576 struct cgraph_node
*callee_node
= cgraph_node::get_create (callee
);
578 /* We can not safely optimize based on summary of callee if it does
579 not always bind to current def: it is possible that memory load
580 was optimized out earlier which may not happen in the interposed
582 if (!callee_node
->binds_to_current_def_p ())
585 fprintf (dump_file
, " - May be interposed: collapsing loads.\n");
586 if (cur_summary
->loads
)
587 cur_summary
->loads
->collapse ();
588 if (cur_summary
->loads_lto
)
589 cur_summary
->loads_lto
->collapse ();
592 /* If this is a recursive call, the target summary is the same as ours, so
593 there's nothing to do. */
594 if (recursive_call_p (current_function_decl
, callee
))
596 recursive_calls
->safe_push (stmt
);
598 fprintf (dump_file
, " - Skipping recursive call.\n");
602 gcc_assert (callee_node
!= NULL
);
604 /* Get the function symbol and its availability. */
605 enum availability avail
;
606 callee_node
= callee_node
->function_symbol (&avail
);
607 if (avail
<= AVAIL_INTERPOSABLE
)
609 /* Keep stores summary, but discard all loads for interposable function
613 if (cur_summary
->loads
)
614 cur_summary
->loads
->collapse ();
615 if (cur_summary
->loads_lto
)
616 cur_summary
->loads_lto
->collapse ();
620 fprintf (dump_file
, " - Function availability <= AVAIL_INTERPOSABLE.\n");
624 /* Get callee's modref summary. As above, if there's no summary, we either
625 have to give up or, if stores are ignored, we can just purge loads. */
626 modref_summary
*callee_summary
= summaries
->get (callee_node
);
631 if (cur_summary
->loads
)
632 cur_summary
->loads
->collapse ();
633 if (cur_summary
->loads_lto
)
634 cur_summary
->loads_lto
->collapse ();
638 fprintf (dump_file
, " - No modref summary available for callee.\n");
642 merge_call_side_effects (cur_summary
, stmt
, callee_summary
, ignore_stores
);
647 /* Helper for analyze_stmt. */
650 analyze_load (gimple
*, tree
, tree op
, void *data
)
652 modref_summary
*summary
= (modref_summary
*)data
;
656 fprintf (dump_file
, " - Analyzing load: ");
657 print_generic_expr (dump_file
, op
);
658 fprintf (dump_file
, "\n");
661 if (!record_access_p (op
))
665 ao_ref_init (&r
, op
);
668 record_access (summary
->loads
, &r
);
669 if (summary
->loads_lto
)
670 record_access_lto (summary
->loads_lto
, &r
);
674 /* Helper for analyze_stmt. */
677 analyze_store (gimple
*, tree
, tree op
, void *data
)
679 modref_summary
*summary
= (modref_summary
*)data
;
683 fprintf (dump_file
, " - Analyzing store: ");
684 print_generic_expr (dump_file
, op
);
685 fprintf (dump_file
, "\n");
688 if (!record_access_p (op
))
692 ao_ref_init (&r
, op
);
695 record_access (((modref_summary
*)data
)->stores
, &r
);
696 if (summary
->stores_lto
)
697 record_access_lto (((modref_summary
*)data
)->stores_lto
, &r
);
701 /* Analyze statement STMT of function F.
702 If IPA is true do not merge in side effects of calls. */
705 analyze_stmt (modref_summary
*summary
, gimple
*stmt
, bool ipa
,
706 vec
<gimple
*> *recursive_calls
)
708 /* In general we can not ignore clobbers because they are barries for code
709 motion, however after inlining it is safe to do becuase local optimization
710 passes do not consider clobbers from other functions.
711 Similar logic is in ipa-pure-consts. */
712 if ((ipa
|| cfun
->after_inlining
) && gimple_clobber_p (stmt
))
715 /* Analyze all loads and stores in STMT. */
716 walk_stmt_load_store_ops (stmt
, summary
,
717 analyze_load
, analyze_store
);
719 switch (gimple_code (stmt
))
722 /* If the ASM statement does not read nor write memory, there's nothing
723 to do. Otherwise just give up. */
724 if (!gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
727 fprintf (dump_file
, " - Function contains GIMPLE_ASM statement "
728 "which clobbers memory.\n");
732 return analyze_call (summary
, stmt
, recursive_calls
);
735 /* Nothing to do for other types of statements. */
740 /* Analyze function F. IPA indicates whether we're running in local mode (false)
741 or the IPA mode (true). */
744 analyze_function (function
*f
, bool ipa
)
747 fprintf (dump_file
, "modref analyzing '%s' (ipa=%i)%s%s\n",
748 function_name (f
), ipa
,
749 TREE_READONLY (current_function_decl
) ? " (const)" : "",
750 DECL_PURE_P (current_function_decl
) ? " (pure)" : "");
752 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
753 if (!flag_ipa_modref
)
756 /* Initialize the summary. */
758 summaries
= modref_summaries::create_ggc (symtab
);
759 else /* Remove existing summary if we are re-running the pass. */
761 if (dump_file
&& summaries
->get (cgraph_node::get (f
->decl
)))
763 fprintf (dump_file
, "Past summary:\n");
764 summaries
->get (cgraph_node::get (f
->decl
))->dump (dump_file
);
766 summaries
->remove (cgraph_node::get (f
->decl
));
769 ((modref_summaries
*)summaries
)->ipa
= ipa
;
771 modref_summary
*summary
= summaries
->get_create (cgraph_node::get (f
->decl
));
773 /* Compute no-LTO summaries when local optimization is going to happen. */
774 bool nolto
= (!ipa
|| ((!flag_lto
|| flag_fat_lto_objects
) && !in_lto_p
)
775 || (in_lto_p
&& !flag_wpa
776 && flag_incremental_link
!= INCREMENTAL_LINK_LTO
));
778 /* Compute LTO when LTO streaming is going to happen. */
779 bool lto
= ipa
&& ((flag_lto
&& !in_lto_p
)
781 || flag_incremental_link
== INCREMENTAL_LINK_LTO
);
783 /* Create and initialize summary for F.
784 Note that summaries may be already allocated from previous
788 gcc_assert (!summary
->loads
);
789 summary
->loads
= modref_records::create_ggc (param_modref_max_bases
,
790 param_modref_max_refs
,
791 param_modref_max_accesses
);
792 gcc_assert (!summary
->stores
);
793 summary
->stores
= modref_records::create_ggc (param_modref_max_bases
,
794 param_modref_max_refs
,
795 param_modref_max_accesses
);
799 gcc_assert (!summary
->loads_lto
);
800 summary
->loads_lto
= modref_records_lto::create_ggc
801 (param_modref_max_bases
,
802 param_modref_max_refs
,
803 param_modref_max_accesses
);
804 gcc_assert (!summary
->stores_lto
);
805 summary
->stores_lto
= modref_records_lto::create_ggc
806 (param_modref_max_bases
,
807 param_modref_max_refs
,
808 param_modref_max_accesses
);
810 summary
->finished
= false;
811 int ecf_flags
= flags_from_decl_or_type (current_function_decl
);
812 auto_vec
<gimple
*, 32> recursive_calls
;
814 /* Analyze each statement in each basic block of the function. If the
815 statement cannot be analyzed (for any reason), the entire function cannot
816 be analyzed by modref. */
818 FOR_EACH_BB_FN (bb
, f
)
820 gimple_stmt_iterator si
;
821 for (si
= gsi_after_labels (bb
); !gsi_end_p (si
); gsi_next (&si
))
823 if (!analyze_stmt (summary
, gsi_stmt (si
), ipa
, &recursive_calls
)
824 || !summary
->useful_p (ecf_flags
))
826 cgraph_node
*fnode
= cgraph_node::get (current_function_decl
);
827 summaries
->remove (fnode
);
830 " - modref done with result: not tracked.\n");
836 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
837 This needs to be done after all other side effects are computed. */
844 for (unsigned i
= 0; i
< recursive_calls
.length (); i
++)
846 changed
|= merge_call_side_effects
847 (summary
, recursive_calls
[i
], summary
,
848 ignore_stores_p (current_function_decl
,
850 (recursive_calls
[i
])));
851 if (!summary
->useful_p (ecf_flags
))
853 cgraph_node
*fnode
= cgraph_node::get (current_function_decl
);
854 summaries
->remove (fnode
);
857 " - modref done with result: not tracked.\n");
865 summary
->finished
= true;
869 fprintf (dump_file
, " - modref done with result: tracked.\n");
870 summary
->dump (dump_file
);
874 /* Callback for generate_summary. */
877 modref_generate (void)
879 struct cgraph_node
*node
;
880 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node
)
882 function
*f
= DECL_STRUCT_FUNCTION (node
->decl
);
886 analyze_function (f
, true);
891 /* Called when a new function is inserted to callgraph late. */
894 modref_summaries::insert (struct cgraph_node
*node
, modref_summary
*)
896 if (!DECL_STRUCT_FUNCTION (node
->decl
))
898 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
899 analyze_function (DECL_STRUCT_FUNCTION (node
->decl
), ipa
);
903 /* Called when new clone is inserted to callgraph late. */
906 modref_summaries::duplicate (cgraph_node
*, cgraph_node
*,
907 modref_summary
*src_data
,
908 modref_summary
*dst_data
)
910 dst_data
->finished
= src_data
->finished
;
911 if (src_data
->stores
)
913 dst_data
->stores
= modref_records::create_ggc
914 (src_data
->stores
->max_bases
,
915 src_data
->stores
->max_refs
,
916 src_data
->stores
->max_accesses
);
917 dst_data
->stores
->copy_from (src_data
->stores
);
921 dst_data
->loads
= modref_records::create_ggc
922 (src_data
->loads
->max_bases
,
923 src_data
->loads
->max_refs
,
924 src_data
->loads
->max_accesses
);
925 dst_data
->loads
->copy_from (src_data
->loads
);
927 if (src_data
->stores_lto
)
929 dst_data
->stores_lto
= modref_records_lto::create_ggc
930 (src_data
->stores_lto
->max_bases
,
931 src_data
->stores_lto
->max_refs
,
932 src_data
->stores_lto
->max_accesses
);
933 dst_data
->stores_lto
->copy_from (src_data
->stores_lto
);
935 if (src_data
->loads_lto
)
937 dst_data
->loads_lto
= modref_records_lto::create_ggc
938 (src_data
->loads_lto
->max_bases
,
939 src_data
->loads_lto
->max_refs
,
940 src_data
->stores_lto
->max_accesses
);
941 dst_data
->loads_lto
->copy_from (src_data
->loads_lto
);
947 /* Definition of the modref pass on GIMPLE. */
948 const pass_data pass_data_modref
= {
953 (PROP_cfg
| PROP_ssa
),
960 class pass_modref
: public gimple_opt_pass
963 pass_modref (gcc::context
*ctxt
)
964 : gimple_opt_pass (pass_data_modref
, ctxt
) {}
966 /* opt_pass methods: */
969 return new pass_modref (m_ctxt
);
971 virtual bool gate (function
*)
973 return flag_ipa_modref
;
975 virtual unsigned int execute (function
*);
978 /* Encode TT to the output block OB using the summary streaming API. */
981 write_modref_records (modref_records_lto
*tt
, struct output_block
*ob
)
983 streamer_write_uhwi (ob
, tt
->max_bases
);
984 streamer_write_uhwi (ob
, tt
->max_refs
);
985 streamer_write_uhwi (ob
, tt
->max_accesses
);
987 streamer_write_uhwi (ob
, tt
->every_base
);
988 streamer_write_uhwi (ob
, vec_safe_length (tt
->bases
));
990 modref_base_node
<tree
> *base_node
;
991 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, base_node
)
993 stream_write_tree (ob
, base_node
->base
, true);
995 streamer_write_uhwi (ob
, base_node
->every_ref
);
996 streamer_write_uhwi (ob
, vec_safe_length (base_node
->refs
));
999 modref_ref_node
<tree
> *ref_node
;
1000 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
1002 stream_write_tree (ob
, ref_node
->ref
, true);
1003 streamer_write_uhwi (ob
, ref_node
->every_access
);
1004 streamer_write_uhwi (ob
, vec_safe_length (ref_node
->accesses
));
1007 modref_access_node
*access_node
;
1008 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
1010 streamer_write_uhwi (ob
, access_node
->parm_index
);
1011 if (access_node
->parm_index
!= -1)
1013 streamer_write_uhwi (ob
, access_node
->parm_offset_known
);
1014 if (access_node
->parm_offset_known
)
1016 streamer_write_poly_int64 (ob
, access_node
->parm_offset
);
1017 streamer_write_poly_int64 (ob
, access_node
->offset
);
1018 streamer_write_poly_int64 (ob
, access_node
->size
);
1019 streamer_write_poly_int64 (ob
, access_node
->max_size
);
1027 /* Read a modref_tree from the input block IB using the data from DATA_IN.
1028 This assumes that the tree was encoded using write_modref_tree.
1029 Either nolto_ret or lto_ret is initialized by the tree depending whether
1030 LTO streaming is expected or not. */
1033 read_modref_records (lto_input_block
*ib
, struct data_in
*data_in
,
1034 modref_records
**nolto_ret
,
1035 modref_records_lto
**lto_ret
)
1037 size_t max_bases
= streamer_read_uhwi (ib
);
1038 size_t max_refs
= streamer_read_uhwi (ib
);
1039 size_t max_accesses
= streamer_read_uhwi (ib
);
1041 /* Decide whether we want to turn LTO data types to non-LTO (i.e. when
1042 LTO re-streaming is not going to happen). */
1043 if (flag_wpa
|| flag_incremental_link
== INCREMENTAL_LINK_LTO
)
1044 *lto_ret
= modref_records_lto::create_ggc (max_bases
, max_refs
,
1047 *nolto_ret
= modref_records::create_ggc (max_bases
, max_refs
,
1050 size_t every_base
= streamer_read_uhwi (ib
);
1051 size_t nbase
= streamer_read_uhwi (ib
);
1053 gcc_assert (!every_base
|| nbase
== 0);
1057 (*nolto_ret
)->collapse ();
1059 (*lto_ret
)->collapse ();
1061 for (size_t i
= 0; i
< nbase
; i
++)
1063 tree base_tree
= stream_read_tree (ib
, data_in
);
1064 modref_base_node
<alias_set_type
> *nolto_base_node
= NULL
;
1065 modref_base_node
<tree
> *lto_base_node
= NULL
;
1067 /* At stream in time we have LTO alias info. Check if we streamed in
1068 something obviously unnecessary. Do not glob types by alias sets;
1069 it is not 100% clear that ltrans types will get merged same way.
1070 Types may get refined based on ODR type conflicts. */
1071 if (base_tree
&& !get_alias_set (base_tree
))
1075 fprintf (dump_file
, "Streamed in alias set 0 type ");
1076 print_generic_expr (dump_file
, base_tree
);
1077 fprintf (dump_file
, "\n");
1083 nolto_base_node
= (*nolto_ret
)->insert_base (base_tree
1084 ? get_alias_set (base_tree
)
1087 lto_base_node
= (*lto_ret
)->insert_base (base_tree
);
1088 size_t every_ref
= streamer_read_uhwi (ib
);
1089 size_t nref
= streamer_read_uhwi (ib
);
1091 gcc_assert (!every_ref
|| nref
== 0);
1094 if (nolto_base_node
)
1095 nolto_base_node
->collapse ();
1097 lto_base_node
->collapse ();
1099 for (size_t j
= 0; j
< nref
; j
++)
1101 tree ref_tree
= stream_read_tree (ib
, data_in
);
1103 if (ref_tree
&& !get_alias_set (ref_tree
))
1107 fprintf (dump_file
, "Streamed in alias set 0 type ");
1108 print_generic_expr (dump_file
, ref_tree
);
1109 fprintf (dump_file
, "\n");
1114 modref_ref_node
<alias_set_type
> *nolto_ref_node
= NULL
;
1115 modref_ref_node
<tree
> *lto_ref_node
= NULL
;
1117 if (nolto_base_node
)
1119 = nolto_base_node
->insert_ref (ref_tree
1120 ? get_alias_set (ref_tree
) : 0,
1123 lto_ref_node
= lto_base_node
->insert_ref (ref_tree
, max_refs
);
1125 size_t every_access
= streamer_read_uhwi (ib
);
1126 size_t naccesses
= streamer_read_uhwi (ib
);
1129 nolto_ref_node
->every_access
= every_access
;
1131 lto_ref_node
->every_access
= every_access
;
1133 for (size_t k
= 0; k
< naccesses
; k
++)
1135 int parm_index
= streamer_read_uhwi (ib
);
1136 bool parm_offset_known
= false;
1137 poly_int64 parm_offset
= 0;
1138 poly_int64 offset
= 0;
1139 poly_int64 size
= -1;
1140 poly_int64 max_size
= -1;
1142 if (parm_index
!= -1)
1144 parm_offset_known
= streamer_read_uhwi (ib
);
1145 if (parm_offset_known
)
1147 parm_offset
= streamer_read_poly_int64 (ib
);
1148 offset
= streamer_read_poly_int64 (ib
);
1149 size
= streamer_read_poly_int64 (ib
);
1150 max_size
= streamer_read_poly_int64 (ib
);
1153 modref_access_node a
= {offset
, size
, max_size
, parm_offset
,
1154 parm_index
, parm_offset_known
};
1156 nolto_ref_node
->insert_access (a
, max_accesses
);
1158 lto_ref_node
->insert_access (a
, max_accesses
);
1163 (*lto_ret
)->cleanup ();
1165 (*nolto_ret
)->cleanup ();
1168 /* Callback for write_summary. */
1173 struct output_block
*ob
= create_output_block (LTO_section_ipa_modref
);
1174 lto_symtab_encoder_t encoder
= ob
->decl_state
->symtab_node_encoder
;
1175 unsigned int count
= 0;
1180 streamer_write_uhwi (ob
, 0);
1181 streamer_write_char_stream (ob
->main_stream
, 0);
1182 produce_asm (ob
, NULL
);
1183 destroy_output_block (ob
);
1187 for (i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
1189 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
1190 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
1193 if (cnode
&& cnode
->definition
&& !cnode
->alias
1194 && (r
= summaries
->get (cnode
))
1195 && r
->lto_useful_p (flags_from_decl_or_type (cnode
->decl
)))
1198 streamer_write_uhwi (ob
, count
);
1200 for (i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
1202 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
1203 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
1205 if (cnode
&& cnode
->definition
&& !cnode
->alias
)
1208 modref_summary
*r
= summaries
->get (cnode
);
1210 if (!r
|| !r
->lto_useful_p (flags_from_decl_or_type (cnode
->decl
)))
1213 streamer_write_uhwi (ob
, lto_symtab_encoder_encode (encoder
, cnode
));
1215 streamer_write_uhwi (ob
, r
->loads_lto
? 1 : 0);
1216 streamer_write_uhwi (ob
, r
->stores_lto
? 1 : 0);
1218 write_modref_records (r
->loads_lto
, ob
);
1220 write_modref_records (r
->stores_lto
, ob
);
1223 streamer_write_char_stream (ob
->main_stream
, 0);
1224 produce_asm (ob
, NULL
);
1225 destroy_output_block (ob
);
1229 read_section (struct lto_file_decl_data
*file_data
, const char *data
,
1232 const struct lto_function_header
*header
1233 = (const struct lto_function_header
*) data
;
1234 const int cfg_offset
= sizeof (struct lto_function_header
);
1235 const int main_offset
= cfg_offset
+ header
->cfg_size
;
1236 const int string_offset
= main_offset
+ header
->main_size
;
1237 struct data_in
*data_in
;
1239 unsigned int f_count
;
1241 lto_input_block
ib ((const char *) data
+ main_offset
, header
->main_size
,
1242 file_data
->mode_table
);
1245 = lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
1246 header
->string_size
, vNULL
);
1247 f_count
= streamer_read_uhwi (&ib
);
1248 for (i
= 0; i
< f_count
; i
++)
1250 struct cgraph_node
*node
;
1251 lto_symtab_encoder_t encoder
;
1253 unsigned int index
= streamer_read_uhwi (&ib
);
1254 encoder
= file_data
->symtab_node_encoder
;
1255 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
1258 modref_summary
*modref_sum
= summaries
->get_create (node
);
1259 modref_sum
->finished
= false;
1260 int have_loads
= streamer_read_uhwi (&ib
);
1261 int have_stores
= streamer_read_uhwi (&ib
);
1262 gcc_assert (!modref_sum
->loads_lto
1263 && !modref_sum
->stores_lto
1264 && !modref_sum
->loads
1265 && !modref_sum
->stores
);
1267 read_modref_records (&ib
, data_in
,
1269 &modref_sum
->loads_lto
);
1271 read_modref_records (&ib
, data_in
,
1272 &modref_sum
->stores
,
1273 &modref_sum
->stores_lto
);
1276 fprintf (dump_file
, "Read modref for %s\n",
1277 node
->dump_name ());
1278 modref_sum
->dump (dump_file
);
1281 modref_sum
->finished
= true;
1284 lto_free_section_data (file_data
, LTO_section_ipa_modref
, NULL
, data
,
1286 lto_data_in_delete (data_in
);
1289 /* Callback for read_summary. */
1294 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
1295 struct lto_file_decl_data
*file_data
;
1299 summaries
= modref_summaries::create_ggc (symtab
);
1300 ((modref_summaries
*)summaries
)->ipa
= !flag_ltrans
;
1302 while ((file_data
= file_data_vec
[j
++]))
1305 const char *data
= lto_get_summary_section_data (file_data
,
1306 LTO_section_ipa_modref
,
1309 read_section (file_data
, data
, len
);
1311 /* Fatal error here. We do not want to support compiling ltrans units
1312 with different version of compiler or different flags than the WPA
1313 unit, so this should never happen. */
1314 fatal_error (input_location
,
1315 "IPA modref summary is missing in input file");
1319 /* Update parameter indexes in TT according to MAP. */
1322 remap_arguments (vec
<int> *map
, modref_records
*tt
)
1325 modref_base_node
<alias_set_type
> *base_node
;
1326 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, base_node
)
1329 modref_ref_node
<alias_set_type
> *ref_node
;
1330 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
1333 modref_access_node
*access_node
;
1334 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
1335 if (access_node
->parm_index
> 0)
1336 access_node
->parm_index
= (*map
)[access_node
->parm_index
];
1341 /* If signature changed, update the summary. */
1344 modref_transform (struct cgraph_node
*node
)
1346 if (!node
->clone
.param_adjustments
)
1348 modref_summary
*r
= summaries
->get (node
);
1353 fprintf (dump_file
, "Updating summary for %s from:\n",
1354 node
->dump_name ());
1355 r
->dump (dump_file
);
1359 ipa_adjusted_param
*p
;
1361 FOR_EACH_VEC_SAFE_ELT (node
->clone
.param_adjustments
->m_adj_params
, i
, p
)
1363 int idx
= node
->clone
.param_adjustments
->get_original_index (i
);
1368 auto_vec
<int, 32> map
;
1370 map
.safe_grow (max
+ 1);
1371 for (i
= 0; i
<= max
; i
++)
1372 map
.quick_push (-1);
1373 FOR_EACH_VEC_SAFE_ELT (node
->clone
.param_adjustments
->m_adj_params
, i
, p
)
1375 int idx
= node
->clone
.param_adjustments
->get_original_index (i
);
1379 remap_arguments (&map
, r
->loads
);
1380 remap_arguments (&map
, r
->stores
);
1383 fprintf (dump_file
, "to:\n");
1384 r
->dump (dump_file
);
1389 /* Definition of the modref IPA pass. */
1390 const pass_data pass_data_ipa_modref
=
1392 IPA_PASS
, /* type */
1393 "modref", /* name */
1394 OPTGROUP_IPA
, /* optinfo_flags */
1395 TV_IPA_MODREF
, /* tv_id */
1396 0, /* properties_required */
1397 0, /* properties_provided */
1398 0, /* properties_destroyed */
1399 0, /* todo_flags_start */
1400 ( TODO_dump_symtab
), /* todo_flags_finish */
1403 class pass_ipa_modref
: public ipa_opt_pass_d
1406 pass_ipa_modref (gcc::context
*ctxt
)
1407 : ipa_opt_pass_d (pass_data_ipa_modref
, ctxt
,
1408 modref_generate
, /* generate_summary */
1409 modref_write
, /* write_summary */
1410 modref_read
, /* read_summary */
1411 modref_write
, /* write_optimization_summary */
1412 modref_read
, /* read_optimization_summary */
1413 NULL
, /* stmt_fixup */
1414 0, /* function_transform_todo_flags_start */
1415 modref_transform
,/* function_transform */
1416 NULL
) /* variable_transform */
1419 /* opt_pass methods: */
1420 opt_pass
*clone () { return new pass_ipa_modref (m_ctxt
); }
1421 virtual bool gate (function
*)
1425 virtual unsigned int execute (function
*);
1431 unsigned int pass_modref::execute (function
*f
)
1433 /* If new function is being added during IPA, we can skip analysis. */
1434 if (summaries
&& ((modref_summaries
*)summaries
)->ipa
)
1436 analyze_function (f
, false);
1441 make_pass_modref (gcc::context
*ctxt
)
1443 return new pass_modref (ctxt
);
1447 make_pass_ipa_modref (gcc::context
*ctxt
)
1449 return new pass_ipa_modref (ctxt
);
1452 /* Skip edges from and to nodes without ipa_pure_const enabled.
1453 Ignore not available symbols. */
1456 ignore_edge (struct cgraph_edge
*e
)
1458 enum availability avail
;
1459 cgraph_node
*callee
= e
->callee
->function_or_virtual_thunk_symbol
1460 (&avail
, e
->caller
);
1462 return (avail
<= AVAIL_INTERPOSABLE
1463 || !summaries
->get (callee
)
1464 || flags_from_decl_or_type (e
->callee
->decl
)
1465 & (ECF_CONST
| ECF_NOVOPS
));
1468 /* Compute parm_map for CALLE_EDGE. */
1471 compute_parm_map (cgraph_edge
*callee_edge
, vec
<modref_parm_map
> *parm_map
)
1473 class ipa_edge_args
*args
;
1474 if (ipa_node_params_sum
1475 && !callee_edge
->call_stmt_cannot_inline_p
1476 && (args
= IPA_EDGE_REF (callee_edge
)) != NULL
)
1478 int i
, count
= ipa_get_cs_argument_count (args
);
1479 class ipa_node_params
*caller_parms_info
, *callee_pi
;
1480 class ipa_call_summary
*es
1481 = ipa_call_summaries
->get (callee_edge
);
1483 = callee_edge
->callee
->function_or_virtual_thunk_symbol
1484 (NULL
, callee_edge
->caller
);
1486 caller_parms_info
= IPA_NODE_REF (callee_edge
->caller
->inlined_to
1487 ? callee_edge
->caller
->inlined_to
1488 : callee_edge
->caller
);
1489 callee_pi
= IPA_NODE_REF (callee
);
1491 (*parm_map
).safe_grow (count
);
1493 for (i
= 0; i
< count
; i
++)
1495 if (es
&& es
->param
[i
].points_to_local_or_readonly_memory
)
1497 (*parm_map
)[i
].parm_index
= -2;
1501 struct ipa_jump_func
*jf
1502 = ipa_get_ith_jump_func (args
, i
);
1503 if (jf
&& callee_pi
)
1505 tree cst
= ipa_value_from_jfunc (caller_parms_info
,
1509 if (cst
&& points_to_local_or_readonly_memory_p (cst
))
1511 (*parm_map
)[i
].parm_index
= -2;
1515 if (jf
&& jf
->type
== IPA_JF_PASS_THROUGH
)
1517 (*parm_map
)[i
].parm_index
1518 = ipa_get_jf_pass_through_formal_id (jf
);
1519 (*parm_map
)[i
].parm_offset_known
1520 = ipa_get_jf_pass_through_operation (jf
) == NOP_EXPR
;
1521 (*parm_map
)[i
].parm_offset
= 0;
1524 if (jf
&& jf
->type
== IPA_JF_ANCESTOR
)
1526 (*parm_map
)[i
].parm_index
= ipa_get_jf_ancestor_formal_id (jf
);
1527 (*parm_map
)[i
].parm_offset_known
= true;
1529 (!(ipa_get_jf_ancestor_offset (jf
) & (BITS_PER_UNIT
- 1)));
1530 (*parm_map
)[i
].parm_offset
1531 = ipa_get_jf_ancestor_offset (jf
) >> LOG2_BITS_PER_UNIT
;
1534 (*parm_map
)[i
].parm_index
= -1;
1538 fprintf (dump_file
, " Parm map: ");
1539 for (i
= 0; i
< count
; i
++)
1540 fprintf (dump_file
, " %i", (*parm_map
)[i
].parm_index
);
1541 fprintf (dump_file
, "\n");
1546 /* Call EDGE was inlined; merge summary from callee to the caller. */
1549 ipa_merge_modref_summary_after_inlining (cgraph_edge
*edge
)
1554 struct cgraph_node
*to
= (edge
->caller
->inlined_to
1555 ? edge
->caller
->inlined_to
: edge
->caller
);
1556 class modref_summary
*to_info
= summaries
->get (to
);
1561 class modref_summary
*callee_info
= summaries
->get (edge
->callee
);
1562 int flags
= flags_from_decl_or_type (edge
->callee
->decl
);
1566 if (ignore_stores_p (edge
->callee
->decl
, flags
))
1569 to_info
->loads
->collapse ();
1570 if (to_info
->loads_lto
)
1571 to_info
->loads_lto
->collapse ();
1575 summaries
->remove (to
);
1576 summaries
->remove (edge
->callee
);
1582 auto_vec
<modref_parm_map
, 32> parm_map
;
1584 compute_parm_map (edge
, &parm_map
);
1586 if (!ignore_stores_p (edge
->callee
->decl
, flags
))
1589 to_info
->loads
->merge (callee_info
->loads
, &parm_map
);
1590 if (to_info
->stores
)
1591 to_info
->stores
->merge (callee_info
->stores
, &parm_map
);
1593 if (to_info
->loads_lto
)
1594 to_info
->loads_lto
->merge (callee_info
->loads_lto
, &parm_map
);
1595 if (to_info
->stores_lto
)
1596 to_info
->stores_lto
->merge (callee_info
->stores_lto
, &parm_map
);
1598 if (!to_info
->useful_p (flags
))
1599 summaries
->remove (to
);
1600 summaries
->remove (edge
->callee
);
1604 /* Collapse loads and return true if something changed. */
1607 collapse_loads (modref_summary
*cur_summary
)
1609 bool changed
= false;
1611 if (cur_summary
->loads
&& !cur_summary
->loads
->every_base
)
1613 cur_summary
->loads
->collapse ();
1616 if (cur_summary
->loads_lto
1617 && !cur_summary
->loads_lto
->every_base
)
1619 cur_summary
->loads_lto
->collapse ();
1625 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE. */
1628 modref_propagate_in_scc (cgraph_node
*component_node
)
1630 bool changed
= true;
1636 for (struct cgraph_node
*cur
= component_node
; cur
;
1637 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
1639 cgraph_node
*node
= cur
->inlined_to
? cur
->inlined_to
: cur
;
1640 modref_summary
*cur_summary
= summaries
->get (node
);
1646 fprintf (dump_file
, " Processing %s%s%s\n",
1648 TREE_READONLY (cur
->decl
) ? " (const)" : "",
1649 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
1651 for (cgraph_edge
*e
= cur
->indirect_calls
; e
; e
= e
->next_callee
)
1653 if (e
->indirect_info
->ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
1655 if (ignore_stores_p (cur
->decl
, e
->indirect_info
->ecf_flags
))
1658 fprintf (dump_file
, " Indirect call: "
1659 "collapsing loads\n");
1660 changed
|= collapse_loads (cur_summary
);
1665 fprintf (dump_file
, " Indirect call: giving up\n");
1666 summaries
->remove (node
);
1676 for (cgraph_edge
*callee_edge
= cur
->callees
; callee_edge
;
1677 callee_edge
= callee_edge
->next_callee
)
1679 int flags
= flags_from_decl_or_type (callee_edge
->callee
->decl
);
1680 modref_summary
*callee_summary
;
1681 struct cgraph_node
*callee
;
1683 if (flags
& (ECF_CONST
| ECF_NOVOPS
)
1684 || !callee_edge
->inline_failed
)
1687 /* Get the callee and its summary. */
1688 enum availability avail
;
1689 callee
= callee_edge
->callee
->function_or_virtual_thunk_symbol
1692 /* It is not necessary to re-process calls outside of the
1696 || ((struct ipa_dfs_info
*)cur
->aux
)->scc_no
1697 != ((struct ipa_dfs_info
*)callee
->aux
)->scc_no
))
1701 fprintf (dump_file
, " Call to %s\n",
1702 callee_edge
->callee
->dump_name ());
1704 bool ignore_stores
= ignore_stores_p (cur
->decl
, flags
);
1706 /* We don't know anything about CALLEE, hence we cannot tell
1707 anything about the entire component. */
1709 if (avail
<= AVAIL_INTERPOSABLE
1710 || !(callee_summary
= summaries
->get (callee
)))
1714 if (dump_file
&& avail
<= AVAIL_INTERPOSABLE
)
1715 fprintf (dump_file
, " Call target interposable"
1716 " or not available\n");
1718 fprintf (dump_file
, " No call target summary\n");
1720 summaries
->remove (node
);
1726 if (dump_file
&& avail
<= AVAIL_INTERPOSABLE
)
1727 fprintf (dump_file
, " Call target interposable"
1728 " or not available; collapsing loads\n");
1730 fprintf (dump_file
, " No call target summary;"
1731 " collapsing loads\n");
1733 changed
|= collapse_loads (cur_summary
);
1738 /* We can not safely optimize based on summary of callee if it
1739 does not always bind to current def: it is possible that
1740 memory load was optimized out earlier which may not happen in
1741 the interposed variant. */
1742 if (!callee_edge
->binds_to_current_def_p ())
1744 changed
|= collapse_loads (cur_summary
);
1746 fprintf (dump_file
, " May not bind local;"
1747 " collapsing loads\n");
1751 auto_vec
<modref_parm_map
, 32> parm_map
;
1753 compute_parm_map (callee_edge
, &parm_map
);
1755 /* Merge in callee's information. */
1756 if (callee_summary
->loads
)
1757 changed
|= cur_summary
->loads
->merge
1758 (callee_summary
->loads
, &parm_map
);
1759 if (callee_summary
->stores
)
1760 changed
|= cur_summary
->stores
->merge
1761 (callee_summary
->stores
, &parm_map
);
1762 if (callee_summary
->loads_lto
)
1763 changed
|= cur_summary
->loads_lto
->merge
1764 (callee_summary
->loads_lto
, &parm_map
);
1765 if (callee_summary
->stores_lto
)
1766 changed
|= cur_summary
->stores_lto
->merge
1767 (callee_summary
->stores_lto
, &parm_map
);
1768 if (dump_file
&& changed
)
1769 cur_summary
->dump (dump_file
);
1774 for (struct cgraph_node
*cur
= component_node
; cur
;
1775 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
1777 modref_summary
*cur_summary
= summaries
->get (cur
);
1779 cur_summary
->finished
= true;
1784 "Propagation finished in %i iterations\n", iteration
);
1785 for (struct cgraph_node
*cur
= component_node
; cur
;
1786 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
1787 if (!cur
->inlined_to
)
1789 modref_summary
*cur_summary
= summaries
->get (cur
);
1791 fprintf (dump_file
, "Propagated modref for %s%s%s\n",
1793 TREE_READONLY (cur
->decl
) ? " (const)" : "",
1794 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
1796 cur_summary
->dump (dump_file
);
1798 fprintf (dump_file
, " Not tracked\n");
1803 /* Run the IPA pass. This will take a function's summaries and calls and
1804 construct new summaries which represent a transitive closure. So that
1805 summary of an analyzed function contains information about the loads and
1806 stores that the function or any function that it calls does. */
1809 pass_ipa_modref::execute (function
*)
1814 struct cgraph_node
**order
= XCNEWVEC (struct cgraph_node
*,
1815 symtab
->cgraph_count
);
1817 order_pos
= ipa_reduced_postorder (order
, true, ignore_edge
);
1820 /* Iterate over all strongly connected components in post-order. */
1821 for (i
= 0; i
< order_pos
; i
++)
1823 /* Get the component's representative. That's just any node in the
1824 component from which we can traverse the entire component. */
1825 struct cgraph_node
*component_node
= order
[i
];
1828 fprintf (dump_file
, "\n\nStart of SCC component\n");
1830 modref_propagate_in_scc (component_node
);
1832 ((modref_summaries
*)summaries
)->ipa
= false;
1833 ipa_free_postorder_info ();
1838 /* Summaries must stay alive until end of compilation. */
1841 ipa_modref_c_finalize ()
1844 ggc_delete (summaries
);
1848 #include "gt-ipa-modref.h"