1 /* Search for references that a functions loads or stores.
2 Copyright (C) 2020 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
23 accesses across function calls. The summary has a form of decision tree
24 described in ipa-modref-tree.h.
26 This file contains a tree pass and an IPA pass. Both performs the same
27 analys however tree pass is executed during early and late optimization
28 passes to propagate info downwards in the compilation order. IPA pass
29 propagates across the callgraph and is able to handle recursion and works on
30 whole program during link-time analysis.
32 LTO mode differs from the local mode by not recording alias sets but types
33 that are translated to alias sets later. This is necessary in order stream
34 the information because the alias sets are rebuild at stream-in time and may
35 not correspond to ones seen during analysis. For this reason part of analysis
40 #include "coretypes.h"
44 #include "alloc-pool.h"
45 #include "tree-pass.h"
46 #include "gimple-iterator.h"
49 #include "ipa-utils.h"
50 #include "symbol-summary.h"
51 #include "gimple-pretty-print.h"
52 #include "gimple-walk.h"
53 #include "print-tree.h"
54 #include "tree-streamer.h"
57 #include "ipa-modref-tree.h"
58 #include "ipa-modref.h"
59 #include "value-range.h"
61 #include "ipa-fnsummary.h"
62 #include "attr-fnspec.h"
63 #include "symtab-clones.h"
65 /* We record fnspec specifiers for call edges since they depends on actual
84 /* Summary holding fnspec string for a given call. */
86 class fnspec_summaries_t
: public call_summary
<fnspec_summary
*>
89 fnspec_summaries_t (symbol_table
*symtab
)
90 : call_summary
<fnspec_summary
*> (symtab
) {}
91 /* Hook that is called by summary when an edge is duplicated. */
92 virtual void duplicate (cgraph_edge
*,
97 dst
->fnspec
= xstrdup (src
->fnspec
);
101 static fnspec_summaries_t
*fnspec_summaries
= NULL
;
103 /* Class (from which there is one global instance) that holds modref summaries
104 for all analyzed functions. */
106 class GTY((user
)) modref_summaries
107 : public fast_function_summary
<modref_summary
*, va_gc
>
110 modref_summaries (symbol_table
*symtab
)
111 : fast_function_summary
<modref_summary
*, va_gc
> (symtab
) {}
112 virtual void insert (cgraph_node
*, modref_summary
*state
);
113 virtual void duplicate (cgraph_node
*src_node
,
114 cgraph_node
*dst_node
,
115 modref_summary
*src_data
,
116 modref_summary
*dst_data
);
117 static modref_summaries
*create_ggc (symbol_table
*symtab
)
119 return new (ggc_alloc_no_dtor
<modref_summaries
> ())
120 modref_summaries (symtab
);
124 class modref_summary_lto
;
126 /* Class (from which there is one global instance) that holds modref summaries
127 for all analyzed functions. */
129 class GTY((user
)) modref_summaries_lto
130 : public fast_function_summary
<modref_summary_lto
*, va_gc
>
133 modref_summaries_lto (symbol_table
*symtab
)
134 : fast_function_summary
<modref_summary_lto
*, va_gc
> (symtab
),
135 propagated (false) {}
136 virtual void insert (cgraph_node
*, modref_summary_lto
*state
);
137 virtual void duplicate (cgraph_node
*src_node
,
138 cgraph_node
*dst_node
,
139 modref_summary_lto
*src_data
,
140 modref_summary_lto
*dst_data
);
141 static modref_summaries_lto
*create_ggc (symbol_table
*symtab
)
143 return new (ggc_alloc_no_dtor
<modref_summaries_lto
> ())
144 modref_summaries_lto (symtab
);
149 /* Global variable holding all modref summaries
150 (from analysis to IPA propagation time). */
152 static GTY(()) fast_function_summary
<modref_summary
*, va_gc
>
155 /* Global variable holding all modref optimizaiton summaries
156 (from IPA propagation time or used by local optimization pass). */
158 static GTY(()) fast_function_summary
<modref_summary
*, va_gc
>
159 *optimization_summaries
;
161 /* LTO summaries hold info from analysis to LTO streaming or from LTO
162 stream-in through propagation to LTO stream-out. */
164 static GTY(()) fast_function_summary
<modref_summary_lto
*, va_gc
>
167 /* Summary for a single function which this pass produces. */
169 modref_summary::modref_summary ()
170 : loads (NULL
), stores (NULL
), writes_errno (NULL
)
174 modref_summary::~modref_summary ()
182 /* Return true if summary is potentially useful for optimization. */
185 modref_summary::useful_p (int ecf_flags
)
187 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
189 if (loads
&& !loads
->every_base
)
191 if (ecf_flags
& ECF_PURE
)
193 return stores
&& !stores
->every_base
;
196 /* Single function summary used for LTO. */
198 typedef modref_tree
<tree
> modref_records_lto
;
199 struct GTY(()) modref_summary_lto
201 /* Load and stores in functions using types rather then alias sets.
203 This is necessary to make the information streamable for LTO but is also
204 more verbose and thus more likely to hit the limits. */
205 modref_records_lto
*loads
;
206 modref_records_lto
*stores
;
209 modref_summary_lto ();
210 ~modref_summary_lto ();
212 bool useful_p (int ecf_flags
);
215 /* Summary for a single function which this pass produces. */
217 modref_summary_lto::modref_summary_lto ()
218 : loads (NULL
), stores (NULL
), writes_errno (NULL
)
222 modref_summary_lto::~modref_summary_lto ()
231 /* Return true if lto summary is potentially useful for optimization. */
234 modref_summary_lto::useful_p (int ecf_flags
)
236 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
238 if (loads
&& !loads
->every_base
)
240 if (ecf_flags
& ECF_PURE
)
242 return stores
&& !stores
->every_base
;
248 dump_access (modref_access_node
*a
, FILE *out
)
250 fprintf (out
, " access:");
251 if (a
->parm_index
!= -1)
253 fprintf (out
, " Parm %i", a
->parm_index
);
254 if (a
->parm_offset_known
)
256 fprintf (out
, " param offset:");
257 print_dec ((poly_int64_pod
)a
->parm_offset
, out
, SIGNED
);
260 if (a
->range_info_useful_p ())
262 fprintf (out
, " offset:");
263 print_dec ((poly_int64_pod
)a
->offset
, out
, SIGNED
);
264 fprintf (out
, " size:");
265 print_dec ((poly_int64_pod
)a
->size
, out
, SIGNED
);
266 fprintf (out
, " max_size:");
267 print_dec ((poly_int64_pod
)a
->max_size
, out
, SIGNED
);
272 /* Dump records TT to OUT. */
275 dump_records (modref_records
*tt
, FILE *out
)
277 fprintf (out
, " Limits: %i bases, %i refs\n",
278 (int)tt
->max_bases
, (int)tt
->max_refs
);
281 fprintf (out
, " Every base\n");
285 modref_base_node
<alias_set_type
> *n
;
286 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, n
)
288 fprintf (out
, " Base %i: alias set %i\n", (int)i
, n
->base
);
291 fprintf (out
, " Every ref\n");
295 modref_ref_node
<alias_set_type
> *r
;
296 FOR_EACH_VEC_SAFE_ELT (n
->refs
, j
, r
)
298 fprintf (out
, " Ref %i: alias set %i\n", (int)j
, r
->ref
);
301 fprintf (out
, " Every access\n");
305 modref_access_node
*a
;
306 FOR_EACH_VEC_SAFE_ELT (r
->accesses
, k
, a
)
307 dump_access (a
, out
);
312 /* Dump records TT to OUT. */
315 dump_lto_records (modref_records_lto
*tt
, FILE *out
)
317 fprintf (out
, " Limits: %i bases, %i refs\n",
318 (int)tt
->max_bases
, (int)tt
->max_refs
);
321 fprintf (out
, " Every base\n");
325 modref_base_node
<tree
> *n
;
326 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, n
)
328 fprintf (out
, " Base %i:", (int)i
);
329 print_generic_expr (dump_file
, n
->base
);
330 fprintf (out
, " (alias set %i)\n",
331 n
->base
? get_alias_set (n
->base
) : 0);
334 fprintf (out
, " Every ref\n");
338 modref_ref_node
<tree
> *r
;
339 FOR_EACH_VEC_SAFE_ELT (n
->refs
, j
, r
)
341 fprintf (out
, " Ref %i:", (int)j
);
342 print_generic_expr (dump_file
, r
->ref
);
343 fprintf (out
, " (alias set %i)\n",
344 r
->ref
? get_alias_set (r
->ref
) : 0);
347 fprintf (out
, " Every access\n");
351 modref_access_node
*a
;
352 FOR_EACH_VEC_SAFE_ELT (r
->accesses
, k
, a
)
353 dump_access (a
, out
);
361 modref_summary::dump (FILE *out
)
365 fprintf (out
, " loads:\n");
366 dump_records (loads
, out
);
370 fprintf (out
, " stores:\n");
371 dump_records (stores
, out
);
374 fprintf (out
, " Writes errno\n");
380 modref_summary_lto::dump (FILE *out
)
382 fprintf (out
, " loads:\n");
383 dump_lto_records (loads
, out
);
384 fprintf (out
, " stores:\n");
385 dump_lto_records (stores
, out
);
387 fprintf (out
, " Writes errno\n");
390 /* Get function summary for FUNC if it exists, return NULL otherwise. */
393 get_modref_function_summary (cgraph_node
*func
)
395 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
396 if (!optimization_summaries
)
399 /* A single function body may be represented by multiple symbols with
400 different visibility. For example, if FUNC is an interposable alias,
401 we don't want to return anything, even if we have summary for the target
403 enum availability avail
;
404 func
= func
->function_or_virtual_thunk_symbol
405 (&avail
, cgraph_node::get (current_function_decl
));
406 if (avail
<= AVAIL_INTERPOSABLE
)
409 modref_summary
*r
= optimization_summaries
->get (func
);
413 /* Construct modref_access_node from REF. */
414 static modref_access_node
415 get_access (ao_ref
*ref
)
419 base
= ao_ref_base (ref
);
420 modref_access_node a
= {ref
->offset
, ref
->size
, ref
->max_size
,
422 if (TREE_CODE (base
) == MEM_REF
|| TREE_CODE (base
) == TARGET_MEM_REF
)
425 base
= TREE_OPERAND (base
, 0);
426 if (TREE_CODE (base
) == SSA_NAME
427 && SSA_NAME_IS_DEFAULT_DEF (base
)
428 && TREE_CODE (SSA_NAME_VAR (base
)) == PARM_DECL
)
431 for (tree t
= DECL_ARGUMENTS (current_function_decl
);
432 t
!= SSA_NAME_VAR (base
); t
= DECL_CHAIN (t
))
441 if (TREE_CODE (memref
) == MEM_REF
)
444 = wi::to_poly_wide (TREE_OPERAND
445 (memref
, 1)).to_shwi (&a
.parm_offset
);
448 a
.parm_offset_known
= false;
458 /* Record access into the modref_records data structure. */
461 record_access (modref_records
*tt
, ao_ref
*ref
)
463 alias_set_type base_set
= !flag_strict_aliasing
? 0
464 : ao_ref_base_alias_set (ref
);
465 alias_set_type ref_set
= !flag_strict_aliasing
? 0
466 : (ao_ref_alias_set (ref
));
467 modref_access_node a
= get_access (ref
);
470 fprintf (dump_file
, " - Recording base_set=%i ref_set=%i parm=%i\n",
471 base_set
, ref_set
, a
.parm_index
);
473 tt
->insert (base_set
, ref_set
, a
);
476 /* IPA version of record_access_tree. */
479 record_access_lto (modref_records_lto
*tt
, ao_ref
*ref
)
481 /* get_alias_set sometimes use different type to compute the alias set
482 than TREE_TYPE (base). Do same adjustments. */
483 tree base_type
= NULL_TREE
, ref_type
= NULL_TREE
;
484 if (flag_strict_aliasing
)
489 while (handled_component_p (base
))
490 base
= TREE_OPERAND (base
, 0);
492 base_type
= reference_alias_ptr_type_1 (&base
);
495 base_type
= TREE_TYPE (base
);
497 base_type
= TYPE_REF_CAN_ALIAS_ALL (base_type
)
498 ? NULL_TREE
: TREE_TYPE (base_type
);
500 tree ref_expr
= ref
->ref
;
501 ref_type
= reference_alias_ptr_type_1 (&ref_expr
);
504 ref_type
= TREE_TYPE (ref_expr
);
506 ref_type
= TYPE_REF_CAN_ALIAS_ALL (ref_type
)
507 ? NULL_TREE
: TREE_TYPE (ref_type
);
509 /* Sanity check that we are in sync with what get_alias_set does. */
510 gcc_checking_assert ((!base_type
&& !ao_ref_base_alias_set (ref
))
511 || get_alias_set (base_type
)
512 == ao_ref_base_alias_set (ref
));
513 gcc_checking_assert ((!ref_type
&& !ao_ref_alias_set (ref
))
514 || get_alias_set (ref_type
)
515 == ao_ref_alias_set (ref
));
517 /* Do not bother to record types that have no meaningful alias set.
518 Also skip variably modified types since these go to local streams. */
519 if (base_type
&& (!get_alias_set (base_type
)
520 || variably_modified_type_p (base_type
, NULL_TREE
)))
521 base_type
= NULL_TREE
;
522 if (ref_type
&& (!get_alias_set (ref_type
)
523 || variably_modified_type_p (ref_type
, NULL_TREE
)))
524 ref_type
= NULL_TREE
;
526 modref_access_node a
= get_access (ref
);
529 fprintf (dump_file
, " - Recording base type:");
530 print_generic_expr (dump_file
, base_type
);
531 fprintf (dump_file
, " (alias set %i) ref type:",
532 base_type
? get_alias_set (base_type
) : 0);
533 print_generic_expr (dump_file
, ref_type
);
534 fprintf (dump_file
, " (alias set %i) parm:%i\n",
535 ref_type
? get_alias_set (ref_type
) : 0,
539 tt
->insert (base_type
, ref_type
, a
);
542 /* Returns true if and only if we should store the access to EXPR.
543 Some accesses, e.g. loads from automatic variables, are not interesting. */
546 record_access_p (tree expr
)
548 if (refs_local_or_readonly_memory_p (expr
))
551 fprintf (dump_file
, " - Read-only or local, ignoring.\n");
557 /* Return true if ECF flags says that stores can be ignored. */
560 ignore_stores_p (tree caller
, int flags
)
562 if (flags
& ECF_PURE
)
564 if ((flags
& (ECF_NORETURN
| ECF_NOTHROW
)) == (ECF_NORETURN
| ECF_NOTHROW
)
565 || (!opt_for_fn (caller
, flag_exceptions
) && (flags
& ECF_NORETURN
)))
570 /* Determine parm_map for argument I of STMT. */
573 parm_map_for_arg (gimple
*stmt
, int i
)
575 tree op
= gimple_call_arg (stmt
, i
);
578 struct modref_parm_map parm_map
;
580 parm_map
.parm_offset_known
= false;
581 parm_map
.parm_offset
= 0;
583 offset_known
= unadjusted_ptr_and_unit_offset (op
, &op
, &offset
);
584 if (TREE_CODE (op
) == SSA_NAME
585 && SSA_NAME_IS_DEFAULT_DEF (op
)
586 && TREE_CODE (SSA_NAME_VAR (op
)) == PARM_DECL
)
589 for (tree t
= DECL_ARGUMENTS (current_function_decl
);
590 t
!= SSA_NAME_VAR (op
); t
= DECL_CHAIN (t
))
599 parm_map
.parm_index
= index
;
600 parm_map
.parm_offset_known
= offset_known
;
601 parm_map
.parm_offset
= offset
;
603 else if (points_to_local_or_readonly_memory_p (op
))
604 parm_map
.parm_index
= -2;
606 parm_map
.parm_index
= -1;
610 /* Merge side effects of call STMT to function with CALLEE_SUMMARY
611 int CUR_SUMMARY. Return true if something changed.
612 If IGNORE_STORES is true, do not merge stores. */
615 merge_call_side_effects (modref_summary
*cur_summary
,
616 gimple
*stmt
, modref_summary
*callee_summary
,
617 bool ignore_stores
, cgraph_node
*callee_node
)
619 auto_vec
<modref_parm_map
, 32> parm_map
;
620 bool changed
= false;
623 fprintf (dump_file
, " - Merging side effects of %s with parm map:",
624 callee_node
->dump_name ());
626 /* We can not safely optimize based on summary of callee if it does
627 not always bind to current def: it is possible that memory load
628 was optimized out earlier which may not happen in the interposed
630 if (!callee_node
->binds_to_current_def_p ())
633 fprintf (dump_file
, " - May be interposed: collapsing loads.\n");
634 cur_summary
->loads
->collapse ();
637 parm_map
.safe_grow_cleared (gimple_call_num_args (stmt
));
638 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); i
++)
640 parm_map
[i
] = parm_map_for_arg (stmt
, i
);
643 fprintf (dump_file
, " %i", parm_map
[i
].parm_index
);
644 if (parm_map
[i
].parm_offset_known
)
646 fprintf (dump_file
, " offset:");
647 print_dec ((poly_int64_pod
)parm_map
[i
].parm_offset
,
653 fprintf (dump_file
, "\n");
655 /* Merge with callee's summary. */
656 changed
|= cur_summary
->loads
->merge (callee_summary
->loads
, &parm_map
);
659 changed
|= cur_summary
->stores
->merge (callee_summary
->stores
,
661 if (!cur_summary
->writes_errno
662 && callee_summary
->writes_errno
)
664 cur_summary
->writes_errno
= true;
671 /* Return access mode for argument I of call STMT with FNSPEC. */
673 static modref_access_node
674 get_access_for_fnspec (gcall
*call
, attr_fnspec
&fnspec
,
675 unsigned int i
, modref_parm_map
&map
)
677 tree size
= NULL_TREE
;
678 unsigned int size_arg
;
680 if (!fnspec
.arg_specified_p (i
))
682 else if (fnspec
.arg_max_access_size_given_by_arg_p (i
, &size_arg
))
683 size
= gimple_call_arg (call
, size_arg
);
684 else if (fnspec
.arg_access_size_given_by_type_p (i
))
686 tree callee
= gimple_call_fndecl (call
);
687 tree t
= TYPE_ARG_TYPES (TREE_TYPE (callee
));
689 for (unsigned int p
= 0; p
< i
; p
++)
691 size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t
)));
693 modref_access_node a
= {0, -1, -1,
694 map
.parm_offset
, map
.parm_index
,
695 map
.parm_offset_known
};
698 && poly_int_tree_p (size
, &size_hwi
)
699 && coeffs_in_range_p (size_hwi
, 0,
700 HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
703 a
.max_size
= size_hwi
<< LOG2_BITS_PER_UNIT
;
708 /* Collapse loads and return true if something changed. */
711 collapse_loads (modref_summary
*cur_summary
,
712 modref_summary_lto
*cur_summary_lto
)
714 bool changed
= false;
716 if (cur_summary
&& !cur_summary
->loads
->every_base
)
718 cur_summary
->loads
->collapse ();
722 && !cur_summary_lto
->loads
->every_base
)
724 cur_summary_lto
->loads
->collapse ();
730 /* Collapse loads and return true if something changed. */
733 collapse_stores (modref_summary
*cur_summary
,
734 modref_summary_lto
*cur_summary_lto
)
736 bool changed
= false;
738 if (cur_summary
&& !cur_summary
->stores
->every_base
)
740 cur_summary
->stores
->collapse ();
744 && !cur_summary_lto
->stores
->every_base
)
746 cur_summary_lto
->stores
->collapse ();
753 /* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
754 If IGNORE_STORES is true ignore them.
755 Return false if no useful summary can be produced. */
758 process_fnspec (modref_summary
*cur_summary
,
759 modref_summary_lto
*cur_summary_lto
,
760 gcall
*call
, bool ignore_stores
)
762 attr_fnspec fnspec
= gimple_call_fnspec (call
);
763 if (!fnspec
.known_p ())
765 if (dump_file
&& gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
766 fprintf (dump_file
, " Builtin with no fnspec: %s\n",
767 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call
))));
770 collapse_loads (cur_summary
, cur_summary_lto
);
775 if (fnspec
.global_memory_read_p ())
776 collapse_loads (cur_summary
, cur_summary_lto
);
779 for (unsigned int i
= 0; i
< gimple_call_num_args (call
); i
++)
780 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, i
))))
782 else if (!fnspec
.arg_specified_p (i
)
783 || fnspec
.arg_maybe_read_p (i
))
785 modref_parm_map map
= parm_map_for_arg (call
, i
);
787 if (map
.parm_index
== -2)
789 if (map
.parm_index
== -1)
791 collapse_loads (cur_summary
, cur_summary_lto
);
795 cur_summary
->loads
->insert (0, 0,
796 get_access_for_fnspec (call
,
800 cur_summary_lto
->loads
->insert (0, 0,
801 get_access_for_fnspec (call
,
808 if (fnspec
.global_memory_written_p ())
809 collapse_stores (cur_summary
, cur_summary_lto
);
812 for (unsigned int i
= 0; i
< gimple_call_num_args (call
); i
++)
813 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, i
))))
815 else if (!fnspec
.arg_specified_p (i
)
816 || fnspec
.arg_maybe_written_p (i
))
818 modref_parm_map map
= parm_map_for_arg (call
, i
);
820 if (map
.parm_index
== -2)
822 if (map
.parm_index
== -1)
824 collapse_stores (cur_summary
, cur_summary_lto
);
828 cur_summary
->stores
->insert (0, 0,
829 get_access_for_fnspec (call
,
833 cur_summary_lto
->stores
->insert (0, 0,
834 get_access_for_fnspec (call
,
838 if (fnspec
.errno_maybe_written_p () && flag_errno_math
)
841 cur_summary
->writes_errno
= true;
843 cur_summary_lto
->writes_errno
= true;
849 /* Analyze function call STMT in function F.
850 Remember recursive calls in RECURSIVE_CALLS. */
853 analyze_call (modref_summary
*cur_summary
, modref_summary_lto
*cur_summary_lto
,
854 gcall
*stmt
, vec
<gimple
*> *recursive_calls
)
856 /* Check flags on the function call. In certain cases, analysis can be
858 int flags
= gimple_call_flags (stmt
);
859 if (flags
& (ECF_CONST
| ECF_NOVOPS
))
863 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
864 "except for args.\n");
868 /* Pure functions do not affect global memory. Stores by functions which are
869 noreturn and do not throw can safely be ignored. */
870 bool ignore_stores
= ignore_stores_p (current_function_decl
, flags
);
872 /* Next, we try to get the callee's function declaration. The goal is to
873 merge their summary with ours. */
874 tree callee
= gimple_call_fndecl (stmt
);
876 /* Check if this is an indirect call. */
880 fprintf (dump_file
, gimple_call_internal_p (stmt
)
881 ? " - Internal call" : " - Indirect call.\n");
882 return process_fnspec (cur_summary
, cur_summary_lto
, stmt
, ignore_stores
);
884 /* We only need to handle internal calls in IPA mode. */
885 gcc_checking_assert (!cur_summary_lto
);
887 struct cgraph_node
*callee_node
= cgraph_node::get_create (callee
);
889 /* If this is a recursive call, the target summary is the same as ours, so
890 there's nothing to do. */
891 if (recursive_call_p (current_function_decl
, callee
))
893 recursive_calls
->safe_push (stmt
);
895 fprintf (dump_file
, " - Skipping recursive call.\n");
899 gcc_assert (callee_node
!= NULL
);
901 /* Get the function symbol and its availability. */
902 enum availability avail
;
903 callee_node
= callee_node
->function_symbol (&avail
);
904 if (avail
<= AVAIL_INTERPOSABLE
)
907 fprintf (dump_file
, " - Function availability <= AVAIL_INTERPOSABLE.\n");
908 return process_fnspec (cur_summary
, cur_summary_lto
, stmt
, ignore_stores
);
911 /* Get callee's modref summary. As above, if there's no summary, we either
912 have to give up or, if stores are ignored, we can just purge loads. */
913 modref_summary
*callee_summary
= optimization_summaries
->get (callee_node
);
917 fprintf (dump_file
, " - No modref summary available for callee.\n");
918 return process_fnspec (cur_summary
, cur_summary_lto
, stmt
, ignore_stores
);
921 merge_call_side_effects (cur_summary
, stmt
, callee_summary
, ignore_stores
,
927 /* Support analyzis in non-lto and lto mode in parallel. */
931 struct modref_summary
*nolto
;
932 struct modref_summary_lto
*lto
;
935 /* Helper for analyze_stmt. */
938 analyze_load (gimple
*, tree
, tree op
, void *data
)
940 modref_summary
*summary
= ((summary_ptrs
*)data
)->nolto
;
941 modref_summary_lto
*summary_lto
= ((summary_ptrs
*)data
)->lto
;
945 fprintf (dump_file
, " - Analyzing load: ");
946 print_generic_expr (dump_file
, op
);
947 fprintf (dump_file
, "\n");
950 if (!record_access_p (op
))
954 ao_ref_init (&r
, op
);
957 record_access (summary
->loads
, &r
);
959 record_access_lto (summary_lto
->loads
, &r
);
963 /* Helper for analyze_stmt. */
966 analyze_store (gimple
*, tree
, tree op
, void *data
)
968 modref_summary
*summary
= ((summary_ptrs
*)data
)->nolto
;
969 modref_summary_lto
*summary_lto
= ((summary_ptrs
*)data
)->lto
;
973 fprintf (dump_file
, " - Analyzing store: ");
974 print_generic_expr (dump_file
, op
);
975 fprintf (dump_file
, "\n");
978 if (!record_access_p (op
))
982 ao_ref_init (&r
, op
);
985 record_access (summary
->stores
, &r
);
987 record_access_lto (summary_lto
->stores
, &r
);
991 /* Analyze statement STMT of function F.
992 If IPA is true do not merge in side effects of calls. */
995 analyze_stmt (modref_summary
*summary
, modref_summary_lto
*summary_lto
,
996 gimple
*stmt
, bool ipa
, vec
<gimple
*> *recursive_calls
)
998 /* In general we can not ignore clobbers because they are barries for code
999 motion, however after inlining it is safe to do becuase local optimization
1000 passes do not consider clobbers from other functions.
1001 Similar logic is in ipa-pure-consts. */
1002 if ((ipa
|| cfun
->after_inlining
) && gimple_clobber_p (stmt
))
1005 struct summary_ptrs sums
= {summary
, summary_lto
};
1007 /* Analyze all loads and stores in STMT. */
1008 walk_stmt_load_store_ops (stmt
, &sums
,
1009 analyze_load
, analyze_store
);
1011 switch (gimple_code (stmt
))
1014 /* If the ASM statement does not read nor write memory, there's nothing
1015 to do. Otherwise just give up. */
1016 if (!gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
1019 fprintf (dump_file
, " - Function contains GIMPLE_ASM statement "
1020 "which clobbers memory.\n");
1023 if (!ipa
|| gimple_call_internal_p (stmt
))
1024 return analyze_call (summary
, summary_lto
,
1025 as_a
<gcall
*> (stmt
), recursive_calls
);
1028 attr_fnspec fnspec
= gimple_call_fnspec (as_a
<gcall
*>(stmt
));
1030 if (fnspec
.known_p ()
1031 && (!fnspec
.global_memory_read_p ()
1032 || !fnspec
.global_memory_written_p ()))
1034 fnspec_summaries
->get_create
1035 (cgraph_node::get (current_function_decl
)->get_edge (stmt
))
1036 ->fnspec
= xstrdup (fnspec
.get_str ());
1038 fprintf (dump_file
, " Recorded fnspec %s\n", fnspec
.get_str ());
1043 /* Nothing to do for other types of statements. */
1048 /* Remove summary of current function because during the function body
1049 scan we determined it is not useful. LTO, NOLTO and IPA determines the
1053 remove_summary (bool lto
, bool nolto
, bool ipa
)
1055 cgraph_node
*fnode
= cgraph_node::get (current_function_decl
);
1057 optimization_summaries
->remove (fnode
);
1061 summaries
->remove (fnode
);
1063 summaries_lto
->remove (fnode
);
1067 " - modref done with result: not tracked.\n");
1070 /* Analyze function F. IPA indicates whether we're running in local mode
1071 (false) or the IPA mode (true). */
1074 analyze_function (function
*f
, bool ipa
)
1077 fprintf (dump_file
, "modref analyzing '%s' (ipa=%i)%s%s\n",
1078 function_name (f
), ipa
,
1079 TREE_READONLY (current_function_decl
) ? " (const)" : "",
1080 DECL_PURE_P (current_function_decl
) ? " (pure)" : "");
1082 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
1083 if (!flag_ipa_modref
)
1086 /* Compute no-LTO summaries when local optimization is going to happen. */
1087 bool nolto
= (!ipa
|| ((!flag_lto
|| flag_fat_lto_objects
) && !in_lto_p
)
1088 || (in_lto_p
&& !flag_wpa
1089 && flag_incremental_link
!= INCREMENTAL_LINK_LTO
));
1090 /* Compute LTO when LTO streaming is going to happen. */
1091 bool lto
= ipa
&& ((flag_lto
&& !in_lto_p
)
1093 || flag_incremental_link
== INCREMENTAL_LINK_LTO
);
1094 cgraph_node
*fnode
= cgraph_node::get (current_function_decl
);
1096 modref_summary
*summary
= NULL
;
1097 modref_summary_lto
*summary_lto
= NULL
;
1099 /* Initialize the summary.
1100 If we run in local mode there is possibly pre-existing summary from
1101 IPA pass. Dump it so it is easy to compare if mod-ref info has
1105 if (!optimization_summaries
)
1106 optimization_summaries
= modref_summaries::create_ggc (symtab
);
1107 else /* Remove existing summary if we are re-running the pass. */
1111 = optimization_summaries
->get (cgraph_node::get (f
->decl
)))
1115 fprintf (dump_file
, "Past summary:\n");
1116 optimization_summaries
->get
1117 (cgraph_node::get (f
->decl
))->dump (dump_file
);
1119 optimization_summaries
->remove (cgraph_node::get (f
->decl
));
1121 summary
= optimization_summaries
->get_create (cgraph_node::get (f
->decl
));
1122 gcc_checking_assert (nolto
&& !lto
);
1124 /* In IPA mode we analyze every function precisely once. Asser that. */
1130 summaries
= modref_summaries::create_ggc (symtab
);
1132 summaries
->remove (cgraph_node::get (f
->decl
));
1133 summary
= summaries
->get_create (cgraph_node::get (f
->decl
));
1138 summaries_lto
= modref_summaries_lto::create_ggc (symtab
);
1140 summaries_lto
->remove (cgraph_node::get (f
->decl
));
1141 summary_lto
= summaries_lto
->get_create (cgraph_node::get (f
->decl
));
1143 if (!fnspec_summaries
)
1144 fnspec_summaries
= new fnspec_summaries_t (symtab
);
1148 /* Create and initialize summary for F.
1149 Note that summaries may be already allocated from previous
1153 gcc_assert (!summary
->loads
);
1154 summary
->loads
= modref_records::create_ggc (param_modref_max_bases
,
1155 param_modref_max_refs
,
1156 param_modref_max_accesses
);
1157 gcc_assert (!summary
->stores
);
1158 summary
->stores
= modref_records::create_ggc (param_modref_max_bases
,
1159 param_modref_max_refs
,
1160 param_modref_max_accesses
);
1161 summary
->writes_errno
= false;
1165 gcc_assert (!summary_lto
->loads
);
1166 summary_lto
->loads
= modref_records_lto::create_ggc
1167 (param_modref_max_bases
,
1168 param_modref_max_refs
,
1169 param_modref_max_accesses
);
1170 gcc_assert (!summary_lto
->stores
);
1171 summary_lto
->stores
= modref_records_lto::create_ggc
1172 (param_modref_max_bases
,
1173 param_modref_max_refs
,
1174 param_modref_max_accesses
);
1175 summary_lto
->writes_errno
= false;
1177 int ecf_flags
= flags_from_decl_or_type (current_function_decl
);
1178 auto_vec
<gimple
*, 32> recursive_calls
;
1180 /* Analyze each statement in each basic block of the function. If the
1181 statement cannot be analyzed (for any reason), the entire function cannot
1182 be analyzed by modref. */
1184 FOR_EACH_BB_FN (bb
, f
)
1186 gimple_stmt_iterator si
;
1187 for (si
= gsi_after_labels (bb
); !gsi_end_p (si
); gsi_next (&si
))
1189 if (!analyze_stmt (summary
, summary_lto
,
1190 gsi_stmt (si
), ipa
, &recursive_calls
)
1191 || ((!summary
|| !summary
->useful_p (ecf_flags
))
1192 && (!summary_lto
|| !summary_lto
->useful_p (ecf_flags
))))
1194 remove_summary (lto
, nolto
, ipa
);
1200 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
1201 This needs to be done after all other side effects are computed. */
1204 bool changed
= true;
1208 for (unsigned i
= 0; i
< recursive_calls
.length (); i
++)
1210 changed
|= merge_call_side_effects
1211 (summary
, recursive_calls
[i
], summary
,
1212 ignore_stores_p (current_function_decl
,
1214 (recursive_calls
[i
])),
1216 if (!summary
->useful_p (ecf_flags
))
1218 remove_summary (lto
, nolto
, ipa
);
1224 if (summary
&& !summary
->useful_p (ecf_flags
))
1227 optimization_summaries
->remove (fnode
);
1229 summaries
->remove (fnode
);
1232 if (summary_lto
&& !summary_lto
->useful_p (ecf_flags
))
1234 summaries_lto
->remove (fnode
);
1240 fprintf (dump_file
, " - modref done with result: tracked.\n");
1242 summary
->dump (dump_file
);
1244 summary_lto
->dump (dump_file
);
1248 /* Callback for generate_summary. */
1251 modref_generate (void)
1253 struct cgraph_node
*node
;
1254 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node
)
1256 function
*f
= DECL_STRUCT_FUNCTION (node
->decl
);
1260 analyze_function (f
, true);
1265 /* Called when a new function is inserted to callgraph late. */
1268 modref_summaries::insert (struct cgraph_node
*node
, modref_summary
*)
1270 /* Local passes ought to be executed by the pass manager. */
1271 if (this == optimization_summaries
)
1273 optimization_summaries
->remove (node
);
1276 if (!DECL_STRUCT_FUNCTION (node
->decl
))
1278 summaries
->remove (node
);
1281 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
1282 analyze_function (DECL_STRUCT_FUNCTION (node
->decl
), true);
1286 /* Called when a new function is inserted to callgraph late. */
1289 modref_summaries_lto::insert (struct cgraph_node
*node
, modref_summary_lto
*)
1291 /* We do not support adding new function when IPA information is already
1292 propagated. This is done only by SIMD cloning that is not very
1294 if (!DECL_STRUCT_FUNCTION (node
->decl
)
1297 summaries_lto
->remove (node
);
1300 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
1301 analyze_function (DECL_STRUCT_FUNCTION (node
->decl
), true);
1305 /* Called when new clone is inserted to callgraph late. */
1308 modref_summaries::duplicate (cgraph_node
*, cgraph_node
*dst
,
1309 modref_summary
*src_data
,
1310 modref_summary
*dst_data
)
1312 /* Do not duplicte optimization summaries; we do not handle parameter
1313 transforms on them. */
1314 if (this == optimization_summaries
)
1316 optimization_summaries
->remove (dst
);
1319 dst_data
->stores
= modref_records::create_ggc
1320 (src_data
->stores
->max_bases
,
1321 src_data
->stores
->max_refs
,
1322 src_data
->stores
->max_accesses
);
1323 dst_data
->stores
->copy_from (src_data
->stores
);
1324 dst_data
->loads
= modref_records::create_ggc
1325 (src_data
->loads
->max_bases
,
1326 src_data
->loads
->max_refs
,
1327 src_data
->loads
->max_accesses
);
1328 dst_data
->loads
->copy_from (src_data
->loads
);
1329 dst_data
->writes_errno
= src_data
->writes_errno
;
1332 /* Called when new clone is inserted to callgraph late. */
1335 modref_summaries_lto::duplicate (cgraph_node
*, cgraph_node
*,
1336 modref_summary_lto
*src_data
,
1337 modref_summary_lto
*dst_data
)
1339 /* Be sure that no furhter cloning happens after ipa-modref. If it does
1340 we will need to update signatures for possible param changes. */
1341 gcc_checking_assert (!((modref_summaries_lto
*)summaries_lto
)->propagated
);
1342 dst_data
->stores
= modref_records_lto::create_ggc
1343 (src_data
->stores
->max_bases
,
1344 src_data
->stores
->max_refs
,
1345 src_data
->stores
->max_accesses
);
1346 dst_data
->stores
->copy_from (src_data
->stores
);
1347 dst_data
->loads
= modref_records_lto::create_ggc
1348 (src_data
->loads
->max_bases
,
1349 src_data
->loads
->max_refs
,
1350 src_data
->loads
->max_accesses
);
1351 dst_data
->loads
->copy_from (src_data
->loads
);
1352 dst_data
->writes_errno
= src_data
->writes_errno
;
1357 /* Definition of the modref pass on GIMPLE. */
1358 const pass_data pass_data_modref
= {
1363 (PROP_cfg
| PROP_ssa
),
1370 class pass_modref
: public gimple_opt_pass
1373 pass_modref (gcc::context
*ctxt
)
1374 : gimple_opt_pass (pass_data_modref
, ctxt
) {}
1376 /* opt_pass methods: */
1379 return new pass_modref (m_ctxt
);
1381 virtual bool gate (function
*)
1383 return flag_ipa_modref
;
1385 virtual unsigned int execute (function
*);
1388 /* Encode TT to the output block OB using the summary streaming API. */
1391 write_modref_records (modref_records_lto
*tt
, struct output_block
*ob
)
1393 streamer_write_uhwi (ob
, tt
->max_bases
);
1394 streamer_write_uhwi (ob
, tt
->max_refs
);
1395 streamer_write_uhwi (ob
, tt
->max_accesses
);
1397 streamer_write_uhwi (ob
, tt
->every_base
);
1398 streamer_write_uhwi (ob
, vec_safe_length (tt
->bases
));
1400 modref_base_node
<tree
> *base_node
;
1401 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, base_node
)
1403 stream_write_tree (ob
, base_node
->base
, true);
1405 streamer_write_uhwi (ob
, base_node
->every_ref
);
1406 streamer_write_uhwi (ob
, vec_safe_length (base_node
->refs
));
1409 modref_ref_node
<tree
> *ref_node
;
1410 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
1412 stream_write_tree (ob
, ref_node
->ref
, true);
1413 streamer_write_uhwi (ob
, ref_node
->every_access
);
1414 streamer_write_uhwi (ob
, vec_safe_length (ref_node
->accesses
));
1417 modref_access_node
*access_node
;
1418 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
1420 streamer_write_hwi (ob
, access_node
->parm_index
);
1421 if (access_node
->parm_index
!= -1)
1423 streamer_write_uhwi (ob
, access_node
->parm_offset_known
);
1424 if (access_node
->parm_offset_known
)
1426 streamer_write_poly_int64 (ob
, access_node
->parm_offset
);
1427 streamer_write_poly_int64 (ob
, access_node
->offset
);
1428 streamer_write_poly_int64 (ob
, access_node
->size
);
1429 streamer_write_poly_int64 (ob
, access_node
->max_size
);
1437 /* Read a modref_tree from the input block IB using the data from DATA_IN.
1438 This assumes that the tree was encoded using write_modref_tree.
1439 Either nolto_ret or lto_ret is initialized by the tree depending whether
1440 LTO streaming is expected or not. */
1443 read_modref_records (lto_input_block
*ib
, struct data_in
*data_in
,
1444 modref_records
**nolto_ret
,
1445 modref_records_lto
**lto_ret
)
1447 size_t max_bases
= streamer_read_uhwi (ib
);
1448 size_t max_refs
= streamer_read_uhwi (ib
);
1449 size_t max_accesses
= streamer_read_uhwi (ib
);
1452 *lto_ret
= modref_records_lto::create_ggc (max_bases
, max_refs
,
1455 *nolto_ret
= modref_records::create_ggc (max_bases
, max_refs
,
1457 gcc_checking_assert (lto_ret
|| nolto_ret
);
1459 size_t every_base
= streamer_read_uhwi (ib
);
1460 size_t nbase
= streamer_read_uhwi (ib
);
1462 gcc_assert (!every_base
|| nbase
== 0);
1466 (*nolto_ret
)->collapse ();
1468 (*lto_ret
)->collapse ();
1470 for (size_t i
= 0; i
< nbase
; i
++)
1472 tree base_tree
= stream_read_tree (ib
, data_in
);
1473 modref_base_node
<alias_set_type
> *nolto_base_node
= NULL
;
1474 modref_base_node
<tree
> *lto_base_node
= NULL
;
1476 /* At stream in time we have LTO alias info. Check if we streamed in
1477 something obviously unnecessary. Do not glob types by alias sets;
1478 it is not 100% clear that ltrans types will get merged same way.
1479 Types may get refined based on ODR type conflicts. */
1480 if (base_tree
&& !get_alias_set (base_tree
))
1484 fprintf (dump_file
, "Streamed in alias set 0 type ");
1485 print_generic_expr (dump_file
, base_tree
);
1486 fprintf (dump_file
, "\n");
1492 nolto_base_node
= (*nolto_ret
)->insert_base (base_tree
1493 ? get_alias_set (base_tree
)
1496 lto_base_node
= (*lto_ret
)->insert_base (base_tree
);
1497 size_t every_ref
= streamer_read_uhwi (ib
);
1498 size_t nref
= streamer_read_uhwi (ib
);
1500 gcc_assert (!every_ref
|| nref
== 0);
1503 if (nolto_base_node
)
1504 nolto_base_node
->collapse ();
1506 lto_base_node
->collapse ();
1508 for (size_t j
= 0; j
< nref
; j
++)
1510 tree ref_tree
= stream_read_tree (ib
, data_in
);
1512 if (ref_tree
&& !get_alias_set (ref_tree
))
1516 fprintf (dump_file
, "Streamed in alias set 0 type ");
1517 print_generic_expr (dump_file
, ref_tree
);
1518 fprintf (dump_file
, "\n");
1523 modref_ref_node
<alias_set_type
> *nolto_ref_node
= NULL
;
1524 modref_ref_node
<tree
> *lto_ref_node
= NULL
;
1526 if (nolto_base_node
)
1528 = nolto_base_node
->insert_ref (ref_tree
1529 ? get_alias_set (ref_tree
) : 0,
1532 lto_ref_node
= lto_base_node
->insert_ref (ref_tree
, max_refs
);
1534 size_t every_access
= streamer_read_uhwi (ib
);
1535 size_t naccesses
= streamer_read_uhwi (ib
);
1538 nolto_ref_node
->every_access
= every_access
;
1540 lto_ref_node
->every_access
= every_access
;
1542 for (size_t k
= 0; k
< naccesses
; k
++)
1544 int parm_index
= streamer_read_hwi (ib
);
1545 bool parm_offset_known
= false;
1546 poly_int64 parm_offset
= 0;
1547 poly_int64 offset
= 0;
1548 poly_int64 size
= -1;
1549 poly_int64 max_size
= -1;
1551 if (parm_index
!= -1)
1553 parm_offset_known
= streamer_read_uhwi (ib
);
1554 if (parm_offset_known
)
1556 parm_offset
= streamer_read_poly_int64 (ib
);
1557 offset
= streamer_read_poly_int64 (ib
);
1558 size
= streamer_read_poly_int64 (ib
);
1559 max_size
= streamer_read_poly_int64 (ib
);
1562 modref_access_node a
= {offset
, size
, max_size
, parm_offset
,
1563 parm_index
, parm_offset_known
};
1565 nolto_ref_node
->insert_access (a
, max_accesses
);
1567 lto_ref_node
->insert_access (a
, max_accesses
);
1572 (*lto_ret
)->cleanup ();
1574 (*nolto_ret
)->cleanup ();
1577 /* Callback for write_summary. */
1582 struct output_block
*ob
= create_output_block (LTO_section_ipa_modref
);
1583 lto_symtab_encoder_t encoder
= ob
->decl_state
->symtab_node_encoder
;
1584 unsigned int count
= 0;
1589 streamer_write_uhwi (ob
, 0);
1590 streamer_write_char_stream (ob
->main_stream
, 0);
1591 produce_asm (ob
, NULL
);
1592 destroy_output_block (ob
);
1596 for (i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
1598 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
1599 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
1600 modref_summary_lto
*r
;
1602 if (cnode
&& cnode
->definition
&& !cnode
->alias
1603 && (r
= summaries_lto
->get (cnode
))
1604 && r
->useful_p (flags_from_decl_or_type (cnode
->decl
)))
1607 streamer_write_uhwi (ob
, count
);
1609 for (i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
1611 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
1612 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
1614 if (cnode
&& cnode
->definition
&& !cnode
->alias
)
1616 modref_summary_lto
*r
= summaries_lto
->get (cnode
);
1618 if (!r
|| !r
->useful_p (flags_from_decl_or_type (cnode
->decl
)))
1621 streamer_write_uhwi (ob
, lto_symtab_encoder_encode (encoder
, cnode
));
1623 write_modref_records (r
->loads
, ob
);
1624 write_modref_records (r
->stores
, ob
);
1626 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
1627 bp_pack_value (&bp
, r
->writes_errno
, 1);
1630 for (cgraph_edge
*e
= cnode
->indirect_calls
;
1631 e
; e
= e
->next_callee
)
1633 class fnspec_summary
*sum
= fnspec_summaries
->get (e
);
1634 bp_pack_value (&bp
, sum
!= NULL
, 1);
1636 bp_pack_string (ob
, &bp
, sum
->fnspec
, true);
1638 for (cgraph_edge
*e
= cnode
->callees
; e
; e
= e
->next_callee
)
1640 class fnspec_summary
*sum
= fnspec_summaries
->get (e
);
1641 bp_pack_value (&bp
, sum
!= NULL
, 1);
1643 bp_pack_string (ob
, &bp
, sum
->fnspec
, true);
1646 streamer_write_bitpack (&bp
);
1649 streamer_write_char_stream (ob
->main_stream
, 0);
1650 produce_asm (ob
, NULL
);
1651 destroy_output_block (ob
);
1655 read_section (struct lto_file_decl_data
*file_data
, const char *data
,
1658 const struct lto_function_header
*header
1659 = (const struct lto_function_header
*) data
;
1660 const int cfg_offset
= sizeof (struct lto_function_header
);
1661 const int main_offset
= cfg_offset
+ header
->cfg_size
;
1662 const int string_offset
= main_offset
+ header
->main_size
;
1663 struct data_in
*data_in
;
1665 unsigned int f_count
;
1667 lto_input_block
ib ((const char *) data
+ main_offset
, header
->main_size
,
1668 file_data
->mode_table
);
1671 = lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
1672 header
->string_size
, vNULL
);
1673 f_count
= streamer_read_uhwi (&ib
);
1674 for (i
= 0; i
< f_count
; i
++)
1676 struct cgraph_node
*node
;
1677 lto_symtab_encoder_t encoder
;
1679 unsigned int index
= streamer_read_uhwi (&ib
);
1680 encoder
= file_data
->symtab_node_encoder
;
1681 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
1684 modref_summary
*modref_sum
= summaries
1685 ? summaries
->get_create (node
) : NULL
;
1686 modref_summary_lto
*modref_sum_lto
= summaries_lto
1687 ? summaries_lto
->get_create (node
)
1689 if (optimization_summaries
)
1690 modref_sum
= optimization_summaries
->get_create (node
);
1693 modref_sum
->writes_errno
= false;
1695 modref_sum_lto
->writes_errno
= false;
1697 gcc_assert (!modref_sum
|| (!modref_sum
->loads
1698 && !modref_sum
->stores
));
1699 gcc_assert (!modref_sum_lto
|| (!modref_sum_lto
->loads
1700 && !modref_sum_lto
->stores
));
1701 read_modref_records (&ib
, data_in
,
1702 modref_sum
? &modref_sum
->loads
: NULL
,
1703 modref_sum_lto
? &modref_sum_lto
->loads
: NULL
);
1704 read_modref_records (&ib
, data_in
,
1705 modref_sum
? &modref_sum
->stores
: NULL
,
1706 modref_sum_lto
? &modref_sum_lto
->stores
: NULL
);
1707 struct bitpack_d bp
= streamer_read_bitpack (&ib
);
1708 if (bp_unpack_value (&bp
, 1))
1711 modref_sum
->writes_errno
= true;
1713 modref_sum_lto
->writes_errno
= true;
1717 for (cgraph_edge
*e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
1719 if (bp_unpack_value (&bp
, 1))
1721 class fnspec_summary
*sum
= fnspec_summaries
->get_create (e
);
1722 sum
->fnspec
= xstrdup (bp_unpack_string (data_in
, &bp
));
1725 for (cgraph_edge
*e
= node
->callees
; e
; e
= e
->next_callee
)
1727 if (bp_unpack_value (&bp
, 1))
1729 class fnspec_summary
*sum
= fnspec_summaries
->get_create (e
);
1730 sum
->fnspec
= xstrdup (bp_unpack_string (data_in
, &bp
));
1736 fprintf (dump_file
, "Read modref for %s\n",
1737 node
->dump_name ());
1739 modref_sum
->dump (dump_file
);
1741 modref_sum_lto
->dump (dump_file
);
1745 lto_free_section_data (file_data
, LTO_section_ipa_modref
, NULL
, data
,
1747 lto_data_in_delete (data_in
);
1750 /* Callback for read_summary. */
1755 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
1756 struct lto_file_decl_data
*file_data
;
1759 gcc_checking_assert (!optimization_summaries
&& !summaries
&& !summaries_lto
);
1761 optimization_summaries
= modref_summaries::create_ggc (symtab
);
1764 if (flag_wpa
|| flag_incremental_link
== INCREMENTAL_LINK_LTO
)
1765 summaries_lto
= modref_summaries_lto::create_ggc (symtab
);
1767 || (flag_incremental_link
== INCREMENTAL_LINK_LTO
1768 && flag_fat_lto_objects
))
1769 summaries
= modref_summaries::create_ggc (symtab
);
1770 if (!fnspec_summaries
)
1771 fnspec_summaries
= new fnspec_summaries_t (symtab
);
1774 while ((file_data
= file_data_vec
[j
++]))
1777 const char *data
= lto_get_summary_section_data (file_data
,
1778 LTO_section_ipa_modref
,
1781 read_section (file_data
, data
, len
);
1783 /* Fatal error here. We do not want to support compiling ltrans units
1784 with different version of compiler or different flags than the WPA
1785 unit, so this should never happen. */
1786 fatal_error (input_location
,
1787 "IPA modref summary is missing in input file");
1791 /* If signature changed, update the summary. */
1794 update_signature (struct cgraph_node
*node
)
1796 clone_info
*info
= clone_info::get (node
);
1797 if (!info
|| !info
->param_adjustments
)
1800 modref_summary
*r
= optimization_summaries
1801 ? optimization_summaries
->get (node
) : NULL
;
1802 modref_summary_lto
*r_lto
= summaries_lto
1803 ? summaries_lto
->get (node
) : NULL
;
1808 fprintf (dump_file
, "Updating summary for %s from:\n",
1809 node
->dump_name ());
1810 r
->dump (dump_file
);
1814 ipa_adjusted_param
*p
;
1816 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
1818 int idx
= info
->param_adjustments
->get_original_index (i
);
1823 auto_vec
<int, 32> map
;
1825 map
.reserve (max
+ 1);
1826 for (i
= 0; i
<= max
; i
++)
1827 map
.quick_push (-1);
1828 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
1830 int idx
= info
->param_adjustments
->get_original_index (i
);
1836 r
->loads
->remap_params (&map
);
1837 r
->stores
->remap_params (&map
);
1841 r_lto
->loads
->remap_params (&map
);
1842 r_lto
->stores
->remap_params (&map
);
1846 fprintf (dump_file
, "to:\n");
1848 r
->dump (dump_file
);
1850 r_lto
->dump (dump_file
);
1855 /* Definition of the modref IPA pass. */
1856 const pass_data pass_data_ipa_modref
=
1858 IPA_PASS
, /* type */
1859 "modref", /* name */
1860 OPTGROUP_IPA
, /* optinfo_flags */
1861 TV_IPA_MODREF
, /* tv_id */
1862 0, /* properties_required */
1863 0, /* properties_provided */
1864 0, /* properties_destroyed */
1865 0, /* todo_flags_start */
1866 ( TODO_dump_symtab
), /* todo_flags_finish */
1869 class pass_ipa_modref
: public ipa_opt_pass_d
1872 pass_ipa_modref (gcc::context
*ctxt
)
1873 : ipa_opt_pass_d (pass_data_ipa_modref
, ctxt
,
1874 modref_generate
, /* generate_summary */
1875 modref_write
, /* write_summary */
1876 modref_read
, /* read_summary */
1877 modref_write
, /* write_optimization_summary */
1878 modref_read
, /* read_optimization_summary */
1879 NULL
, /* stmt_fixup */
1880 0, /* function_transform_todo_flags_start */
1881 NULL
, /* function_transform */
1882 NULL
) /* variable_transform */
1885 /* opt_pass methods: */
1886 opt_pass
*clone () { return new pass_ipa_modref (m_ctxt
); }
1887 virtual bool gate (function
*)
1891 virtual unsigned int execute (function
*);
1897 unsigned int pass_modref::execute (function
*f
)
1899 analyze_function (f
, false);
1904 make_pass_modref (gcc::context
*ctxt
)
1906 return new pass_modref (ctxt
);
1910 make_pass_ipa_modref (gcc::context
*ctxt
)
1912 return new pass_ipa_modref (ctxt
);
1915 /* Skip edges from and to nodes without ipa_pure_const enabled.
1916 Ignore not available symbols. */
1919 ignore_edge (struct cgraph_edge
*e
)
1921 /* We merge summaries of inline clones into summaries of functions they
1922 are inlined to. For that reason the complete function bodies must
1924 if (!e
->inline_failed
)
1926 enum availability avail
;
1927 cgraph_node
*callee
= e
->callee
->function_or_virtual_thunk_symbol
1928 (&avail
, e
->caller
);
1930 return (avail
<= AVAIL_INTERPOSABLE
1931 || ((!optimization_summaries
|| !optimization_summaries
->get (callee
))
1932 && (!summaries_lto
|| !summaries_lto
->get (callee
)))
1933 || flags_from_decl_or_type (e
->callee
->decl
)
1934 & (ECF_CONST
| ECF_NOVOPS
));
1937 /* Compute parm_map for CALLE_EDGE. */
1940 compute_parm_map (cgraph_edge
*callee_edge
, vec
<modref_parm_map
> *parm_map
)
1942 class ipa_edge_args
*args
;
1943 if (ipa_node_params_sum
1944 && !callee_edge
->call_stmt_cannot_inline_p
1945 && (args
= IPA_EDGE_REF (callee_edge
)) != NULL
)
1947 int i
, count
= ipa_get_cs_argument_count (args
);
1948 class ipa_node_params
*caller_parms_info
, *callee_pi
;
1949 class ipa_call_summary
*es
1950 = ipa_call_summaries
->get (callee_edge
);
1952 = callee_edge
->callee
->function_or_virtual_thunk_symbol
1953 (NULL
, callee_edge
->caller
);
1955 caller_parms_info
= IPA_NODE_REF (callee_edge
->caller
->inlined_to
1956 ? callee_edge
->caller
->inlined_to
1957 : callee_edge
->caller
);
1958 callee_pi
= IPA_NODE_REF (callee
);
1960 (*parm_map
).safe_grow_cleared (count
);
1962 for (i
= 0; i
< count
; i
++)
1964 if (es
&& es
->param
[i
].points_to_local_or_readonly_memory
)
1966 (*parm_map
)[i
].parm_index
= -2;
1970 struct ipa_jump_func
*jf
1971 = ipa_get_ith_jump_func (args
, i
);
1972 if (jf
&& callee_pi
)
1974 tree cst
= ipa_value_from_jfunc (caller_parms_info
,
1978 if (cst
&& points_to_local_or_readonly_memory_p (cst
))
1980 (*parm_map
)[i
].parm_index
= -2;
1984 if (jf
&& jf
->type
== IPA_JF_PASS_THROUGH
)
1986 (*parm_map
)[i
].parm_index
1987 = ipa_get_jf_pass_through_formal_id (jf
);
1988 if (ipa_get_jf_pass_through_operation (jf
) == NOP_EXPR
)
1990 (*parm_map
)[i
].parm_offset_known
= true;
1991 (*parm_map
)[i
].parm_offset
= 0;
1993 else if (ipa_get_jf_pass_through_operation (jf
)
1994 == POINTER_PLUS_EXPR
1995 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf
),
1996 &(*parm_map
)[i
].parm_offset
))
1997 (*parm_map
)[i
].parm_offset_known
= true;
1999 (*parm_map
)[i
].parm_offset_known
= false;
2002 if (jf
&& jf
->type
== IPA_JF_ANCESTOR
)
2004 (*parm_map
)[i
].parm_index
= ipa_get_jf_ancestor_formal_id (jf
);
2005 (*parm_map
)[i
].parm_offset_known
= true;
2007 (!(ipa_get_jf_ancestor_offset (jf
) & (BITS_PER_UNIT
- 1)));
2008 (*parm_map
)[i
].parm_offset
2009 = ipa_get_jf_ancestor_offset (jf
) >> LOG2_BITS_PER_UNIT
;
2012 (*parm_map
)[i
].parm_index
= -1;
2016 fprintf (dump_file
, " Parm map: ");
2017 for (i
= 0; i
< count
; i
++)
2018 fprintf (dump_file
, " %i", (*parm_map
)[i
].parm_index
);
2019 fprintf (dump_file
, "\n");
2026 /* Call EDGE was inlined; merge summary from callee to the caller. */
2029 ipa_merge_modref_summary_after_inlining (cgraph_edge
*edge
)
2031 if (!summaries
&& !summaries_lto
)
2034 struct cgraph_node
*to
= (edge
->caller
->inlined_to
2035 ? edge
->caller
->inlined_to
: edge
->caller
);
2036 class modref_summary
*to_info
= summaries
? summaries
->get (to
) : NULL
;
2037 class modref_summary_lto
*to_info_lto
= summaries_lto
2038 ? summaries_lto
->get (to
) : NULL
;
2040 if (!to_info
&& !to_info_lto
)
2043 summaries
->remove (edge
->callee
);
2045 summaries_lto
->remove (edge
->callee
);
2049 class modref_summary
*callee_info
= summaries
? summaries
->get (edge
->callee
)
2051 class modref_summary_lto
*callee_info_lto
2052 = summaries_lto
? summaries_lto
->get (edge
->callee
) : NULL
;
2053 int flags
= flags_from_decl_or_type (edge
->callee
->decl
);
2055 if (!callee_info
&& to_info
)
2057 if (ignore_stores_p (edge
->caller
->decl
, flags
))
2058 to_info
->loads
->collapse ();
2061 summaries
->remove (to
);
2065 if (!callee_info_lto
&& to_info_lto
)
2067 if (ignore_stores_p (edge
->caller
->decl
, flags
))
2068 to_info_lto
->loads
->collapse ();
2071 summaries_lto
->remove (to
);
2075 if (callee_info
|| callee_info_lto
)
2077 auto_vec
<modref_parm_map
, 32> parm_map
;
2079 compute_parm_map (edge
, &parm_map
);
2081 if (!ignore_stores_p (edge
->caller
->decl
, flags
))
2083 if (to_info
&& callee_info
)
2084 to_info
->stores
->merge (callee_info
->stores
, &parm_map
);
2085 if (to_info_lto
&& callee_info_lto
)
2086 to_info_lto
->stores
->merge (callee_info_lto
->stores
, &parm_map
);
2088 if (to_info
&& callee_info
)
2089 to_info
->loads
->merge (callee_info
->loads
, &parm_map
);
2090 if (to_info_lto
&& callee_info_lto
)
2091 to_info_lto
->loads
->merge (callee_info_lto
->loads
, &parm_map
);
2095 if (to_info
&& !to_info
->useful_p (flags
))
2098 fprintf (dump_file
, "Removed mod-ref summary for %s\n",
2100 summaries
->remove (to
);
2102 else if (to_info
&& dump_file
)
2105 fprintf (dump_file
, "Updated mod-ref summary for %s\n",
2107 to_info
->dump (dump_file
);
2110 summaries
->remove (edge
->callee
);
2114 if (to_info_lto
&& !to_info_lto
->useful_p (flags
))
2117 fprintf (dump_file
, "Removed mod-ref summary for %s\n",
2119 summaries_lto
->remove (to
);
2121 else if (to_info_lto
&& dump_file
)
2124 fprintf (dump_file
, "Updated mod-ref summary for %s\n",
2126 to_info_lto
->dump (dump_file
);
2128 if (callee_info_lto
)
2129 summaries_lto
->remove (edge
->callee
);
2134 /* Get parameter type from DECL. This is only safe for special cases
2135 like builtins we create fnspec for because the type match is checked
2136 at fnspec creation time. */
2139 get_parm_type (tree decl
, unsigned int i
)
2141 tree t
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
2143 for (unsigned int p
= 0; p
< i
; p
++)
2145 return TREE_VALUE (t
);
2148 /* Return access mode for argument I of call E with FNSPEC. */
2150 static modref_access_node
2151 get_access_for_fnspec (cgraph_edge
*e
, attr_fnspec
&fnspec
,
2152 unsigned int i
, modref_parm_map
&map
)
2154 tree size
= NULL_TREE
;
2155 unsigned int size_arg
;
2157 if (!fnspec
.arg_specified_p (i
))
2159 else if (fnspec
.arg_max_access_size_given_by_arg_p (i
, &size_arg
))
2161 cgraph_node
*node
= e
->caller
->inlined_to
2162 ? e
->caller
->inlined_to
: e
->caller
;
2163 class ipa_node_params
*caller_parms_info
= IPA_NODE_REF (node
);
2164 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2165 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, size_arg
);
2168 size
= ipa_value_from_jfunc (caller_parms_info
, jf
,
2169 get_parm_type (e
->callee
->decl
, size_arg
));
2171 else if (fnspec
.arg_access_size_given_by_type_p (i
))
2172 size
= TYPE_SIZE_UNIT (get_parm_type (e
->callee
->decl
, i
));
2173 modref_access_node a
= {0, -1, -1,
2174 map
.parm_offset
, map
.parm_index
,
2175 map
.parm_offset_known
};
2176 poly_int64 size_hwi
;
2178 && poly_int_tree_p (size
, &size_hwi
)
2179 && coeffs_in_range_p (size_hwi
, 0,
2180 HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
2183 a
.max_size
= size_hwi
<< LOG2_BITS_PER_UNIT
;
2188 /* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
2189 CUR_SUMMARY_LTO accordingly. Return true if something changed. */
2192 propagate_unknown_call (cgraph_node
*node
,
2193 cgraph_edge
*e
, int ecf_flags
,
2194 modref_summary
**cur_summary_ptr
,
2195 modref_summary_lto
**cur_summary_lto_ptr
)
2197 bool changed
= false;
2198 modref_summary
*cur_summary
= cur_summary_ptr
? *cur_summary_ptr
: NULL
;
2199 modref_summary_lto
*cur_summary_lto
= cur_summary_lto_ptr
2200 ? *cur_summary_lto_ptr
: NULL
;
2201 class fnspec_summary
*fnspec_sum
= fnspec_summaries
->get (e
);
2202 auto_vec
<modref_parm_map
, 32> parm_map
;
2204 && compute_parm_map (e
, &parm_map
))
2206 attr_fnspec
fnspec (fnspec_sum
->fnspec
);
2208 gcc_checking_assert (fnspec
.known_p ());
2209 if (fnspec
.global_memory_read_p ())
2210 collapse_loads (cur_summary
, cur_summary_lto
);
2213 tree t
= TYPE_ARG_TYPES (TREE_TYPE (e
->callee
->decl
));
2214 for (unsigned i
= 0; i
< parm_map
.length () && t
;
2215 i
++, t
= TREE_CHAIN (t
))
2216 if (!POINTER_TYPE_P (TREE_VALUE (t
)))
2218 else if (!fnspec
.arg_specified_p (i
)
2219 || fnspec
.arg_maybe_read_p (i
))
2221 modref_parm_map map
= parm_map
[i
];
2222 if (map
.parm_index
== -2)
2224 if (map
.parm_index
== -1)
2226 collapse_loads (cur_summary
, cur_summary_lto
);
2230 changed
|= cur_summary
->loads
->insert
2231 (0, 0, get_access_for_fnspec (e
, fnspec
, i
, map
));
2232 if (cur_summary_lto
)
2233 changed
|= cur_summary_lto
->loads
->insert
2234 (0, 0, get_access_for_fnspec (e
, fnspec
, i
, map
));
2237 if (ignore_stores_p (node
->decl
, ecf_flags
))
2239 else if (fnspec
.global_memory_written_p ())
2240 collapse_stores (cur_summary
, cur_summary_lto
);
2243 tree t
= TYPE_ARG_TYPES (TREE_TYPE (e
->callee
->decl
));
2244 for (unsigned i
= 0; i
< parm_map
.length () && t
;
2245 i
++, t
= TREE_CHAIN (t
))
2246 if (!POINTER_TYPE_P (TREE_VALUE (t
)))
2248 else if (!fnspec
.arg_specified_p (i
)
2249 || fnspec
.arg_maybe_written_p (i
))
2251 modref_parm_map map
= parm_map
[i
];
2252 if (map
.parm_index
== -2)
2254 if (map
.parm_index
== -1)
2256 collapse_stores (cur_summary
, cur_summary_lto
);
2260 changed
|= cur_summary
->stores
->insert
2261 (0, 0, get_access_for_fnspec (e
, fnspec
, i
, map
));
2262 if (cur_summary_lto
)
2263 changed
|= cur_summary_lto
->stores
->insert
2264 (0, 0, get_access_for_fnspec (e
, fnspec
, i
, map
));
2267 if (fnspec
.errno_maybe_written_p () && flag_errno_math
)
2269 if (cur_summary
&& !cur_summary
->writes_errno
)
2271 cur_summary
->writes_errno
= true;
2274 if (cur_summary_lto
&& !cur_summary_lto
->writes_errno
)
2276 cur_summary_lto
->writes_errno
= true;
2282 if (ignore_stores_p (node
->decl
, ecf_flags
))
2285 fprintf (dump_file
, " collapsing loads\n");
2286 return collapse_loads (cur_summary
, cur_summary_lto
);
2288 if (optimization_summaries
)
2289 optimization_summaries
->remove (node
);
2291 summaries_lto
->remove (node
);
2292 if (cur_summary_ptr
)
2293 *cur_summary_ptr
= NULL
;
2294 if (cur_summary_lto_ptr
)
2295 *cur_summary_lto_ptr
= NULL
;
2297 fprintf (dump_file
, " Giving up\n");
2301 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE. */
2304 modref_propagate_in_scc (cgraph_node
*component_node
)
2306 bool changed
= true;
2312 for (struct cgraph_node
*cur
= component_node
; cur
;
2313 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
2315 cgraph_node
*node
= cur
->inlined_to
? cur
->inlined_to
: cur
;
2316 modref_summary
*cur_summary
= optimization_summaries
2317 ? optimization_summaries
->get (node
)
2319 modref_summary_lto
*cur_summary_lto
= summaries_lto
2320 ? summaries_lto
->get (node
)
2323 if (!cur_summary
&& !cur_summary_lto
)
2327 fprintf (dump_file
, " Processing %s%s%s\n",
2329 TREE_READONLY (cur
->decl
) ? " (const)" : "",
2330 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
2332 for (cgraph_edge
*e
= cur
->indirect_calls
; e
; e
= e
->next_callee
)
2334 if (e
->indirect_info
->ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
2337 fprintf (dump_file
, " Indirect call"
2338 "collapsing loads\n");
2339 changed
|= propagate_unknown_call
2340 (node
, e
, e
->indirect_info
->ecf_flags
,
2341 &cur_summary
, &cur_summary_lto
);
2342 if (!cur_summary
&& !cur_summary_lto
)
2346 if (!cur_summary
&& !cur_summary_lto
)
2349 for (cgraph_edge
*callee_edge
= cur
->callees
; callee_edge
;
2350 callee_edge
= callee_edge
->next_callee
)
2352 int flags
= flags_from_decl_or_type (callee_edge
->callee
->decl
);
2353 modref_summary
*callee_summary
= NULL
;
2354 modref_summary_lto
*callee_summary_lto
= NULL
;
2355 struct cgraph_node
*callee
;
2357 if (flags
& (ECF_CONST
| ECF_NOVOPS
)
2358 || !callee_edge
->inline_failed
)
2361 /* Get the callee and its summary. */
2362 enum availability avail
;
2363 callee
= callee_edge
->callee
->function_or_virtual_thunk_symbol
2366 /* It is not necessary to re-process calls outside of the
2370 || ((struct ipa_dfs_info
*)cur
->aux
)->scc_no
2371 != ((struct ipa_dfs_info
*)callee
->aux
)->scc_no
))
2375 fprintf (dump_file
, " Call to %s\n",
2376 callee_edge
->callee
->dump_name ());
2378 bool ignore_stores
= ignore_stores_p (cur
->decl
, flags
);
2380 if (avail
<= AVAIL_INTERPOSABLE
)
2383 fprintf (dump_file
, " Call target interposable"
2384 " or not available\n");
2385 changed
|= propagate_unknown_call
2386 (node
, callee_edge
, flags
,
2387 &cur_summary
, &cur_summary_lto
);
2388 if (!cur_summary
&& !cur_summary_lto
)
2393 /* We don't know anything about CALLEE, hence we cannot tell
2394 anything about the entire component. */
2397 && !(callee_summary
= optimization_summaries
->get (callee
)))
2400 fprintf (dump_file
, " No call target summary\n");
2401 changed
|= propagate_unknown_call
2402 (node
, callee_edge
, flags
,
2403 &cur_summary
, NULL
);
2404 if (!cur_summary
&& !cur_summary_lto
)
2408 && !(callee_summary_lto
= summaries_lto
->get (callee
)))
2411 fprintf (dump_file
, " No call target summary\n");
2412 changed
|= propagate_unknown_call
2413 (node
, callee_edge
, flags
,
2414 NULL
, &cur_summary_lto
);
2415 if (!cur_summary
&& !cur_summary_lto
)
2419 /* We can not safely optimize based on summary of callee if it
2420 does not always bind to current def: it is possible that
2421 memory load was optimized out earlier which may not happen in
2422 the interposed variant. */
2423 if (!callee_edge
->binds_to_current_def_p ())
2425 changed
|= collapse_loads (cur_summary
, cur_summary_lto
);
2427 fprintf (dump_file
, " May not bind local;"
2428 " collapsing loads\n");
2432 auto_vec
<modref_parm_map
, 32> parm_map
;
2434 compute_parm_map (callee_edge
, &parm_map
);
2436 /* Merge in callee's information. */
2439 changed
|= cur_summary
->loads
->merge
2440 (callee_summary
->loads
, &parm_map
);
2443 changed
|= cur_summary
->stores
->merge
2444 (callee_summary
->stores
, &parm_map
);
2445 if (!cur_summary
->writes_errno
2446 && callee_summary
->writes_errno
)
2448 cur_summary
->writes_errno
= true;
2453 if (callee_summary_lto
)
2455 changed
|= cur_summary_lto
->loads
->merge
2456 (callee_summary_lto
->loads
, &parm_map
);
2459 changed
|= cur_summary_lto
->stores
->merge
2460 (callee_summary_lto
->stores
, &parm_map
);
2461 if (!cur_summary_lto
->writes_errno
2462 && callee_summary_lto
->writes_errno
)
2464 cur_summary_lto
->writes_errno
= true;
2469 if (dump_file
&& changed
)
2472 cur_summary
->dump (dump_file
);
2473 if (cur_summary_lto
)
2474 cur_summary_lto
->dump (dump_file
);
2483 "Propagation finished in %i iterations\n", iteration
);
2484 for (struct cgraph_node
*cur
= component_node
; cur
;
2485 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
2486 if (!cur
->inlined_to
)
2488 modref_summary
*cur_summary
= optimization_summaries
2489 ? optimization_summaries
->get (cur
)
2491 modref_summary_lto
*cur_summary_lto
= summaries_lto
2492 ? summaries_lto
->get (cur
)
2495 fprintf (dump_file
, "Propagated modref for %s%s%s\n",
2497 TREE_READONLY (cur
->decl
) ? " (const)" : "",
2498 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
2499 if (optimization_summaries
)
2502 cur_summary
->dump (dump_file
);
2504 fprintf (dump_file
, " Not tracked\n");
2508 if (cur_summary_lto
)
2509 cur_summary_lto
->dump (dump_file
);
2511 fprintf (dump_file
, " Not tracked (lto)\n");
2517 /* Run the IPA pass. This will take a function's summaries and calls and
2518 construct new summaries which represent a transitive closure. So that
2519 summary of an analyzed function contains information about the loads and
2520 stores that the function or any function that it calls does. */
2523 pass_ipa_modref::execute (function
*)
2525 if (!summaries
&& !summaries_lto
)
2528 if (optimization_summaries
)
2529 ggc_delete (optimization_summaries
);
2530 optimization_summaries
= summaries
;
2533 struct cgraph_node
**order
= XCNEWVEC (struct cgraph_node
*,
2534 symtab
->cgraph_count
);
2536 order_pos
= ipa_reduced_postorder (order
, true, ignore_edge
);
2539 /* Iterate over all strongly connected components in post-order. */
2540 for (i
= 0; i
< order_pos
; i
++)
2542 /* Get the component's representative. That's just any node in the
2543 component from which we can traverse the entire component. */
2544 struct cgraph_node
*component_node
= order
[i
];
2547 fprintf (dump_file
, "\n\nStart of SCC component\n");
2549 modref_propagate_in_scc (component_node
);
2552 FOR_EACH_FUNCTION (node
)
2553 update_signature (node
);
2555 ((modref_summaries_lto
*)summaries_lto
)->propagated
= true;
2556 ipa_free_postorder_info ();
2558 delete fnspec_summaries
;
2559 fnspec_summaries
= NULL
;
2563 /* Summaries must stay alive until end of compilation. */
2566 ipa_modref_c_finalize ()
2568 if (optimization_summaries
)
2569 ggc_delete (optimization_summaries
);
2570 optimization_summaries
= NULL
;
2571 gcc_checking_assert (!summaries
);
2574 ggc_delete (summaries_lto
);
2575 summaries_lto
= NULL
;
2577 if (fnspec_summaries
)
2578 delete fnspec_summaries
;
2579 fnspec_summaries
= NULL
;
2582 #include "gt-ipa-modref.h"