1 /* Search for references that a functions loads or stores.
2 Copyright (C) 2020-2021 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
23 accesses across function calls.
25 This file contains a tree pass and an IPA pass. Both performs the same
26 analysis however tree pass is executed during early and late optimization
27 passes to propagate info downwards in the compilation order. IPA pass
28 propagates across the callgraph and is able to handle recursion and works on
29 whole program during link-time analysis.
31 LTO mode differs from the local mode by not recording alias sets but types
32 that are translated to alias sets later. This is necessary in order stream
33 the information because the alias sets are rebuild at stream-in time and may
34 not correspond to ones seen during analysis. For this reason part of
35 analysis is duplicated.
37 The following information is computed
38 1) load/store access tree described in ipa-modref-tree.h
39 This is used by tree-ssa-alias to disambiguate load/stores
40 2) EAF flags used by points-to analysis (in tree-ssa-structlias).
41 and defined in tree-core.h.
42 and stored to optimization_summaries.
44 There are multiple summaries computed and used during the propagation:
45 - summaries holds summaries from analysis to IPA propagation
47 - summaries_lto is same as summaries but holds them in a format
48 that can be streamed (as described above).
49 - fnspec_summary holds fnspec strings for call. This is
50 necessary because gimple_call_fnspec performs additional
51 analysis except for looking callee fndecl.
52 - escape_summary holds escape points for given call edge.
53 That is a vector recording what function parmaeters
54 may escape to a function call (and with what parameter index). */
58 #include "coretypes.h"
62 #include "alloc-pool.h"
63 #include "tree-pass.h"
64 #include "gimple-iterator.h"
67 #include "ipa-utils.h"
68 #include "symbol-summary.h"
69 #include "gimple-pretty-print.h"
70 #include "gimple-walk.h"
71 #include "print-tree.h"
72 #include "tree-streamer.h"
75 #include "ipa-modref-tree.h"
76 #include "ipa-modref.h"
77 #include "value-range.h"
79 #include "ipa-fnsummary.h"
80 #include "attr-fnspec.h"
81 #include "symtab-clones.h"
82 #include "gimple-ssa.h"
83 #include "tree-phinodes.h"
84 #include "tree-ssa-operands.h"
85 #include "ssa-iterators.h"
86 #include "stringpool.h"
87 #include "tree-ssanames.h"
92 /* We record fnspec specifiers for call edges since they depends on actual
111 /* Summary holding fnspec string for a given call. */
113 class fnspec_summaries_t
: public call_summary
<fnspec_summary
*>
116 fnspec_summaries_t (symbol_table
*symtab
)
117 : call_summary
<fnspec_summary
*> (symtab
) {}
118 /* Hook that is called by summary when an edge is duplicated. */
119 virtual void duplicate (cgraph_edge
*,
124 dst
->fnspec
= xstrdup (src
->fnspec
);
128 static fnspec_summaries_t
*fnspec_summaries
= NULL
;
130 /* Escape summary holds a vector of param indexes that escape to
134 /* Parameter that escapes at a given call. */
135 unsigned int parm_index
;
136 /* Argument it escapes to. */
138 /* Minimal flags known about the argument. */
139 eaf_flags_t min_flags
;
140 /* Does it escape directly or indirectly? */
144 /* Dump EAF flags. */
147 dump_eaf_flags (FILE *out
, int flags
, bool newline
= true)
149 if (flags
& EAF_DIRECT
)
150 fprintf (out
, " direct");
151 if (flags
& EAF_NOCLOBBER
)
152 fprintf (out
, " noclobber");
153 if (flags
& EAF_NOESCAPE
)
154 fprintf (out
, " noescape");
155 if (flags
& EAF_NODIRECTESCAPE
)
156 fprintf (out
, " nodirectescape");
157 if (flags
& EAF_UNUSED
)
158 fprintf (out
, " unused");
159 if (flags
& EAF_NOT_RETURNED
)
160 fprintf (out
, " not_returned");
161 if (flags
& EAF_NOREAD
)
162 fprintf (out
, " noread");
167 struct escape_summary
169 auto_vec
<escape_entry
> esc
;
170 void dump (FILE *out
)
172 for (unsigned int i
= 0; i
< esc
.length (); i
++)
174 fprintf (out
, " parm %i arg %i %s min:",
177 esc
[i
].direct
? "(direct)" : "(indirect)");
178 dump_eaf_flags (out
, esc
[i
].min_flags
, false);
184 class escape_summaries_t
: public call_summary
<escape_summary
*>
187 escape_summaries_t (symbol_table
*symtab
)
188 : call_summary
<escape_summary
*> (symtab
) {}
189 /* Hook that is called by summary when an edge is duplicated. */
190 virtual void duplicate (cgraph_edge
*,
195 dst
->esc
= src
->esc
.copy ();
199 static escape_summaries_t
*escape_summaries
= NULL
;
201 } /* ANON namespace: GTY annotated summaries can not be anonymous. */
204 /* Class (from which there is one global instance) that holds modref summaries
205 for all analyzed functions. */
207 class GTY((user
)) modref_summaries
208 : public fast_function_summary
<modref_summary
*, va_gc
>
211 modref_summaries (symbol_table
*symtab
)
212 : fast_function_summary
<modref_summary
*, va_gc
> (symtab
) {}
213 virtual void insert (cgraph_node
*, modref_summary
*state
);
214 virtual void duplicate (cgraph_node
*src_node
,
215 cgraph_node
*dst_node
,
216 modref_summary
*src_data
,
217 modref_summary
*dst_data
);
218 static modref_summaries
*create_ggc (symbol_table
*symtab
)
220 return new (ggc_alloc_no_dtor
<modref_summaries
> ())
221 modref_summaries (symtab
);
225 class modref_summary_lto
;
227 /* Class (from which there is one global instance) that holds modref summaries
228 for all analyzed functions. */
230 class GTY((user
)) modref_summaries_lto
231 : public fast_function_summary
<modref_summary_lto
*, va_gc
>
234 modref_summaries_lto (symbol_table
*symtab
)
235 : fast_function_summary
<modref_summary_lto
*, va_gc
> (symtab
),
236 propagated (false) {}
237 virtual void insert (cgraph_node
*, modref_summary_lto
*state
);
238 virtual void duplicate (cgraph_node
*src_node
,
239 cgraph_node
*dst_node
,
240 modref_summary_lto
*src_data
,
241 modref_summary_lto
*dst_data
);
242 static modref_summaries_lto
*create_ggc (symbol_table
*symtab
)
244 return new (ggc_alloc_no_dtor
<modref_summaries_lto
> ())
245 modref_summaries_lto (symtab
);
250 /* Global variable holding all modref summaries
251 (from analysis to IPA propagation time). */
253 static GTY(()) fast_function_summary
<modref_summary
*, va_gc
>
256 /* Global variable holding all modref optimization summaries
257 (from IPA propagation time or used by local optimization pass). */
259 static GTY(()) fast_function_summary
<modref_summary
*, va_gc
>
260 *optimization_summaries
;
262 /* LTO summaries hold info from analysis to LTO streaming or from LTO
263 stream-in through propagation to LTO stream-out. */
265 static GTY(()) fast_function_summary
<modref_summary_lto
*, va_gc
>
268 /* Summary for a single function which this pass produces. */
270 modref_summary::modref_summary ()
271 : loads (NULL
), stores (NULL
), writes_errno (NULL
)
275 modref_summary::~modref_summary ()
283 /* All flags that are implied by the ECF_CONST functions. */
284 const int implicit_const_eaf_flags
= EAF_DIRECT
| EAF_NOCLOBBER
| EAF_NOESCAPE
285 | EAF_NODIRECTESCAPE
| EAF_NOREAD
;
286 /* All flags that are implied by the ECF_PURE function. */
287 const int implicit_pure_eaf_flags
= EAF_NOCLOBBER
| EAF_NOESCAPE
288 | EAF_NODIRECTESCAPE
;
289 /* All flags implied when we know we can ignore stores (i.e. when handling
290 call to noreturn). */
291 const int ignore_stores_eaf_flags
= EAF_DIRECT
| EAF_NOCLOBBER
| EAF_NOESCAPE
292 | EAF_NODIRECTESCAPE
;
294 /* Remove all flags from EAF_FLAGS that are implied by ECF_FLAGS and not
295 useful to track. If returns_void is true moreover clear
298 remove_useless_eaf_flags (int eaf_flags
, int ecf_flags
, bool returns_void
)
300 if (ecf_flags
& ECF_NOVOPS
)
302 if (ecf_flags
& ECF_CONST
)
303 eaf_flags
&= ~implicit_const_eaf_flags
;
304 else if (ecf_flags
& ECF_PURE
)
305 eaf_flags
&= ~implicit_pure_eaf_flags
;
306 else if ((ecf_flags
& ECF_NORETURN
) || returns_void
)
307 eaf_flags
&= ~EAF_NOT_RETURNED
;
308 /* Only NOCLOBBER or DIRECT flags alone are not useful (see comments
309 in tree-ssa-alias.c). Give up earlier. */
310 if ((eaf_flags
& ~(EAF_DIRECT
| EAF_NOCLOBBER
)) == 0)
315 /* Return true if FLAGS holds some useful information. */
318 eaf_flags_useful_p (vec
<eaf_flags_t
> &flags
, int ecf_flags
)
320 for (unsigned i
= 0; i
< flags
.length (); i
++)
321 if (remove_useless_eaf_flags (flags
[i
], ecf_flags
, false))
326 /* Return true if summary is potentially useful for optimization.
327 If CHECK_FLAGS is false assume that arg_flags are useful. */
330 modref_summary::useful_p (int ecf_flags
, bool check_flags
)
332 if (ecf_flags
& ECF_NOVOPS
)
334 if (arg_flags
.length () && !check_flags
)
336 if (check_flags
&& eaf_flags_useful_p (arg_flags
, ecf_flags
))
338 arg_flags
.release ();
339 if (ecf_flags
& ECF_CONST
)
341 if (loads
&& !loads
->every_base
)
343 if (ecf_flags
& ECF_PURE
)
345 return stores
&& !stores
->every_base
;
348 /* Single function summary used for LTO. */
350 typedef modref_tree
<tree
> modref_records_lto
;
351 struct GTY(()) modref_summary_lto
353 /* Load and stores in functions using types rather then alias sets.
355 This is necessary to make the information streamable for LTO but is also
356 more verbose and thus more likely to hit the limits. */
357 modref_records_lto
*loads
;
358 modref_records_lto
*stores
;
359 auto_vec
<eaf_flags_t
> GTY((skip
)) arg_flags
;
362 modref_summary_lto ();
363 ~modref_summary_lto ();
365 bool useful_p (int ecf_flags
, bool check_flags
= true);
368 /* Summary for a single function which this pass produces. */
370 modref_summary_lto::modref_summary_lto ()
371 : loads (NULL
), stores (NULL
), writes_errno (NULL
)
375 modref_summary_lto::~modref_summary_lto ()
384 /* Return true if lto summary is potentially useful for optimization.
385 If CHECK_FLAGS is false assume that arg_flags are useful. */
388 modref_summary_lto::useful_p (int ecf_flags
, bool check_flags
)
390 if (ecf_flags
& ECF_NOVOPS
)
392 if (arg_flags
.length () && !check_flags
)
394 if (check_flags
&& eaf_flags_useful_p (arg_flags
, ecf_flags
))
396 arg_flags
.release ();
397 if (ecf_flags
& ECF_CONST
)
399 if (loads
&& !loads
->every_base
)
401 if (ecf_flags
& ECF_PURE
)
403 return stores
&& !stores
->every_base
;
409 dump_access (modref_access_node
*a
, FILE *out
)
411 fprintf (out
, " access:");
412 if (a
->parm_index
!= -1)
414 fprintf (out
, " Parm %i", a
->parm_index
);
415 if (a
->parm_offset_known
)
417 fprintf (out
, " param offset:");
418 print_dec ((poly_int64_pod
)a
->parm_offset
, out
, SIGNED
);
421 if (a
->range_info_useful_p ())
423 fprintf (out
, " offset:");
424 print_dec ((poly_int64_pod
)a
->offset
, out
, SIGNED
);
425 fprintf (out
, " size:");
426 print_dec ((poly_int64_pod
)a
->size
, out
, SIGNED
);
427 fprintf (out
, " max_size:");
428 print_dec ((poly_int64_pod
)a
->max_size
, out
, SIGNED
);
433 /* Dump records TT to OUT. */
436 dump_records (modref_records
*tt
, FILE *out
)
438 fprintf (out
, " Limits: %i bases, %i refs\n",
439 (int)tt
->max_bases
, (int)tt
->max_refs
);
442 fprintf (out
, " Every base\n");
446 modref_base_node
<alias_set_type
> *n
;
447 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, n
)
449 fprintf (out
, " Base %i: alias set %i\n", (int)i
, n
->base
);
452 fprintf (out
, " Every ref\n");
456 modref_ref_node
<alias_set_type
> *r
;
457 FOR_EACH_VEC_SAFE_ELT (n
->refs
, j
, r
)
459 fprintf (out
, " Ref %i: alias set %i\n", (int)j
, r
->ref
);
462 fprintf (out
, " Every access\n");
466 modref_access_node
*a
;
467 FOR_EACH_VEC_SAFE_ELT (r
->accesses
, k
, a
)
468 dump_access (a
, out
);
473 /* Dump records TT to OUT. */
476 dump_lto_records (modref_records_lto
*tt
, FILE *out
)
478 fprintf (out
, " Limits: %i bases, %i refs\n",
479 (int)tt
->max_bases
, (int)tt
->max_refs
);
482 fprintf (out
, " Every base\n");
486 modref_base_node
<tree
> *n
;
487 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, n
)
489 fprintf (out
, " Base %i:", (int)i
);
490 print_generic_expr (dump_file
, n
->base
);
491 fprintf (out
, " (alias set %i)\n",
492 n
->base
? get_alias_set (n
->base
) : 0);
495 fprintf (out
, " Every ref\n");
499 modref_ref_node
<tree
> *r
;
500 FOR_EACH_VEC_SAFE_ELT (n
->refs
, j
, r
)
502 fprintf (out
, " Ref %i:", (int)j
);
503 print_generic_expr (dump_file
, r
->ref
);
504 fprintf (out
, " (alias set %i)\n",
505 r
->ref
? get_alias_set (r
->ref
) : 0);
508 fprintf (out
, " Every access\n");
512 modref_access_node
*a
;
513 FOR_EACH_VEC_SAFE_ELT (r
->accesses
, k
, a
)
514 dump_access (a
, out
);
519 /* Dump all escape points of NODE to OUT. */
522 dump_modref_edge_summaries (FILE *out
, cgraph_node
*node
, int depth
)
525 if (!escape_summaries
)
527 for (cgraph_edge
*e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
529 class escape_summary
*sum
= escape_summaries
->get (e
);
532 fprintf (out
, "%*sIndirect call %i in %s escapes:",
533 depth
, "", i
, node
->dump_name ());
538 for (cgraph_edge
*e
= node
->callees
; e
; e
= e
->next_callee
)
540 if (!e
->inline_failed
)
541 dump_modref_edge_summaries (out
, e
->callee
, depth
+ 1);
542 class escape_summary
*sum
= escape_summaries
->get (e
);
545 fprintf (out
, "%*sCall %s->%s escapes:", depth
, "",
546 node
->dump_name (), e
->callee
->dump_name ());
549 class fnspec_summary
*fsum
= fnspec_summaries
->get (e
);
552 fprintf (out
, "%*sCall %s->%s fnspec: %s\n", depth
, "",
553 node
->dump_name (), e
->callee
->dump_name (),
559 /* Remove all call edge summaries associated with NODE. */
562 remove_modref_edge_summaries (cgraph_node
*node
)
564 if (!escape_summaries
)
566 for (cgraph_edge
*e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
567 escape_summaries
->remove (e
);
568 for (cgraph_edge
*e
= node
->callees
; e
; e
= e
->next_callee
)
570 if (!e
->inline_failed
)
571 remove_modref_edge_summaries (e
->callee
);
572 escape_summaries
->remove (e
);
573 fnspec_summaries
->remove (e
);
580 modref_summary::dump (FILE *out
)
584 fprintf (out
, " loads:\n");
585 dump_records (loads
, out
);
589 fprintf (out
, " stores:\n");
590 dump_records (stores
, out
);
593 fprintf (out
, " Writes errno\n");
594 if (arg_flags
.length ())
596 for (unsigned int i
= 0; i
< arg_flags
.length (); i
++)
599 fprintf (out
, " parm %i flags:", i
);
600 dump_eaf_flags (out
, arg_flags
[i
]);
608 modref_summary_lto::dump (FILE *out
)
610 fprintf (out
, " loads:\n");
611 dump_lto_records (loads
, out
);
612 fprintf (out
, " stores:\n");
613 dump_lto_records (stores
, out
);
615 fprintf (out
, " Writes errno\n");
616 if (arg_flags
.length ())
618 for (unsigned int i
= 0; i
< arg_flags
.length (); i
++)
621 fprintf (out
, " parm %i flags:", i
);
622 dump_eaf_flags (out
, arg_flags
[i
]);
627 /* Get function summary for FUNC if it exists, return NULL otherwise. */
630 get_modref_function_summary (cgraph_node
*func
)
632 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
633 if (!optimization_summaries
)
636 /* A single function body may be represented by multiple symbols with
637 different visibility. For example, if FUNC is an interposable alias,
638 we don't want to return anything, even if we have summary for the target
640 enum availability avail
;
641 func
= func
->function_or_virtual_thunk_symbol
642 (&avail
, current_function_decl
?
643 cgraph_node::get (current_function_decl
) : NULL
);
644 if (avail
<= AVAIL_INTERPOSABLE
)
647 modref_summary
*r
= optimization_summaries
->get (func
);
651 /* Construct modref_access_node from REF. */
652 static modref_access_node
653 get_access (ao_ref
*ref
)
657 base
= ao_ref_base (ref
);
658 modref_access_node a
= {ref
->offset
, ref
->size
, ref
->max_size
,
660 if (TREE_CODE (base
) == MEM_REF
|| TREE_CODE (base
) == TARGET_MEM_REF
)
663 base
= TREE_OPERAND (base
, 0);
664 if (TREE_CODE (base
) == SSA_NAME
665 && SSA_NAME_IS_DEFAULT_DEF (base
)
666 && TREE_CODE (SSA_NAME_VAR (base
)) == PARM_DECL
)
669 for (tree t
= DECL_ARGUMENTS (current_function_decl
);
670 t
!= SSA_NAME_VAR (base
); t
= DECL_CHAIN (t
))
679 if (TREE_CODE (memref
) == MEM_REF
)
682 = wi::to_poly_wide (TREE_OPERAND
683 (memref
, 1)).to_shwi (&a
.parm_offset
);
686 a
.parm_offset_known
= false;
696 /* Record access into the modref_records data structure. */
699 record_access (modref_records
*tt
, ao_ref
*ref
)
701 alias_set_type base_set
= !flag_strict_aliasing
? 0
702 : ao_ref_base_alias_set (ref
);
703 alias_set_type ref_set
= !flag_strict_aliasing
? 0
704 : (ao_ref_alias_set (ref
));
705 modref_access_node a
= get_access (ref
);
708 fprintf (dump_file
, " - Recording base_set=%i ref_set=%i parm=%i\n",
709 base_set
, ref_set
, a
.parm_index
);
711 tt
->insert (base_set
, ref_set
, a
);
714 /* IPA version of record_access_tree. */
717 record_access_lto (modref_records_lto
*tt
, ao_ref
*ref
)
719 /* get_alias_set sometimes use different type to compute the alias set
720 than TREE_TYPE (base). Do same adjustments. */
721 tree base_type
= NULL_TREE
, ref_type
= NULL_TREE
;
722 if (flag_strict_aliasing
)
727 while (handled_component_p (base
))
728 base
= TREE_OPERAND (base
, 0);
730 base_type
= reference_alias_ptr_type_1 (&base
);
733 base_type
= TREE_TYPE (base
);
735 base_type
= TYPE_REF_CAN_ALIAS_ALL (base_type
)
736 ? NULL_TREE
: TREE_TYPE (base_type
);
738 tree ref_expr
= ref
->ref
;
739 ref_type
= reference_alias_ptr_type_1 (&ref_expr
);
742 ref_type
= TREE_TYPE (ref_expr
);
744 ref_type
= TYPE_REF_CAN_ALIAS_ALL (ref_type
)
745 ? NULL_TREE
: TREE_TYPE (ref_type
);
747 /* Sanity check that we are in sync with what get_alias_set does. */
748 gcc_checking_assert ((!base_type
&& !ao_ref_base_alias_set (ref
))
749 || get_alias_set (base_type
)
750 == ao_ref_base_alias_set (ref
));
751 gcc_checking_assert ((!ref_type
&& !ao_ref_alias_set (ref
))
752 || get_alias_set (ref_type
)
753 == ao_ref_alias_set (ref
));
755 /* Do not bother to record types that have no meaningful alias set.
756 Also skip variably modified types since these go to local streams. */
757 if (base_type
&& (!get_alias_set (base_type
)
758 || variably_modified_type_p (base_type
, NULL_TREE
)))
759 base_type
= NULL_TREE
;
760 if (ref_type
&& (!get_alias_set (ref_type
)
761 || variably_modified_type_p (ref_type
, NULL_TREE
)))
762 ref_type
= NULL_TREE
;
764 modref_access_node a
= get_access (ref
);
767 fprintf (dump_file
, " - Recording base type:");
768 print_generic_expr (dump_file
, base_type
);
769 fprintf (dump_file
, " (alias set %i) ref type:",
770 base_type
? get_alias_set (base_type
) : 0);
771 print_generic_expr (dump_file
, ref_type
);
772 fprintf (dump_file
, " (alias set %i) parm:%i\n",
773 ref_type
? get_alias_set (ref_type
) : 0,
777 tt
->insert (base_type
, ref_type
, a
);
780 /* Returns true if and only if we should store the access to EXPR.
781 Some accesses, e.g. loads from automatic variables, are not interesting. */
784 record_access_p (tree expr
)
786 if (refs_local_or_readonly_memory_p (expr
))
789 fprintf (dump_file
, " - Read-only or local, ignoring.\n");
795 /* Return true if ECF flags says that return value can be ignored. */
798 ignore_retval_p (tree caller
, int flags
)
800 if ((flags
& (ECF_NORETURN
| ECF_NOTHROW
)) == (ECF_NORETURN
| ECF_NOTHROW
)
801 || (!opt_for_fn (caller
, flag_exceptions
) && (flags
& ECF_NORETURN
)))
806 /* Return true if ECF flags says that stores can be ignored. */
809 ignore_stores_p (tree caller
, int flags
)
811 if (flags
& (ECF_PURE
| ECF_CONST
| ECF_NOVOPS
))
813 if ((flags
& (ECF_NORETURN
| ECF_NOTHROW
)) == (ECF_NORETURN
| ECF_NOTHROW
)
814 || (!opt_for_fn (caller
, flag_exceptions
) && (flags
& ECF_NORETURN
)))
819 /* Determine parm_map for argument I of STMT. */
822 parm_map_for_arg (gimple
*stmt
, int i
)
824 tree op
= gimple_call_arg (stmt
, i
);
827 struct modref_parm_map parm_map
;
829 parm_map
.parm_offset_known
= false;
830 parm_map
.parm_offset
= 0;
832 offset_known
= unadjusted_ptr_and_unit_offset (op
, &op
, &offset
);
833 if (TREE_CODE (op
) == SSA_NAME
834 && SSA_NAME_IS_DEFAULT_DEF (op
)
835 && TREE_CODE (SSA_NAME_VAR (op
)) == PARM_DECL
)
838 for (tree t
= DECL_ARGUMENTS (current_function_decl
);
839 t
!= SSA_NAME_VAR (op
); t
= DECL_CHAIN (t
))
848 parm_map
.parm_index
= index
;
849 parm_map
.parm_offset_known
= offset_known
;
850 parm_map
.parm_offset
= offset
;
852 else if (points_to_local_or_readonly_memory_p (op
))
853 parm_map
.parm_index
= -2;
855 parm_map
.parm_index
= -1;
859 /* Merge side effects of call STMT to function with CALLEE_SUMMARY
860 int CUR_SUMMARY. Return true if something changed.
861 If IGNORE_STORES is true, do not merge stores. */
864 merge_call_side_effects (modref_summary
*cur_summary
,
865 gimple
*stmt
, modref_summary
*callee_summary
,
866 bool ignore_stores
, cgraph_node
*callee_node
)
868 auto_vec
<modref_parm_map
, 32> parm_map
;
869 bool changed
= false;
871 /* We can not safely optimize based on summary of callee if it does
872 not always bind to current def: it is possible that memory load
873 was optimized out earlier which may not happen in the interposed
875 if (!callee_node
->binds_to_current_def_p ())
878 fprintf (dump_file
, " - May be interposed: collapsing loads.\n");
879 cur_summary
->loads
->collapse ();
883 fprintf (dump_file
, " - Merging side effects of %s with parm map:",
884 callee_node
->dump_name ());
886 parm_map
.safe_grow_cleared (gimple_call_num_args (stmt
), true);
887 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); i
++)
889 parm_map
[i
] = parm_map_for_arg (stmt
, i
);
892 fprintf (dump_file
, " %i", parm_map
[i
].parm_index
);
893 if (parm_map
[i
].parm_offset_known
)
895 fprintf (dump_file
, " offset:");
896 print_dec ((poly_int64_pod
)parm_map
[i
].parm_offset
,
902 fprintf (dump_file
, "\n");
904 /* Merge with callee's summary. */
905 changed
|= cur_summary
->loads
->merge (callee_summary
->loads
, &parm_map
);
908 changed
|= cur_summary
->stores
->merge (callee_summary
->stores
,
910 if (!cur_summary
->writes_errno
911 && callee_summary
->writes_errno
)
913 cur_summary
->writes_errno
= true;
920 /* Return access mode for argument I of call STMT with FNSPEC. */
922 static modref_access_node
923 get_access_for_fnspec (gcall
*call
, attr_fnspec
&fnspec
,
924 unsigned int i
, modref_parm_map
&map
)
926 tree size
= NULL_TREE
;
927 unsigned int size_arg
;
929 if (!fnspec
.arg_specified_p (i
))
931 else if (fnspec
.arg_max_access_size_given_by_arg_p (i
, &size_arg
))
932 size
= gimple_call_arg (call
, size_arg
);
933 else if (fnspec
.arg_access_size_given_by_type_p (i
))
935 tree callee
= gimple_call_fndecl (call
);
936 tree t
= TYPE_ARG_TYPES (TREE_TYPE (callee
));
938 for (unsigned int p
= 0; p
< i
; p
++)
940 size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t
)));
942 modref_access_node a
= {0, -1, -1,
943 map
.parm_offset
, map
.parm_index
,
944 map
.parm_offset_known
};
947 && poly_int_tree_p (size
, &size_hwi
)
948 && coeffs_in_range_p (size_hwi
, 0,
949 HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
952 a
.max_size
= size_hwi
<< LOG2_BITS_PER_UNIT
;
957 /* Collapse loads and return true if something changed. */
960 collapse_loads (modref_summary
*cur_summary
,
961 modref_summary_lto
*cur_summary_lto
)
963 bool changed
= false;
965 if (cur_summary
&& !cur_summary
->loads
->every_base
)
967 cur_summary
->loads
->collapse ();
971 && !cur_summary_lto
->loads
->every_base
)
973 cur_summary_lto
->loads
->collapse ();
979 /* Collapse loads and return true if something changed. */
982 collapse_stores (modref_summary
*cur_summary
,
983 modref_summary_lto
*cur_summary_lto
)
985 bool changed
= false;
987 if (cur_summary
&& !cur_summary
->stores
->every_base
)
989 cur_summary
->stores
->collapse ();
993 && !cur_summary_lto
->stores
->every_base
)
995 cur_summary_lto
->stores
->collapse ();
1002 /* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
1003 If IGNORE_STORES is true ignore them.
1004 Return false if no useful summary can be produced. */
1007 process_fnspec (modref_summary
*cur_summary
,
1008 modref_summary_lto
*cur_summary_lto
,
1009 gcall
*call
, bool ignore_stores
)
1011 attr_fnspec fnspec
= gimple_call_fnspec (call
);
1012 if (!fnspec
.known_p ())
1014 if (dump_file
&& gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
1015 fprintf (dump_file
, " Builtin with no fnspec: %s\n",
1016 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call
))));
1019 collapse_loads (cur_summary
, cur_summary_lto
);
1024 if (fnspec
.global_memory_read_p ())
1025 collapse_loads (cur_summary
, cur_summary_lto
);
1028 for (unsigned int i
= 0; i
< gimple_call_num_args (call
); i
++)
1029 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, i
))))
1031 else if (!fnspec
.arg_specified_p (i
)
1032 || fnspec
.arg_maybe_read_p (i
))
1034 modref_parm_map map
= parm_map_for_arg (call
, i
);
1036 if (map
.parm_index
== -2)
1038 if (map
.parm_index
== -1)
1040 collapse_loads (cur_summary
, cur_summary_lto
);
1044 cur_summary
->loads
->insert (0, 0,
1045 get_access_for_fnspec (call
,
1048 if (cur_summary_lto
)
1049 cur_summary_lto
->loads
->insert (0, 0,
1050 get_access_for_fnspec (call
,
1057 if (fnspec
.global_memory_written_p ())
1058 collapse_stores (cur_summary
, cur_summary_lto
);
1061 for (unsigned int i
= 0; i
< gimple_call_num_args (call
); i
++)
1062 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, i
))))
1064 else if (!fnspec
.arg_specified_p (i
)
1065 || fnspec
.arg_maybe_written_p (i
))
1067 modref_parm_map map
= parm_map_for_arg (call
, i
);
1069 if (map
.parm_index
== -2)
1071 if (map
.parm_index
== -1)
1073 collapse_stores (cur_summary
, cur_summary_lto
);
1077 cur_summary
->stores
->insert (0, 0,
1078 get_access_for_fnspec (call
,
1081 if (cur_summary_lto
)
1082 cur_summary_lto
->stores
->insert (0, 0,
1083 get_access_for_fnspec (call
,
1087 if (fnspec
.errno_maybe_written_p () && flag_errno_math
)
1090 cur_summary
->writes_errno
= true;
1091 if (cur_summary_lto
)
1092 cur_summary_lto
->writes_errno
= true;
1098 /* Analyze function call STMT in function F.
1099 Remember recursive calls in RECURSIVE_CALLS. */
1102 analyze_call (modref_summary
*cur_summary
, modref_summary_lto
*cur_summary_lto
,
1103 gcall
*stmt
, vec
<gimple
*> *recursive_calls
)
1105 /* Check flags on the function call. In certain cases, analysis can be
1107 int flags
= gimple_call_flags (stmt
);
1108 if (flags
& (ECF_CONST
| ECF_NOVOPS
))
1112 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
1113 "except for args.\n");
1117 /* Pure functions do not affect global memory. Stores by functions which are
1118 noreturn and do not throw can safely be ignored. */
1119 bool ignore_stores
= ignore_stores_p (current_function_decl
, flags
);
1121 /* Next, we try to get the callee's function declaration. The goal is to
1122 merge their summary with ours. */
1123 tree callee
= gimple_call_fndecl (stmt
);
1125 /* Check if this is an indirect call. */
1129 fprintf (dump_file
, gimple_call_internal_p (stmt
)
1130 ? " - Internal call" : " - Indirect call.\n");
1131 return process_fnspec (cur_summary
, cur_summary_lto
, stmt
, ignore_stores
);
1133 /* We only need to handle internal calls in IPA mode. */
1134 gcc_checking_assert (!cur_summary_lto
);
1136 struct cgraph_node
*callee_node
= cgraph_node::get_create (callee
);
1138 /* If this is a recursive call, the target summary is the same as ours, so
1139 there's nothing to do. */
1140 if (recursive_call_p (current_function_decl
, callee
))
1142 recursive_calls
->safe_push (stmt
);
1144 fprintf (dump_file
, " - Skipping recursive call.\n");
1148 gcc_assert (callee_node
!= NULL
);
1150 /* Get the function symbol and its availability. */
1151 enum availability avail
;
1152 callee_node
= callee_node
->function_symbol (&avail
);
1153 if (avail
<= AVAIL_INTERPOSABLE
)
1156 fprintf (dump_file
, " - Function availability <= AVAIL_INTERPOSABLE.\n");
1157 return process_fnspec (cur_summary
, cur_summary_lto
, stmt
, ignore_stores
);
1160 /* Get callee's modref summary. As above, if there's no summary, we either
1161 have to give up or, if stores are ignored, we can just purge loads. */
1162 modref_summary
*callee_summary
= optimization_summaries
->get (callee_node
);
1163 if (!callee_summary
)
1166 fprintf (dump_file
, " - No modref summary available for callee.\n");
1167 return process_fnspec (cur_summary
, cur_summary_lto
, stmt
, ignore_stores
);
1170 merge_call_side_effects (cur_summary
, stmt
, callee_summary
, ignore_stores
,
1176 /* Support analysis in non-lto and lto mode in parallel. */
1180 struct modref_summary
*nolto
;
1181 struct modref_summary_lto
*lto
;
1184 /* Helper for analyze_stmt. */
1187 analyze_load (gimple
*, tree
, tree op
, void *data
)
1189 modref_summary
*summary
= ((summary_ptrs
*)data
)->nolto
;
1190 modref_summary_lto
*summary_lto
= ((summary_ptrs
*)data
)->lto
;
1194 fprintf (dump_file
, " - Analyzing load: ");
1195 print_generic_expr (dump_file
, op
);
1196 fprintf (dump_file
, "\n");
1199 if (!record_access_p (op
))
1203 ao_ref_init (&r
, op
);
1206 record_access (summary
->loads
, &r
);
1208 record_access_lto (summary_lto
->loads
, &r
);
1212 /* Helper for analyze_stmt. */
1215 analyze_store (gimple
*, tree
, tree op
, void *data
)
1217 modref_summary
*summary
= ((summary_ptrs
*)data
)->nolto
;
1218 modref_summary_lto
*summary_lto
= ((summary_ptrs
*)data
)->lto
;
1222 fprintf (dump_file
, " - Analyzing store: ");
1223 print_generic_expr (dump_file
, op
);
1224 fprintf (dump_file
, "\n");
1227 if (!record_access_p (op
))
1231 ao_ref_init (&r
, op
);
1234 record_access (summary
->stores
, &r
);
1236 record_access_lto (summary_lto
->stores
, &r
);
1240 /* Analyze statement STMT of function F.
1241 If IPA is true do not merge in side effects of calls. */
1244 analyze_stmt (modref_summary
*summary
, modref_summary_lto
*summary_lto
,
1245 gimple
*stmt
, bool ipa
, vec
<gimple
*> *recursive_calls
)
1247 /* In general we can not ignore clobbers because they are barriers for code
1248 motion, however after inlining it is safe to do because local optimization
1249 passes do not consider clobbers from other functions.
1250 Similar logic is in ipa-pure-const.c. */
1251 if ((ipa
|| cfun
->after_inlining
) && gimple_clobber_p (stmt
))
1254 struct summary_ptrs sums
= {summary
, summary_lto
};
1256 /* Analyze all loads and stores in STMT. */
1257 walk_stmt_load_store_ops (stmt
, &sums
,
1258 analyze_load
, analyze_store
);
1260 switch (gimple_code (stmt
))
1263 /* If the ASM statement does not read nor write memory, there's nothing
1264 to do. Otherwise just give up. */
1265 if (!gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
1268 fprintf (dump_file
, " - Function contains GIMPLE_ASM statement "
1269 "which clobbers memory.\n");
1272 if (!ipa
|| gimple_call_internal_p (stmt
))
1273 return analyze_call (summary
, summary_lto
,
1274 as_a
<gcall
*> (stmt
), recursive_calls
);
1277 attr_fnspec fnspec
= gimple_call_fnspec (as_a
<gcall
*>(stmt
));
1279 if (fnspec
.known_p ()
1280 && (!fnspec
.global_memory_read_p ()
1281 || !fnspec
.global_memory_written_p ()))
1283 cgraph_edge
*e
= cgraph_node::get (current_function_decl
)->get_edge (stmt
);
1286 fnspec_summaries
->get_create (e
)->fnspec
= xstrdup (fnspec
.get_str ());
1288 fprintf (dump_file
, " Recorded fnspec %s\n", fnspec
.get_str ());
1294 /* Nothing to do for other types of statements. */
1299 /* Remove summary of current function because during the function body
1300 scan we determined it is not useful. LTO, NOLTO and IPA determines the
1304 remove_summary (bool lto
, bool nolto
, bool ipa
)
1306 cgraph_node
*fnode
= cgraph_node::get (current_function_decl
);
1308 optimization_summaries
->remove (fnode
);
1312 summaries
->remove (fnode
);
1314 summaries_lto
->remove (fnode
);
1315 remove_modref_edge_summaries (fnode
);
1319 " - modref done with result: not tracked.\n");
1322 /* Return true if OP accesses memory pointed to by SSA_NAME. */
1325 memory_access_to (tree op
, tree ssa_name
)
1327 tree base
= get_base_address (op
);
1330 if (TREE_CODE (base
) != MEM_REF
&& TREE_CODE (base
) != TARGET_MEM_REF
)
1332 return TREE_OPERAND (base
, 0) == ssa_name
;
1335 /* Consider statement val = *arg.
1336 return EAF flags of ARG that can be determined from EAF flags of VAL
1337 (which are known to be FLAGS). If IGNORE_STORES is true we can ignore
1338 all stores to VAL, i.e. when handling noreturn function. */
1341 deref_flags (int flags
, bool ignore_stores
)
1343 int ret
= EAF_NODIRECTESCAPE
;
1344 /* If argument is unused just account for
1345 the read involved in dereference. */
1346 if (flags
& EAF_UNUSED
)
1347 ret
|= EAF_DIRECT
| EAF_NOCLOBBER
| EAF_NOESCAPE
| EAF_NOT_RETURNED
;
1350 if ((flags
& EAF_NOCLOBBER
) || ignore_stores
)
1351 ret
|= EAF_NOCLOBBER
;
1352 if ((flags
& EAF_NOESCAPE
) || ignore_stores
)
1353 ret
|= EAF_NOESCAPE
;
1354 /* If the value dereferenced is not used for another load or store
1355 we can still consider ARG as used only directly.
1366 if ((flags
& (EAF_NOREAD
| EAF_NOT_RETURNED
| EAF_NOESCAPE
| EAF_DIRECT
))
1367 == (EAF_NOREAD
| EAF_NOT_RETURNED
| EAF_NOESCAPE
| EAF_DIRECT
)
1368 && ((flags
& EAF_NOCLOBBER
) || ignore_stores
))
1370 if (flags
& EAF_NOT_RETURNED
)
1371 ret
|= EAF_NOT_RETURNED
;
1378 /* Description of an escape point. */
1382 /* Value escapes to this call. */
1384 /* Argument it escapes to. */
1386 /* Flags already known about the argument (this can save us from recording
1387 esape points if local analysis did good job already). */
1388 eaf_flags_t min_flags
;
1389 /* Does value escape directly or indiretly? */
1393 class modref_lattice
1396 /* EAF flags of the SSA name. */
1398 /* DFS bookkkeeping: we don't do real dataflow yet. */
1402 /* When doing IPA analysis we can not merge in callee escape points;
1403 Only remember them and do the merging at IPA propagation time. */
1404 vec
<escape_point
, va_heap
, vl_ptr
> escape_points
;
1408 bool merge (const modref_lattice
&with
);
1409 bool merge (int flags
);
1410 bool merge_deref (const modref_lattice
&with
, bool ignore_stores
);
1411 bool merge_direct_load ();
1412 bool merge_direct_store ();
1413 bool add_escape_point (gcall
*call
, int arg
, int min_flags
, bool diret
);
1414 void dump (FILE *out
, int indent
= 0) const;
1417 /* Lattices are saved to vectors, so keep them PODs. */
1419 modref_lattice::init ()
1421 /* All flags we track. */
1422 int f
= EAF_DIRECT
| EAF_NOCLOBBER
| EAF_NOESCAPE
| EAF_UNUSED
1423 | EAF_NODIRECTESCAPE
| EAF_NOT_RETURNED
| EAF_NOREAD
;
1425 /* Check that eaf_flags_t is wide enough to hold all flags. */
1426 gcc_checking_assert (f
== flags
);
1431 /* Release memory. */
1433 modref_lattice::release ()
1435 escape_points
.release ();
1438 /* Dump lattice to OUT; indent with INDENT spaces. */
1441 modref_lattice::dump (FILE *out
, int indent
) const
1443 dump_eaf_flags (out
, flags
);
1444 if (escape_points
.length ())
1446 fprintf (out
, "%*sEscapes:\n", indent
, "");
1447 for (unsigned int i
= 0; i
< escape_points
.length (); i
++)
1449 fprintf (out
, "%*s Arg %i (%s) min flags", indent
, "",
1450 escape_points
[i
].arg
,
1451 escape_points
[i
].direct
? "direct" : "indirect");
1452 dump_eaf_flags (out
, escape_points
[i
].min_flags
, false);
1453 fprintf (out
, " in call ");
1454 print_gimple_stmt (out
, escape_points
[i
].call
, 0);
1459 /* Add escape point CALL, ARG, MIN_FLAGS, DIRECT. Return false if such escape
1463 modref_lattice::add_escape_point (gcall
*call
, int arg
, int min_flags
,
1469 /* If we already determined flags to be bad enough,
1470 we do not need to record. */
1471 if ((flags
& min_flags
) == flags
|| (min_flags
& EAF_UNUSED
))
1474 FOR_EACH_VEC_ELT (escape_points
, i
, ep
)
1475 if (ep
->call
== call
&& ep
->arg
== arg
&& ep
->direct
== direct
)
1477 if ((ep
->min_flags
& min_flags
) == min_flags
)
1479 ep
->min_flags
&= min_flags
;
1482 /* Give up if max escape points is met. */
1483 if ((int)escape_points
.length () > param_modref_max_escape_points
)
1486 fprintf (dump_file
, "--param modref-max-escape-points limit reached\n");
1490 escape_point new_ep
= {call
, arg
, min_flags
, direct
};
1491 escape_points
.safe_push (new_ep
);
1495 /* Merge in flags from F. */
1497 modref_lattice::merge (int f
)
1501 /* Noescape implies that value also does not escape directly.
1502 Fnspec machinery does set both so compensate for this. */
1503 if (f
& EAF_NOESCAPE
)
1504 f
|= EAF_NODIRECTESCAPE
;
1505 if ((flags
& f
) != flags
)
1508 /* Prune obvoiusly useless flags;
1509 We do not have ECF_FLAGS handy which is not big problem since
1510 we will do final flags cleanup before producing summary.
1511 Merging should be fast so it can work well with dataflow. */
1512 flags
= remove_useless_eaf_flags (flags
, 0, false);
1514 escape_points
.release ();
1520 /* Merge in WITH. Return true if anyting changed. */
1523 modref_lattice::merge (const modref_lattice
&with
)
1528 bool changed
= merge (with
.flags
);
1532 for (unsigned int i
= 0; i
< with
.escape_points
.length (); i
++)
1533 changed
|= add_escape_point (with
.escape_points
[i
].call
,
1534 with
.escape_points
[i
].arg
,
1535 with
.escape_points
[i
].min_flags
,
1536 with
.escape_points
[i
].direct
);
1540 /* Merge in deref of WITH. If IGNORE_STORES is true do not consider
1541 stores. Return true if anyting changed. */
1544 modref_lattice::merge_deref (const modref_lattice
&with
, bool ignore_stores
)
1549 bool changed
= merge (deref_flags (with
.flags
, ignore_stores
));
1553 for (unsigned int i
= 0; i
< with
.escape_points
.length (); i
++)
1555 int min_flags
= with
.escape_points
[i
].min_flags
;
1557 if (with
.escape_points
[i
].direct
)
1558 min_flags
= deref_flags (min_flags
, ignore_stores
);
1559 else if (ignore_stores
)
1560 min_flags
|= ignore_stores_eaf_flags
;
1561 changed
|= add_escape_point (with
.escape_points
[i
].call
,
1562 with
.escape_points
[i
].arg
,
1569 /* Merge in flags for direct load. */
1572 modref_lattice::merge_direct_load ()
1574 return merge (~(EAF_UNUSED
| EAF_NOREAD
));
1577 /* Merge in flags for direct store. */
1580 modref_lattice::merge_direct_store ()
1582 return merge (~(EAF_UNUSED
| EAF_NOCLOBBER
));
1585 } /* ANON namespace. */
1587 static void analyze_ssa_name_flags (tree name
,
1588 vec
<modref_lattice
> &lattice
,
1589 int depth
, bool ipa
);
1591 /* Call statements may return their parameters. Consider argument number
1592 ARG of USE_STMT and determine flags that can needs to be cleared
1593 in case pointer possibly indirectly references from ARG I is returned.
1594 LATTICE, DEPTH and ipa are same as in analyze_ssa_name_flags. */
1597 merge_call_lhs_flags (gcall
*call
, int arg
, int index
, bool deref
,
1598 vec
<modref_lattice
> &lattice
,
1599 int depth
, bool ipa
)
1601 /* If there is no return value, no flags are affected. */
1602 if (!gimple_call_lhs (call
))
1605 /* If we know that function returns given argument and it is not ARG
1606 we can still be happy. */
1607 int flags
= gimple_call_return_flags (call
);
1608 if ((flags
& ERF_RETURNS_ARG
)
1609 && (flags
& ERF_RETURN_ARG_MASK
) != arg
)
1612 if (gimple_call_arg_flags (call
, arg
) & (EAF_NOT_RETURNED
| EAF_UNUSED
))
1615 /* If return value is SSA name determine its flags. */
1616 if (TREE_CODE (gimple_call_lhs (call
)) == SSA_NAME
)
1618 tree lhs
= gimple_call_lhs (call
);
1619 analyze_ssa_name_flags (lhs
, lattice
, depth
+ 1, ipa
);
1621 lattice
[index
].merge_deref (lattice
[SSA_NAME_VERSION (lhs
)], false);
1623 lattice
[index
].merge (lattice
[SSA_NAME_VERSION (lhs
)]);
1625 /* In the case of memory store we can do nothing. */
1627 lattice
[index
].merge (0);
1630 /* Analyze EAF flags for SSA name NAME and store result to LATTICE.
1631 LATTICE is an array of modref_lattices.
1632 DEPTH is a recursion depth used to make debug output prettier.
1633 If IPA is true we analyze for IPA propagation (and thus call escape points
1634 are processed later) */
1637 analyze_ssa_name_flags (tree name
, vec
<modref_lattice
> &lattice
, int depth
,
1640 imm_use_iterator ui
;
1642 int index
= SSA_NAME_VERSION (name
);
1644 /* See if value is already computed. */
1645 if (lattice
[index
].known
)
1647 if (lattice
[index
].open
)
1651 "%*sGiving up on a cycle in SSA graph\n", depth
* 4, "");
1654 if (depth
== param_modref_max_depth
)
1658 "%*sGiving up on max depth\n", depth
* 4, "");
1661 /* Recursion guard. */
1662 lattice
[index
].init ();
1667 "%*sAnalyzing flags of ssa name: ", depth
* 4, "");
1668 print_generic_expr (dump_file
, name
);
1669 fprintf (dump_file
, "\n");
1672 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, name
)
1674 if (lattice
[index
].flags
== 0)
1676 if (is_gimple_debug (use_stmt
))
1680 fprintf (dump_file
, "%*s Analyzing stmt: ", depth
* 4, "");
1681 print_gimple_stmt (dump_file
, use_stmt
, 0);
1683 /* If we see a direct non-debug use, clear unused bit.
1684 All dereferneces should be accounted below using deref_flags. */
1685 lattice
[index
].merge (~EAF_UNUSED
);
1687 /* Gimple return may load the return value.
1688 Returning name counts as an use by tree-ssa-structalias.c */
1689 if (greturn
*ret
= dyn_cast
<greturn
*> (use_stmt
))
1691 if (gimple_return_retval (ret
) == name
)
1692 lattice
[index
].merge (~(EAF_UNUSED
| EAF_NOT_RETURNED
));
1693 else if (memory_access_to (gimple_return_retval (ret
), name
))
1695 lattice
[index
].merge_direct_load ();
1696 lattice
[index
].merge (~(EAF_UNUSED
| EAF_NOT_RETURNED
));
1699 /* Account for LHS store, arg loads and flags from callee function. */
1700 else if (gcall
*call
= dyn_cast
<gcall
*> (use_stmt
))
1702 tree callee
= gimple_call_fndecl (call
);
1704 /* IPA PTA internally it treats calling a function as "writing" to
1705 the argument space of all functions the function pointer points to
1706 (PR101949). We can not drop EAF_NOCLOBBER only when ipa-pta
1707 is on since that would allow propagation of this from -fno-ipa-pta
1708 to -fipa-pta functions. */
1709 if (gimple_call_fn (use_stmt
) == name
)
1710 lattice
[index
].merge (~EAF_NOCLOBBER
);
1712 /* Return slot optimization would require bit of propagation;
1714 if (gimple_call_return_slot_opt_p (call
)
1715 && gimple_call_lhs (call
) != NULL_TREE
1716 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call
))))
1719 fprintf (dump_file
, "%*s Unhandled return slot opt\n",
1721 lattice
[index
].merge (0);
1723 /* Recursion would require bit of propagation; give up for now. */
1724 else if (callee
&& !ipa
&& recursive_call_p (current_function_decl
,
1726 lattice
[index
].merge (0);
1729 int ecf_flags
= gimple_call_flags (call
);
1730 bool ignore_stores
= ignore_stores_p (current_function_decl
,
1732 bool ignore_retval
= ignore_retval_p (current_function_decl
,
1735 /* Handle *name = func (...). */
1736 if (gimple_call_lhs (call
)
1737 && memory_access_to (gimple_call_lhs (call
), name
))
1738 lattice
[index
].merge_direct_store ();
1740 /* We do not track accesses to the static chain (we could)
1742 if (gimple_call_chain (call
)
1743 && (gimple_call_chain (call
) == name
))
1744 lattice
[index
].merge (0);
1746 /* Process internal functions and right away. */
1747 bool record_ipa
= ipa
&& !gimple_call_internal_p (call
);
1749 /* Handle all function parameters. */
1750 for (unsigned i
= 0;
1751 i
< gimple_call_num_args (call
) && lattice
[index
].flags
; i
++)
1752 /* Name is directly passed to the callee. */
1753 if (gimple_call_arg (call
, i
) == name
)
1755 if (!(ecf_flags
& (ECF_CONST
| ECF_NOVOPS
)))
1757 int call_flags
= gimple_call_arg_flags (call
, i
)
1760 call_flags
|= ignore_stores_eaf_flags
;
1763 lattice
[index
].merge (call_flags
);
1765 lattice
[index
].add_escape_point (call
, i
,
1769 merge_call_lhs_flags (call
, i
, index
, false,
1770 lattice
, depth
, ipa
);
1772 /* Name is dereferenced and passed to a callee. */
1773 else if (memory_access_to (gimple_call_arg (call
, i
), name
))
1775 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
1776 lattice
[index
].merge_direct_load ();
1779 int call_flags
= deref_flags
1780 (gimple_call_arg_flags (call
, i
)
1781 | EAF_NOT_RETURNED
, ignore_stores
);
1783 lattice
[index
].merge (call_flags
);
1785 lattice
[index
].add_escape_point (call
, i
,
1789 merge_call_lhs_flags (call
, i
, index
, true,
1790 lattice
, depth
, ipa
);
1794 else if (gimple_assign_load_p (use_stmt
))
1796 gassign
*assign
= as_a
<gassign
*> (use_stmt
);
1797 /* Memory to memory copy. */
1798 if (gimple_store_p (assign
))
1800 /* Handle *lhs = *name.
1802 We do not track memory locations, so assume that value
1803 is used arbitrarily. */
1804 if (memory_access_to (gimple_assign_rhs1 (assign
), name
))
1805 lattice
[index
].merge (0);
1806 /* Handle *name = *exp. */
1807 else if (memory_access_to (gimple_assign_lhs (assign
), name
))
1808 lattice
[index
].merge_direct_store ();
1810 /* Handle lhs = *name. */
1811 else if (memory_access_to (gimple_assign_rhs1 (assign
), name
))
1813 tree lhs
= gimple_assign_lhs (assign
);
1814 analyze_ssa_name_flags (lhs
, lattice
, depth
+ 1, ipa
);
1815 lattice
[index
].merge_deref (lattice
[SSA_NAME_VERSION (lhs
)],
1819 else if (gimple_store_p (use_stmt
))
1821 gassign
*assign
= dyn_cast
<gassign
*> (use_stmt
);
1823 /* Handle *lhs = name. */
1824 if (assign
&& gimple_assign_rhs1 (assign
) == name
)
1827 fprintf (dump_file
, "%*s ssa name saved to memory\n",
1829 lattice
[index
].merge (0);
1831 /* Handle *name = exp. */
1833 && memory_access_to (gimple_assign_lhs (assign
), name
))
1835 /* In general we can not ignore clobbers because they are
1836 barriers for code motion, however after inlining it is safe to
1837 do because local optimization passes do not consider clobbers
1838 from other functions. Similar logic is in ipa-pure-const.c. */
1839 if (!cfun
->after_inlining
|| !gimple_clobber_p (assign
))
1840 lattice
[index
].merge_direct_store ();
1842 /* ASM statements etc. */
1846 fprintf (dump_file
, "%*s Unhandled store\n",
1848 lattice
[index
].merge (0);
1851 else if (gassign
*assign
= dyn_cast
<gassign
*> (use_stmt
))
1853 enum tree_code code
= gimple_assign_rhs_code (assign
);
1855 /* See if operation is a merge as considered by
1856 tree-ssa-structalias.c:find_func_aliases. */
1857 if (!truth_value_p (code
)
1858 && code
!= POINTER_DIFF_EXPR
1859 && (code
!= POINTER_PLUS_EXPR
1860 || gimple_assign_rhs1 (assign
) == name
))
1862 tree lhs
= gimple_assign_lhs (assign
);
1863 analyze_ssa_name_flags (lhs
, lattice
, depth
+ 1, ipa
);
1864 lattice
[index
].merge (lattice
[SSA_NAME_VERSION (lhs
)]);
1867 else if (gphi
*phi
= dyn_cast
<gphi
*> (use_stmt
))
1869 tree result
= gimple_phi_result (phi
);
1870 analyze_ssa_name_flags (result
, lattice
, depth
+ 1, ipa
);
1871 lattice
[index
].merge (lattice
[SSA_NAME_VERSION (result
)]);
1873 /* Conditions are not considered escape points
1874 by tree-ssa-structalias. */
1875 else if (gimple_code (use_stmt
) == GIMPLE_COND
)
1880 fprintf (dump_file
, "%*s Unhandled stmt\n", depth
* 4, "");
1881 lattice
[index
].merge (0);
1886 fprintf (dump_file
, "%*s current flags of ", depth
* 4, "");
1887 print_generic_expr (dump_file
, name
);
1888 lattice
[index
].dump (dump_file
, depth
* 4 + 4);
1893 fprintf (dump_file
, "%*sflags of ssa name ", depth
* 4, "");
1894 print_generic_expr (dump_file
, name
);
1895 lattice
[index
].dump (dump_file
, depth
* 4 + 2);
1897 lattice
[index
].open
= false;
1898 lattice
[index
].known
= true;
1901 /* Determine EAF flags for function parameters. */
1904 analyze_parms (modref_summary
*summary
, modref_summary_lto
*summary_lto
,
1907 unsigned int parm_index
= 0;
1908 unsigned int count
= 0;
1909 int ecf_flags
= flags_from_decl_or_type (current_function_decl
);
1911 /* For novops functions we have nothing to gain by EAF flags. */
1912 if (ecf_flags
& ECF_NOVOPS
)
1915 for (tree parm
= DECL_ARGUMENTS (current_function_decl
); parm
;
1916 parm
= TREE_CHAIN (parm
))
1922 auto_vec
<modref_lattice
> lattice
;
1923 lattice
.safe_grow_cleared (num_ssa_names
, true);
1925 for (tree parm
= DECL_ARGUMENTS (current_function_decl
); parm
; parm_index
++,
1926 parm
= TREE_CHAIN (parm
))
1928 tree name
= ssa_default_def (cfun
, parm
);
1929 if (!name
|| has_zero_uses (name
))
1931 /* We do not track non-SSA parameters,
1932 but we want to track unused gimple_regs. */
1933 if (!is_gimple_reg (parm
))
1937 if (parm_index
>= summary
->arg_flags
.length ())
1938 summary
->arg_flags
.safe_grow_cleared (count
, true);
1939 summary
->arg_flags
[parm_index
] = EAF_UNUSED
;
1941 else if (summary_lto
)
1943 if (parm_index
>= summary_lto
->arg_flags
.length ())
1944 summary_lto
->arg_flags
.safe_grow_cleared (count
, true);
1945 summary_lto
->arg_flags
[parm_index
] = EAF_UNUSED
;
1949 analyze_ssa_name_flags (name
, lattice
, 0, ipa
);
1950 int flags
= lattice
[SSA_NAME_VERSION (name
)].flags
;
1952 /* Eliminate useless flags so we do not end up storing unnecessary
1955 flags
= remove_useless_eaf_flags
1957 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
1963 if (parm_index
>= summary
->arg_flags
.length ())
1964 summary
->arg_flags
.safe_grow_cleared (count
, true);
1965 summary
->arg_flags
[parm_index
] = flags
;
1967 else if (summary_lto
)
1969 if (parm_index
>= summary_lto
->arg_flags
.length ())
1970 summary_lto
->arg_flags
.safe_grow_cleared (count
, true);
1971 summary_lto
->arg_flags
[parm_index
] = flags
;
1973 if (lattice
[SSA_NAME_VERSION (name
)].escape_points
.length ())
1977 cgraph_node
*node
= cgraph_node::get (current_function_decl
);
1979 gcc_checking_assert (ipa
);
1981 (lattice
[SSA_NAME_VERSION (name
)].escape_points
, ip
, ep
)
1982 if ((ep
->min_flags
& flags
) != flags
)
1984 cgraph_edge
*e
= node
->get_edge (ep
->call
);
1985 struct escape_entry ee
= {parm_index
, ep
->arg
,
1986 ep
->min_flags
, ep
->direct
};
1988 escape_summaries
->get_create (e
)->esc
.safe_push (ee
);
1994 for (unsigned int i
= 0; i
< num_ssa_names
; i
++)
1995 lattice
[i
].release ();
1998 /* Analyze function F. IPA indicates whether we're running in local mode
1999 (false) or the IPA mode (true). */
2002 analyze_function (function
*f
, bool ipa
)
2005 fprintf (dump_file
, "modref analyzing '%s' (ipa=%i)%s%s\n",
2006 function_name (f
), ipa
,
2007 TREE_READONLY (current_function_decl
) ? " (const)" : "",
2008 DECL_PURE_P (current_function_decl
) ? " (pure)" : "");
2010 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
2011 if (!flag_ipa_modref
)
2014 /* Compute no-LTO summaries when local optimization is going to happen. */
2015 bool nolto
= (!ipa
|| ((!flag_lto
|| flag_fat_lto_objects
) && !in_lto_p
)
2016 || (in_lto_p
&& !flag_wpa
2017 && flag_incremental_link
!= INCREMENTAL_LINK_LTO
));
2018 /* Compute LTO when LTO streaming is going to happen. */
2019 bool lto
= ipa
&& ((flag_lto
&& !in_lto_p
)
2021 || flag_incremental_link
== INCREMENTAL_LINK_LTO
);
2022 cgraph_node
*fnode
= cgraph_node::get (current_function_decl
);
2024 modref_summary
*summary
= NULL
;
2025 modref_summary_lto
*summary_lto
= NULL
;
2027 /* Initialize the summary.
2028 If we run in local mode there is possibly pre-existing summary from
2029 IPA pass. Dump it so it is easy to compare if mod-ref info has
2033 if (!optimization_summaries
)
2034 optimization_summaries
= modref_summaries::create_ggc (symtab
);
2035 else /* Remove existing summary if we are re-running the pass. */
2039 = optimization_summaries
->get (cgraph_node::get (f
->decl
)))
2043 fprintf (dump_file
, "Past summary:\n");
2044 optimization_summaries
->get
2045 (cgraph_node::get (f
->decl
))->dump (dump_file
);
2047 optimization_summaries
->remove (cgraph_node::get (f
->decl
));
2049 summary
= optimization_summaries
->get_create (cgraph_node::get (f
->decl
));
2050 gcc_checking_assert (nolto
&& !lto
);
2052 /* In IPA mode we analyze every function precisely once. Assert that. */
2058 summaries
= modref_summaries::create_ggc (symtab
);
2060 summaries
->remove (cgraph_node::get (f
->decl
));
2061 summary
= summaries
->get_create (cgraph_node::get (f
->decl
));
2066 summaries_lto
= modref_summaries_lto::create_ggc (symtab
);
2068 summaries_lto
->remove (cgraph_node::get (f
->decl
));
2069 summary_lto
= summaries_lto
->get_create (cgraph_node::get (f
->decl
));
2071 if (!fnspec_summaries
)
2072 fnspec_summaries
= new fnspec_summaries_t (symtab
);
2073 if (!escape_summaries
)
2074 escape_summaries
= new escape_summaries_t (symtab
);
2078 /* Create and initialize summary for F.
2079 Note that summaries may be already allocated from previous
2083 gcc_assert (!summary
->loads
);
2084 summary
->loads
= modref_records::create_ggc (param_modref_max_bases
,
2085 param_modref_max_refs
,
2086 param_modref_max_accesses
);
2087 gcc_assert (!summary
->stores
);
2088 summary
->stores
= modref_records::create_ggc (param_modref_max_bases
,
2089 param_modref_max_refs
,
2090 param_modref_max_accesses
);
2091 summary
->writes_errno
= false;
2095 gcc_assert (!summary_lto
->loads
);
2096 summary_lto
->loads
= modref_records_lto::create_ggc
2097 (param_modref_max_bases
,
2098 param_modref_max_refs
,
2099 param_modref_max_accesses
);
2100 gcc_assert (!summary_lto
->stores
);
2101 summary_lto
->stores
= modref_records_lto::create_ggc
2102 (param_modref_max_bases
,
2103 param_modref_max_refs
,
2104 param_modref_max_accesses
);
2105 summary_lto
->writes_errno
= false;
2108 analyze_parms (summary
, summary_lto
, ipa
);
2110 int ecf_flags
= flags_from_decl_or_type (current_function_decl
);
2111 auto_vec
<gimple
*, 32> recursive_calls
;
2113 /* Analyze each statement in each basic block of the function. If the
2114 statement cannot be analyzed (for any reason), the entire function cannot
2115 be analyzed by modref. */
2117 FOR_EACH_BB_FN (bb
, f
)
2119 gimple_stmt_iterator si
;
2120 for (si
= gsi_after_labels (bb
); !gsi_end_p (si
); gsi_next (&si
))
2122 if (!analyze_stmt (summary
, summary_lto
,
2123 gsi_stmt (si
), ipa
, &recursive_calls
)
2124 || ((!summary
|| !summary
->useful_p (ecf_flags
, false))
2126 || !summary_lto
->useful_p (ecf_flags
, false))))
2128 collapse_loads (summary
, summary_lto
);
2129 collapse_stores (summary
, summary_lto
);
2135 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
2136 This needs to be done after all other side effects are computed. */
2139 bool changed
= true;
2143 for (unsigned i
= 0; i
< recursive_calls
.length (); i
++)
2145 changed
|= merge_call_side_effects
2146 (summary
, recursive_calls
[i
], summary
,
2147 ignore_stores_p (current_function_decl
,
2149 (recursive_calls
[i
])),
2151 if (!summary
->useful_p (ecf_flags
, false))
2153 remove_summary (lto
, nolto
, ipa
);
2159 if (summary
&& !summary
->useful_p (ecf_flags
))
2162 optimization_summaries
->remove (fnode
);
2164 summaries
->remove (fnode
);
2167 if (summary_lto
&& !summary_lto
->useful_p (ecf_flags
))
2169 summaries_lto
->remove (fnode
);
2172 if (ipa
&& !summary
&& !summary_lto
)
2173 remove_modref_edge_summaries (fnode
);
2177 fprintf (dump_file
, " - modref done with result: tracked.\n");
2179 summary
->dump (dump_file
);
2181 summary_lto
->dump (dump_file
);
2182 dump_modref_edge_summaries (dump_file
, fnode
, 2);
2186 /* Callback for generate_summary. */
2189 modref_generate (void)
2191 struct cgraph_node
*node
;
2192 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node
)
2194 function
*f
= DECL_STRUCT_FUNCTION (node
->decl
);
2198 analyze_function (f
, true);
2203 /* Called when a new function is inserted to callgraph late. */
2206 modref_summaries::insert (struct cgraph_node
*node
, modref_summary
*)
2208 /* Local passes ought to be executed by the pass manager. */
2209 if (this == optimization_summaries
)
2211 optimization_summaries
->remove (node
);
2214 if (!DECL_STRUCT_FUNCTION (node
->decl
)
2215 || !opt_for_fn (node
->decl
, flag_ipa_modref
))
2217 summaries
->remove (node
);
2220 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
2221 analyze_function (DECL_STRUCT_FUNCTION (node
->decl
), true);
2225 /* Called when a new function is inserted to callgraph late. */
2228 modref_summaries_lto::insert (struct cgraph_node
*node
, modref_summary_lto
*)
2230 /* We do not support adding new function when IPA information is already
2231 propagated. This is done only by SIMD cloning that is not very
2233 if (!DECL_STRUCT_FUNCTION (node
->decl
)
2234 || !opt_for_fn (node
->decl
, flag_ipa_modref
)
2237 summaries_lto
->remove (node
);
2240 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
2241 analyze_function (DECL_STRUCT_FUNCTION (node
->decl
), true);
2245 /* Called when new clone is inserted to callgraph late. */
2248 modref_summaries::duplicate (cgraph_node
*, cgraph_node
*dst
,
2249 modref_summary
*src_data
,
2250 modref_summary
*dst_data
)
2252 /* Do not duplicate optimization summaries; we do not handle parameter
2253 transforms on them. */
2254 if (this == optimization_summaries
)
2256 optimization_summaries
->remove (dst
);
2259 dst_data
->stores
= modref_records::create_ggc
2260 (src_data
->stores
->max_bases
,
2261 src_data
->stores
->max_refs
,
2262 src_data
->stores
->max_accesses
);
2263 dst_data
->stores
->copy_from (src_data
->stores
);
2264 dst_data
->loads
= modref_records::create_ggc
2265 (src_data
->loads
->max_bases
,
2266 src_data
->loads
->max_refs
,
2267 src_data
->loads
->max_accesses
);
2268 dst_data
->loads
->copy_from (src_data
->loads
);
2269 dst_data
->writes_errno
= src_data
->writes_errno
;
2270 if (src_data
->arg_flags
.length ())
2271 dst_data
->arg_flags
= src_data
->arg_flags
.copy ();
2274 /* Called when new clone is inserted to callgraph late. */
2277 modref_summaries_lto::duplicate (cgraph_node
*, cgraph_node
*,
2278 modref_summary_lto
*src_data
,
2279 modref_summary_lto
*dst_data
)
2281 /* Be sure that no further cloning happens after ipa-modref. If it does
2282 we will need to update signatures for possible param changes. */
2283 gcc_checking_assert (!((modref_summaries_lto
*)summaries_lto
)->propagated
);
2284 dst_data
->stores
= modref_records_lto::create_ggc
2285 (src_data
->stores
->max_bases
,
2286 src_data
->stores
->max_refs
,
2287 src_data
->stores
->max_accesses
);
2288 dst_data
->stores
->copy_from (src_data
->stores
);
2289 dst_data
->loads
= modref_records_lto::create_ggc
2290 (src_data
->loads
->max_bases
,
2291 src_data
->loads
->max_refs
,
2292 src_data
->loads
->max_accesses
);
2293 dst_data
->loads
->copy_from (src_data
->loads
);
2294 dst_data
->writes_errno
= src_data
->writes_errno
;
2295 if (src_data
->arg_flags
.length ())
2296 dst_data
->arg_flags
= src_data
->arg_flags
.copy ();
2301 /* Definition of the modref pass on GIMPLE. */
2302 const pass_data pass_data_modref
= {
2307 (PROP_cfg
| PROP_ssa
),
2314 class pass_modref
: public gimple_opt_pass
2317 pass_modref (gcc::context
*ctxt
)
2318 : gimple_opt_pass (pass_data_modref
, ctxt
) {}
2320 /* opt_pass methods: */
2323 return new pass_modref (m_ctxt
);
2325 virtual bool gate (function
*)
2327 return flag_ipa_modref
;
2329 virtual unsigned int execute (function
*);
2332 /* Encode TT to the output block OB using the summary streaming API. */
2335 write_modref_records (modref_records_lto
*tt
, struct output_block
*ob
)
2337 streamer_write_uhwi (ob
, tt
->max_bases
);
2338 streamer_write_uhwi (ob
, tt
->max_refs
);
2339 streamer_write_uhwi (ob
, tt
->max_accesses
);
2341 streamer_write_uhwi (ob
, tt
->every_base
);
2342 streamer_write_uhwi (ob
, vec_safe_length (tt
->bases
));
2344 modref_base_node
<tree
> *base_node
;
2345 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, base_node
)
2347 stream_write_tree (ob
, base_node
->base
, true);
2349 streamer_write_uhwi (ob
, base_node
->every_ref
);
2350 streamer_write_uhwi (ob
, vec_safe_length (base_node
->refs
));
2353 modref_ref_node
<tree
> *ref_node
;
2354 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
2356 stream_write_tree (ob
, ref_node
->ref
, true);
2357 streamer_write_uhwi (ob
, ref_node
->every_access
);
2358 streamer_write_uhwi (ob
, vec_safe_length (ref_node
->accesses
));
2361 modref_access_node
*access_node
;
2362 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
2364 streamer_write_hwi (ob
, access_node
->parm_index
);
2365 if (access_node
->parm_index
!= -1)
2367 streamer_write_uhwi (ob
, access_node
->parm_offset_known
);
2368 if (access_node
->parm_offset_known
)
2370 streamer_write_poly_int64 (ob
, access_node
->parm_offset
);
2371 streamer_write_poly_int64 (ob
, access_node
->offset
);
2372 streamer_write_poly_int64 (ob
, access_node
->size
);
2373 streamer_write_poly_int64 (ob
, access_node
->max_size
);
2381 /* Read a modref_tree from the input block IB using the data from DATA_IN.
2382 This assumes that the tree was encoded using write_modref_tree.
2383 Either nolto_ret or lto_ret is initialized by the tree depending whether
2384 LTO streaming is expected or not. */
2387 read_modref_records (lto_input_block
*ib
, struct data_in
*data_in
,
2388 modref_records
**nolto_ret
,
2389 modref_records_lto
**lto_ret
)
2391 size_t max_bases
= streamer_read_uhwi (ib
);
2392 size_t max_refs
= streamer_read_uhwi (ib
);
2393 size_t max_accesses
= streamer_read_uhwi (ib
);
2396 *lto_ret
= modref_records_lto::create_ggc (max_bases
, max_refs
,
2399 *nolto_ret
= modref_records::create_ggc (max_bases
, max_refs
,
2401 gcc_checking_assert (lto_ret
|| nolto_ret
);
2403 size_t every_base
= streamer_read_uhwi (ib
);
2404 size_t nbase
= streamer_read_uhwi (ib
);
2406 gcc_assert (!every_base
|| nbase
== 0);
2410 (*nolto_ret
)->collapse ();
2412 (*lto_ret
)->collapse ();
2414 for (size_t i
= 0; i
< nbase
; i
++)
2416 tree base_tree
= stream_read_tree (ib
, data_in
);
2417 modref_base_node
<alias_set_type
> *nolto_base_node
= NULL
;
2418 modref_base_node
<tree
> *lto_base_node
= NULL
;
2420 /* At stream in time we have LTO alias info. Check if we streamed in
2421 something obviously unnecessary. Do not glob types by alias sets;
2422 it is not 100% clear that ltrans types will get merged same way.
2423 Types may get refined based on ODR type conflicts. */
2424 if (base_tree
&& !get_alias_set (base_tree
))
2428 fprintf (dump_file
, "Streamed in alias set 0 type ");
2429 print_generic_expr (dump_file
, base_tree
);
2430 fprintf (dump_file
, "\n");
2436 nolto_base_node
= (*nolto_ret
)->insert_base (base_tree
2437 ? get_alias_set (base_tree
)
2440 lto_base_node
= (*lto_ret
)->insert_base (base_tree
);
2441 size_t every_ref
= streamer_read_uhwi (ib
);
2442 size_t nref
= streamer_read_uhwi (ib
);
2444 gcc_assert (!every_ref
|| nref
== 0);
2447 if (nolto_base_node
)
2448 nolto_base_node
->collapse ();
2450 lto_base_node
->collapse ();
2452 for (size_t j
= 0; j
< nref
; j
++)
2454 tree ref_tree
= stream_read_tree (ib
, data_in
);
2456 if (ref_tree
&& !get_alias_set (ref_tree
))
2460 fprintf (dump_file
, "Streamed in alias set 0 type ");
2461 print_generic_expr (dump_file
, ref_tree
);
2462 fprintf (dump_file
, "\n");
2467 modref_ref_node
<alias_set_type
> *nolto_ref_node
= NULL
;
2468 modref_ref_node
<tree
> *lto_ref_node
= NULL
;
2470 if (nolto_base_node
)
2472 = nolto_base_node
->insert_ref (ref_tree
2473 ? get_alias_set (ref_tree
) : 0,
2476 lto_ref_node
= lto_base_node
->insert_ref (ref_tree
, max_refs
);
2478 size_t every_access
= streamer_read_uhwi (ib
);
2479 size_t naccesses
= streamer_read_uhwi (ib
);
2482 nolto_ref_node
->every_access
= every_access
;
2484 lto_ref_node
->every_access
= every_access
;
2486 for (size_t k
= 0; k
< naccesses
; k
++)
2488 int parm_index
= streamer_read_hwi (ib
);
2489 bool parm_offset_known
= false;
2490 poly_int64 parm_offset
= 0;
2491 poly_int64 offset
= 0;
2492 poly_int64 size
= -1;
2493 poly_int64 max_size
= -1;
2495 if (parm_index
!= -1)
2497 parm_offset_known
= streamer_read_uhwi (ib
);
2498 if (parm_offset_known
)
2500 parm_offset
= streamer_read_poly_int64 (ib
);
2501 offset
= streamer_read_poly_int64 (ib
);
2502 size
= streamer_read_poly_int64 (ib
);
2503 max_size
= streamer_read_poly_int64 (ib
);
2506 modref_access_node a
= {offset
, size
, max_size
, parm_offset
,
2507 parm_index
, parm_offset_known
};
2509 nolto_ref_node
->insert_access (a
, max_accesses
);
2511 lto_ref_node
->insert_access (a
, max_accesses
);
2516 (*lto_ret
)->cleanup ();
2518 (*nolto_ret
)->cleanup ();
2521 /* Write ESUM to BP. */
2524 modref_write_escape_summary (struct bitpack_d
*bp
, escape_summary
*esum
)
2528 bp_pack_var_len_unsigned (bp
, 0);
2531 bp_pack_var_len_unsigned (bp
, esum
->esc
.length ());
2534 FOR_EACH_VEC_ELT (esum
->esc
, i
, ee
)
2536 bp_pack_var_len_unsigned (bp
, ee
->parm_index
);
2537 bp_pack_var_len_unsigned (bp
, ee
->arg
);
2538 bp_pack_var_len_unsigned (bp
, ee
->min_flags
);
2539 bp_pack_value (bp
, ee
->direct
, 1);
2543 /* Read escape summary for E from BP. */
2546 modref_read_escape_summary (struct bitpack_d
*bp
, cgraph_edge
*e
)
2548 unsigned int n
= bp_unpack_var_len_unsigned (bp
);
2551 escape_summary
*esum
= escape_summaries
->get_create (e
);
2552 esum
->esc
.reserve_exact (n
);
2553 for (unsigned int i
= 0; i
< n
; i
++)
2556 ee
.parm_index
= bp_unpack_var_len_unsigned (bp
);
2557 ee
.arg
= bp_unpack_var_len_unsigned (bp
);
2558 ee
.min_flags
= bp_unpack_var_len_unsigned (bp
);
2559 ee
.direct
= bp_unpack_value (bp
, 1);
2560 esum
->esc
.quick_push (ee
);
2564 /* Callback for write_summary. */
2569 struct output_block
*ob
= create_output_block (LTO_section_ipa_modref
);
2570 lto_symtab_encoder_t encoder
= ob
->decl_state
->symtab_node_encoder
;
2571 unsigned int count
= 0;
2576 streamer_write_uhwi (ob
, 0);
2577 streamer_write_char_stream (ob
->main_stream
, 0);
2578 produce_asm (ob
, NULL
);
2579 destroy_output_block (ob
);
2583 for (i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
2585 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
2586 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
2587 modref_summary_lto
*r
;
2589 if (cnode
&& cnode
->definition
&& !cnode
->alias
2590 && (r
= summaries_lto
->get (cnode
))
2591 && r
->useful_p (flags_from_decl_or_type (cnode
->decl
)))
2594 streamer_write_uhwi (ob
, count
);
2596 for (i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
2598 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
2599 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
2601 if (cnode
&& cnode
->definition
&& !cnode
->alias
)
2603 modref_summary_lto
*r
= summaries_lto
->get (cnode
);
2605 if (!r
|| !r
->useful_p (flags_from_decl_or_type (cnode
->decl
)))
2608 streamer_write_uhwi (ob
, lto_symtab_encoder_encode (encoder
, cnode
));
2610 streamer_write_uhwi (ob
, r
->arg_flags
.length ());
2611 for (unsigned int i
= 0; i
< r
->arg_flags
.length (); i
++)
2612 streamer_write_uhwi (ob
, r
->arg_flags
[i
]);
2614 write_modref_records (r
->loads
, ob
);
2615 write_modref_records (r
->stores
, ob
);
2617 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
2618 bp_pack_value (&bp
, r
->writes_errno
, 1);
2621 for (cgraph_edge
*e
= cnode
->indirect_calls
;
2622 e
; e
= e
->next_callee
)
2624 class fnspec_summary
*sum
= fnspec_summaries
->get (e
);
2625 bp_pack_value (&bp
, sum
!= NULL
, 1);
2627 bp_pack_string (ob
, &bp
, sum
->fnspec
, true);
2628 class escape_summary
*esum
= escape_summaries
->get (e
);
2629 modref_write_escape_summary (&bp
,esum
);
2631 for (cgraph_edge
*e
= cnode
->callees
; e
; e
= e
->next_callee
)
2633 class fnspec_summary
*sum
= fnspec_summaries
->get (e
);
2634 bp_pack_value (&bp
, sum
!= NULL
, 1);
2636 bp_pack_string (ob
, &bp
, sum
->fnspec
, true);
2637 class escape_summary
*esum
= escape_summaries
->get (e
);
2638 modref_write_escape_summary (&bp
,esum
);
2641 streamer_write_bitpack (&bp
);
2644 streamer_write_char_stream (ob
->main_stream
, 0);
2645 produce_asm (ob
, NULL
);
2646 destroy_output_block (ob
);
2650 read_section (struct lto_file_decl_data
*file_data
, const char *data
,
2653 const struct lto_function_header
*header
2654 = (const struct lto_function_header
*) data
;
2655 const int cfg_offset
= sizeof (struct lto_function_header
);
2656 const int main_offset
= cfg_offset
+ header
->cfg_size
;
2657 const int string_offset
= main_offset
+ header
->main_size
;
2658 struct data_in
*data_in
;
2660 unsigned int f_count
;
2662 lto_input_block
ib ((const char *) data
+ main_offset
, header
->main_size
,
2663 file_data
->mode_table
);
2666 = lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
2667 header
->string_size
, vNULL
);
2668 f_count
= streamer_read_uhwi (&ib
);
2669 for (i
= 0; i
< f_count
; i
++)
2671 struct cgraph_node
*node
;
2672 lto_symtab_encoder_t encoder
;
2674 unsigned int index
= streamer_read_uhwi (&ib
);
2675 encoder
= file_data
->symtab_node_encoder
;
2676 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
2679 modref_summary
*modref_sum
= summaries
2680 ? summaries
->get_create (node
) : NULL
;
2681 modref_summary_lto
*modref_sum_lto
= summaries_lto
2682 ? summaries_lto
->get_create (node
)
2684 if (optimization_summaries
)
2685 modref_sum
= optimization_summaries
->get_create (node
);
2688 modref_sum
->writes_errno
= false;
2690 modref_sum_lto
->writes_errno
= false;
2692 gcc_assert (!modref_sum
|| (!modref_sum
->loads
2693 && !modref_sum
->stores
));
2694 gcc_assert (!modref_sum_lto
|| (!modref_sum_lto
->loads
2695 && !modref_sum_lto
->stores
));
2696 unsigned int args
= streamer_read_uhwi (&ib
);
2697 if (args
&& modref_sum
)
2698 modref_sum
->arg_flags
.reserve_exact (args
);
2699 if (args
&& modref_sum_lto
)
2700 modref_sum_lto
->arg_flags
.reserve_exact (args
);
2701 for (unsigned int i
= 0; i
< args
; i
++)
2703 eaf_flags_t flags
= streamer_read_uhwi (&ib
);
2705 modref_sum
->arg_flags
.quick_push (flags
);
2707 modref_sum_lto
->arg_flags
.quick_push (flags
);
2709 read_modref_records (&ib
, data_in
,
2710 modref_sum
? &modref_sum
->loads
: NULL
,
2711 modref_sum_lto
? &modref_sum_lto
->loads
: NULL
);
2712 read_modref_records (&ib
, data_in
,
2713 modref_sum
? &modref_sum
->stores
: NULL
,
2714 modref_sum_lto
? &modref_sum_lto
->stores
: NULL
);
2715 struct bitpack_d bp
= streamer_read_bitpack (&ib
);
2716 if (bp_unpack_value (&bp
, 1))
2719 modref_sum
->writes_errno
= true;
2721 modref_sum_lto
->writes_errno
= true;
2725 for (cgraph_edge
*e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
2727 if (bp_unpack_value (&bp
, 1))
2729 class fnspec_summary
*sum
= fnspec_summaries
->get_create (e
);
2730 sum
->fnspec
= xstrdup (bp_unpack_string (data_in
, &bp
));
2732 modref_read_escape_summary (&bp
, e
);
2734 for (cgraph_edge
*e
= node
->callees
; e
; e
= e
->next_callee
)
2736 if (bp_unpack_value (&bp
, 1))
2738 class fnspec_summary
*sum
= fnspec_summaries
->get_create (e
);
2739 sum
->fnspec
= xstrdup (bp_unpack_string (data_in
, &bp
));
2741 modref_read_escape_summary (&bp
, e
);
2746 fprintf (dump_file
, "Read modref for %s\n",
2747 node
->dump_name ());
2749 modref_sum
->dump (dump_file
);
2751 modref_sum_lto
->dump (dump_file
);
2752 dump_modref_edge_summaries (dump_file
, node
, 4);
2756 lto_free_section_data (file_data
, LTO_section_ipa_modref
, NULL
, data
,
2758 lto_data_in_delete (data_in
);
2761 /* Callback for read_summary. */
2766 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
2767 struct lto_file_decl_data
*file_data
;
2770 gcc_checking_assert (!optimization_summaries
&& !summaries
&& !summaries_lto
);
2772 optimization_summaries
= modref_summaries::create_ggc (symtab
);
2775 if (flag_wpa
|| flag_incremental_link
== INCREMENTAL_LINK_LTO
)
2776 summaries_lto
= modref_summaries_lto::create_ggc (symtab
);
2778 || (flag_incremental_link
== INCREMENTAL_LINK_LTO
2779 && flag_fat_lto_objects
))
2780 summaries
= modref_summaries::create_ggc (symtab
);
2781 if (!fnspec_summaries
)
2782 fnspec_summaries
= new fnspec_summaries_t (symtab
);
2783 if (!escape_summaries
)
2784 escape_summaries
= new escape_summaries_t (symtab
);
2787 while ((file_data
= file_data_vec
[j
++]))
2790 const char *data
= lto_get_summary_section_data (file_data
,
2791 LTO_section_ipa_modref
,
2794 read_section (file_data
, data
, len
);
2796 /* Fatal error here. We do not want to support compiling ltrans units
2797 with different version of compiler or different flags than the WPA
2798 unit, so this should never happen. */
2799 fatal_error (input_location
,
2800 "IPA modref summary is missing in input file");
2804 /* Recompute arg_flags for param adjustments in INFO. */
2807 remap_arg_flags (auto_vec
<eaf_flags_t
> &arg_flags
, clone_info
*info
)
2809 auto_vec
<eaf_flags_t
> old
= arg_flags
.copy ();
2812 ipa_adjusted_param
*p
;
2814 arg_flags
.release ();
2816 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
2818 int o
= info
->param_adjustments
->get_original_index (i
);
2819 if (o
>= 0 && (int)old
.length () > o
&& old
[o
])
2823 arg_flags
.safe_grow_cleared (max
+ 1, true);
2824 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
2826 int o
= info
->param_adjustments
->get_original_index (i
);
2827 if (o
>= 0 && (int)old
.length () > o
&& old
[o
])
2828 arg_flags
[i
] = old
[o
];
2832 /* If signature changed, update the summary. */
2835 update_signature (struct cgraph_node
*node
)
2837 clone_info
*info
= clone_info::get (node
);
2838 if (!info
|| !info
->param_adjustments
)
2841 modref_summary
*r
= optimization_summaries
2842 ? optimization_summaries
->get (node
) : NULL
;
2843 modref_summary_lto
*r_lto
= summaries_lto
2844 ? summaries_lto
->get (node
) : NULL
;
2849 fprintf (dump_file
, "Updating summary for %s from:\n",
2850 node
->dump_name ());
2852 r
->dump (dump_file
);
2854 r_lto
->dump (dump_file
);
2858 ipa_adjusted_param
*p
;
2860 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
2862 int idx
= info
->param_adjustments
->get_original_index (i
);
2867 auto_vec
<int, 32> map
;
2869 map
.reserve (max
+ 1);
2870 for (i
= 0; i
<= max
; i
++)
2871 map
.quick_push (-1);
2872 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
2874 int idx
= info
->param_adjustments
->get_original_index (i
);
2880 r
->loads
->remap_params (&map
);
2881 r
->stores
->remap_params (&map
);
2882 if (r
->arg_flags
.length ())
2883 remap_arg_flags (r
->arg_flags
, info
);
2887 r_lto
->loads
->remap_params (&map
);
2888 r_lto
->stores
->remap_params (&map
);
2889 if (r_lto
->arg_flags
.length ())
2890 remap_arg_flags (r_lto
->arg_flags
, info
);
2894 fprintf (dump_file
, "to:\n");
2896 r
->dump (dump_file
);
2898 r_lto
->dump (dump_file
);
2903 /* Definition of the modref IPA pass. */
2904 const pass_data pass_data_ipa_modref
=
2906 IPA_PASS
, /* type */
2907 "modref", /* name */
2908 OPTGROUP_IPA
, /* optinfo_flags */
2909 TV_IPA_MODREF
, /* tv_id */
2910 0, /* properties_required */
2911 0, /* properties_provided */
2912 0, /* properties_destroyed */
2913 0, /* todo_flags_start */
2914 ( TODO_dump_symtab
), /* todo_flags_finish */
2917 class pass_ipa_modref
: public ipa_opt_pass_d
2920 pass_ipa_modref (gcc::context
*ctxt
)
2921 : ipa_opt_pass_d (pass_data_ipa_modref
, ctxt
,
2922 modref_generate
, /* generate_summary */
2923 modref_write
, /* write_summary */
2924 modref_read
, /* read_summary */
2925 modref_write
, /* write_optimization_summary */
2926 modref_read
, /* read_optimization_summary */
2927 NULL
, /* stmt_fixup */
2928 0, /* function_transform_todo_flags_start */
2929 NULL
, /* function_transform */
2930 NULL
) /* variable_transform */
2933 /* opt_pass methods: */
2934 opt_pass
*clone () { return new pass_ipa_modref (m_ctxt
); }
2935 virtual bool gate (function
*)
2939 virtual unsigned int execute (function
*);
2945 unsigned int pass_modref::execute (function
*f
)
2947 analyze_function (f
, false);
2952 make_pass_modref (gcc::context
*ctxt
)
2954 return new pass_modref (ctxt
);
2958 make_pass_ipa_modref (gcc::context
*ctxt
)
2960 return new pass_ipa_modref (ctxt
);
2963 /* Skip edges from and to nodes without ipa_pure_const enabled.
2964 Ignore not available symbols. */
2967 ignore_edge (struct cgraph_edge
*e
)
2969 /* We merge summaries of inline clones into summaries of functions they
2970 are inlined to. For that reason the complete function bodies must
2972 if (!e
->inline_failed
)
2974 enum availability avail
;
2975 cgraph_node
*callee
= e
->callee
->function_or_virtual_thunk_symbol
2976 (&avail
, e
->caller
);
2978 return (avail
<= AVAIL_INTERPOSABLE
2979 || ((!optimization_summaries
|| !optimization_summaries
->get (callee
))
2980 && (!summaries_lto
|| !summaries_lto
->get (callee
)))
2981 || flags_from_decl_or_type (e
->callee
->decl
)
2982 & (ECF_CONST
| ECF_NOVOPS
));
2985 /* Compute parm_map for CALLEE_EDGE. */
2988 compute_parm_map (cgraph_edge
*callee_edge
, vec
<modref_parm_map
> *parm_map
)
2990 class ipa_edge_args
*args
;
2991 if (ipa_node_params_sum
2992 && !callee_edge
->call_stmt_cannot_inline_p
2993 && (args
= ipa_edge_args_sum
->get (callee_edge
)) != NULL
)
2995 int i
, count
= ipa_get_cs_argument_count (args
);
2996 class ipa_node_params
*caller_parms_info
, *callee_pi
;
2997 class ipa_call_summary
*es
2998 = ipa_call_summaries
->get (callee_edge
);
3000 = callee_edge
->callee
->function_or_virtual_thunk_symbol
3001 (NULL
, callee_edge
->caller
);
3004 = ipa_node_params_sum
->get (callee_edge
->caller
->inlined_to
3005 ? callee_edge
->caller
->inlined_to
3006 : callee_edge
->caller
);
3007 callee_pi
= ipa_node_params_sum
->get (callee
);
3009 (*parm_map
).safe_grow_cleared (count
, true);
3011 for (i
= 0; i
< count
; i
++)
3013 if (es
&& es
->param
[i
].points_to_local_or_readonly_memory
)
3015 (*parm_map
)[i
].parm_index
= -2;
3019 struct ipa_jump_func
*jf
3020 = ipa_get_ith_jump_func (args
, i
);
3021 if (jf
&& callee_pi
)
3023 tree cst
= ipa_value_from_jfunc (caller_parms_info
,
3027 if (cst
&& points_to_local_or_readonly_memory_p (cst
))
3029 (*parm_map
)[i
].parm_index
= -2;
3033 if (jf
&& jf
->type
== IPA_JF_PASS_THROUGH
)
3035 (*parm_map
)[i
].parm_index
3036 = ipa_get_jf_pass_through_formal_id (jf
);
3037 if (ipa_get_jf_pass_through_operation (jf
) == NOP_EXPR
)
3039 (*parm_map
)[i
].parm_offset_known
= true;
3040 (*parm_map
)[i
].parm_offset
= 0;
3042 else if (ipa_get_jf_pass_through_operation (jf
)
3043 == POINTER_PLUS_EXPR
3044 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf
),
3045 &(*parm_map
)[i
].parm_offset
))
3046 (*parm_map
)[i
].parm_offset_known
= true;
3048 (*parm_map
)[i
].parm_offset_known
= false;
3051 if (jf
&& jf
->type
== IPA_JF_ANCESTOR
)
3053 (*parm_map
)[i
].parm_index
= ipa_get_jf_ancestor_formal_id (jf
);
3054 (*parm_map
)[i
].parm_offset_known
= true;
3056 (!(ipa_get_jf_ancestor_offset (jf
) & (BITS_PER_UNIT
- 1)));
3057 (*parm_map
)[i
].parm_offset
3058 = ipa_get_jf_ancestor_offset (jf
) >> LOG2_BITS_PER_UNIT
;
3061 (*parm_map
)[i
].parm_index
= -1;
3065 fprintf (dump_file
, " Parm map: ");
3066 for (i
= 0; i
< count
; i
++)
3067 fprintf (dump_file
, " %i", (*parm_map
)[i
].parm_index
);
3068 fprintf (dump_file
, "\n");
3075 /* Map used to translate escape infos. */
3083 /* Update escape map fo E. */
3086 update_escape_summary_1 (cgraph_edge
*e
,
3087 vec
<vec
<escape_map
>> &map
,
3090 escape_summary
*sum
= escape_summaries
->get (e
);
3093 auto_vec
<escape_entry
> old
= sum
->esc
.copy ();
3094 sum
->esc
.release ();
3098 FOR_EACH_VEC_ELT (old
, i
, ee
)
3101 struct escape_map
*em
;
3102 if (ee
->parm_index
>= map
.length ())
3104 FOR_EACH_VEC_ELT (map
[ee
->parm_index
], j
, em
)
3106 int min_flags
= ee
->min_flags
;
3107 if (ee
->direct
&& !em
->direct
)
3108 min_flags
= deref_flags (min_flags
, ignore_stores
);
3109 struct escape_entry entry
= {em
->parm_index
, ee
->arg
,
3111 ee
->direct
& em
->direct
};
3112 sum
->esc
.safe_push (entry
);
3115 if (!sum
->esc
.length ())
3116 escape_summaries
->remove (e
);
3119 /* Update escape map fo NODE. */
3122 update_escape_summary (cgraph_node
*node
,
3123 vec
<vec
<escape_map
>> &map
,
3126 if (!escape_summaries
)
3128 for (cgraph_edge
*e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3129 update_escape_summary_1 (e
, map
, ignore_stores
);
3130 for (cgraph_edge
*e
= node
->callees
; e
; e
= e
->next_callee
)
3132 if (!e
->inline_failed
)
3133 update_escape_summary (e
->callee
, map
, ignore_stores
);
3135 update_escape_summary_1 (e
, map
, ignore_stores
);
3139 /* Call EDGE was inlined; merge summary from callee to the caller. */
3142 ipa_merge_modref_summary_after_inlining (cgraph_edge
*edge
)
3144 if (!summaries
&& !summaries_lto
)
3147 struct cgraph_node
*to
= (edge
->caller
->inlined_to
3148 ? edge
->caller
->inlined_to
: edge
->caller
);
3149 class modref_summary
*to_info
= summaries
? summaries
->get (to
) : NULL
;
3150 class modref_summary_lto
*to_info_lto
= summaries_lto
3151 ? summaries_lto
->get (to
) : NULL
;
3153 if (!to_info
&& !to_info_lto
)
3156 summaries
->remove (edge
->callee
);
3158 summaries_lto
->remove (edge
->callee
);
3159 remove_modref_edge_summaries (edge
->callee
);
3163 class modref_summary
*callee_info
= summaries
? summaries
->get (edge
->callee
)
3165 class modref_summary_lto
*callee_info_lto
3166 = summaries_lto
? summaries_lto
->get (edge
->callee
) : NULL
;
3167 int flags
= flags_from_decl_or_type (edge
->callee
->decl
);
3168 bool ignore_stores
= ignore_stores_p (edge
->caller
->decl
, flags
);
3170 if (!callee_info
&& to_info
)
3172 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
)))
3173 to_info
->loads
->collapse ();
3175 to_info
->stores
->collapse ();
3177 if (!callee_info_lto
&& to_info_lto
)
3179 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
)))
3180 to_info_lto
->loads
->collapse ();
3182 to_info_lto
->stores
->collapse ();
3184 if (callee_info
|| callee_info_lto
)
3186 auto_vec
<modref_parm_map
, 32> parm_map
;
3188 compute_parm_map (edge
, &parm_map
);
3192 if (to_info
&& callee_info
)
3193 to_info
->stores
->merge (callee_info
->stores
, &parm_map
);
3194 if (to_info_lto
&& callee_info_lto
)
3195 to_info_lto
->stores
->merge (callee_info_lto
->stores
, &parm_map
);
3197 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
)))
3199 if (to_info
&& callee_info
)
3200 to_info
->loads
->merge (callee_info
->loads
, &parm_map
);
3201 if (to_info_lto
&& callee_info_lto
)
3202 to_info_lto
->loads
->merge (callee_info_lto
->loads
, &parm_map
);
3206 /* Now merge escape summaries.
3207 For every escape to the callee we need to merge calle flags
3208 and remap calees escapes. */
3209 class escape_summary
*sum
= escape_summaries
->get (edge
);
3210 int max_escape
= -1;
3214 if (sum
&& !(flags
& (ECF_CONST
| ECF_NOVOPS
)))
3215 FOR_EACH_VEC_ELT (sum
->esc
, i
, ee
)
3216 if ((int)ee
->arg
> max_escape
)
3217 max_escape
= ee
->arg
;
3219 auto_vec
<vec
<struct escape_map
>, 32> emap (max_escape
+ 1);
3220 emap
.safe_grow (max_escape
+ 1, true);
3221 for (i
= 0; (int)i
< max_escape
+ 1; i
++)
3224 if (sum
&& !(flags
& (ECF_CONST
| ECF_NOVOPS
)))
3225 FOR_EACH_VEC_ELT (sum
->esc
, i
, ee
)
3227 bool needed
= false;
3228 if (to_info
&& to_info
->arg_flags
.length () > ee
->parm_index
)
3230 int flags
= callee_info
3231 && callee_info
->arg_flags
.length () > ee
->arg
3232 ? callee_info
->arg_flags
[ee
->arg
] : 0;
3234 flags
= deref_flags (flags
, ignore_stores
);
3235 else if (ignore_stores
)
3236 flags
|= ignore_stores_eaf_flags
;
3237 flags
|= ee
->min_flags
;
3238 to_info
->arg_flags
[ee
->parm_index
] &= flags
;
3239 if (to_info
->arg_flags
[ee
->parm_index
])
3242 if (to_info_lto
&& to_info_lto
->arg_flags
.length () > ee
->parm_index
)
3244 int flags
= callee_info_lto
3245 && callee_info_lto
->arg_flags
.length () > ee
->arg
3246 ? callee_info_lto
->arg_flags
[ee
->arg
] : 0;
3248 flags
= deref_flags (flags
, ignore_stores
);
3249 else if (ignore_stores
)
3250 flags
|= ignore_stores_eaf_flags
;
3251 flags
|= ee
->min_flags
;
3252 to_info_lto
->arg_flags
[ee
->parm_index
] &= flags
;
3253 if (to_info_lto
->arg_flags
[ee
->parm_index
])
3256 struct escape_map entry
= {ee
->parm_index
, ee
->direct
};
3258 emap
[ee
->arg
].safe_push (entry
);
3260 update_escape_summary (edge
->callee
, emap
, ignore_stores
);
3261 for (i
= 0; (int)i
< max_escape
+ 1; i
++)
3264 escape_summaries
->remove (edge
);
3268 if (to_info
&& !to_info
->useful_p (flags
))
3271 fprintf (dump_file
, "Removed mod-ref summary for %s\n",
3273 summaries
->remove (to
);
3276 else if (to_info
&& dump_file
)
3279 fprintf (dump_file
, "Updated mod-ref summary for %s\n",
3281 to_info
->dump (dump_file
);
3284 summaries
->remove (edge
->callee
);
3288 if (to_info_lto
&& !to_info_lto
->useful_p (flags
))
3291 fprintf (dump_file
, "Removed mod-ref summary for %s\n",
3293 summaries_lto
->remove (to
);
3295 else if (to_info_lto
&& dump_file
)
3298 fprintf (dump_file
, "Updated mod-ref summary for %s\n",
3300 to_info_lto
->dump (dump_file
);
3303 if (callee_info_lto
)
3304 summaries_lto
->remove (edge
->callee
);
3306 if (!to_info
&& !to_info_lto
)
3307 remove_modref_edge_summaries (to
);
3311 /* Get parameter type from DECL. This is only safe for special cases
3312 like builtins we create fnspec for because the type match is checked
3313 at fnspec creation time. */
3316 get_parm_type (tree decl
, unsigned int i
)
3318 tree t
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
3320 for (unsigned int p
= 0; p
< i
; p
++)
3322 return TREE_VALUE (t
);
3325 /* Return access mode for argument I of call E with FNSPEC. */
3327 static modref_access_node
3328 get_access_for_fnspec (cgraph_edge
*e
, attr_fnspec
&fnspec
,
3329 unsigned int i
, modref_parm_map
&map
)
3331 tree size
= NULL_TREE
;
3332 unsigned int size_arg
;
3334 if (!fnspec
.arg_specified_p (i
))
3336 else if (fnspec
.arg_max_access_size_given_by_arg_p (i
, &size_arg
))
3338 cgraph_node
*node
= e
->caller
->inlined_to
3339 ? e
->caller
->inlined_to
: e
->caller
;
3340 ipa_node_params
*caller_parms_info
= ipa_node_params_sum
->get (node
);
3341 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
3342 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, size_arg
);
3345 size
= ipa_value_from_jfunc (caller_parms_info
, jf
,
3346 get_parm_type (e
->callee
->decl
, size_arg
));
3348 else if (fnspec
.arg_access_size_given_by_type_p (i
))
3349 size
= TYPE_SIZE_UNIT (get_parm_type (e
->callee
->decl
, i
));
3350 modref_access_node a
= {0, -1, -1,
3351 map
.parm_offset
, map
.parm_index
,
3352 map
.parm_offset_known
};
3353 poly_int64 size_hwi
;
3355 && poly_int_tree_p (size
, &size_hwi
)
3356 && coeffs_in_range_p (size_hwi
, 0,
3357 HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
3360 a
.max_size
= size_hwi
<< LOG2_BITS_PER_UNIT
;
3365 /* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
3366 CUR_SUMMARY_LTO accordingly. Return true if something changed. */
3369 propagate_unknown_call (cgraph_node
*node
,
3370 cgraph_edge
*e
, int ecf_flags
,
3371 modref_summary
*cur_summary
,
3372 modref_summary_lto
*cur_summary_lto
)
3374 bool changed
= false;
3375 class fnspec_summary
*fnspec_sum
= fnspec_summaries
->get (e
);
3376 auto_vec
<modref_parm_map
, 32> parm_map
;
3378 && compute_parm_map (e
, &parm_map
))
3380 attr_fnspec
fnspec (fnspec_sum
->fnspec
);
3382 gcc_checking_assert (fnspec
.known_p ());
3383 if (fnspec
.global_memory_read_p ())
3384 collapse_loads (cur_summary
, cur_summary_lto
);
3387 tree t
= TYPE_ARG_TYPES (TREE_TYPE (e
->callee
->decl
));
3388 for (unsigned i
= 0; i
< parm_map
.length () && t
;
3389 i
++, t
= TREE_CHAIN (t
))
3390 if (!POINTER_TYPE_P (TREE_VALUE (t
)))
3392 else if (!fnspec
.arg_specified_p (i
)
3393 || fnspec
.arg_maybe_read_p (i
))
3395 modref_parm_map map
= parm_map
[i
];
3396 if (map
.parm_index
== -2)
3398 if (map
.parm_index
== -1)
3400 collapse_loads (cur_summary
, cur_summary_lto
);
3404 changed
|= cur_summary
->loads
->insert
3405 (0, 0, get_access_for_fnspec (e
, fnspec
, i
, map
));
3406 if (cur_summary_lto
)
3407 changed
|= cur_summary_lto
->loads
->insert
3408 (0, 0, get_access_for_fnspec (e
, fnspec
, i
, map
));
3411 if (ignore_stores_p (node
->decl
, ecf_flags
))
3413 else if (fnspec
.global_memory_written_p ())
3414 collapse_stores (cur_summary
, cur_summary_lto
);
3417 tree t
= TYPE_ARG_TYPES (TREE_TYPE (e
->callee
->decl
));
3418 for (unsigned i
= 0; i
< parm_map
.length () && t
;
3419 i
++, t
= TREE_CHAIN (t
))
3420 if (!POINTER_TYPE_P (TREE_VALUE (t
)))
3422 else if (!fnspec
.arg_specified_p (i
)
3423 || fnspec
.arg_maybe_written_p (i
))
3425 modref_parm_map map
= parm_map
[i
];
3426 if (map
.parm_index
== -2)
3428 if (map
.parm_index
== -1)
3430 collapse_stores (cur_summary
, cur_summary_lto
);
3434 changed
|= cur_summary
->stores
->insert
3435 (0, 0, get_access_for_fnspec (e
, fnspec
, i
, map
));
3436 if (cur_summary_lto
)
3437 changed
|= cur_summary_lto
->stores
->insert
3438 (0, 0, get_access_for_fnspec (e
, fnspec
, i
, map
));
3441 if (fnspec
.errno_maybe_written_p () && flag_errno_math
)
3443 if (cur_summary
&& !cur_summary
->writes_errno
)
3445 cur_summary
->writes_errno
= true;
3448 if (cur_summary_lto
&& !cur_summary_lto
->writes_errno
)
3450 cur_summary_lto
->writes_errno
= true;
3457 fprintf (dump_file
, " collapsing loads\n");
3458 changed
|= collapse_loads (cur_summary
, cur_summary_lto
);
3459 if (!ignore_stores_p (node
->decl
, ecf_flags
))
3462 fprintf (dump_file
, " collapsing stores\n");
3463 changed
|= collapse_stores (cur_summary
, cur_summary_lto
);
3468 /* Maybe remove summaies of NODE pointed to by CUR_SUMMARY_PTR
3469 and CUR_SUMMARY_LTO_PTR if they are useless according to ECF_FLAGS. */
3472 remove_useless_summaries (cgraph_node
*node
,
3473 modref_summary
**cur_summary_ptr
,
3474 modref_summary_lto
**cur_summary_lto_ptr
,
3477 if (*cur_summary_ptr
&& !(*cur_summary_ptr
)->useful_p (ecf_flags
, false))
3479 optimization_summaries
->remove (node
);
3480 *cur_summary_ptr
= NULL
;
3482 if (*cur_summary_lto_ptr
3483 && !(*cur_summary_lto_ptr
)->useful_p (ecf_flags
, false))
3485 summaries_lto
->remove (node
);
3486 *cur_summary_lto_ptr
= NULL
;
3490 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
3491 and propagate loads/stores. */
3494 modref_propagate_in_scc (cgraph_node
*component_node
)
3496 bool changed
= true;
3502 for (struct cgraph_node
*cur
= component_node
; cur
;
3503 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
3505 cgraph_node
*node
= cur
->inlined_to
? cur
->inlined_to
: cur
;
3506 modref_summary
*cur_summary
= optimization_summaries
3507 ? optimization_summaries
->get (node
)
3509 modref_summary_lto
*cur_summary_lto
= summaries_lto
3510 ? summaries_lto
->get (node
)
3513 if (!cur_summary
&& !cur_summary_lto
)
3516 int cur_ecf_flags
= flags_from_decl_or_type (node
->decl
);
3519 fprintf (dump_file
, " Processing %s%s%s\n",
3521 TREE_READONLY (cur
->decl
) ? " (const)" : "",
3522 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
3524 for (cgraph_edge
*e
= cur
->indirect_calls
; e
; e
= e
->next_callee
)
3526 if (e
->indirect_info
->ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
3529 fprintf (dump_file
, " Indirect call"
3530 "collapsing loads\n");
3531 if (propagate_unknown_call
3532 (node
, e
, e
->indirect_info
->ecf_flags
,
3533 cur_summary
, cur_summary_lto
))
3536 remove_useless_summaries (node
, &cur_summary
,
3539 if (!cur_summary
&& !cur_summary_lto
)
3544 if (!cur_summary
&& !cur_summary_lto
)
3547 for (cgraph_edge
*callee_edge
= cur
->callees
; callee_edge
;
3548 callee_edge
= callee_edge
->next_callee
)
3550 int flags
= flags_from_decl_or_type (callee_edge
->callee
->decl
);
3551 modref_summary
*callee_summary
= NULL
;
3552 modref_summary_lto
*callee_summary_lto
= NULL
;
3553 struct cgraph_node
*callee
;
3555 if (flags
& (ECF_CONST
| ECF_NOVOPS
)
3556 || !callee_edge
->inline_failed
)
3559 /* Get the callee and its summary. */
3560 enum availability avail
;
3561 callee
= callee_edge
->callee
->function_or_virtual_thunk_symbol
3564 /* It is not necessary to re-process calls outside of the
3568 || ((struct ipa_dfs_info
*)cur
->aux
)->scc_no
3569 != ((struct ipa_dfs_info
*)callee
->aux
)->scc_no
))
3573 fprintf (dump_file
, " Call to %s\n",
3574 callee_edge
->callee
->dump_name ());
3576 bool ignore_stores
= ignore_stores_p (cur
->decl
, flags
);
3578 if (avail
<= AVAIL_INTERPOSABLE
)
3581 fprintf (dump_file
, " Call target interposable"
3582 " or not available\n");
3583 changed
|= propagate_unknown_call
3584 (node
, callee_edge
, flags
,
3585 cur_summary
, cur_summary_lto
);
3586 if (!cur_summary
&& !cur_summary_lto
)
3591 /* We don't know anything about CALLEE, hence we cannot tell
3592 anything about the entire component. */
3595 && !(callee_summary
= optimization_summaries
->get (callee
)))
3598 fprintf (dump_file
, " No call target summary\n");
3599 changed
|= propagate_unknown_call
3600 (node
, callee_edge
, flags
,
3604 && !(callee_summary_lto
= summaries_lto
->get (callee
)))
3607 fprintf (dump_file
, " No call target summary\n");
3608 changed
|= propagate_unknown_call
3609 (node
, callee_edge
, flags
,
3610 NULL
, cur_summary_lto
);
3613 /* We can not safely optimize based on summary of callee if it
3614 does not always bind to current def: it is possible that
3615 memory load was optimized out earlier which may not happen in
3616 the interposed variant. */
3617 if (!callee_edge
->binds_to_current_def_p ())
3619 changed
|= collapse_loads (cur_summary
, cur_summary_lto
);
3621 fprintf (dump_file
, " May not bind local;"
3622 " collapsing loads\n");
3626 auto_vec
<modref_parm_map
, 32> parm_map
;
3628 compute_parm_map (callee_edge
, &parm_map
);
3630 /* Merge in callee's information. */
3633 changed
|= cur_summary
->loads
->merge
3634 (callee_summary
->loads
, &parm_map
);
3637 changed
|= cur_summary
->stores
->merge
3638 (callee_summary
->stores
, &parm_map
);
3639 if (!cur_summary
->writes_errno
3640 && callee_summary
->writes_errno
)
3642 cur_summary
->writes_errno
= true;
3647 if (callee_summary_lto
)
3649 changed
|= cur_summary_lto
->loads
->merge
3650 (callee_summary_lto
->loads
, &parm_map
);
3653 changed
|= cur_summary_lto
->stores
->merge
3654 (callee_summary_lto
->stores
, &parm_map
);
3655 if (!cur_summary_lto
->writes_errno
3656 && callee_summary_lto
->writes_errno
)
3658 cur_summary_lto
->writes_errno
= true;
3664 remove_useless_summaries (node
, &cur_summary
,
3667 if (!cur_summary
&& !cur_summary_lto
)
3669 if (dump_file
&& changed
)
3672 cur_summary
->dump (dump_file
);
3673 if (cur_summary_lto
)
3674 cur_summary_lto
->dump (dump_file
);
3675 dump_modref_edge_summaries (dump_file
, node
, 4);
3683 "Propagation finished in %i iterations\n", iteration
);
3686 /* Dump results of propagation in SCC rooted in COMPONENT_NODE. */
3689 modref_propagate_dump_scc (cgraph_node
*component_node
)
3691 for (struct cgraph_node
*cur
= component_node
; cur
;
3692 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
3693 if (!cur
->inlined_to
)
3695 modref_summary
*cur_summary
= optimization_summaries
3696 ? optimization_summaries
->get (cur
)
3698 modref_summary_lto
*cur_summary_lto
= summaries_lto
3699 ? summaries_lto
->get (cur
)
3702 fprintf (dump_file
, "Propagated modref for %s%s%s\n",
3704 TREE_READONLY (cur
->decl
) ? " (const)" : "",
3705 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
3706 if (optimization_summaries
)
3709 cur_summary
->dump (dump_file
);
3711 fprintf (dump_file
, " Not tracked\n");
3715 if (cur_summary_lto
)
3716 cur_summary_lto
->dump (dump_file
);
3718 fprintf (dump_file
, " Not tracked (lto)\n");
3723 /* Process escapes in SUM and merge SUMMARY to CUR_SUMMARY
3724 and SUMMARY_LTO to CUR_SUMMARY_LTO.
3725 Return true if something changed. */
3728 modref_merge_call_site_flags (escape_summary
*sum
,
3729 modref_summary
*cur_summary
,
3730 modref_summary_lto
*cur_summary_lto
,
3731 modref_summary
*summary
,
3732 modref_summary_lto
*summary_lto
,
3738 bool changed
= false;
3739 bool ignore_stores
= ignore_stores_p (caller
, ecf_flags
);
3741 /* If we have no useful info to propagate. */
3742 if ((!cur_summary
|| !cur_summary
->arg_flags
.length ())
3743 && (!cur_summary_lto
|| !cur_summary_lto
->arg_flags
.length ()))
3746 FOR_EACH_VEC_ELT (sum
->esc
, i
, ee
)
3751 if (summary
&& ee
->arg
< summary
->arg_flags
.length ())
3752 flags
= summary
->arg_flags
[ee
->arg
];
3754 && ee
->arg
< summary_lto
->arg_flags
.length ())
3755 flags_lto
= summary_lto
->arg_flags
[ee
->arg
];
3758 flags
= deref_flags (flags
, ignore_stores
);
3759 flags_lto
= deref_flags (flags_lto
, ignore_stores
);
3761 else if (ignore_stores
)
3763 flags
|= ignore_stores_eaf_flags
;
3764 flags_lto
|= ignore_stores_eaf_flags
;
3766 /* Returning the value is already accounted to at local propagation. */
3767 flags
|= ee
->min_flags
| EAF_NOT_RETURNED
;
3768 flags_lto
|= ee
->min_flags
| EAF_NOT_RETURNED
;
3769 /* Noescape implies that value also does not escape directly.
3770 Fnspec machinery does set both so compensate for this. */
3771 if (flags
& EAF_NOESCAPE
)
3772 flags
|= EAF_NODIRECTESCAPE
;
3773 if (flags_lto
& EAF_NOESCAPE
)
3774 flags_lto
|= EAF_NODIRECTESCAPE
;
3775 if (!(flags
& EAF_UNUSED
)
3776 && cur_summary
&& ee
->parm_index
< cur_summary
->arg_flags
.length ())
3778 int f
= cur_summary
->arg_flags
[ee
->parm_index
];
3779 if ((f
& flags
) != f
)
3781 f
= remove_useless_eaf_flags
3782 (f
& flags
, ecf_flags
,
3783 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller
))));
3784 cur_summary
->arg_flags
[ee
->parm_index
] = f
;
3788 if (!(flags_lto
& EAF_UNUSED
)
3790 && ee
->parm_index
< cur_summary_lto
->arg_flags
.length ())
3792 int f
= cur_summary_lto
->arg_flags
[ee
->parm_index
];
3793 if ((f
& flags_lto
) != f
)
3795 f
= remove_useless_eaf_flags
3796 (f
& flags_lto
, ecf_flags
,
3797 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller
))));
3798 cur_summary_lto
->arg_flags
[ee
->parm_index
] = f
;
3806 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
3807 and propagate arg flags. */
3810 modref_propagate_flags_in_scc (cgraph_node
*component_node
)
3812 bool changed
= true;
3818 for (struct cgraph_node
*cur
= component_node
; cur
;
3819 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
3821 cgraph_node
*node
= cur
->inlined_to
? cur
->inlined_to
: cur
;
3822 modref_summary
*cur_summary
= optimization_summaries
3823 ? optimization_summaries
->get (node
)
3825 modref_summary_lto
*cur_summary_lto
= summaries_lto
3826 ? summaries_lto
->get (node
)
3829 if (!cur_summary
&& !cur_summary_lto
)
3833 fprintf (dump_file
, " Processing %s%s%s\n",
3835 TREE_READONLY (cur
->decl
) ? " (const)" : "",
3836 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
3838 for (cgraph_edge
*e
= cur
->indirect_calls
; e
; e
= e
->next_callee
)
3840 escape_summary
*sum
= escape_summaries
->get (e
);
3842 if (!sum
|| (e
->indirect_info
->ecf_flags
3843 & (ECF_CONST
| ECF_NOVOPS
)))
3846 changed
|= modref_merge_call_site_flags
3847 (sum
, cur_summary
, cur_summary_lto
,
3849 node
->decl
, e
->indirect_info
->ecf_flags
);
3852 if (!cur_summary
&& !cur_summary_lto
)
3855 for (cgraph_edge
*callee_edge
= cur
->callees
; callee_edge
;
3856 callee_edge
= callee_edge
->next_callee
)
3858 int ecf_flags
= flags_from_decl_or_type
3859 (callee_edge
->callee
->decl
);
3860 modref_summary
*callee_summary
= NULL
;
3861 modref_summary_lto
*callee_summary_lto
= NULL
;
3862 struct cgraph_node
*callee
;
3864 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
)
3865 || !callee_edge
->inline_failed
)
3867 /* Get the callee and its summary. */
3868 enum availability avail
;
3869 callee
= callee_edge
->callee
->function_or_virtual_thunk_symbol
3872 /* It is not necessary to re-process calls outside of the
3876 || ((struct ipa_dfs_info
*)cur
->aux
)->scc_no
3877 != ((struct ipa_dfs_info
*)callee
->aux
)->scc_no
))
3880 escape_summary
*sum
= escape_summaries
->get (callee_edge
);
3885 fprintf (dump_file
, " Call to %s\n",
3886 callee_edge
->callee
->dump_name ());
3888 if (avail
<= AVAIL_INTERPOSABLE
3889 || callee_edge
->call_stmt_cannot_inline_p
)
3894 callee_summary
= optimization_summaries
->get (callee
);
3895 if (cur_summary_lto
)
3896 callee_summary_lto
= summaries_lto
->get (callee
);
3898 changed
|= modref_merge_call_site_flags
3899 (sum
, cur_summary
, cur_summary_lto
,
3900 callee_summary
, callee_summary_lto
,
3901 node
->decl
, ecf_flags
);
3902 if (dump_file
&& changed
)
3905 cur_summary
->dump (dump_file
);
3906 if (cur_summary_lto
)
3907 cur_summary_lto
->dump (dump_file
);
3915 "Propagation of flags finished in %i iterations\n", iteration
);
3918 /* Run the IPA pass. This will take a function's summaries and calls and
3919 construct new summaries which represent a transitive closure. So that
3920 summary of an analyzed function contains information about the loads and
3921 stores that the function or any function that it calls does. */
3924 pass_ipa_modref::execute (function
*)
3926 if (!summaries
&& !summaries_lto
)
3929 if (optimization_summaries
)
3930 ggc_delete (optimization_summaries
);
3931 optimization_summaries
= summaries
;
3934 struct cgraph_node
**order
= XCNEWVEC (struct cgraph_node
*,
3935 symtab
->cgraph_count
);
3937 order_pos
= ipa_reduced_postorder (order
, true, ignore_edge
);
3940 /* Iterate over all strongly connected components in post-order. */
3941 for (i
= 0; i
< order_pos
; i
++)
3943 /* Get the component's representative. That's just any node in the
3944 component from which we can traverse the entire component. */
3945 struct cgraph_node
*component_node
= order
[i
];
3948 fprintf (dump_file
, "\n\nStart of SCC component\n");
3950 modref_propagate_in_scc (component_node
);
3951 modref_propagate_flags_in_scc (component_node
);
3953 modref_propagate_dump_scc (component_node
);
3956 FOR_EACH_FUNCTION (node
)
3957 update_signature (node
);
3959 ((modref_summaries_lto
*)summaries_lto
)->propagated
= true;
3960 ipa_free_postorder_info ();
3962 delete fnspec_summaries
;
3963 fnspec_summaries
= NULL
;
3964 delete escape_summaries
;
3965 escape_summaries
= NULL
;
3969 /* Summaries must stay alive until end of compilation. */
3972 ipa_modref_c_finalize ()
3974 if (optimization_summaries
)
3975 ggc_delete (optimization_summaries
);
3976 optimization_summaries
= NULL
;
3977 gcc_checking_assert (!summaries
3978 || flag_incremental_link
== INCREMENTAL_LINK_LTO
);
3980 ggc_delete (summaries_lto
);
3981 summaries_lto
= NULL
;
3982 if (fnspec_summaries
)
3983 delete fnspec_summaries
;
3984 fnspec_summaries
= NULL
;
3985 if (escape_summaries
)
3986 delete escape_summaries
;
3987 escape_summaries
= NULL
;
3990 #include "gt-ipa-modref.h"