]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-modref.c
Update copyright years.
[thirdparty/gcc.git] / gcc / ipa-modref.c
CommitLineData
d119f34c 1/* Search for references that a functions loads or stores.
99dee823 2 Copyright (C) 2020-2021 Free Software Foundation, Inc.
d119f34c
JH
3 Contributed by David Cepelik and Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
85ebbabd 23 accesses across function calls.
d119f34c
JH
24
25 This file contains a tree pass and an IPA pass. Both performs the same
8a2fd716 26 analysis however tree pass is executed during early and late optimization
d119f34c
JH
27 passes to propagate info downwards in the compilation order. IPA pass
28 propagates across the callgraph and is able to handle recursion and works on
29 whole program during link-time analysis.
30
46a27415 31 LTO mode differs from the local mode by not recording alias sets but types
d119f34c 32 that are translated to alias sets later. This is necessary in order stream
46a27415 33 the information because the alias sets are rebuild at stream-in time and may
85ebbabd
JH
34 not correspond to ones seen during analysis. For this reason part of
35 analysis is duplicated.
36
37 The following information is computed
38 1) load/store access tree described in ipa-modref-tree.h
39 This is used by tree-ssa-alias to disambiguate load/dtores
40 2) EAF flags used by points-to analysis (in tree-ssa-structlias).
41 and defined in tree-core.h.
42 and stored to optimization_summaries.
43
44 There are multiple summaries computed and used during the propagation:
45 - summaries holds summaries from analysis to IPA propagation
46 time.
47 - summaries_lto is same as summaries but holds them in a format
48 that can be streamed (as described above).
49 - fnspec_summary holds fnspec strings for call. This is
50 necessary because gimple_call_fnspec performs additional
51 analysis except for looking callee fndecl.
52 - escape_summary holds escape points for given call edge.
53 That is a vector recording what function parmaeters
54 may escape to a function call (and with what parameter index). */
d119f34c
JH
55
56#include "config.h"
57#include "system.h"
58#include "coretypes.h"
59#include "backend.h"
60#include "tree.h"
61#include "gimple.h"
62#include "alloc-pool.h"
63#include "tree-pass.h"
64#include "gimple-iterator.h"
65#include "tree-dfa.h"
66#include "cgraph.h"
67#include "ipa-utils.h"
68#include "symbol-summary.h"
69#include "gimple-pretty-print.h"
70#include "gimple-walk.h"
71#include "print-tree.h"
72#include "tree-streamer.h"
73#include "alias.h"
74#include "calls.h"
75#include "ipa-modref-tree.h"
76#include "ipa-modref.h"
e977dd5e
JH
77#include "value-range.h"
78#include "ipa-prop.h"
79#include "ipa-fnsummary.h"
617695cd 80#include "attr-fnspec.h"
ae7a23a3 81#include "symtab-clones.h"
520d5ad3
JH
82#include "gimple-ssa.h"
83#include "tree-phinodes.h"
84#include "tree-ssa-operands.h"
85#include "ssa-iterators.h"
86#include "stringpool.h"
87#include "tree-ssanames.h"
88
85ebbabd 89namespace {
d119f34c 90
6cef01c3
JH
91/* We record fnspec specifiers for call edges since they depends on actual
92 gimple statements. */
93
94class fnspec_summary
95{
96public:
97 char *fnspec;
98
99 fnspec_summary ()
100 : fnspec (NULL)
101 {
102 }
103
104 ~fnspec_summary ()
105 {
106 free (fnspec);
107 }
108};
109
110/* Summary holding fnspec string for a given call. */
111
112class fnspec_summaries_t : public call_summary <fnspec_summary *>
113{
114public:
115 fnspec_summaries_t (symbol_table *symtab)
116 : call_summary <fnspec_summary *> (symtab) {}
117 /* Hook that is called by summary when an edge is duplicated. */
118 virtual void duplicate (cgraph_edge *,
119 cgraph_edge *,
120 fnspec_summary *src,
121 fnspec_summary *dst)
122 {
123 dst->fnspec = xstrdup (src->fnspec);
124 }
125};
126
127static fnspec_summaries_t *fnspec_summaries = NULL;
128
85ebbabd
JH
129/* Escape summary holds a vector of param indexes that escape to
130 a given call. */
131struct escape_entry
132{
133 /* Parameter that escapes at a given call. */
134 unsigned int parm_index;
135 /* Argument it escapes to. */
136 unsigned int arg;
137 /* Minimal flags known about the argument. */
138 char min_flags;
139 /* Does it escape directly or indirectly? */
140 bool direct;
141};
142
143/* Dump EAF flags. */
144
145static void
146dump_eaf_flags (FILE *out, int flags, bool newline = true)
147{
148 if (flags & EAF_DIRECT)
149 fprintf (out, " direct");
150 if (flags & EAF_NOCLOBBER)
151 fprintf (out, " noclobber");
152 if (flags & EAF_NOESCAPE)
153 fprintf (out, " noescape");
bb07490a
JH
154 if (flags & EAF_NODIRECTESCAPE)
155 fprintf (out, " nodirectescape");
85ebbabd
JH
156 if (flags & EAF_UNUSED)
157 fprintf (out, " unused");
158 if (newline)
159 fprintf (out, "\n");
160}
161
162struct escape_summary
163{
164 auto_vec <escape_entry> esc;
165 void dump (FILE *out)
166 {
167 for (unsigned int i = 0; i < esc.length (); i++)
168 {
169 fprintf (out, " parm %i arg %i %s min:",
170 esc[i].parm_index,
171 esc[i].arg,
172 esc[i].direct ? "(direct)" : "(indirect)");
173 dump_eaf_flags (out, esc[i].min_flags, false);
174 }
175 fprintf (out, "\n");
176 }
177};
178
179class escape_summaries_t : public call_summary <escape_summary *>
180{
181public:
182 escape_summaries_t (symbol_table *symtab)
183 : call_summary <escape_summary *> (symtab) {}
184 /* Hook that is called by summary when an edge is duplicated. */
185 virtual void duplicate (cgraph_edge *,
186 cgraph_edge *,
187 escape_summary *src,
188 escape_summary *dst)
189 {
190 dst->esc = src->esc.copy ();
191 }
192};
193
194static escape_summaries_t *escape_summaries = NULL;
195
196} /* ANON namespace: GTY annotated summaries can not be anonymous. */
197
198
d119f34c
JH
199/* Class (from which there is one global instance) that holds modref summaries
200 for all analyzed functions. */
6cef01c3 201
d119f34c
JH
202class GTY((user)) modref_summaries
203 : public fast_function_summary <modref_summary *, va_gc>
204{
205public:
206 modref_summaries (symbol_table *symtab)
207 : fast_function_summary <modref_summary *, va_gc> (symtab) {}
208 virtual void insert (cgraph_node *, modref_summary *state);
209 virtual void duplicate (cgraph_node *src_node,
210 cgraph_node *dst_node,
211 modref_summary *src_data,
212 modref_summary *dst_data);
c9da53d6
JH
213 static modref_summaries *create_ggc (symbol_table *symtab)
214 {
215 return new (ggc_alloc_no_dtor<modref_summaries> ())
216 modref_summaries (symtab);
217 }
d119f34c
JH
218};
219
71dbabcc
JH
220class modref_summary_lto;
221
222/* Class (from which there is one global instance) that holds modref summaries
223 for all analyzed functions. */
6cef01c3 224
71dbabcc
JH
225class GTY((user)) modref_summaries_lto
226 : public fast_function_summary <modref_summary_lto *, va_gc>
227{
228public:
229 modref_summaries_lto (symbol_table *symtab)
230 : fast_function_summary <modref_summary_lto *, va_gc> (symtab),
231 propagated (false) {}
232 virtual void insert (cgraph_node *, modref_summary_lto *state);
233 virtual void duplicate (cgraph_node *src_node,
234 cgraph_node *dst_node,
235 modref_summary_lto *src_data,
236 modref_summary_lto *dst_data);
237 static modref_summaries_lto *create_ggc (symbol_table *symtab)
238 {
239 return new (ggc_alloc_no_dtor<modref_summaries_lto> ())
240 modref_summaries_lto (symtab);
241 }
242 bool propagated;
243};
244
245/* Global variable holding all modref summaries
246 (from analysis to IPA propagation time). */
6cef01c3 247
71dbabcc
JH
248static GTY(()) fast_function_summary <modref_summary *, va_gc>
249 *summaries;
250
8a2fd716 251/* Global variable holding all modref optimization summaries
71dbabcc 252 (from IPA propagation time or used by local optimization pass). */
6cef01c3 253
71dbabcc
JH
254static GTY(()) fast_function_summary <modref_summary *, va_gc>
255 *optimization_summaries;
256
257/* LTO summaries hold info from analysis to LTO streaming or from LTO
258 stream-in through propagation to LTO stream-out. */
6cef01c3 259
71dbabcc
JH
260static GTY(()) fast_function_summary <modref_summary_lto *, va_gc>
261 *summaries_lto;
d119f34c
JH
262
263/* Summary for a single function which this pass produces. */
264
265modref_summary::modref_summary ()
6cef01c3 266 : loads (NULL), stores (NULL), writes_errno (NULL)
d119f34c
JH
267{
268}
269
270modref_summary::~modref_summary ()
271{
272 if (loads)
273 ggc_delete (loads);
274 if (stores)
275 ggc_delete (stores);
d119f34c
JH
276}
277
85ebbabd
JH
278/* Return true if FLAGS holds some useful information. */
279
280static bool
281eaf_flags_useful_p (vec <unsigned char> &flags, int ecf_flags)
282{
283 for (unsigned i = 0; i < flags.length (); i++)
284 if (ecf_flags & ECF_PURE)
285 {
286 if (flags[i] & (EAF_UNUSED | EAF_DIRECT))
287 return true;
288 }
289 else
290 {
291 if (flags[i])
292 return true;
293 }
294 return false;
295}
296
297/* Return true if summary is potentially useful for optimization.
298 If CHECK_FLAGS is false assume that arg_flags are useful. */
67c935c8
JH
299
300bool
85ebbabd 301modref_summary::useful_p (int ecf_flags, bool check_flags)
67c935c8
JH
302{
303 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
304 return false;
85ebbabd 305 if (arg_flags.length () && !check_flags)
520d5ad3 306 return true;
85ebbabd
JH
307 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
308 return true;
309 arg_flags.release ();
71dbabcc 310 if (loads && !loads->every_base)
67c935c8
JH
311 return true;
312 if (ecf_flags & ECF_PURE)
313 return false;
71dbabcc 314 return stores && !stores->every_base;
67c935c8
JH
315}
316
71dbabcc
JH
317/* Single function summary used for LTO. */
318
319typedef modref_tree <tree> modref_records_lto;
320struct GTY(()) modref_summary_lto
321{
322 /* Load and stores in functions using types rather then alias sets.
323
324 This is necessary to make the information streamable for LTO but is also
325 more verbose and thus more likely to hit the limits. */
326 modref_records_lto *loads;
327 modref_records_lto *stores;
85ebbabd 328 auto_vec<unsigned char> GTY((skip)) arg_flags;
6cef01c3 329 bool writes_errno;
71dbabcc
JH
330
331 modref_summary_lto ();
332 ~modref_summary_lto ();
333 void dump (FILE *);
85ebbabd 334 bool useful_p (int ecf_flags, bool check_flags = true);
71dbabcc
JH
335};
336
337/* Summary for a single function which this pass produces. */
338
339modref_summary_lto::modref_summary_lto ()
6cef01c3 340 : loads (NULL), stores (NULL), writes_errno (NULL)
71dbabcc
JH
341{
342}
343
344modref_summary_lto::~modref_summary_lto ()
345{
346 if (loads)
347 ggc_delete (loads);
348 if (stores)
349 ggc_delete (stores);
350}
351
352
85ebbabd
JH
353/* Return true if lto summary is potentially useful for optimization.
354 If CHECK_FLAGS is false assume that arg_flags are useful. */
67c935c8
JH
355
356bool
85ebbabd 357modref_summary_lto::useful_p (int ecf_flags, bool check_flags)
67c935c8
JH
358{
359 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
360 return false;
85ebbabd
JH
361 if (arg_flags.length () && !check_flags)
362 return true;
363 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
364 return true;
365 arg_flags.release ();
67c935c8
JH
366 if (loads && !loads->every_base)
367 return true;
368 if (ecf_flags & ECF_PURE)
369 return false;
e24817aa 370 return stores && !stores->every_base;
67c935c8
JH
371}
372
c33f4742
JH
373/* Dump A to OUT. */
374
375static void
376dump_access (modref_access_node *a, FILE *out)
377{
c34db4b6
JH
378 fprintf (out, " access:");
379 if (a->parm_index != -1)
380 {
381 fprintf (out, " Parm %i", a->parm_index);
382 if (a->parm_offset_known)
383 {
384 fprintf (out, " param offset:");
385 print_dec ((poly_int64_pod)a->parm_offset, out, SIGNED);
386 }
387 }
388 if (a->range_info_useful_p ())
389 {
390 fprintf (out, " offset:");
391 print_dec ((poly_int64_pod)a->offset, out, SIGNED);
392 fprintf (out, " size:");
393 print_dec ((poly_int64_pod)a->size, out, SIGNED);
394 fprintf (out, " max_size:");
395 print_dec ((poly_int64_pod)a->max_size, out, SIGNED);
396 }
397 fprintf (out, "\n");
c33f4742
JH
398}
399
d119f34c
JH
400/* Dump records TT to OUT. */
401
402static void
403dump_records (modref_records *tt, FILE *out)
404{
405 fprintf (out, " Limits: %i bases, %i refs\n",
406 (int)tt->max_bases, (int)tt->max_refs);
407 if (tt->every_base)
408 {
409 fprintf (out, " Every base\n");
410 return;
411 }
412 size_t i;
413 modref_base_node <alias_set_type> *n;
414 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
415 {
416 fprintf (out, " Base %i: alias set %i\n", (int)i, n->base);
417 if (n->every_ref)
418 {
419 fprintf (out, " Every ref\n");
420 continue;
421 }
422 size_t j;
423 modref_ref_node <alias_set_type> *r;
424 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
425 {
426 fprintf (out, " Ref %i: alias set %i\n", (int)j, r->ref);
c33f4742
JH
427 if (r->every_access)
428 {
ada353b8 429 fprintf (out, " Every access\n");
c33f4742
JH
430 continue;
431 }
432 size_t k;
433 modref_access_node *a;
434 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
435 dump_access (a, out);
d119f34c
JH
436 }
437 }
438}
439
440/* Dump records TT to OUT. */
441
442static void
443dump_lto_records (modref_records_lto *tt, FILE *out)
444{
445 fprintf (out, " Limits: %i bases, %i refs\n",
446 (int)tt->max_bases, (int)tt->max_refs);
447 if (tt->every_base)
448 {
449 fprintf (out, " Every base\n");
450 return;
451 }
452 size_t i;
453 modref_base_node <tree> *n;
454 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
455 {
456 fprintf (out, " Base %i:", (int)i);
457 print_generic_expr (dump_file, n->base);
458 fprintf (out, " (alias set %i)\n",
9044db88 459 n->base ? get_alias_set (n->base) : 0);
d119f34c
JH
460 if (n->every_ref)
461 {
462 fprintf (out, " Every ref\n");
463 continue;
464 }
465 size_t j;
466 modref_ref_node <tree> *r;
467 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
468 {
469 fprintf (out, " Ref %i:", (int)j);
470 print_generic_expr (dump_file, r->ref);
471 fprintf (out, " (alias set %i)\n",
9044db88 472 r->ref ? get_alias_set (r->ref) : 0);
c33f4742
JH
473 if (r->every_access)
474 {
56cb815b 475 fprintf (out, " Every access\n");
c33f4742
JH
476 continue;
477 }
478 size_t k;
479 modref_access_node *a;
480 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
481 dump_access (a, out);
d119f34c
JH
482 }
483 }
484}
485
85ebbabd 486/* Dump all escape points of NODE to OUT. */
520d5ad3
JH
487
488static void
85ebbabd 489dump_modref_edge_summaries (FILE *out, cgraph_node *node, int depth)
520d5ad3 490{
85ebbabd
JH
491 int i = 0;
492 if (!escape_summaries)
493 return;
494 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
495 {
496 class escape_summary *sum = escape_summaries->get (e);
497 if (sum)
498 {
499 fprintf (out, "%*sIndirect call %i in %s escapes:",
500 depth, "", i, node->dump_name ());
501 sum->dump (out);
502 }
503 i++;
504 }
505 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
506 {
507 if (!e->inline_failed)
508 dump_modref_edge_summaries (out, e->callee, depth + 1);
509 class escape_summary *sum = escape_summaries->get (e);
510 if (sum)
511 {
512 fprintf (out, "%*sCall %s->%s escapes:", depth, "",
513 node->dump_name (), e->callee->dump_name ());
514 sum->dump (out);
515 }
516 class fnspec_summary *fsum = fnspec_summaries->get (e);
517 if (fsum)
518 {
519 fprintf (out, "%*sCall %s->%s fnspec: %s\n", depth, "",
520 node->dump_name (), e->callee->dump_name (),
521 fsum->fnspec);
522 }
523 }
524}
525
526/* Remove all call edge summaries associated with NODE. */
527
528static void
529remove_modref_edge_summaries (cgraph_node *node)
530{
531 if (!escape_summaries)
532 return;
533 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
534 escape_summaries->remove (e);
535 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
536 {
537 if (!e->inline_failed)
538 remove_modref_edge_summaries (e->callee);
539 escape_summaries->remove (e);
540 fnspec_summaries->remove (e);
541 }
520d5ad3
JH
542}
543
d119f34c
JH
544/* Dump summary. */
545
546void
547modref_summary::dump (FILE *out)
548{
6cef01c3
JH
549 if (loads)
550 {
551 fprintf (out, " loads:\n");
552 dump_records (loads, out);
553 }
554 if (stores)
555 {
556 fprintf (out, " stores:\n");
557 dump_records (stores, out);
558 }
617695cd
JH
559 if (writes_errno)
560 fprintf (out, " Writes errno\n");
520d5ad3
JH
561 if (arg_flags.length ())
562 {
563 for (unsigned int i = 0; i < arg_flags.length (); i++)
564 if (arg_flags[i])
565 {
566 fprintf (out, " parm %i flags:", i);
567 dump_eaf_flags (out, arg_flags[i]);
568 }
569 }
71dbabcc
JH
570}
571
572/* Dump summary. */
573
574void
575modref_summary_lto::dump (FILE *out)
576{
56cb815b
JH
577 fprintf (out, " loads:\n");
578 dump_lto_records (loads, out);
579 fprintf (out, " stores:\n");
580 dump_lto_records (stores, out);
6cef01c3
JH
581 if (writes_errno)
582 fprintf (out, " Writes errno\n");
85ebbabd
JH
583 if (arg_flags.length ())
584 {
585 for (unsigned int i = 0; i < arg_flags.length (); i++)
586 if (arg_flags[i])
587 {
588 fprintf (out, " parm %i flags:", i);
589 dump_eaf_flags (out, arg_flags[i]);
590 }
591 }
d119f34c
JH
592}
593
d119f34c
JH
594/* Get function summary for FUNC if it exists, return NULL otherwise. */
595
596modref_summary *
597get_modref_function_summary (cgraph_node *func)
598{
599 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
71dbabcc 600 if (!optimization_summaries)
d119f34c
JH
601 return NULL;
602
603 /* A single function body may be represented by multiple symbols with
604 different visibility. For example, if FUNC is an interposable alias,
605 we don't want to return anything, even if we have summary for the target
606 function. */
607 enum availability avail;
608 func = func->function_or_virtual_thunk_symbol
520d5ad3
JH
609 (&avail, current_function_decl ?
610 cgraph_node::get (current_function_decl) : NULL);
d119f34c
JH
611 if (avail <= AVAIL_INTERPOSABLE)
612 return NULL;
613
71dbabcc
JH
614 modref_summary *r = optimization_summaries->get (func);
615 return r;
d119f34c
JH
616}
617
c33f4742
JH
618/* Construct modref_access_node from REF. */
619static modref_access_node
620get_access (ao_ref *ref)
621{
c33f4742
JH
622 tree base;
623
c34db4b6
JH
624 base = ao_ref_base (ref);
625 modref_access_node a = {ref->offset, ref->size, ref->max_size,
626 0, -1, false};
c33f4742
JH
627 if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
628 {
2bdf324f 629 tree memref = base;
c33f4742
JH
630 base = TREE_OPERAND (base, 0);
631 if (TREE_CODE (base) == SSA_NAME
632 && SSA_NAME_IS_DEFAULT_DEF (base)
633 && TREE_CODE (SSA_NAME_VAR (base)) == PARM_DECL)
634 {
635 a.parm_index = 0;
636 for (tree t = DECL_ARGUMENTS (current_function_decl);
637 t != SSA_NAME_VAR (base); t = DECL_CHAIN (t))
638 {
639 if (!t)
640 {
641 a.parm_index = -1;
642 break;
643 }
644 a.parm_index++;
645 }
2bdf324f
JH
646 if (TREE_CODE (memref) == MEM_REF)
647 {
648 a.parm_offset_known
649 = wi::to_poly_wide (TREE_OPERAND
650 (memref, 1)).to_shwi (&a.parm_offset);
651 }
652 else
653 a.parm_offset_known = false;
c33f4742
JH
654 }
655 else
656 a.parm_index = -1;
657 }
658 else
659 a.parm_index = -1;
660 return a;
661}
662
d119f34c
JH
663/* Record access into the modref_records data structure. */
664
665static void
666record_access (modref_records *tt, ao_ref *ref)
667{
668 alias_set_type base_set = !flag_strict_aliasing ? 0
669 : ao_ref_base_alias_set (ref);
670 alias_set_type ref_set = !flag_strict_aliasing ? 0
671 : (ao_ref_alias_set (ref));
c33f4742 672 modref_access_node a = get_access (ref);
d119f34c
JH
673 if (dump_file)
674 {
c33f4742
JH
675 fprintf (dump_file, " - Recording base_set=%i ref_set=%i parm=%i\n",
676 base_set, ref_set, a.parm_index);
d119f34c 677 }
c33f4742 678 tt->insert (base_set, ref_set, a);
d119f34c
JH
679}
680
681/* IPA version of record_access_tree. */
682
683static void
684record_access_lto (modref_records_lto *tt, ao_ref *ref)
685{
686 /* get_alias_set sometimes use different type to compute the alias set
687 than TREE_TYPE (base). Do same adjustments. */
688 tree base_type = NULL_TREE, ref_type = NULL_TREE;
689 if (flag_strict_aliasing)
690 {
691 tree base;
692
693 base = ref->ref;
694 while (handled_component_p (base))
695 base = TREE_OPERAND (base, 0);
696
697 base_type = reference_alias_ptr_type_1 (&base);
698
699 if (!base_type)
700 base_type = TREE_TYPE (base);
701 else
702 base_type = TYPE_REF_CAN_ALIAS_ALL (base_type)
703 ? NULL_TREE : TREE_TYPE (base_type);
704
705 tree ref_expr = ref->ref;
706 ref_type = reference_alias_ptr_type_1 (&ref_expr);
707
708 if (!ref_type)
709 ref_type = TREE_TYPE (ref_expr);
710 else
711 ref_type = TYPE_REF_CAN_ALIAS_ALL (ref_type)
712 ? NULL_TREE : TREE_TYPE (ref_type);
713
714 /* Sanity check that we are in sync with what get_alias_set does. */
715 gcc_checking_assert ((!base_type && !ao_ref_base_alias_set (ref))
716 || get_alias_set (base_type)
717 == ao_ref_base_alias_set (ref));
718 gcc_checking_assert ((!ref_type && !ao_ref_alias_set (ref))
719 || get_alias_set (ref_type)
720 == ao_ref_alias_set (ref));
721
722 /* Do not bother to record types that have no meaningful alias set.
723 Also skip variably modified types since these go to local streams. */
724 if (base_type && (!get_alias_set (base_type)
725 || variably_modified_type_p (base_type, NULL_TREE)))
726 base_type = NULL_TREE;
727 if (ref_type && (!get_alias_set (ref_type)
728 || variably_modified_type_p (ref_type, NULL_TREE)))
729 ref_type = NULL_TREE;
730 }
c33f4742 731 modref_access_node a = get_access (ref);
d119f34c
JH
732 if (dump_file)
733 {
734 fprintf (dump_file, " - Recording base type:");
735 print_generic_expr (dump_file, base_type);
736 fprintf (dump_file, " (alias set %i) ref type:",
737 base_type ? get_alias_set (base_type) : 0);
738 print_generic_expr (dump_file, ref_type);
c33f4742
JH
739 fprintf (dump_file, " (alias set %i) parm:%i\n",
740 ref_type ? get_alias_set (ref_type) : 0,
741 a.parm_index);
d119f34c
JH
742 }
743
c33f4742 744 tt->insert (base_type, ref_type, a);
d119f34c
JH
745}
746
747/* Returns true if and only if we should store the access to EXPR.
748 Some accesses, e.g. loads from automatic variables, are not interesting. */
749
750static bool
751record_access_p (tree expr)
752{
e977dd5e 753 if (refs_local_or_readonly_memory_p (expr))
d119f34c
JH
754 {
755 if (dump_file)
e977dd5e 756 fprintf (dump_file, " - Read-only or local, ignoring.\n");
d119f34c
JH
757 return false;
758 }
d119f34c
JH
759 return true;
760}
761
85ebbabd
JH
762/* Return true if ECF flags says that return value can be ignored. */
763
764static bool
765ignore_retval_p (tree caller, int flags)
766{
767 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
768 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
769 return true;
770 return false;
771}
772
d119f34c
JH
773/* Return true if ECF flags says that stores can be ignored. */
774
775static bool
776ignore_stores_p (tree caller, int flags)
777{
85ebbabd 778 if (flags & (ECF_PURE | ECF_CONST | ECF_NOVOPS))
d119f34c
JH
779 return true;
780 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
781 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
782 return true;
783 return false;
784}
785
617695cd
JH
786/* Determine parm_map for argument I of STMT. */
787
788modref_parm_map
789parm_map_for_arg (gimple *stmt, int i)
790{
791 tree op = gimple_call_arg (stmt, i);
792 bool offset_known;
793 poly_int64 offset;
794 struct modref_parm_map parm_map;
795
ea937e7d
JH
796 parm_map.parm_offset_known = false;
797 parm_map.parm_offset = 0;
798
617695cd
JH
799 offset_known = unadjusted_ptr_and_unit_offset (op, &op, &offset);
800 if (TREE_CODE (op) == SSA_NAME
801 && SSA_NAME_IS_DEFAULT_DEF (op)
802 && TREE_CODE (SSA_NAME_VAR (op)) == PARM_DECL)
803 {
804 int index = 0;
805 for (tree t = DECL_ARGUMENTS (current_function_decl);
806 t != SSA_NAME_VAR (op); t = DECL_CHAIN (t))
807 {
808 if (!t)
809 {
810 index = -1;
811 break;
812 }
813 index++;
814 }
815 parm_map.parm_index = index;
816 parm_map.parm_offset_known = offset_known;
817 parm_map.parm_offset = offset;
818 }
819 else if (points_to_local_or_readonly_memory_p (op))
820 parm_map.parm_index = -2;
821 else
822 parm_map.parm_index = -1;
823 return parm_map;
824}
825
ada353b8
JH
826/* Merge side effects of call STMT to function with CALLEE_SUMMARY
827 int CUR_SUMMARY. Return true if something changed.
828 If IGNORE_STORES is true, do not merge stores. */
829
830bool
831merge_call_side_effects (modref_summary *cur_summary,
832 gimple *stmt, modref_summary *callee_summary,
56cb815b 833 bool ignore_stores, cgraph_node *callee_node)
ada353b8 834{
c34db4b6 835 auto_vec <modref_parm_map, 32> parm_map;
ada353b8
JH
836 bool changed = false;
837
56cb815b
JH
838 if (dump_file)
839 fprintf (dump_file, " - Merging side effects of %s with parm map:",
840 callee_node->dump_name ());
841
617695cd
JH
842 /* We can not safely optimize based on summary of callee if it does
843 not always bind to current def: it is possible that memory load
844 was optimized out earlier which may not happen in the interposed
845 variant. */
846 if (!callee_node->binds_to_current_def_p ())
847 {
848 if (dump_file)
849 fprintf (dump_file, " - May be interposed: collapsing loads.\n");
850 cur_summary->loads->collapse ();
851 }
852
520d5ad3 853 parm_map.safe_grow_cleared (gimple_call_num_args (stmt), true);
ada353b8
JH
854 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
855 {
617695cd 856 parm_map[i] = parm_map_for_arg (stmt, i);
56cb815b 857 if (dump_file)
c7b6a758
JH
858 {
859 fprintf (dump_file, " %i", parm_map[i].parm_index);
860 if (parm_map[i].parm_offset_known)
861 {
862 fprintf (dump_file, " offset:");
863 print_dec ((poly_int64_pod)parm_map[i].parm_offset,
864 dump_file, SIGNED);
865 }
866 }
ada353b8 867 }
56cb815b
JH
868 if (dump_file)
869 fprintf (dump_file, "\n");
ada353b8
JH
870
871 /* Merge with callee's summary. */
56cb815b 872 changed |= cur_summary->loads->merge (callee_summary->loads, &parm_map);
ada353b8 873 if (!ignore_stores)
617695cd
JH
874 {
875 changed |= cur_summary->stores->merge (callee_summary->stores,
876 &parm_map);
877 if (!cur_summary->writes_errno
878 && callee_summary->writes_errno)
879 {
880 cur_summary->writes_errno = true;
881 changed = true;
882 }
883 }
ada353b8
JH
884 return changed;
885}
886
617695cd
JH
887/* Return access mode for argument I of call STMT with FNSPEC. */
888
889static modref_access_node
890get_access_for_fnspec (gcall *call, attr_fnspec &fnspec,
891 unsigned int i, modref_parm_map &map)
892{
893 tree size = NULL_TREE;
894 unsigned int size_arg;
895
896 if (!fnspec.arg_specified_p (i))
897 ;
898 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
899 size = gimple_call_arg (call, size_arg);
900 else if (fnspec.arg_access_size_given_by_type_p (i))
901 {
902 tree callee = gimple_call_fndecl (call);
903 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
904
905 for (unsigned int p = 0; p < i; p++)
906 t = TREE_CHAIN (t);
907 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
908 }
909 modref_access_node a = {0, -1, -1,
910 map.parm_offset, map.parm_index,
911 map.parm_offset_known};
912 poly_int64 size_hwi;
913 if (size
914 && poly_int_tree_p (size, &size_hwi)
915 && coeffs_in_range_p (size_hwi, 0,
916 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
917 {
918 a.size = -1;
919 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
920 }
921 return a;
922}
923
6cef01c3
JH
924/* Collapse loads and return true if something changed. */
925
926static bool
927collapse_loads (modref_summary *cur_summary,
928 modref_summary_lto *cur_summary_lto)
929{
930 bool changed = false;
931
932 if (cur_summary && !cur_summary->loads->every_base)
933 {
934 cur_summary->loads->collapse ();
935 changed = true;
936 }
937 if (cur_summary_lto
938 && !cur_summary_lto->loads->every_base)
939 {
940 cur_summary_lto->loads->collapse ();
941 changed = true;
942 }
943 return changed;
944}
945
946/* Collapse loads and return true if something changed. */
947
948static bool
949collapse_stores (modref_summary *cur_summary,
950 modref_summary_lto *cur_summary_lto)
951{
952 bool changed = false;
953
954 if (cur_summary && !cur_summary->stores->every_base)
955 {
956 cur_summary->stores->collapse ();
957 changed = true;
958 }
959 if (cur_summary_lto
960 && !cur_summary_lto->stores->every_base)
961 {
962 cur_summary_lto->stores->collapse ();
963 changed = true;
964 }
965 return changed;
966}
967
968
617695cd
JH
969/* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
970 If IGNORE_STORES is true ignore them.
971 Return false if no useful summary can be produced. */
972
973static bool
6cef01c3
JH
974process_fnspec (modref_summary *cur_summary,
975 modref_summary_lto *cur_summary_lto,
976 gcall *call, bool ignore_stores)
617695cd
JH
977{
978 attr_fnspec fnspec = gimple_call_fnspec (call);
979 if (!fnspec.known_p ())
980 {
981 if (dump_file && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
982 fprintf (dump_file, " Builtin with no fnspec: %s\n",
983 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call))));
984 if (ignore_stores)
985 {
6cef01c3 986 collapse_loads (cur_summary, cur_summary_lto);
617695cd
JH
987 return true;
988 }
989 return false;
990 }
991 if (fnspec.global_memory_read_p ())
6cef01c3 992 collapse_loads (cur_summary, cur_summary_lto);
617695cd
JH
993 else
994 {
995 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
996 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
997 ;
998 else if (!fnspec.arg_specified_p (i)
999 || fnspec.arg_maybe_read_p (i))
1000 {
1001 modref_parm_map map = parm_map_for_arg (call, i);
1002
1003 if (map.parm_index == -2)
1004 continue;
1005 if (map.parm_index == -1)
1006 {
6cef01c3 1007 collapse_loads (cur_summary, cur_summary_lto);
617695cd
JH
1008 break;
1009 }
6cef01c3
JH
1010 if (cur_summary)
1011 cur_summary->loads->insert (0, 0,
1012 get_access_for_fnspec (call,
1013 fnspec, i,
1014 map));
1015 if (cur_summary_lto)
1016 cur_summary_lto->loads->insert (0, 0,
1017 get_access_for_fnspec (call,
1018 fnspec, i,
1019 map));
617695cd
JH
1020 }
1021 }
1022 if (ignore_stores)
1023 return true;
1024 if (fnspec.global_memory_written_p ())
6cef01c3 1025 collapse_stores (cur_summary, cur_summary_lto);
617695cd
JH
1026 else
1027 {
1028 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1029 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
1030 ;
1031 else if (!fnspec.arg_specified_p (i)
1032 || fnspec.arg_maybe_written_p (i))
1033 {
1034 modref_parm_map map = parm_map_for_arg (call, i);
1035
1036 if (map.parm_index == -2)
1037 continue;
1038 if (map.parm_index == -1)
1039 {
6cef01c3 1040 collapse_stores (cur_summary, cur_summary_lto);
617695cd
JH
1041 break;
1042 }
6cef01c3
JH
1043 if (cur_summary)
1044 cur_summary->stores->insert (0, 0,
1045 get_access_for_fnspec (call,
1046 fnspec, i,
1047 map));
1048 if (cur_summary_lto)
1049 cur_summary_lto->stores->insert (0, 0,
1050 get_access_for_fnspec (call,
1051 fnspec, i,
1052 map));
617695cd
JH
1053 }
1054 if (fnspec.errno_maybe_written_p () && flag_errno_math)
6cef01c3
JH
1055 {
1056 if (cur_summary)
1057 cur_summary->writes_errno = true;
1058 if (cur_summary_lto)
1059 cur_summary_lto->writes_errno = true;
1060 }
617695cd
JH
1061 }
1062 return true;
1063}
1064
ada353b8
JH
1065/* Analyze function call STMT in function F.
1066 Remember recursive calls in RECURSIVE_CALLS. */
d119f34c
JH
1067
1068static bool
6cef01c3 1069analyze_call (modref_summary *cur_summary, modref_summary_lto *cur_summary_lto,
617695cd 1070 gcall *stmt, vec <gimple *> *recursive_calls)
d119f34c
JH
1071{
1072 /* Check flags on the function call. In certain cases, analysis can be
1073 simplified. */
1074 int flags = gimple_call_flags (stmt);
1075 if (flags & (ECF_CONST | ECF_NOVOPS))
1076 {
1077 if (dump_file)
1078 fprintf (dump_file,
1079 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
1080 "except for args.\n");
1081 return true;
1082 }
1083
1084 /* Pure functions do not affect global memory. Stores by functions which are
1085 noreturn and do not throw can safely be ignored. */
1086 bool ignore_stores = ignore_stores_p (current_function_decl, flags);
1087
1088 /* Next, we try to get the callee's function declaration. The goal is to
1089 merge their summary with ours. */
1090 tree callee = gimple_call_fndecl (stmt);
1091
1092 /* Check if this is an indirect call. */
1093 if (!callee)
1094 {
d119f34c 1095 if (dump_file)
6cef01c3
JH
1096 fprintf (dump_file, gimple_call_internal_p (stmt)
1097 ? " - Internal call" : " - Indirect call.\n");
1098 return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
d119f34c 1099 }
6cef01c3
JH
1100 /* We only need to handle internal calls in IPA mode. */
1101 gcc_checking_assert (!cur_summary_lto);
d119f34c
JH
1102
1103 struct cgraph_node *callee_node = cgraph_node::get_create (callee);
1104
d119f34c
JH
1105 /* If this is a recursive call, the target summary is the same as ours, so
1106 there's nothing to do. */
1107 if (recursive_call_p (current_function_decl, callee))
1108 {
ada353b8 1109 recursive_calls->safe_push (stmt);
d119f34c
JH
1110 if (dump_file)
1111 fprintf (dump_file, " - Skipping recursive call.\n");
1112 return true;
1113 }
1114
1115 gcc_assert (callee_node != NULL);
1116
1117 /* Get the function symbol and its availability. */
1118 enum availability avail;
1119 callee_node = callee_node->function_symbol (&avail);
1120 if (avail <= AVAIL_INTERPOSABLE)
1121 {
d119f34c
JH
1122 if (dump_file)
1123 fprintf (dump_file, " - Function availability <= AVAIL_INTERPOSABLE.\n");
6cef01c3 1124 return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
d119f34c
JH
1125 }
1126
1127 /* Get callee's modref summary. As above, if there's no summary, we either
1128 have to give up or, if stores are ignored, we can just purge loads. */
71dbabcc 1129 modref_summary *callee_summary = optimization_summaries->get (callee_node);
d119f34c
JH
1130 if (!callee_summary)
1131 {
d119f34c
JH
1132 if (dump_file)
1133 fprintf (dump_file, " - No modref summary available for callee.\n");
6cef01c3 1134 return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
d119f34c
JH
1135 }
1136
56cb815b
JH
1137 merge_call_side_effects (cur_summary, stmt, callee_summary, ignore_stores,
1138 callee_node);
d119f34c
JH
1139
1140 return true;
1141}
1142
8a2fd716 1143/* Support analysis in non-lto and lto mode in parallel. */
71dbabcc
JH
1144
1145struct summary_ptrs
1146{
1147 struct modref_summary *nolto;
1148 struct modref_summary_lto *lto;
1149};
1150
d119f34c
JH
1151/* Helper for analyze_stmt. */
1152
1153static bool
1154analyze_load (gimple *, tree, tree op, void *data)
1155{
71dbabcc
JH
1156 modref_summary *summary = ((summary_ptrs *)data)->nolto;
1157 modref_summary_lto *summary_lto = ((summary_ptrs *)data)->lto;
d119f34c
JH
1158
1159 if (dump_file)
1160 {
1161 fprintf (dump_file, " - Analyzing load: ");
1162 print_generic_expr (dump_file, op);
1163 fprintf (dump_file, "\n");
1164 }
1165
1166 if (!record_access_p (op))
1167 return false;
1168
1169 ao_ref r;
1170 ao_ref_init (&r, op);
1171
71dbabcc 1172 if (summary)
d119f34c 1173 record_access (summary->loads, &r);
71dbabcc
JH
1174 if (summary_lto)
1175 record_access_lto (summary_lto->loads, &r);
d119f34c
JH
1176 return false;
1177}
1178
1179/* Helper for analyze_stmt. */
1180
1181static bool
1182analyze_store (gimple *, tree, tree op, void *data)
1183{
71dbabcc
JH
1184 modref_summary *summary = ((summary_ptrs *)data)->nolto;
1185 modref_summary_lto *summary_lto = ((summary_ptrs *)data)->lto;
d119f34c
JH
1186
1187 if (dump_file)
1188 {
1189 fprintf (dump_file, " - Analyzing store: ");
1190 print_generic_expr (dump_file, op);
1191 fprintf (dump_file, "\n");
1192 }
1193
1194 if (!record_access_p (op))
1195 return false;
1196
1197 ao_ref r;
1198 ao_ref_init (&r, op);
1199
71dbabcc
JH
1200 if (summary)
1201 record_access (summary->stores, &r);
1202 if (summary_lto)
1203 record_access_lto (summary_lto->stores, &r);
d119f34c
JH
1204 return false;
1205}
1206
1207/* Analyze statement STMT of function F.
1208 If IPA is true do not merge in side effects of calls. */
1209
1210static bool
71dbabcc
JH
1211analyze_stmt (modref_summary *summary, modref_summary_lto *summary_lto,
1212 gimple *stmt, bool ipa, vec <gimple *> *recursive_calls)
d119f34c 1213{
8a2fd716
JJ
1214 /* In general we can not ignore clobbers because they are barriers for code
1215 motion, however after inlining it is safe to do because local optimization
3991912e 1216 passes do not consider clobbers from other functions.
8a2fd716 1217 Similar logic is in ipa-pure-const.c. */
3991912e 1218 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
67a5c215 1219 return true;
3991912e 1220
71dbabcc
JH
1221 struct summary_ptrs sums = {summary, summary_lto};
1222
d119f34c 1223 /* Analyze all loads and stores in STMT. */
71dbabcc 1224 walk_stmt_load_store_ops (stmt, &sums,
d119f34c 1225 analyze_load, analyze_store);
d119f34c
JH
1226
1227 switch (gimple_code (stmt))
1228 {
1229 case GIMPLE_ASM:
1230 /* If the ASM statement does not read nor write memory, there's nothing
1231 to do. Otherwise just give up. */
1232 if (!gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
1233 return true;
1234 if (dump_file)
1235 fprintf (dump_file, " - Function contains GIMPLE_ASM statement "
1236 "which clobbers memory.\n");
1237 return false;
1238 case GIMPLE_CALL:
6cef01c3
JH
1239 if (!ipa || gimple_call_internal_p (stmt))
1240 return analyze_call (summary, summary_lto,
1241 as_a <gcall *> (stmt), recursive_calls);
1242 else
1243 {
1244 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
1245
1246 if (fnspec.known_p ()
1247 && (!fnspec.global_memory_read_p ()
1248 || !fnspec.global_memory_written_p ()))
1249 {
1250 fnspec_summaries->get_create
1251 (cgraph_node::get (current_function_decl)->get_edge (stmt))
1252 ->fnspec = xstrdup (fnspec.get_str ());
1253 if (dump_file)
1254 fprintf (dump_file, " Recorded fnspec %s\n", fnspec.get_str ());
1255 }
1256 }
d119f34c
JH
1257 return true;
1258 default:
1259 /* Nothing to do for other types of statements. */
1260 return true;
1261 }
1262}
1263
71dbabcc
JH
1264/* Remove summary of current function because during the function body
1265 scan we determined it is not useful. LTO, NOLTO and IPA determines the
1266 mode of scan. */
1267
1268static void
1269remove_summary (bool lto, bool nolto, bool ipa)
1270{
1271 cgraph_node *fnode = cgraph_node::get (current_function_decl);
1272 if (!ipa)
1273 optimization_summaries->remove (fnode);
1274 else
1275 {
1276 if (nolto)
1277 summaries->remove (fnode);
1278 if (lto)
1279 summaries_lto->remove (fnode);
85ebbabd 1280 remove_modref_edge_summaries (fnode);
71dbabcc
JH
1281 }
1282 if (dump_file)
1283 fprintf (dump_file,
1284 " - modref done with result: not tracked.\n");
1285}
1286
520d5ad3
JH
1287/* Return true if OP accesses memory pointed to by SSA_NAME. */
1288
1289bool
1290memory_access_to (tree op, tree ssa_name)
1291{
1292 tree base = get_base_address (op);
1293 if (!base)
1294 return false;
1295 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
1296 return false;
1297 return TREE_OPERAND (base, 0) == ssa_name;
1298}
1299
1300/* Consider statement val = *arg.
1301 return EAF flags of ARG that can be determined from EAF flags of VAL
1302 (which are known to be FLAGS). If IGNORE_STORES is true we can ignore
1303 all stores to VAL, i.e. when handling noreturn function. */
1304
1305static int
1306deref_flags (int flags, bool ignore_stores)
1307{
bb07490a 1308 int ret = EAF_NODIRECTESCAPE;
520d5ad3
JH
1309 if (flags & EAF_UNUSED)
1310 ret |= EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1311 else
1312 {
1313 if ((flags & EAF_NOCLOBBER) || ignore_stores)
1314 ret |= EAF_NOCLOBBER;
1315 if ((flags & EAF_NOESCAPE) || ignore_stores)
1316 ret |= EAF_NOESCAPE;
1317 }
1318 return ret;
1319}
1320
85ebbabd
JH
1321namespace {
1322
1323/* Description of an escape point. */
1324
1325struct escape_point
1326{
1327 /* Value escapes to this call. */
1328 gcall *call;
1329 /* Argument it escapes to. */
1330 int arg;
1331 /* Flags already known about the argument (this can save us from recording
1332 esape points if local analysis did good job already). */
1333 char min_flags;
1334 /* Does value escape directly or indiretly? */
1335 bool direct;
1336};
1337
1338class modref_lattice
1339{
1340public:
1341 /* EAF flags of the SSA name. */
1342 int flags;
1343 /* DFS bookkkeeping: we don't do real dataflow yet. */
1344 bool known;
1345 bool open;
1346
1347 /* When doing IPA analysis we can not merge in callee escape points;
1348 Only remember them and do the merging at IPA propagation time. */
1349 vec <escape_point, va_heap, vl_ptr> escape_points;
1350
1351 void init ();
1352 void release ();
1353 bool merge (const modref_lattice &with);
1354 bool merge (int flags);
1355 bool merge_deref (const modref_lattice &with, bool ignore_stores);
1356 bool merge_direct_load ();
1357 bool merge_direct_store ();
1358 bool add_escape_point (gcall *call, int arg, int min_flags, bool diret);
1359 void dump (FILE *out, int indent = 0) const;
1360};
1361
1362/* Lattices are saved to vectors, so keep them PODs. */
1363void
1364modref_lattice::init ()
1365{
bb07490a
JH
1366 flags = EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE | EAF_UNUSED
1367 | EAF_NODIRECTESCAPE;
85ebbabd
JH
1368 open = true;
1369 known = false;
1370}
1371
1372/* Release memory. */
1373void
1374modref_lattice::release ()
1375{
1376 escape_points.release ();
1377}
1378
1379/* Dump lattice to OUT; indent with INDENT spaces. */
1380
1381void
1382modref_lattice::dump (FILE *out, int indent) const
1383{
1384 dump_eaf_flags (out, flags);
1385 if (escape_points.length ())
1386 {
1387 fprintf (out, "%*sEscapes:\n", indent, "");
1388 for (unsigned int i = 0; i < escape_points.length (); i++)
1389 {
1390 fprintf (out, "%*s Arg %i (%s) min flags", indent, "",
1391 escape_points[i].arg,
1392 escape_points[i].direct ? "direct" : "indirect");
1393 dump_eaf_flags (out, flags, false);
1394 fprintf (out, " in call ");
1395 print_gimple_stmt (out, escape_points[i].call, 0);
1396 }
1397 }
1398}
1399
1400/* Add escape point CALL, ARG, MIN_FLAGS, DIRECT. Return false if such escape
1401 point exists. */
1402
1403bool
1404modref_lattice::add_escape_point (gcall *call, int arg, int min_flags,
1405 bool direct)
1406{
1407 escape_point *ep;
1408 unsigned int i;
1409
1410 /* If we already determined flags to be bad enough,
1411 * we do not need to record. */
1412 if ((flags & min_flags) == flags)
1413 return false;
1414
1415 FOR_EACH_VEC_ELT (escape_points, i, ep)
1416 if (ep->call == call && ep->arg == arg && ep->direct == direct)
1417 {
1418 if ((ep->min_flags & min_flags) == min_flags)
1419 return false;
1420 ep->min_flags &= min_flags;
1421 return true;
1422 }
1423 /* Give up if max escape points is met. */
1424 if ((int)escape_points.length () > param_modref_max_escape_points)
1425 {
1426 if (dump_file)
1427 fprintf (dump_file, "--param modref-max-escape-points limit reached\n");
1428 merge (0);
1429 return true;
1430 }
1431 escape_point new_ep = {call, arg, min_flags, direct};
1432 escape_points.safe_push (new_ep);
1433 return true;
1434}
1435
1436/* Merge in flags from F. */
1437bool
1438modref_lattice::merge (int f)
1439{
3350e59f
JH
1440 if (f & EAF_UNUSED)
1441 return false;
85ebbabd
JH
1442 if ((flags & f) != flags)
1443 {
1444 flags &= f;
1445 /* Only NOCLOBBER or DIRECT flags alone are not useful (see comments
1446 in tree-ssa-alias.c). Give up earlier. */
1447 if ((flags & ~(EAF_DIRECT | EAF_NOCLOBBER)) == 0)
1448 flags = 0;
1449 if (!flags)
1450 escape_points.release ();
1451 return true;
1452 }
1453 return false;
1454}
1455
1456/* Merge in WITH. Return true if anyting changed. */
1457
1458bool
1459modref_lattice::merge (const modref_lattice &with)
1460{
1461 if (!with.known)
1462 return merge (0);
1463
1464 bool changed = merge (with.flags);
1465
1466 if (!flags)
1467 return changed;
1468 for (unsigned int i = 0; i < with.escape_points.length (); i++)
1469 changed |= add_escape_point (with.escape_points[i].call,
1470 with.escape_points[i].arg,
1471 with.escape_points[i].min_flags,
1472 with.escape_points[i].direct);
1473 return changed;
1474}
1475
1476/* Merge in deref of WITH. If IGNORE_STORES is true do not consider
1477 stores. Return true if anyting changed. */
1478
1479bool
1480modref_lattice::merge_deref (const modref_lattice &with, bool ignore_stores)
1481{
1482 if (!with.known)
1483 return merge (0);
1484
1485 bool changed = merge (deref_flags (with.flags, ignore_stores));
1486
1487 if (!flags)
1488 return changed;
1489 for (unsigned int i = 0; i < with.escape_points.length (); i++)
1490 changed |= add_escape_point (with.escape_points[i].call,
1491 with.escape_points[i].arg,
1492 with.escape_points[i].min_flags,
1493 false);
1494 return changed;
1495}
1496
1497/* Merge in flags for direct load. */
1498
1499bool
1500modref_lattice::merge_direct_load ()
1501{
1502 return merge (~EAF_UNUSED);
1503}
1504
1505/* Merge in flags for direct store. */
1506
1507bool
1508modref_lattice::merge_direct_store ()
1509{
1510 return merge (~(EAF_UNUSED | EAF_NOCLOBBER));
1511}
1512
1513} /* ANON namespace. */
1514
1515static void analyze_ssa_name_flags (tree name,
1516 vec<modref_lattice> &lattice,
1517 int depth, bool ipa);
1518
520d5ad3
JH
1519/* Call statements may return their parameters. Consider argument number
1520 ARG of USE_STMT and determine flags that can needs to be cleared
1521 in case pointer possibly indirectly references from ARG I is returned.
85ebbabd 1522 LATTICE, DEPTH and ipa are same as in analyze_ssa_name_flags. */
520d5ad3 1523
85ebbabd
JH
1524static void
1525merge_call_lhs_flags (gcall *call, int arg, int index, bool deref,
1526 vec<modref_lattice> &lattice,
1527 int depth, bool ipa)
520d5ad3
JH
1528{
1529 /* If there is no return value, no flags are affected. */
1530 if (!gimple_call_lhs (call))
85ebbabd 1531 return;
520d5ad3
JH
1532
1533 /* If we know that function returns given argument and it is not ARG
1534 we can still be happy. */
1535 int flags = gimple_call_return_flags (call);
1536 if ((flags & ERF_RETURNS_ARG)
1537 && (flags & ERF_RETURN_ARG_MASK) != arg)
85ebbabd 1538 return;
520d5ad3
JH
1539
1540 /* If return value is SSA name determine its flags. */
1541 if (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME)
85ebbabd
JH
1542 {
1543 tree lhs = gimple_call_lhs (call);
1544 analyze_ssa_name_flags (lhs, lattice, depth + 1, ipa);
1545 if (deref)
1546 lattice[index].merge (lattice[SSA_NAME_VERSION (lhs)]);
1547 else
1548 lattice[index].merge_deref (lattice[SSA_NAME_VERSION (lhs)], false);
1549 }
520d5ad3
JH
1550 /* In the case of memory store we can do nothing. */
1551 else
85ebbabd 1552 lattice[index].merge (0);
520d5ad3
JH
1553}
1554
85ebbabd
JH
1555/* Analyze EAF flags for SSA name NAME and store result to LATTICE.
1556 LATTICE is an array of modref_lattices.
1557 DEPTH is a recursion depth used to make debug output prettier.
1558 If IPA is true we analyze for IPA propagation (and thus call escape points
1559 are processed later) */
520d5ad3 1560
85ebbabd
JH
1561static void
1562analyze_ssa_name_flags (tree name, vec<modref_lattice> &lattice, int depth,
1563 bool ipa)
520d5ad3
JH
1564{
1565 imm_use_iterator ui;
1566 gimple *use_stmt;
85ebbabd 1567 int index = SSA_NAME_VERSION (name);
520d5ad3
JH
1568
1569 /* See if value is already computed. */
85ebbabd
JH
1570 if (lattice[index].known)
1571 return;
1572 if (lattice[index].open)
520d5ad3 1573 {
85ebbabd
JH
1574 if (dump_file)
1575 fprintf (dump_file,
1576 "%*sGiving up on a cycle in SSA graph\n", depth * 4, "");
1577 return;
520d5ad3
JH
1578 }
1579 if (depth == param_modref_max_depth)
1580 {
1581 if (dump_file)
1582 fprintf (dump_file,
1583 "%*sGiving up on max depth\n", depth * 4, "");
85ebbabd 1584 return;
520d5ad3
JH
1585 }
1586 /* Recursion guard. */
85ebbabd 1587 lattice[index].init ();
520d5ad3
JH
1588
1589 if (dump_file)
1590 {
1591 fprintf (dump_file,
1592 "%*sAnalyzing flags of ssa name: ", depth * 4, "");
1593 print_generic_expr (dump_file, name);
1594 fprintf (dump_file, "\n");
1595 }
1596
1597 FOR_EACH_IMM_USE_STMT (use_stmt, ui, name)
1598 {
85ebbabd 1599 if (lattice[index].flags == 0)
520d5ad3
JH
1600 {
1601 BREAK_FROM_IMM_USE_STMT (ui);
1602 }
1603 if (is_gimple_debug (use_stmt))
1604 continue;
1605 if (dump_file)
1606 {
1607 fprintf (dump_file, "%*s Analyzing stmt:", depth * 4, "");
1608 print_gimple_stmt (dump_file, use_stmt, 0);
1609 }
1610
26285af4
JH
1611 /* Gimple return may load the return value.
1612 Returning name counts as an use by tree-ssa-structalias.c */
520d5ad3
JH
1613 if (greturn *ret = dyn_cast <greturn *> (use_stmt))
1614 {
85ebbabd
JH
1615 if (gimple_return_retval (ret) == name)
1616 lattice[index].merge (~EAF_UNUSED);
1617 else if (memory_access_to (gimple_return_retval (ret), name))
1618 lattice[index].merge_direct_load ();
520d5ad3
JH
1619 }
1620 /* Account for LHS store, arg loads and flags from callee function. */
1621 else if (gcall *call = dyn_cast <gcall *> (use_stmt))
1622 {
1623 tree callee = gimple_call_fndecl (call);
1624
1625 /* Recursion would require bit of propagation; give up for now. */
85ebbabd
JH
1626 if (callee && !ipa && recursive_call_p (current_function_decl,
1627 callee))
1628 lattice[index].merge (0);
520d5ad3
JH
1629 else
1630 {
1631 int ecf_flags = gimple_call_flags (call);
1632 bool ignore_stores = ignore_stores_p (current_function_decl,
1633 ecf_flags);
85ebbabd
JH
1634 bool ignore_retval = ignore_retval_p (current_function_decl,
1635 ecf_flags);
520d5ad3
JH
1636
1637 /* Handle *name = func (...). */
1638 if (gimple_call_lhs (call)
1639 && memory_access_to (gimple_call_lhs (call), name))
85ebbabd 1640 lattice[index].merge_direct_store ();
520d5ad3
JH
1641
1642 /* We do not track accesses to the static chain (we could)
1643 so give up. */
1644 if (gimple_call_chain (call)
1645 && (gimple_call_chain (call) == name))
85ebbabd
JH
1646 lattice[index].merge (0);
1647
1648 /* Process internal functions and right away. */
1649 bool record_ipa = ipa && !gimple_call_internal_p (call);
520d5ad3
JH
1650
1651 /* Handle all function parameters. */
85ebbabd
JH
1652 for (unsigned i = 0;
1653 i < gimple_call_num_args (call) && lattice[index].flags; i++)
520d5ad3
JH
1654 /* Name is directly passed to the callee. */
1655 if (gimple_call_arg (call, i) == name)
1656 {
85ebbabd 1657 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
520d5ad3
JH
1658 {
1659 int call_flags = gimple_call_arg_flags (call, i);
1660 if (ignore_stores)
bb07490a
JH
1661 call_flags |= EAF_NOCLOBBER | EAF_NOESCAPE
1662 | EAF_NODIRECTESCAPE;
520d5ad3 1663
85ebbabd
JH
1664 if (!record_ipa)
1665 lattice[index].merge (call_flags);
1666 if (record_ipa)
1667 lattice[index].add_escape_point (call, i,
1668 call_flags, true);
520d5ad3 1669 }
85ebbabd
JH
1670 if (!ignore_retval)
1671 merge_call_lhs_flags (call, i, index, false,
1672 lattice, depth, ipa);
520d5ad3
JH
1673 }
1674 /* Name is dereferenced and passed to a callee. */
1675 else if (memory_access_to (gimple_call_arg (call, i), name))
1676 {
1677 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
85ebbabd 1678 lattice[index].merge_direct_load ();
520d5ad3 1679 else
85ebbabd
JH
1680 {
1681 int call_flags = deref_flags
1682 (gimple_call_arg_flags (call, i), ignore_stores);
1683 if (!record_ipa)
1684 lattice[index].merge (call_flags);
1685 if (record_ipa)
1686 lattice[index].add_escape_point (call, i,
1687 call_flags, false);
1688 }
1689 if (!ignore_retval)
1690 merge_call_lhs_flags (call, i, index, true,
1691 lattice, depth, ipa);
520d5ad3
JH
1692 }
1693 }
520d5ad3
JH
1694 }
1695 else if (gimple_assign_load_p (use_stmt))
1696 {
1697 gassign *assign = as_a <gassign *> (use_stmt);
1698 /* Memory to memory copy. */
1699 if (gimple_store_p (assign))
1700 {
520d5ad3
JH
1701 /* Handle *lhs = *name.
1702
1703 We do not track memory locations, so assume that value
1704 is used arbitrarily. */
1705 if (memory_access_to (gimple_assign_rhs1 (assign), name))
85ebbabd
JH
1706 lattice[index].merge (0);
1707 /* Handle *name = *exp. */
1708 else if (memory_access_to (gimple_assign_lhs (assign), name))
1709 lattice[index].merge_direct_store ();
520d5ad3
JH
1710 }
1711 /* Handle lhs = *name. */
1712 else if (memory_access_to (gimple_assign_rhs1 (assign), name))
85ebbabd
JH
1713 {
1714 tree lhs = gimple_assign_lhs (assign);
1715 analyze_ssa_name_flags (lhs, lattice, depth + 1, ipa);
1716 lattice[index].merge_deref (lattice[SSA_NAME_VERSION (lhs)],
1717 false);
1718 }
520d5ad3
JH
1719 }
1720 else if (gimple_store_p (use_stmt))
1721 {
1722 gassign *assign = dyn_cast <gassign *> (use_stmt);
1723
1724 /* Handle *lhs = name. */
1725 if (assign && gimple_assign_rhs1 (assign) == name)
1726 {
1727 if (dump_file)
1728 fprintf (dump_file, "%*s ssa name saved to memory\n",
1729 depth * 4, "");
85ebbabd 1730 lattice[index].merge (0);
520d5ad3
JH
1731 }
1732 /* Handle *name = exp. */
1733 else if (assign
1734 && memory_access_to (gimple_assign_lhs (assign), name))
0c9687d0
JH
1735 {
1736 /* In general we can not ignore clobbers because they are
1737 barriers for code motion, however after inlining it is safe to
1738 do because local optimization passes do not consider clobbers
1739 from other functions. Similar logic is in ipa-pure-const.c. */
1740 if (!cfun->after_inlining || !gimple_clobber_p (assign))
85ebbabd 1741 lattice[index].merge_direct_store ();
0c9687d0 1742 }
520d5ad3
JH
1743 /* ASM statements etc. */
1744 else if (!assign)
1745 {
1746 if (dump_file)
1747 fprintf (dump_file, "%*s Unhandled store\n",
1748 depth * 4, "");
85ebbabd 1749 lattice[index].merge (0);
520d5ad3
JH
1750 }
1751 }
1752 else if (gassign *assign = dyn_cast <gassign *> (use_stmt))
1753 {
1754 enum tree_code code = gimple_assign_rhs_code (assign);
1755
1756 /* See if operation is a merge as considered by
1757 tree-ssa-structalias.c:find_func_aliases. */
1758 if (!truth_value_p (code)
1759 && code != POINTER_DIFF_EXPR
1760 && (code != POINTER_PLUS_EXPR
1761 || gimple_assign_rhs1 (assign) == name))
85ebbabd
JH
1762 {
1763 tree lhs = gimple_assign_lhs (assign);
1764 analyze_ssa_name_flags (lhs, lattice, depth + 1, ipa);
1765 lattice[index].merge (lattice[SSA_NAME_VERSION (lhs)]);
1766 }
520d5ad3
JH
1767 }
1768 else if (gphi *phi = dyn_cast <gphi *> (use_stmt))
1769 {
85ebbabd
JH
1770 tree result = gimple_phi_result (phi);
1771 analyze_ssa_name_flags (result, lattice, depth + 1, ipa);
1772 lattice[index].merge (lattice[SSA_NAME_VERSION (result)]);
520d5ad3
JH
1773 }
1774 /* Conditions are not considered escape points
1775 by tree-ssa-structalias. */
1776 else if (gimple_code (use_stmt) == GIMPLE_COND)
1777 ;
1778 else
1779 {
1780 if (dump_file)
1781 fprintf (dump_file, "%*s Unhandled stmt\n", depth * 4, "");
85ebbabd 1782 lattice[index].merge (0);
520d5ad3
JH
1783 }
1784
1785 if (dump_file)
1786 {
1787 fprintf (dump_file, "%*s current flags of ", depth * 4, "");
1788 print_generic_expr (dump_file, name);
85ebbabd 1789 lattice[index].dump (dump_file, depth * 4 + 4);
520d5ad3
JH
1790 }
1791 }
1792 if (dump_file)
1793 {
1794 fprintf (dump_file, "%*sflags of ssa name ", depth * 4, "");
1795 print_generic_expr (dump_file, name);
85ebbabd 1796 lattice[index].dump (dump_file, depth * 4 + 2);
520d5ad3 1797 }
85ebbabd
JH
1798 lattice[index].open = false;
1799 lattice[index].known = true;
520d5ad3
JH
1800}
1801
1802/* Determine EAF flags for function parameters. */
1803
1804static void
85ebbabd
JH
1805analyze_parms (modref_summary *summary, modref_summary_lto *summary_lto,
1806 bool ipa)
520d5ad3
JH
1807{
1808 unsigned int parm_index = 0;
1809 unsigned int count = 0;
85ebbabd
JH
1810 int ecf_flags = flags_from_decl_or_type (current_function_decl);
1811
1812 /* For const functions we have nothing to gain by EAF flags. */
1813 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
1814 return;
520d5ad3
JH
1815
1816 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
1817 parm = TREE_CHAIN (parm))
1818 count++;
1819
1820 if (!count)
1821 return;
1822
85ebbabd
JH
1823 auto_vec<modref_lattice> lattice;
1824 lattice.safe_grow_cleared (num_ssa_names, true);
520d5ad3
JH
1825
1826 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm; parm_index++,
1827 parm = TREE_CHAIN (parm))
1828 {
1829 tree name = ssa_default_def (cfun, parm);
3350e59f
JH
1830 if (!name || has_zero_uses (name))
1831 {
1832 /* We do not track non-SSA parameters,
1833 but we want to track unused gimple_regs. */
1834 if (!is_gimple_reg (parm))
1835 continue;
1836 if (summary)
1837 {
1838 if (parm_index >= summary->arg_flags.length ())
1839 summary->arg_flags.safe_grow_cleared (count, true);
1840 summary->arg_flags[parm_index] = EAF_UNUSED;
1841 }
1842 else if (summary_lto)
1843 {
1844 if (parm_index >= summary_lto->arg_flags.length ())
1845 summary_lto->arg_flags.safe_grow_cleared (count, true);
1846 summary_lto->arg_flags[parm_index] = EAF_UNUSED;
1847 }
1848 continue;
1849 }
85ebbabd
JH
1850 analyze_ssa_name_flags (name, lattice, 0, ipa);
1851 int flags = lattice[SSA_NAME_VERSION (name)].flags;
1852
1853 /* For pure functions we have implicit NOCLOBBER
1854 and NOESCAPE. */
1855 if (ecf_flags & ECF_PURE)
bb07490a 1856 flags &= ~(EAF_NOCLOBBER | EAF_NOESCAPE | EAF_NODIRECTESCAPE);
520d5ad3
JH
1857
1858 if (flags)
1859 {
85ebbabd
JH
1860 if (summary)
1861 {
1862 if (parm_index >= summary->arg_flags.length ())
1863 summary->arg_flags.safe_grow_cleared (count, true);
1864 summary->arg_flags[parm_index] = flags;
1865 }
1866 else if (summary_lto)
1867 {
1868 if (parm_index >= summary_lto->arg_flags.length ())
1869 summary_lto->arg_flags.safe_grow_cleared (count, true);
1870 summary_lto->arg_flags[parm_index] = flags;
1871 }
1872 if (lattice[SSA_NAME_VERSION (name)].escape_points.length ())
1873 {
1874 escape_point *ep;
1875 unsigned int ip;
1876 cgraph_node *node = cgraph_node::get (current_function_decl);
1877
1878 gcc_checking_assert (ipa);
1879 FOR_EACH_VEC_ELT
1880 (lattice[SSA_NAME_VERSION (name)].escape_points, ip, ep)
1881 if ((ep->min_flags & flags) != flags)
1882 {
1883 cgraph_edge *e = node->get_edge (ep->call);
1884 struct escape_entry ee = {parm_index, ep->arg,
1885 ep->min_flags, ep->direct};
1886
1887 escape_summaries->get_create (e)->esc.safe_push (ee);
1888 }
1889 }
520d5ad3
JH
1890 }
1891 }
85ebbabd
JH
1892 if (ipa)
1893 for (unsigned int i = 0; i < num_ssa_names; i++)
1894 lattice[i].release ();
520d5ad3
JH
1895}
1896
71dbabcc
JH
1897/* Analyze function F. IPA indicates whether we're running in local mode
1898 (false) or the IPA mode (true). */
d119f34c
JH
1899
1900static void
1901analyze_function (function *f, bool ipa)
1902{
1903 if (dump_file)
67c935c8
JH
1904 fprintf (dump_file, "modref analyzing '%s' (ipa=%i)%s%s\n",
1905 function_name (f), ipa,
1906 TREE_READONLY (current_function_decl) ? " (const)" : "",
1907 DECL_PURE_P (current_function_decl) ? " (pure)" : "");
d119f34c
JH
1908
1909 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
1910 if (!flag_ipa_modref)
1911 return;
1912
d119f34c
JH
1913 /* Compute no-LTO summaries when local optimization is going to happen. */
1914 bool nolto = (!ipa || ((!flag_lto || flag_fat_lto_objects) && !in_lto_p)
1915 || (in_lto_p && !flag_wpa
1916 && flag_incremental_link != INCREMENTAL_LINK_LTO));
d119f34c
JH
1917 /* Compute LTO when LTO streaming is going to happen. */
1918 bool lto = ipa && ((flag_lto && !in_lto_p)
1919 || flag_wpa
1920 || flag_incremental_link == INCREMENTAL_LINK_LTO);
71dbabcc
JH
1921 cgraph_node *fnode = cgraph_node::get (current_function_decl);
1922
1923 modref_summary *summary = NULL;
1924 modref_summary_lto *summary_lto = NULL;
1925
1926 /* Initialize the summary.
1927 If we run in local mode there is possibly pre-existing summary from
1928 IPA pass. Dump it so it is easy to compare if mod-ref info has
1929 improved. */
1930 if (!ipa)
1931 {
1932 if (!optimization_summaries)
1933 optimization_summaries = modref_summaries::create_ggc (symtab);
1934 else /* Remove existing summary if we are re-running the pass. */
1935 {
1936 if (dump_file
56cb815b
JH
1937 && (summary
1938 = optimization_summaries->get (cgraph_node::get (f->decl)))
1939 != NULL
1940 && summary->loads)
71dbabcc
JH
1941 {
1942 fprintf (dump_file, "Past summary:\n");
1943 optimization_summaries->get
1944 (cgraph_node::get (f->decl))->dump (dump_file);
1945 }
1946 optimization_summaries->remove (cgraph_node::get (f->decl));
1947 }
1948 summary = optimization_summaries->get_create (cgraph_node::get (f->decl));
1949 gcc_checking_assert (nolto && !lto);
1950 }
8a2fd716 1951 /* In IPA mode we analyze every function precisely once. Assert that. */
71dbabcc
JH
1952 else
1953 {
1954 if (nolto)
1955 {
1956 if (!summaries)
1957 summaries = modref_summaries::create_ggc (symtab);
1958 else
1959 summaries->remove (cgraph_node::get (f->decl));
1960 summary = summaries->get_create (cgraph_node::get (f->decl));
1961 }
1962 if (lto)
1963 {
1964 if (!summaries_lto)
1965 summaries_lto = modref_summaries_lto::create_ggc (symtab);
1966 else
1967 summaries_lto->remove (cgraph_node::get (f->decl));
1968 summary_lto = summaries_lto->get_create (cgraph_node::get (f->decl));
1969 }
6cef01c3
JH
1970 if (!fnspec_summaries)
1971 fnspec_summaries = new fnspec_summaries_t (symtab);
85ebbabd
JH
1972 if (!escape_summaries)
1973 escape_summaries = new escape_summaries_t (symtab);
71dbabcc
JH
1974 }
1975
d119f34c
JH
1976
1977 /* Create and initialize summary for F.
1978 Note that summaries may be already allocated from previous
1979 run of the pass. */
1980 if (nolto)
1981 {
1982 gcc_assert (!summary->loads);
c9da53d6 1983 summary->loads = modref_records::create_ggc (param_modref_max_bases,
c33f4742
JH
1984 param_modref_max_refs,
1985 param_modref_max_accesses);
d119f34c 1986 gcc_assert (!summary->stores);
c9da53d6 1987 summary->stores = modref_records::create_ggc (param_modref_max_bases,
c33f4742
JH
1988 param_modref_max_refs,
1989 param_modref_max_accesses);
617695cd 1990 summary->writes_errno = false;
d119f34c
JH
1991 }
1992 if (lto)
1993 {
71dbabcc
JH
1994 gcc_assert (!summary_lto->loads);
1995 summary_lto->loads = modref_records_lto::create_ggc
c9da53d6 1996 (param_modref_max_bases,
c33f4742
JH
1997 param_modref_max_refs,
1998 param_modref_max_accesses);
71dbabcc
JH
1999 gcc_assert (!summary_lto->stores);
2000 summary_lto->stores = modref_records_lto::create_ggc
c9da53d6 2001 (param_modref_max_bases,
c33f4742
JH
2002 param_modref_max_refs,
2003 param_modref_max_accesses);
6cef01c3 2004 summary_lto->writes_errno = false;
d119f34c 2005 }
520d5ad3 2006
85ebbabd 2007 analyze_parms (summary, summary_lto, ipa);
520d5ad3 2008
67c935c8 2009 int ecf_flags = flags_from_decl_or_type (current_function_decl);
ada353b8 2010 auto_vec <gimple *, 32> recursive_calls;
d119f34c
JH
2011
2012 /* Analyze each statement in each basic block of the function. If the
2013 statement cannot be analyzed (for any reason), the entire function cannot
2014 be analyzed by modref. */
2015 basic_block bb;
2016 FOR_EACH_BB_FN (bb, f)
2017 {
2018 gimple_stmt_iterator si;
2019 for (si = gsi_after_labels (bb); !gsi_end_p (si); gsi_next (&si))
2020 {
71dbabcc
JH
2021 if (!analyze_stmt (summary, summary_lto,
2022 gsi_stmt (si), ipa, &recursive_calls)
85ebbabd
JH
2023 || ((!summary || !summary->useful_p (ecf_flags, false))
2024 && (!summary_lto
2025 || !summary_lto->useful_p (ecf_flags, false))))
d119f34c 2026 {
520d5ad3
JH
2027 collapse_loads (summary, summary_lto);
2028 collapse_stores (summary, summary_lto);
2029 break;
d119f34c
JH
2030 }
2031 }
2032 }
2033
ada353b8
JH
2034 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
2035 This needs to be done after all other side effects are computed. */
2036 if (!ipa)
2037 {
2038 bool changed = true;
2039 while (changed)
2040 {
2041 changed = false;
2042 for (unsigned i = 0; i < recursive_calls.length (); i++)
2043 {
2044 changed |= merge_call_side_effects
2045 (summary, recursive_calls[i], summary,
2046 ignore_stores_p (current_function_decl,
2047 gimple_call_flags
56cb815b
JH
2048 (recursive_calls[i])),
2049 fnode);
85ebbabd 2050 if (!summary->useful_p (ecf_flags, false))
ada353b8 2051 {
71dbabcc 2052 remove_summary (lto, nolto, ipa);
ada353b8
JH
2053 return;
2054 }
2055 }
2056 }
2057 }
71dbabcc
JH
2058 if (summary && !summary->useful_p (ecf_flags))
2059 {
2060 if (!ipa)
2061 optimization_summaries->remove (fnode);
2062 else
2063 summaries->remove (fnode);
2064 summary = NULL;
2065 }
2066 if (summary_lto && !summary_lto->useful_p (ecf_flags))
2067 {
2068 summaries_lto->remove (fnode);
2069 summary_lto = NULL;
2070 }
85ebbabd
JH
2071 if (ipa && !summary && !summary_lto)
2072 remove_modref_edge_summaries (fnode);
d119f34c
JH
2073
2074 if (dump_file)
2075 {
2076 fprintf (dump_file, " - modref done with result: tracked.\n");
71dbabcc
JH
2077 if (summary)
2078 summary->dump (dump_file);
2079 if (summary_lto)
2080 summary_lto->dump (dump_file);
85ebbabd 2081 dump_modref_edge_summaries (dump_file, fnode, 2);
d119f34c
JH
2082 }
2083}
2084
2085/* Callback for generate_summary. */
2086
2087static void
2088modref_generate (void)
2089{
2090 struct cgraph_node *node;
2091 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
2092 {
2093 function *f = DECL_STRUCT_FUNCTION (node->decl);
2094 if (!f)
2095 continue;
2096 push_cfun (f);
2097 analyze_function (f, true);
2098 pop_cfun ();
2099 }
2100}
2101
2102/* Called when a new function is inserted to callgraph late. */
2103
2104void
2105modref_summaries::insert (struct cgraph_node *node, modref_summary *)
2106{
56cb815b
JH
2107 /* Local passes ought to be executed by the pass manager. */
2108 if (this == optimization_summaries)
71dbabcc
JH
2109 {
2110 optimization_summaries->remove (node);
56cb815b
JH
2111 return;
2112 }
1a90e99f
JH
2113 if (!DECL_STRUCT_FUNCTION (node->decl)
2114 || !opt_for_fn (node->decl, flag_ipa_modref))
56cb815b 2115 {
71dbabcc 2116 summaries->remove (node);
56cb815b 2117 return;
71dbabcc
JH
2118 }
2119 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
56cb815b 2120 analyze_function (DECL_STRUCT_FUNCTION (node->decl), true);
71dbabcc
JH
2121 pop_cfun ();
2122}
2123
2124/* Called when a new function is inserted to callgraph late. */
2125
2126void
2127modref_summaries_lto::insert (struct cgraph_node *node, modref_summary_lto *)
2128{
2129 /* We do not support adding new function when IPA information is already
2130 propagated. This is done only by SIMD cloning that is not very
2131 critical. */
2132 if (!DECL_STRUCT_FUNCTION (node->decl)
1a90e99f 2133 || !opt_for_fn (node->decl, flag_ipa_modref)
71dbabcc
JH
2134 || propagated)
2135 {
2136 summaries_lto->remove (node);
2137 return;
2138 }
d119f34c 2139 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
71dbabcc 2140 analyze_function (DECL_STRUCT_FUNCTION (node->decl), true);
d119f34c
JH
2141 pop_cfun ();
2142}
2143
2144/* Called when new clone is inserted to callgraph late. */
2145
2146void
56cb815b 2147modref_summaries::duplicate (cgraph_node *, cgraph_node *dst,
d119f34c
JH
2148 modref_summary *src_data,
2149 modref_summary *dst_data)
2150{
8a2fd716 2151 /* Do not duplicate optimization summaries; we do not handle parameter
56cb815b
JH
2152 transforms on them. */
2153 if (this == optimization_summaries)
d119f34c 2154 {
56cb815b
JH
2155 optimization_summaries->remove (dst);
2156 return;
d119f34c 2157 }
56cb815b
JH
2158 dst_data->stores = modref_records::create_ggc
2159 (src_data->stores->max_bases,
2160 src_data->stores->max_refs,
2161 src_data->stores->max_accesses);
2162 dst_data->stores->copy_from (src_data->stores);
2163 dst_data->loads = modref_records::create_ggc
2164 (src_data->loads->max_bases,
2165 src_data->loads->max_refs,
2166 src_data->loads->max_accesses);
2167 dst_data->loads->copy_from (src_data->loads);
617695cd 2168 dst_data->writes_errno = src_data->writes_errno;
5962efe9
JH
2169 if (src_data->arg_flags.length ())
2170 dst_data->arg_flags = src_data->arg_flags.copy ();
71dbabcc
JH
2171}
2172
2173/* Called when new clone is inserted to callgraph late. */
2174
2175void
2176modref_summaries_lto::duplicate (cgraph_node *, cgraph_node *,
2177 modref_summary_lto *src_data,
2178 modref_summary_lto *dst_data)
2179{
8a2fd716 2180 /* Be sure that no further cloning happens after ipa-modref. If it does
fe90c504
JH
2181 we will need to update signatures for possible param changes. */
2182 gcc_checking_assert (!((modref_summaries_lto *)summaries_lto)->propagated);
56cb815b
JH
2183 dst_data->stores = modref_records_lto::create_ggc
2184 (src_data->stores->max_bases,
2185 src_data->stores->max_refs,
2186 src_data->stores->max_accesses);
2187 dst_data->stores->copy_from (src_data->stores);
2188 dst_data->loads = modref_records_lto::create_ggc
2189 (src_data->loads->max_bases,
2190 src_data->loads->max_refs,
2191 src_data->loads->max_accesses);
2192 dst_data->loads->copy_from (src_data->loads);
6cef01c3 2193 dst_data->writes_errno = src_data->writes_errno;
5962efe9
JH
2194 if (src_data->arg_flags.length ())
2195 dst_data->arg_flags = src_data->arg_flags.copy ();
d119f34c
JH
2196}
2197
2198namespace
2199{
2200/* Definition of the modref pass on GIMPLE. */
2201const pass_data pass_data_modref = {
2202 GIMPLE_PASS,
2203 "modref",
2204 OPTGROUP_IPA,
2205 TV_TREE_MODREF,
2206 (PROP_cfg | PROP_ssa),
2207 0,
2208 0,
2209 0,
2210 0,
2211};
2212
2213class pass_modref : public gimple_opt_pass
2214{
2215 public:
2216 pass_modref (gcc::context *ctxt)
2217 : gimple_opt_pass (pass_data_modref, ctxt) {}
2218
d119f34c
JH
2219 /* opt_pass methods: */
2220 opt_pass *clone ()
2221 {
2222 return new pass_modref (m_ctxt);
2223 }
2224 virtual bool gate (function *)
2225 {
2226 return flag_ipa_modref;
2227 }
2228 virtual unsigned int execute (function *);
2229};
2230
2231/* Encode TT to the output block OB using the summary streaming API. */
2232
2233static void
2234write_modref_records (modref_records_lto *tt, struct output_block *ob)
2235{
2236 streamer_write_uhwi (ob, tt->max_bases);
2237 streamer_write_uhwi (ob, tt->max_refs);
c33f4742 2238 streamer_write_uhwi (ob, tt->max_accesses);
d119f34c
JH
2239
2240 streamer_write_uhwi (ob, tt->every_base);
2241 streamer_write_uhwi (ob, vec_safe_length (tt->bases));
2242 size_t i;
2243 modref_base_node <tree> *base_node;
2244 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, base_node)
2245 {
2246 stream_write_tree (ob, base_node->base, true);
2247
2248 streamer_write_uhwi (ob, base_node->every_ref);
2249 streamer_write_uhwi (ob, vec_safe_length (base_node->refs));
c33f4742 2250
d119f34c
JH
2251 size_t j;
2252 modref_ref_node <tree> *ref_node;
2253 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
2254 {
2255 stream_write_tree (ob, ref_node->ref, true);
c33f4742
JH
2256 streamer_write_uhwi (ob, ref_node->every_access);
2257 streamer_write_uhwi (ob, vec_safe_length (ref_node->accesses));
2258
2259 size_t k;
2260 modref_access_node *access_node;
2261 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
c34db4b6 2262 {
56cb815b 2263 streamer_write_hwi (ob, access_node->parm_index);
c34db4b6
JH
2264 if (access_node->parm_index != -1)
2265 {
2266 streamer_write_uhwi (ob, access_node->parm_offset_known);
2267 if (access_node->parm_offset_known)
2268 {
2269 streamer_write_poly_int64 (ob, access_node->parm_offset);
2270 streamer_write_poly_int64 (ob, access_node->offset);
2271 streamer_write_poly_int64 (ob, access_node->size);
2272 streamer_write_poly_int64 (ob, access_node->max_size);
2273 }
2274 }
2275 }
d119f34c
JH
2276 }
2277 }
2278}
2279
2280/* Read a modref_tree from the input block IB using the data from DATA_IN.
2281 This assumes that the tree was encoded using write_modref_tree.
2282 Either nolto_ret or lto_ret is initialized by the tree depending whether
46a27415 2283 LTO streaming is expected or not. */
d119f34c
JH
2284
2285void
2286read_modref_records (lto_input_block *ib, struct data_in *data_in,
2287 modref_records **nolto_ret,
2288 modref_records_lto **lto_ret)
2289{
2290 size_t max_bases = streamer_read_uhwi (ib);
2291 size_t max_refs = streamer_read_uhwi (ib);
c33f4742 2292 size_t max_accesses = streamer_read_uhwi (ib);
d119f34c 2293
71dbabcc 2294 if (lto_ret)
c33f4742
JH
2295 *lto_ret = modref_records_lto::create_ggc (max_bases, max_refs,
2296 max_accesses);
71dbabcc 2297 if (nolto_ret)
c33f4742
JH
2298 *nolto_ret = modref_records::create_ggc (max_bases, max_refs,
2299 max_accesses);
71dbabcc 2300 gcc_checking_assert (lto_ret || nolto_ret);
d119f34c
JH
2301
2302 size_t every_base = streamer_read_uhwi (ib);
2303 size_t nbase = streamer_read_uhwi (ib);
2304
2305 gcc_assert (!every_base || nbase == 0);
2306 if (every_base)
2307 {
71dbabcc 2308 if (nolto_ret)
d119f34c 2309 (*nolto_ret)->collapse ();
71dbabcc 2310 if (lto_ret)
d119f34c
JH
2311 (*lto_ret)->collapse ();
2312 }
2313 for (size_t i = 0; i < nbase; i++)
2314 {
2315 tree base_tree = stream_read_tree (ib, data_in);
2316 modref_base_node <alias_set_type> *nolto_base_node = NULL;
2317 modref_base_node <tree> *lto_base_node = NULL;
2318
2319 /* At stream in time we have LTO alias info. Check if we streamed in
2320 something obviously unnecessary. Do not glob types by alias sets;
2321 it is not 100% clear that ltrans types will get merged same way.
2322 Types may get refined based on ODR type conflicts. */
2323 if (base_tree && !get_alias_set (base_tree))
2324 {
2325 if (dump_file)
2326 {
2327 fprintf (dump_file, "Streamed in alias set 0 type ");
2328 print_generic_expr (dump_file, base_tree);
2329 fprintf (dump_file, "\n");
2330 }
2331 base_tree = NULL;
2332 }
2333
71dbabcc 2334 if (nolto_ret)
d119f34c
JH
2335 nolto_base_node = (*nolto_ret)->insert_base (base_tree
2336 ? get_alias_set (base_tree)
2337 : 0);
71dbabcc 2338 if (lto_ret)
d119f34c
JH
2339 lto_base_node = (*lto_ret)->insert_base (base_tree);
2340 size_t every_ref = streamer_read_uhwi (ib);
2341 size_t nref = streamer_read_uhwi (ib);
2342
2343 gcc_assert (!every_ref || nref == 0);
2344 if (every_ref)
2345 {
2346 if (nolto_base_node)
2347 nolto_base_node->collapse ();
2348 if (lto_base_node)
2349 lto_base_node->collapse ();
2350 }
2351 for (size_t j = 0; j < nref; j++)
2352 {
2353 tree ref_tree = stream_read_tree (ib, data_in);
2354
2355 if (ref_tree && !get_alias_set (ref_tree))
2356 {
2357 if (dump_file)
2358 {
2359 fprintf (dump_file, "Streamed in alias set 0 type ");
2360 print_generic_expr (dump_file, ref_tree);
2361 fprintf (dump_file, "\n");
2362 }
c33f4742 2363 ref_tree = NULL;
d119f34c
JH
2364 }
2365
c33f4742
JH
2366 modref_ref_node <alias_set_type> *nolto_ref_node = NULL;
2367 modref_ref_node <tree> *lto_ref_node = NULL;
2368
d119f34c 2369 if (nolto_base_node)
c33f4742
JH
2370 nolto_ref_node
2371 = nolto_base_node->insert_ref (ref_tree
2372 ? get_alias_set (ref_tree) : 0,
2373 max_refs);
d119f34c 2374 if (lto_base_node)
c33f4742
JH
2375 lto_ref_node = lto_base_node->insert_ref (ref_tree, max_refs);
2376
2377 size_t every_access = streamer_read_uhwi (ib);
2378 size_t naccesses = streamer_read_uhwi (ib);
2379
2380 if (nolto_ref_node)
2381 nolto_ref_node->every_access = every_access;
2382 if (lto_ref_node)
2383 lto_ref_node->every_access = every_access;
2384
2385 for (size_t k = 0; k < naccesses; k++)
2386 {
56cb815b 2387 int parm_index = streamer_read_hwi (ib);
c34db4b6
JH
2388 bool parm_offset_known = false;
2389 poly_int64 parm_offset = 0;
2390 poly_int64 offset = 0;
2391 poly_int64 size = -1;
2392 poly_int64 max_size = -1;
2393
2394 if (parm_index != -1)
2395 {
2396 parm_offset_known = streamer_read_uhwi (ib);
2397 if (parm_offset_known)
2398 {
2399 parm_offset = streamer_read_poly_int64 (ib);
2400 offset = streamer_read_poly_int64 (ib);
2401 size = streamer_read_poly_int64 (ib);
2402 max_size = streamer_read_poly_int64 (ib);
2403 }
2404 }
2405 modref_access_node a = {offset, size, max_size, parm_offset,
2406 parm_index, parm_offset_known};
c33f4742
JH
2407 if (nolto_ref_node)
2408 nolto_ref_node->insert_access (a, max_accesses);
2409 if (lto_ref_node)
2410 lto_ref_node->insert_access (a, max_accesses);
2411 }
d119f34c
JH
2412 }
2413 }
71dbabcc 2414 if (lto_ret)
c33f4742 2415 (*lto_ret)->cleanup ();
71dbabcc 2416 if (nolto_ret)
c33f4742 2417 (*nolto_ret)->cleanup ();
d119f34c
JH
2418}
2419
85ebbabd
JH
2420/* Write ESUM to BP. */
2421
2422static void
2423modref_write_escape_summary (struct bitpack_d *bp, escape_summary *esum)
2424{
2425 if (!esum)
2426 {
2427 bp_pack_var_len_unsigned (bp, 0);
2428 return;
2429 }
2430 bp_pack_var_len_unsigned (bp, esum->esc.length ());
2431 unsigned int i;
2432 escape_entry *ee;
2433 FOR_EACH_VEC_ELT (esum->esc, i, ee)
2434 {
2435 bp_pack_var_len_unsigned (bp, ee->parm_index);
2436 bp_pack_var_len_unsigned (bp, ee->arg);
2437 bp_pack_var_len_unsigned (bp, ee->min_flags);
2438 bp_pack_value (bp, ee->direct, 1);
2439 }
2440}
2441
2442/* Read escape summary for E from BP. */
2443
2444static void
2445modref_read_escape_summary (struct bitpack_d *bp, cgraph_edge *e)
2446{
2447 unsigned int n = bp_unpack_var_len_unsigned (bp);
2448 if (!n)
2449 return;
2450 escape_summary *esum = escape_summaries->get_create (e);
2451 esum->esc.reserve_exact (n);
2452 for (unsigned int i = 0; i < n; i++)
2453 {
2454 escape_entry ee;
2455 ee.parm_index = bp_unpack_var_len_unsigned (bp);
2456 ee.arg = bp_unpack_var_len_unsigned (bp);
2457 ee.min_flags = bp_unpack_var_len_unsigned (bp);
2458 ee.direct = bp_unpack_value (bp, 1);
2459 esum->esc.quick_push (ee);
2460 }
2461}
2462
d119f34c
JH
2463/* Callback for write_summary. */
2464
2465static void
2466modref_write ()
2467{
2468 struct output_block *ob = create_output_block (LTO_section_ipa_modref);
2469 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2470 unsigned int count = 0;
2471 int i;
2472
71dbabcc 2473 if (!summaries_lto)
d119f34c
JH
2474 {
2475 streamer_write_uhwi (ob, 0);
2476 streamer_write_char_stream (ob->main_stream, 0);
2477 produce_asm (ob, NULL);
2478 destroy_output_block (ob);
2479 return;
2480 }
2481
2482 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
2483 {
2484 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2485 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
71dbabcc 2486 modref_summary_lto *r;
d119f34c
JH
2487
2488 if (cnode && cnode->definition && !cnode->alias
71dbabcc
JH
2489 && (r = summaries_lto->get (cnode))
2490 && r->useful_p (flags_from_decl_or_type (cnode->decl)))
d119f34c
JH
2491 count++;
2492 }
2493 streamer_write_uhwi (ob, count);
2494
2495 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
2496 {
2497 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2498 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
2499
2500 if (cnode && cnode->definition && !cnode->alias)
2501 {
71dbabcc 2502 modref_summary_lto *r = summaries_lto->get (cnode);
d119f34c 2503
71dbabcc 2504 if (!r || !r->useful_p (flags_from_decl_or_type (cnode->decl)))
d119f34c
JH
2505 continue;
2506
2507 streamer_write_uhwi (ob, lto_symtab_encoder_encode (encoder, cnode));
2508
85ebbabd
JH
2509 streamer_write_uhwi (ob, r->arg_flags.length ());
2510 for (unsigned int i = 0; i < r->arg_flags.length (); i++)
2511 streamer_write_char_stream (ob->main_stream, r->arg_flags[i]);
2512
56cb815b
JH
2513 write_modref_records (r->loads, ob);
2514 write_modref_records (r->stores, ob);
6cef01c3
JH
2515
2516 struct bitpack_d bp = bitpack_create (ob->main_stream);
2517 bp_pack_value (&bp, r->writes_errno, 1);
2518 if (!flag_wpa)
2519 {
2520 for (cgraph_edge *e = cnode->indirect_calls;
2521 e; e = e->next_callee)
2522 {
2523 class fnspec_summary *sum = fnspec_summaries->get (e);
2524 bp_pack_value (&bp, sum != NULL, 1);
2525 if (sum)
2526 bp_pack_string (ob, &bp, sum->fnspec, true);
85ebbabd
JH
2527 class escape_summary *esum = escape_summaries->get (e);
2528 modref_write_escape_summary (&bp,esum);
6cef01c3
JH
2529 }
2530 for (cgraph_edge *e = cnode->callees; e; e = e->next_callee)
2531 {
2532 class fnspec_summary *sum = fnspec_summaries->get (e);
2533 bp_pack_value (&bp, sum != NULL, 1);
2534 if (sum)
2535 bp_pack_string (ob, &bp, sum->fnspec, true);
85ebbabd
JH
2536 class escape_summary *esum = escape_summaries->get (e);
2537 modref_write_escape_summary (&bp,esum);
6cef01c3
JH
2538 }
2539 }
2540 streamer_write_bitpack (&bp);
d119f34c
JH
2541 }
2542 }
2543 streamer_write_char_stream (ob->main_stream, 0);
2544 produce_asm (ob, NULL);
2545 destroy_output_block (ob);
2546}
2547
2548static void
2549read_section (struct lto_file_decl_data *file_data, const char *data,
2550 size_t len)
2551{
2552 const struct lto_function_header *header
2553 = (const struct lto_function_header *) data;
2554 const int cfg_offset = sizeof (struct lto_function_header);
2555 const int main_offset = cfg_offset + header->cfg_size;
2556 const int string_offset = main_offset + header->main_size;
2557 struct data_in *data_in;
2558 unsigned int i;
2559 unsigned int f_count;
2560
2561 lto_input_block ib ((const char *) data + main_offset, header->main_size,
2562 file_data->mode_table);
2563
2564 data_in
2565 = lto_data_in_create (file_data, (const char *) data + string_offset,
2566 header->string_size, vNULL);
2567 f_count = streamer_read_uhwi (&ib);
2568 for (i = 0; i < f_count; i++)
2569 {
2570 struct cgraph_node *node;
2571 lto_symtab_encoder_t encoder;
2572
2573 unsigned int index = streamer_read_uhwi (&ib);
2574 encoder = file_data->symtab_node_encoder;
2575 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder,
2576 index));
2577
71dbabcc
JH
2578 modref_summary *modref_sum = summaries
2579 ? summaries->get_create (node) : NULL;
2580 modref_summary_lto *modref_sum_lto = summaries_lto
2581 ? summaries_lto->get_create (node)
2582 : NULL;
71dbabcc
JH
2583 if (optimization_summaries)
2584 modref_sum = optimization_summaries->get_create (node);
2585
ea937e7d
JH
2586 if (modref_sum)
2587 modref_sum->writes_errno = false;
6cef01c3
JH
2588 if (modref_sum_lto)
2589 modref_sum_lto->writes_errno = false;
ea937e7d 2590
71dbabcc
JH
2591 gcc_assert (!modref_sum || (!modref_sum->loads
2592 && !modref_sum->stores));
2593 gcc_assert (!modref_sum_lto || (!modref_sum_lto->loads
2594 && !modref_sum_lto->stores));
85ebbabd
JH
2595 unsigned int args = streamer_read_uhwi (&ib);
2596 if (args && modref_sum)
2597 modref_sum->arg_flags.reserve_exact (args);
2598 if (args && modref_sum_lto)
2599 modref_sum_lto->arg_flags.reserve_exact (args);
2600 for (unsigned int i = 0; i < args; i++)
2601 {
2602 unsigned char flags = streamer_read_uchar (&ib);
2603 if (modref_sum)
2604 modref_sum->arg_flags.quick_push (flags);
2605 if (modref_sum_lto)
2606 modref_sum_lto->arg_flags.quick_push (flags);
2607 }
56cb815b
JH
2608 read_modref_records (&ib, data_in,
2609 modref_sum ? &modref_sum->loads : NULL,
2610 modref_sum_lto ? &modref_sum_lto->loads : NULL);
2611 read_modref_records (&ib, data_in,
2612 modref_sum ? &modref_sum->stores : NULL,
2613 modref_sum_lto ? &modref_sum_lto->stores : NULL);
6cef01c3
JH
2614 struct bitpack_d bp = streamer_read_bitpack (&ib);
2615 if (bp_unpack_value (&bp, 1))
2616 {
2617 if (modref_sum)
2618 modref_sum->writes_errno = true;
2619 if (modref_sum_lto)
2620 modref_sum_lto->writes_errno = true;
2621 }
2622 if (!flag_ltrans)
2623 {
2624 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
2625 {
2626 if (bp_unpack_value (&bp, 1))
2627 {
2628 class fnspec_summary *sum = fnspec_summaries->get_create (e);
2629 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
2630 }
85ebbabd 2631 modref_read_escape_summary (&bp, e);
6cef01c3
JH
2632 }
2633 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
2634 {
2635 if (bp_unpack_value (&bp, 1))
2636 {
2637 class fnspec_summary *sum = fnspec_summaries->get_create (e);
2638 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
2639 }
85ebbabd 2640 modref_read_escape_summary (&bp, e);
6cef01c3
JH
2641 }
2642 }
d119f34c
JH
2643 if (dump_file)
2644 {
2645 fprintf (dump_file, "Read modref for %s\n",
2646 node->dump_name ());
71dbabcc
JH
2647 if (modref_sum)
2648 modref_sum->dump (dump_file);
2649 if (modref_sum_lto)
2650 modref_sum_lto->dump (dump_file);
85ebbabd 2651 dump_modref_edge_summaries (dump_file, node, 4);
d119f34c 2652 }
d119f34c
JH
2653 }
2654
2655 lto_free_section_data (file_data, LTO_section_ipa_modref, NULL, data,
2656 len);
2657 lto_data_in_delete (data_in);
2658}
2659
2660/* Callback for read_summary. */
2661
2662static void
2663modref_read (void)
2664{
2665 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2666 struct lto_file_decl_data *file_data;
2667 unsigned int j = 0;
2668
71dbabcc
JH
2669 gcc_checking_assert (!optimization_summaries && !summaries && !summaries_lto);
2670 if (flag_ltrans)
2671 optimization_summaries = modref_summaries::create_ggc (symtab);
2672 else
2673 {
2674 if (flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
2675 summaries_lto = modref_summaries_lto::create_ggc (symtab);
2676 if (!flag_wpa
2677 || (flag_incremental_link == INCREMENTAL_LINK_LTO
2678 && flag_fat_lto_objects))
2679 summaries = modref_summaries::create_ggc (symtab);
6cef01c3
JH
2680 if (!fnspec_summaries)
2681 fnspec_summaries = new fnspec_summaries_t (symtab);
85ebbabd
JH
2682 if (!escape_summaries)
2683 escape_summaries = new escape_summaries_t (symtab);
71dbabcc 2684 }
d119f34c
JH
2685
2686 while ((file_data = file_data_vec[j++]))
2687 {
2688 size_t len;
2689 const char *data = lto_get_summary_section_data (file_data,
2690 LTO_section_ipa_modref,
2691 &len);
2692 if (data)
2693 read_section (file_data, data, len);
2694 else
2695 /* Fatal error here. We do not want to support compiling ltrans units
2696 with different version of compiler or different flags than the WPA
2697 unit, so this should never happen. */
2698 fatal_error (input_location,
2699 "IPA modref summary is missing in input file");
2700 }
2701}
2702
85ebbabd
JH
2703/* Recompute arg_flags for param adjustments in INFO. */
2704
2705static void
2706remap_arg_flags (auto_vec <unsigned char> &arg_flags, clone_info *info)
2707{
2708 auto_vec<unsigned char> old = arg_flags.copy ();
2709 int max = -1;
2710 size_t i;
2711 ipa_adjusted_param *p;
2712
2713 arg_flags.release ();
2714
2715 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
2716 {
2717 int o = info->param_adjustments->get_original_index (i);
2718 if (o >= 0 && (int)old.length () > o && old[o])
2719 max = i;
2720 }
5962efe9 2721 if (max >= 0)
85ebbabd
JH
2722 arg_flags.safe_grow_cleared (max + 1, true);
2723 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
2724 {
2725 int o = info->param_adjustments->get_original_index (i);
2726 if (o >= 0 && (int)old.length () > o && old[o])
2727 arg_flags[i] = old[o];
2728 }
2729}
2730
c8fd2be1
JH
2731/* If signature changed, update the summary. */
2732
fe90c504
JH
2733static void
2734update_signature (struct cgraph_node *node)
c8fd2be1 2735{
ae7a23a3
JH
2736 clone_info *info = clone_info::get (node);
2737 if (!info || !info->param_adjustments)
fe90c504
JH
2738 return;
2739
2740 modref_summary *r = optimization_summaries
2741 ? optimization_summaries->get (node) : NULL;
2742 modref_summary_lto *r_lto = summaries_lto
2743 ? summaries_lto->get (node) : NULL;
2744 if (!r && !r_lto)
2745 return;
c8fd2be1
JH
2746 if (dump_file)
2747 {
2748 fprintf (dump_file, "Updating summary for %s from:\n",
2749 node->dump_name ());
85ebbabd
JH
2750 if (r)
2751 r->dump (dump_file);
2752 if (r_lto)
2753 r_lto->dump (dump_file);
c8fd2be1
JH
2754 }
2755
2756 size_t i, max = 0;
2757 ipa_adjusted_param *p;
2758
ae7a23a3 2759 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
c8fd2be1 2760 {
ae7a23a3 2761 int idx = info->param_adjustments->get_original_index (i);
c8fd2be1
JH
2762 if (idx > (int)max)
2763 max = idx;
2764 }
2765
2766 auto_vec <int, 32> map;
2767
5d2cedaa 2768 map.reserve (max + 1);
c8fd2be1
JH
2769 for (i = 0; i <= max; i++)
2770 map.quick_push (-1);
ae7a23a3 2771 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
c8fd2be1 2772 {
ae7a23a3 2773 int idx = info->param_adjustments->get_original_index (i);
c8fd2be1 2774 if (idx >= 0)
2f61125f 2775 map[idx] = i;
c8fd2be1 2776 }
fe90c504
JH
2777 if (r)
2778 {
2779 r->loads->remap_params (&map);
2780 r->stores->remap_params (&map);
85ebbabd
JH
2781 if (r->arg_flags.length ())
2782 remap_arg_flags (r->arg_flags, info);
fe90c504
JH
2783 }
2784 if (r_lto)
2785 {
2786 r_lto->loads->remap_params (&map);
2787 r_lto->stores->remap_params (&map);
85ebbabd
JH
2788 if (r_lto->arg_flags.length ())
2789 remap_arg_flags (r_lto->arg_flags, info);
fe90c504 2790 }
c8fd2be1
JH
2791 if (dump_file)
2792 {
2793 fprintf (dump_file, "to:\n");
fe90c504 2794 if (r)
6cef01c3 2795 r->dump (dump_file);
fe90c504 2796 if (r_lto)
6cef01c3 2797 r_lto->dump (dump_file);
c8fd2be1 2798 }
fe90c504 2799 return;
c8fd2be1
JH
2800}
2801
d119f34c
JH
2802/* Definition of the modref IPA pass. */
2803const pass_data pass_data_ipa_modref =
2804{
2805 IPA_PASS, /* type */
2806 "modref", /* name */
2807 OPTGROUP_IPA, /* optinfo_flags */
2808 TV_IPA_MODREF, /* tv_id */
2809 0, /* properties_required */
2810 0, /* properties_provided */
2811 0, /* properties_destroyed */
2812 0, /* todo_flags_start */
2813 ( TODO_dump_symtab ), /* todo_flags_finish */
2814};
2815
2816class pass_ipa_modref : public ipa_opt_pass_d
2817{
2818public:
2819 pass_ipa_modref (gcc::context *ctxt)
2820 : ipa_opt_pass_d (pass_data_ipa_modref, ctxt,
2821 modref_generate, /* generate_summary */
2822 modref_write, /* write_summary */
2823 modref_read, /* read_summary */
2824 modref_write, /* write_optimization_summary */
2825 modref_read, /* read_optimization_summary */
2826 NULL, /* stmt_fixup */
2827 0, /* function_transform_todo_flags_start */
fe90c504 2828 NULL, /* function_transform */
d119f34c
JH
2829 NULL) /* variable_transform */
2830 {}
2831
2832 /* opt_pass methods: */
2833 opt_pass *clone () { return new pass_ipa_modref (m_ctxt); }
2834 virtual bool gate (function *)
2835 {
2836 return true;
2837 }
2838 virtual unsigned int execute (function *);
2839
2840};
2841
2842}
2843
2844unsigned int pass_modref::execute (function *f)
2845{
d119f34c
JH
2846 analyze_function (f, false);
2847 return 0;
2848}
2849
2850gimple_opt_pass *
2851make_pass_modref (gcc::context *ctxt)
2852{
2853 return new pass_modref (ctxt);
2854}
2855
2856ipa_opt_pass_d *
2857make_pass_ipa_modref (gcc::context *ctxt)
2858{
2859 return new pass_ipa_modref (ctxt);
2860}
2861
2862/* Skip edges from and to nodes without ipa_pure_const enabled.
2863 Ignore not available symbols. */
2864
2865static bool
2866ignore_edge (struct cgraph_edge *e)
2867{
87d75a11
JH
2868 /* We merge summaries of inline clones into summaries of functions they
2869 are inlined to. For that reason the complete function bodies must
2870 act as unit. */
2871 if (!e->inline_failed)
2872 return false;
d119f34c
JH
2873 enum availability avail;
2874 cgraph_node *callee = e->callee->function_or_virtual_thunk_symbol
2875 (&avail, e->caller);
2876
2877 return (avail <= AVAIL_INTERPOSABLE
56cb815b 2878 || ((!optimization_summaries || !optimization_summaries->get (callee))
71dbabcc 2879 && (!summaries_lto || !summaries_lto->get (callee)))
d119f34c
JH
2880 || flags_from_decl_or_type (e->callee->decl)
2881 & (ECF_CONST | ECF_NOVOPS));
2882}
2883
8a2fd716 2884/* Compute parm_map for CALLEE_EDGE. */
d119f34c 2885
6cef01c3 2886static bool
c34db4b6 2887compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
ada353b8
JH
2888{
2889 class ipa_edge_args *args;
2890 if (ipa_node_params_sum
2891 && !callee_edge->call_stmt_cannot_inline_p
2892 && (args = IPA_EDGE_REF (callee_edge)) != NULL)
2893 {
2894 int i, count = ipa_get_cs_argument_count (args);
2895 class ipa_node_params *caller_parms_info, *callee_pi;
2896 class ipa_call_summary *es
2897 = ipa_call_summaries->get (callee_edge);
2898 cgraph_node *callee
2899 = callee_edge->callee->function_or_virtual_thunk_symbol
2900 (NULL, callee_edge->caller);
2901
2902 caller_parms_info = IPA_NODE_REF (callee_edge->caller->inlined_to
2903 ? callee_edge->caller->inlined_to
2904 : callee_edge->caller);
2905 callee_pi = IPA_NODE_REF (callee);
2906
520d5ad3 2907 (*parm_map).safe_grow_cleared (count, true);
ada353b8
JH
2908
2909 for (i = 0; i < count; i++)
2910 {
2911 if (es && es->param[i].points_to_local_or_readonly_memory)
2912 {
c34db4b6 2913 (*parm_map)[i].parm_index = -2;
ada353b8
JH
2914 continue;
2915 }
2916
2917 struct ipa_jump_func *jf
2918 = ipa_get_ith_jump_func (args, i);
899c10c9 2919 if (jf && callee_pi)
ada353b8
JH
2920 {
2921 tree cst = ipa_value_from_jfunc (caller_parms_info,
2922 jf,
2923 ipa_get_type
2924 (callee_pi, i));
2925 if (cst && points_to_local_or_readonly_memory_p (cst))
2926 {
c34db4b6 2927 (*parm_map)[i].parm_index = -2;
ada353b8
JH
2928 continue;
2929 }
2930 }
2931 if (jf && jf->type == IPA_JF_PASS_THROUGH)
2932 {
c34db4b6 2933 (*parm_map)[i].parm_index
56cb815b 2934 = ipa_get_jf_pass_through_formal_id (jf);
4d90edb9
JH
2935 if (ipa_get_jf_pass_through_operation (jf) == NOP_EXPR)
2936 {
2937 (*parm_map)[i].parm_offset_known = true;
2938 (*parm_map)[i].parm_offset = 0;
2939 }
2940 else if (ipa_get_jf_pass_through_operation (jf)
2941 == POINTER_PLUS_EXPR
2942 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf),
2943 &(*parm_map)[i].parm_offset))
2944 (*parm_map)[i].parm_offset_known = true;
2945 else
2946 (*parm_map)[i].parm_offset_known = false;
ada353b8
JH
2947 continue;
2948 }
2949 if (jf && jf->type == IPA_JF_ANCESTOR)
c34db4b6
JH
2950 {
2951 (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
2952 (*parm_map)[i].parm_offset_known = true;
c8fd2be1
JH
2953 gcc_checking_assert
2954 (!(ipa_get_jf_ancestor_offset (jf) & (BITS_PER_UNIT - 1)));
2955 (*parm_map)[i].parm_offset
2956 = ipa_get_jf_ancestor_offset (jf) >> LOG2_BITS_PER_UNIT;
85ebbabd 2957 }
ada353b8 2958 else
c34db4b6 2959 (*parm_map)[i].parm_index = -1;
ada353b8
JH
2960 }
2961 if (dump_file)
2962 {
2963 fprintf (dump_file, " Parm map: ");
2964 for (i = 0; i < count; i++)
c34db4b6 2965 fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
ada353b8
JH
2966 fprintf (dump_file, "\n");
2967 }
6cef01c3 2968 return true;
ada353b8 2969 }
6cef01c3 2970 return false;
ada353b8
JH
2971}
2972
85ebbabd
JH
2973/* Map used to translate escape infos. */
2974
2975struct escape_map
2976{
2977 int parm_index;
2978 bool direct;
2979};
2980
2981/* Update escape map fo E. */
2982
2983static void
2984update_escape_summary_1 (cgraph_edge *e,
2985 vec <vec <escape_map>> &map)
2986{
2987 escape_summary *sum = escape_summaries->get (e);
2988 if (!sum)
2989 return;
2990 auto_vec <escape_entry> old = sum->esc.copy ();
2991 sum->esc.release ();
2992
2993 unsigned int i;
2994 escape_entry *ee;
2995 FOR_EACH_VEC_ELT (old, i, ee)
2996 {
2997 unsigned int j;
2998 struct escape_map *em;
2999 if (ee->parm_index >= map.length ())
3000 continue;
3001 FOR_EACH_VEC_ELT (map[ee->parm_index], j, em)
3002 {
3003 struct escape_entry entry = {em->parm_index, ee->arg,
3004 ee->min_flags,
3005 ee->direct & em->direct};
3006 sum->esc.safe_push (entry);
3007 }
3008 }
3009 if (!sum->esc.length ())
3010 escape_summaries->remove (e);
3011}
3012
3013/* Update escape map fo NODE. */
3014
3015static void
3016update_escape_summary (cgraph_node *node,
3017 vec <vec <escape_map>> &map)
3018{
3019 if (!escape_summaries)
3020 return;
3021 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
3022 update_escape_summary_1 (e, map);
3023 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
3024 {
3025 if (!e->inline_failed)
3026 update_escape_summary (e->callee, map);
3027 else
3028 update_escape_summary_1 (e, map);
3029 }
3030}
3031
ada353b8
JH
3032/* Call EDGE was inlined; merge summary from callee to the caller. */
3033
3034void
3035ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
d119f34c 3036{
71dbabcc 3037 if (!summaries && !summaries_lto)
ada353b8 3038 return;
d119f34c 3039
ada353b8
JH
3040 struct cgraph_node *to = (edge->caller->inlined_to
3041 ? edge->caller->inlined_to : edge->caller);
71dbabcc
JH
3042 class modref_summary *to_info = summaries ? summaries->get (to) : NULL;
3043 class modref_summary_lto *to_info_lto = summaries_lto
3044 ? summaries_lto->get (to) : NULL;
d119f34c 3045
71dbabcc
JH
3046 if (!to_info && !to_info_lto)
3047 {
3048 if (summaries)
3049 summaries->remove (edge->callee);
3050 if (summaries_lto)
3051 summaries_lto->remove (edge->callee);
85ebbabd 3052 remove_modref_edge_summaries (edge->callee);
71dbabcc
JH
3053 return;
3054 }
ada353b8 3055
71dbabcc
JH
3056 class modref_summary *callee_info = summaries ? summaries->get (edge->callee)
3057 : NULL;
3058 class modref_summary_lto *callee_info_lto
3059 = summaries_lto ? summaries_lto->get (edge->callee) : NULL;
ada353b8 3060 int flags = flags_from_decl_or_type (edge->callee->decl);
85ebbabd 3061 bool ignore_stores = ignore_stores_p (edge->caller->decl, flags);
ada353b8 3062
71dbabcc 3063 if (!callee_info && to_info)
d119f34c 3064 {
85ebbabd 3065 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
71dbabcc 3066 to_info->loads->collapse ();
ccea1371 3067 if (!ignore_stores)
85ebbabd 3068 to_info->stores->collapse ();
71dbabcc
JH
3069 }
3070 if (!callee_info_lto && to_info_lto)
3071 {
85ebbabd 3072 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
71dbabcc 3073 to_info_lto->loads->collapse ();
ccea1371 3074 if (!ignore_stores)
85ebbabd 3075 to_info_lto->stores->collapse ();
ada353b8 3076 }
71dbabcc 3077 if (callee_info || callee_info_lto)
ada353b8 3078 {
c34db4b6 3079 auto_vec <modref_parm_map, 32> parm_map;
ada353b8
JH
3080
3081 compute_parm_map (edge, &parm_map);
3082
85ebbabd 3083 if (!ignore_stores)
c8fd2be1 3084 {
71dbabcc 3085 if (to_info && callee_info)
c8fd2be1 3086 to_info->stores->merge (callee_info->stores, &parm_map);
71dbabcc
JH
3087 if (to_info_lto && callee_info_lto)
3088 to_info_lto->stores->merge (callee_info_lto->stores, &parm_map);
c8fd2be1 3089 }
85ebbabd
JH
3090 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
3091 {
3092 if (to_info && callee_info)
3093 to_info->loads->merge (callee_info->loads, &parm_map);
3094 if (to_info_lto && callee_info_lto)
3095 to_info_lto->loads->merge (callee_info_lto->loads, &parm_map);
3096 }
71dbabcc 3097 }
85ebbabd
JH
3098
3099 /* Now merge escape summaries.
3100 For every escape to the callee we need to merge calle flags
3101 and remap calees escapes. */
3102 class escape_summary *sum = escape_summaries->get (edge);
3103 int max_escape = -1;
3104 escape_entry *ee;
3105 unsigned int i;
3106
3107 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
3108 FOR_EACH_VEC_ELT (sum->esc, i, ee)
3109 if ((int)ee->arg > max_escape)
3110 max_escape = ee->arg;
3111
3112 auto_vec <vec <struct escape_map>, 32> emap (max_escape + 1);
3113 emap.safe_grow (max_escape + 1, true);
3114 for (i = 0; (int)i < max_escape + 1; i++)
3115 emap[i] = vNULL;
3116
3117 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
3118 FOR_EACH_VEC_ELT (sum->esc, i, ee)
3119 {
3120 bool needed = false;
3121 if (to_info && to_info->arg_flags.length () > ee->parm_index)
3122 {
3123 int flags = callee_info
3124 && callee_info->arg_flags.length () > ee->arg
3125 ? callee_info->arg_flags[ee->arg] : 0;
3126 if (!ee->direct)
3127 flags = deref_flags (flags, ignore_stores);
3128 else if (ignore_stores)
bb07490a 3129 flags |= EAF_NOCLOBBER | EAF_NOESCAPE | EAF_NODIRECTESCAPE;
85ebbabd
JH
3130 flags |= ee->min_flags;
3131 to_info->arg_flags[ee->parm_index] &= flags;
3132 if (to_info->arg_flags[ee->parm_index])
3133 needed = true;
3134 }
3135 if (to_info_lto && to_info_lto->arg_flags.length () > ee->parm_index)
3136 {
3137 int flags = callee_info_lto
3138 && callee_info_lto->arg_flags.length () > ee->arg
3139 ? callee_info_lto->arg_flags[ee->arg] : 0;
3140 if (!ee->direct)
3141 flags = deref_flags (flags, ignore_stores);
3142 else if (ignore_stores)
bb07490a 3143 flags |= EAF_NOCLOBBER | EAF_NOESCAPE | EAF_NODIRECTESCAPE;
85ebbabd
JH
3144 flags |= ee->min_flags;
3145 to_info_lto->arg_flags[ee->parm_index] &= flags;
3146 if (to_info_lto->arg_flags[ee->parm_index])
3147 needed = true;
3148 }
3149 struct escape_map entry = {ee->parm_index, ee->direct};
3150 if (needed)
3151 emap[ee->arg].safe_push (entry);
3152 }
3153 update_escape_summary (edge->callee, emap);
3154 for (i = 0; (int)i < max_escape + 1; i++)
3155 emap[i].release ();
3156 if (sum)
3157 escape_summaries->remove (edge);
3158
71dbabcc
JH
3159 if (summaries)
3160 {
3161 if (to_info && !to_info->useful_p (flags))
87d75a11
JH
3162 {
3163 if (dump_file)
3164 fprintf (dump_file, "Removed mod-ref summary for %s\n",
3165 to->dump_name ());
3166 summaries->remove (to);
85ebbabd 3167 to_info = NULL;
87d75a11
JH
3168 }
3169 else if (to_info && dump_file)
3170 {
3171 if (dump_file)
3172 fprintf (dump_file, "Updated mod-ref summary for %s\n",
3173 to->dump_name ());
3174 to_info->dump (dump_file);
3175 }
71dbabcc
JH
3176 if (callee_info)
3177 summaries->remove (edge->callee);
3178 }
3179 if (summaries_lto)
3180 {
3181 if (to_info_lto && !to_info_lto->useful_p (flags))
87d75a11
JH
3182 {
3183 if (dump_file)
3184 fprintf (dump_file, "Removed mod-ref summary for %s\n",
3185 to->dump_name ());
3186 summaries_lto->remove (to);
3187 }
3188 else if (to_info_lto && dump_file)
3189 {
3190 if (dump_file)
3191 fprintf (dump_file, "Updated mod-ref summary for %s\n",
3192 to->dump_name ());
3193 to_info_lto->dump (dump_file);
85ebbabd 3194 to_info_lto = NULL;
87d75a11 3195 }
71dbabcc
JH
3196 if (callee_info_lto)
3197 summaries_lto->remove (edge->callee);
3198 }
85ebbabd
JH
3199 if (!to_info && !to_info_lto)
3200 remove_modref_edge_summaries (to);
ada353b8
JH
3201 return;
3202}
d119f34c 3203
6cef01c3
JH
3204/* Get parameter type from DECL. This is only safe for special cases
3205 like builtins we create fnspec for because the type match is checked
3206 at fnspec creation time. */
d119f34c 3207
6cef01c3
JH
3208static tree
3209get_parm_type (tree decl, unsigned int i)
ada353b8 3210{
6cef01c3 3211 tree t = TYPE_ARG_TYPES (TREE_TYPE (decl));
ada353b8 3212
6cef01c3
JH
3213 for (unsigned int p = 0; p < i; p++)
3214 t = TREE_CHAIN (t);
3215 return TREE_VALUE (t);
3216}
3217
3218/* Return access mode for argument I of call E with FNSPEC. */
3219
3220static modref_access_node
3221get_access_for_fnspec (cgraph_edge *e, attr_fnspec &fnspec,
3222 unsigned int i, modref_parm_map &map)
3223{
3224 tree size = NULL_TREE;
3225 unsigned int size_arg;
3226
3227 if (!fnspec.arg_specified_p (i))
3228 ;
3229 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
ada353b8 3230 {
6cef01c3
JH
3231 cgraph_node *node = e->caller->inlined_to
3232 ? e->caller->inlined_to : e->caller;
3233 class ipa_node_params *caller_parms_info = IPA_NODE_REF (node);
3234 class ipa_edge_args *args = IPA_EDGE_REF (e);
3235 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, size_arg);
3236
3237 if (jf)
3238 size = ipa_value_from_jfunc (caller_parms_info, jf,
3239 get_parm_type (e->callee->decl, size_arg));
ada353b8 3240 }
6cef01c3
JH
3241 else if (fnspec.arg_access_size_given_by_type_p (i))
3242 size = TYPE_SIZE_UNIT (get_parm_type (e->callee->decl, i));
3243 modref_access_node a = {0, -1, -1,
3244 map.parm_offset, map.parm_index,
3245 map.parm_offset_known};
3246 poly_int64 size_hwi;
3247 if (size
3248 && poly_int_tree_p (size, &size_hwi)
3249 && coeffs_in_range_p (size_hwi, 0,
3250 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
ada353b8 3251 {
6cef01c3
JH
3252 a.size = -1;
3253 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
ada353b8 3254 }
6cef01c3
JH
3255 return a;
3256}
3257
3258/* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
3259 CUR_SUMMARY_LTO accordingly. Return true if something changed. */
3260
3261static bool
3262propagate_unknown_call (cgraph_node *node,
3263 cgraph_edge *e, int ecf_flags,
85ebbabd
JH
3264 modref_summary *cur_summary,
3265 modref_summary_lto *cur_summary_lto)
6cef01c3
JH
3266{
3267 bool changed = false;
6cef01c3
JH
3268 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
3269 auto_vec <modref_parm_map, 32> parm_map;
3270 if (fnspec_sum
3271 && compute_parm_map (e, &parm_map))
3272 {
3273 attr_fnspec fnspec (fnspec_sum->fnspec);
3274
3275 gcc_checking_assert (fnspec.known_p ());
3276 if (fnspec.global_memory_read_p ())
3277 collapse_loads (cur_summary, cur_summary_lto);
3278 else
3279 {
3280 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
3281 for (unsigned i = 0; i < parm_map.length () && t;
3282 i++, t = TREE_CHAIN (t))
3283 if (!POINTER_TYPE_P (TREE_VALUE (t)))
3284 ;
3285 else if (!fnspec.arg_specified_p (i)
3286 || fnspec.arg_maybe_read_p (i))
3287 {
3288 modref_parm_map map = parm_map[i];
3289 if (map.parm_index == -2)
3290 continue;
3291 if (map.parm_index == -1)
3292 {
3293 collapse_loads (cur_summary, cur_summary_lto);
3294 break;
3295 }
3296 if (cur_summary)
3297 changed |= cur_summary->loads->insert
3298 (0, 0, get_access_for_fnspec (e, fnspec, i, map));
3299 if (cur_summary_lto)
3300 changed |= cur_summary_lto->loads->insert
3301 (0, 0, get_access_for_fnspec (e, fnspec, i, map));
3302 }
3303 }
3304 if (ignore_stores_p (node->decl, ecf_flags))
3305 ;
3306 else if (fnspec.global_memory_written_p ())
3307 collapse_stores (cur_summary, cur_summary_lto);
3308 else
3309 {
3310 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
3311 for (unsigned i = 0; i < parm_map.length () && t;
3312 i++, t = TREE_CHAIN (t))
3313 if (!POINTER_TYPE_P (TREE_VALUE (t)))
3314 ;
3315 else if (!fnspec.arg_specified_p (i)
3316 || fnspec.arg_maybe_written_p (i))
3317 {
3318 modref_parm_map map = parm_map[i];
3319 if (map.parm_index == -2)
3320 continue;
3321 if (map.parm_index == -1)
3322 {
3323 collapse_stores (cur_summary, cur_summary_lto);
3324 break;
3325 }
3326 if (cur_summary)
3327 changed |= cur_summary->stores->insert
3328 (0, 0, get_access_for_fnspec (e, fnspec, i, map));
3329 if (cur_summary_lto)
3330 changed |= cur_summary_lto->stores->insert
3331 (0, 0, get_access_for_fnspec (e, fnspec, i, map));
3332 }
3333 }
3334 if (fnspec.errno_maybe_written_p () && flag_errno_math)
3335 {
3336 if (cur_summary && !cur_summary->writes_errno)
3337 {
3338 cur_summary->writes_errno = true;
3339 changed = true;
3340 }
3341 if (cur_summary_lto && !cur_summary_lto->writes_errno)
3342 {
3343 cur_summary_lto->writes_errno = true;
3344 changed = true;
3345 }
3346 }
3347 return changed;
3348 }
85ebbabd
JH
3349 if (dump_file)
3350 fprintf (dump_file, " collapsing loads\n");
3351 changed |= collapse_loads (cur_summary, cur_summary_lto);
3352 if (!ignore_stores_p (node->decl, ecf_flags))
6cef01c3
JH
3353 {
3354 if (dump_file)
85ebbabd
JH
3355 fprintf (dump_file, " collapsing stores\n");
3356 changed |= collapse_stores (cur_summary, cur_summary_lto);
6cef01c3 3357 }
85ebbabd 3358 return changed;
ada353b8 3359}
d119f34c 3360
85ebbabd
JH
3361/* Maybe remove summaies of NODE pointed to by CUR_SUMMARY_PTR
3362 and CUR_SUMMARY_LTO_PTR if they are useless according to ECF_FLAGS. */
3363
3364static void
3365remove_useless_summaries (cgraph_node *node,
3366 modref_summary **cur_summary_ptr,
3367 modref_summary_lto **cur_summary_lto_ptr,
3368 int ecf_flags)
3369{
3370 if (*cur_summary_ptr && !(*cur_summary_ptr)->useful_p (ecf_flags, false))
3371 {
3372 optimization_summaries->remove (node);
3373 *cur_summary_ptr = NULL;
3374 }
3375 if (*cur_summary_lto_ptr
3376 && !(*cur_summary_lto_ptr)->useful_p (ecf_flags, false))
3377 {
3378 summaries_lto->remove (node);
3379 *cur_summary_lto_ptr = NULL;
3380 }
3381}
3382
3383/* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
3384 and propagate loads/stores. */
ada353b8
JH
3385
3386static void
3387modref_propagate_in_scc (cgraph_node *component_node)
3388{
3389 bool changed = true;
3390 int iteration = 0;
3391
3392 while (changed)
3393 {
3394 changed = false;
3395 for (struct cgraph_node *cur = component_node; cur;
d119f34c
JH
3396 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
3397 {
ada353b8 3398 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
71dbabcc
JH
3399 modref_summary *cur_summary = optimization_summaries
3400 ? optimization_summaries->get (node)
3401 : NULL;
3402 modref_summary_lto *cur_summary_lto = summaries_lto
3403 ? summaries_lto->get (node)
3404 : NULL;
3405
3406 if (!cur_summary && !cur_summary_lto)
ada353b8
JH
3407 continue;
3408
85ebbabd
JH
3409 int cur_ecf_flags = flags_from_decl_or_type (node->decl);
3410
ada353b8
JH
3411 if (dump_file)
3412 fprintf (dump_file, " Processing %s%s%s\n",
3413 cur->dump_name (),
3414 TREE_READONLY (cur->decl) ? " (const)" : "",
3415 DECL_PURE_P (cur->decl) ? " (pure)" : "");
d119f34c 3416
d119f34c
JH
3417 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
3418 {
3419 if (e->indirect_info->ecf_flags & (ECF_CONST | ECF_NOVOPS))
3420 continue;
6cef01c3
JH
3421 if (dump_file)
3422 fprintf (dump_file, " Indirect call"
3423 "collapsing loads\n");
85ebbabd 3424 if (propagate_unknown_call
6cef01c3 3425 (node, e, e->indirect_info->ecf_flags,
85ebbabd
JH
3426 cur_summary, cur_summary_lto))
3427 {
3428 changed = true;
3429 remove_useless_summaries (node, &cur_summary,
3430 &cur_summary_lto,
3431 cur_ecf_flags);
3432 if (!cur_summary && !cur_summary_lto)
3433 break;
3434 }
d119f34c
JH
3435 }
3436
71dbabcc 3437 if (!cur_summary && !cur_summary_lto)
ada353b8
JH
3438 continue;
3439
d119f34c
JH
3440 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
3441 callee_edge = callee_edge->next_callee)
3442 {
3443 int flags = flags_from_decl_or_type (callee_edge->callee->decl);
71dbabcc
JH
3444 modref_summary *callee_summary = NULL;
3445 modref_summary_lto *callee_summary_lto = NULL;
d119f34c
JH
3446 struct cgraph_node *callee;
3447
ada353b8
JH
3448 if (flags & (ECF_CONST | ECF_NOVOPS)
3449 || !callee_edge->inline_failed)
d119f34c
JH
3450 continue;
3451
d119f34c
JH
3452 /* Get the callee and its summary. */
3453 enum availability avail;
3454 callee = callee_edge->callee->function_or_virtual_thunk_symbol
3455 (&avail, cur);
3456
ada353b8
JH
3457 /* It is not necessary to re-process calls outside of the
3458 SCC component. */
3459 if (iteration > 0
3460 && (!callee->aux
3461 || ((struct ipa_dfs_info *)cur->aux)->scc_no
3462 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
3463 continue;
3464
3465 if (dump_file)
3466 fprintf (dump_file, " Call to %s\n",
3467 callee_edge->callee->dump_name ());
d119f34c
JH
3468
3469 bool ignore_stores = ignore_stores_p (cur->decl, flags);
3470
71dbabcc 3471 if (avail <= AVAIL_INTERPOSABLE)
d119f34c 3472 {
6cef01c3
JH
3473 if (dump_file)
3474 fprintf (dump_file, " Call target interposable"
3475 " or not available\n");
3476 changed |= propagate_unknown_call
3477 (node, callee_edge, flags,
85ebbabd 3478 cur_summary, cur_summary_lto);
6cef01c3
JH
3479 if (!cur_summary && !cur_summary_lto)
3480 break;
3481 continue;
71dbabcc
JH
3482 }
3483
3484 /* We don't know anything about CALLEE, hence we cannot tell
3485 anything about the entire component. */
3486
3487 if (cur_summary
3488 && !(callee_summary = optimization_summaries->get (callee)))
3489 {
6cef01c3
JH
3490 if (dump_file)
3491 fprintf (dump_file, " No call target summary\n");
3492 changed |= propagate_unknown_call
3493 (node, callee_edge, flags,
85ebbabd 3494 cur_summary, NULL);
71dbabcc
JH
3495 }
3496 if (cur_summary_lto
3497 && !(callee_summary_lto = summaries_lto->get (callee)))
3498 {
6cef01c3
JH
3499 if (dump_file)
3500 fprintf (dump_file, " No call target summary\n");
3501 changed |= propagate_unknown_call
3502 (node, callee_edge, flags,
85ebbabd 3503 NULL, cur_summary_lto);
d119f34c
JH
3504 }
3505
ada353b8
JH
3506 /* We can not safely optimize based on summary of callee if it
3507 does not always bind to current def: it is possible that
3508 memory load was optimized out earlier which may not happen in
3509 the interposed variant. */
3510 if (!callee_edge->binds_to_current_def_p ())
3511 {
71dbabcc 3512 changed |= collapse_loads (cur_summary, cur_summary_lto);
ada353b8
JH
3513 if (dump_file)
3514 fprintf (dump_file, " May not bind local;"
3515 " collapsing loads\n");
3516 }
3517
3518
c34db4b6 3519 auto_vec <modref_parm_map, 32> parm_map;
ada353b8
JH
3520
3521 compute_parm_map (callee_edge, &parm_map);
c33f4742 3522
d119f34c 3523 /* Merge in callee's information. */
71dbabcc
JH
3524 if (callee_summary)
3525 {
56cb815b
JH
3526 changed |= cur_summary->loads->merge
3527 (callee_summary->loads, &parm_map);
3528 if (!ignore_stores)
6cef01c3
JH
3529 {
3530 changed |= cur_summary->stores->merge
3531 (callee_summary->stores, &parm_map);
3532 if (!cur_summary->writes_errno
3533 && callee_summary->writes_errno)
3534 {
3535 cur_summary->writes_errno = true;
3536 changed = true;
3537 }
3538 }
71dbabcc
JH
3539 }
3540 if (callee_summary_lto)
3541 {
56cb815b
JH
3542 changed |= cur_summary_lto->loads->merge
3543 (callee_summary_lto->loads, &parm_map);
3544 if (!ignore_stores)
6cef01c3
JH
3545 {
3546 changed |= cur_summary_lto->stores->merge
3547 (callee_summary_lto->stores, &parm_map);
3548 if (!cur_summary_lto->writes_errno
3549 && callee_summary_lto->writes_errno)
3550 {
3551 cur_summary_lto->writes_errno = true;
3552 changed = true;
3553 }
3554 }
71dbabcc 3555 }
85ebbabd
JH
3556 if (changed)
3557 remove_useless_summaries (node, &cur_summary,
3558 &cur_summary_lto,
3559 cur_ecf_flags);
3560 if (!cur_summary && !cur_summary_lto)
3561 break;
ada353b8 3562 if (dump_file && changed)
71dbabcc
JH
3563 {
3564 if (cur_summary)
3565 cur_summary->dump (dump_file);
3566 if (cur_summary_lto)
3567 cur_summary_lto->dump (dump_file);
85ebbabd 3568 dump_modref_edge_summaries (dump_file, node, 4);
71dbabcc 3569 }
d119f34c
JH
3570 }
3571 }
ada353b8
JH
3572 iteration++;
3573 }
ada353b8 3574 if (dump_file)
85ebbabd
JH
3575 fprintf (dump_file,
3576 "Propagation finished in %i iterations\n", iteration);
3577}
3578
3579/* Dump results of propagation in SCC rooted in COMPONENT_NODE. */
3580
3581static void
3582modref_propagate_dump_scc (cgraph_node *component_node)
3583{
3584 for (struct cgraph_node *cur = component_node; cur;
3585 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
3586 if (!cur->inlined_to)
3587 {
3588 modref_summary *cur_summary = optimization_summaries
3589 ? optimization_summaries->get (cur)
3590 : NULL;
3591 modref_summary_lto *cur_summary_lto = summaries_lto
3592 ? summaries_lto->get (cur)
3593 : NULL;
3594
3595 fprintf (dump_file, "Propagated modref for %s%s%s\n",
3596 cur->dump_name (),
3597 TREE_READONLY (cur->decl) ? " (const)" : "",
3598 DECL_PURE_P (cur->decl) ? " (pure)" : "");
3599 if (optimization_summaries)
3600 {
3601 if (cur_summary)
3602 cur_summary->dump (dump_file);
3603 else
3604 fprintf (dump_file, " Not tracked\n");
3605 }
3606 if (summaries_lto)
3607 {
3608 if (cur_summary_lto)
3609 cur_summary_lto->dump (dump_file);
3610 else
3611 fprintf (dump_file, " Not tracked (lto)\n");
3612 }
3613 }
3614}
3615
3616/* Process escapes in SUM and merge SUMMARY to CUR_SUMMARY
3617 and SUMMARY_LTO to CUR_SUMMARY_LTO.
3618 Return true if something changed. */
3619
3620static bool
3621modref_merge_call_site_flags (escape_summary *sum,
3622 modref_summary *cur_summary,
3623 modref_summary_lto *cur_summary_lto,
3624 modref_summary *summary,
3625 modref_summary_lto *summary_lto,
3626 bool ignore_stores)
3627{
3628 escape_entry *ee;
3629 unsigned int i;
3630 bool changed = false;
3631
3632 /* If we have no useful info to propagate. */
3633 if ((!cur_summary || !cur_summary->arg_flags.length ())
3634 && (!cur_summary_lto || !cur_summary_lto->arg_flags.length ()))
3635 return false;
3636
3637 FOR_EACH_VEC_ELT (sum->esc, i, ee)
ada353b8 3638 {
85ebbabd
JH
3639 int flags = 0;
3640 int flags_lto = 0;
3641
3642 if (summary && ee->arg < summary->arg_flags.length ())
3643 flags = summary->arg_flags[ee->arg];
3644 if (summary_lto
3645 && ee->arg < summary_lto->arg_flags.length ())
3646 flags_lto = summary_lto->arg_flags[ee->arg];
3647 if (!ee->direct)
3648 {
3649 flags = deref_flags (flags, ignore_stores);
3650 flags_lto = deref_flags (flags_lto, ignore_stores);
3651 }
3652 else if (ignore_stores)
3653 {
bb07490a
JH
3654 flags |= EAF_NOESCAPE | EAF_NOCLOBBER | EAF_NODIRECTESCAPE;
3655 flags_lto |= EAF_NOESCAPE | EAF_NOCLOBBER | EAF_NODIRECTESCAPE;
85ebbabd
JH
3656 }
3657 flags |= ee->min_flags;
3658 flags_lto |= ee->min_flags;
3350e59f
JH
3659 if (!(flags & EAF_UNUSED)
3660 && cur_summary && ee->parm_index < cur_summary->arg_flags.length ())
85ebbabd
JH
3661 {
3662 int f = cur_summary->arg_flags[ee->parm_index];
3663 if ((f & flags) != f)
3664 {
3665 f = f & flags;
3666 if ((f & ~(EAF_DIRECT | EAF_NOCLOBBER)) == 0)
3667 f = 0;
3668 cur_summary->arg_flags[ee->parm_index] = f;
3669 changed = true;
3670 }
3671 }
3350e59f
JH
3672 if (!(flags_lto & EAF_UNUSED)
3673 && cur_summary_lto
85ebbabd
JH
3674 && ee->parm_index < cur_summary_lto->arg_flags.length ())
3675 {
3676 int f = cur_summary_lto->arg_flags[ee->parm_index];
3677 if ((f & flags_lto) != f)
3678 {
3679 f = f & flags;
3680 if ((f & ~(EAF_DIRECT | EAF_NOCLOBBER)) == 0)
3681 f = 0;
3682 cur_summary_lto->arg_flags[ee->parm_index] = f;
3683 changed = true;
3684 }
3685 }
3686 }
3687 return changed;
3688}
3689
3690/* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
3691 and propagate arg flags. */
3692
3693static void
3694modref_propagate_flags_in_scc (cgraph_node *component_node)
3695{
3696 bool changed = true;
3697 int iteration = 0;
3698
3699 while (changed)
3700 {
3701 changed = false;
ada353b8 3702 for (struct cgraph_node *cur = component_node; cur;
d119f34c 3703 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
85ebbabd
JH
3704 {
3705 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
3706 modref_summary *cur_summary = optimization_summaries
3707 ? optimization_summaries->get (node)
3708 : NULL;
3709 modref_summary_lto *cur_summary_lto = summaries_lto
3710 ? summaries_lto->get (node)
3711 : NULL;
3712
3713 if (!cur_summary && !cur_summary_lto)
3714 continue;
3715
3716 if (dump_file)
3717 fprintf (dump_file, " Processing %s%s%s\n",
ada353b8
JH
3718 cur->dump_name (),
3719 TREE_READONLY (cur->decl) ? " (const)" : "",
3720 DECL_PURE_P (cur->decl) ? " (pure)" : "");
85ebbabd
JH
3721
3722 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
3723 {
3724 escape_summary *sum = escape_summaries->get (e);
3725
3726 if (!sum || (e->indirect_info->ecf_flags
3727 & (ECF_CONST | ECF_NOVOPS)))
3728 continue;
3729
3730 changed |= modref_merge_call_site_flags
3731 (sum, cur_summary, cur_summary_lto,
3732 NULL, NULL, ignore_stores_p (node->decl,
3733 e->indirect_info->ecf_flags));
3734 }
3735
3736 if (!cur_summary && !cur_summary_lto)
3737 continue;
3738
3739 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
3740 callee_edge = callee_edge->next_callee)
3741 {
3742 int flags = flags_from_decl_or_type (callee_edge->callee->decl);
3743 modref_summary *callee_summary = NULL;
3744 modref_summary_lto *callee_summary_lto = NULL;
3745 struct cgraph_node *callee;
3746
3747 if (flags & (ECF_CONST | ECF_NOVOPS)
3748 || !callee_edge->inline_failed)
3749 continue;
3750 /* Get the callee and its summary. */
3751 enum availability avail;
3752 callee = callee_edge->callee->function_or_virtual_thunk_symbol
3753 (&avail, cur);
3754
3755 /* It is not necessary to re-process calls outside of the
3756 SCC component. */
3757 if (iteration > 0
3758 && (!callee->aux
3759 || ((struct ipa_dfs_info *)cur->aux)->scc_no
3760 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
3761 continue;
3762
3763 escape_summary *sum = escape_summaries->get (callee_edge);
3764 if (!sum)
3765 continue;
3766
3767 if (dump_file)
3768 fprintf (dump_file, " Call to %s\n",
3769 callee_edge->callee->dump_name ());
3770
3771 if (avail <= AVAIL_INTERPOSABLE
3772 || callee_edge->call_stmt_cannot_inline_p)
3773 ;
3774 else
3775 {
3776 if (cur_summary)
3777 callee_summary = optimization_summaries->get (callee);
3778 if (cur_summary_lto)
3779 callee_summary_lto = summaries_lto->get (callee);
3780 }
3781 changed |= modref_merge_call_site_flags
3782 (sum, cur_summary, cur_summary_lto,
3783 callee_summary, callee_summary_lto,
3784 ignore_stores_p (node->decl, flags));
3785 if (dump_file && changed)
3786 {
3787 if (cur_summary)
3788 cur_summary->dump (dump_file);
3789 if (cur_summary_lto)
3790 cur_summary_lto->dump (dump_file);
3791 }
3792 }
3793 }
3794 iteration++;
3795 }
3796 if (dump_file)
3797 fprintf (dump_file,
3798 "Propagation of flags finished in %i iterations\n", iteration);
ada353b8
JH
3799}
3800
3801/* Run the IPA pass. This will take a function's summaries and calls and
3802 construct new summaries which represent a transitive closure. So that
3803 summary of an analyzed function contains information about the loads and
3804 stores that the function or any function that it calls does. */
3805
3806unsigned int
3807pass_ipa_modref::execute (function *)
3808{
71dbabcc 3809 if (!summaries && !summaries_lto)
ada353b8
JH
3810 return 0;
3811
71dbabcc
JH
3812 if (optimization_summaries)
3813 ggc_delete (optimization_summaries);
3814 optimization_summaries = summaries;
3815 summaries = NULL;
3816
ada353b8
JH
3817 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *,
3818 symtab->cgraph_count);
3819 int order_pos;
3820 order_pos = ipa_reduced_postorder (order, true, ignore_edge);
3821 int i;
3822
3823 /* Iterate over all strongly connected components in post-order. */
3824 for (i = 0; i < order_pos; i++)
3825 {
3826 /* Get the component's representative. That's just any node in the
3827 component from which we can traverse the entire component. */
3828 struct cgraph_node *component_node = order[i];
3829
3830 if (dump_file)
3831 fprintf (dump_file, "\n\nStart of SCC component\n");
3832
3833 modref_propagate_in_scc (component_node);
85ebbabd
JH
3834 modref_propagate_flags_in_scc (component_node);
3835 if (dump_file)
3836 modref_propagate_dump_scc (component_node);
d119f34c 3837 }
fe90c504
JH
3838 cgraph_node *node;
3839 FOR_EACH_FUNCTION (node)
3840 update_signature (node);
71dbabcc
JH
3841 if (summaries_lto)
3842 ((modref_summaries_lto *)summaries_lto)->propagated = true;
d119f34c 3843 ipa_free_postorder_info ();
a0e6e49d 3844 free (order);
6cef01c3
JH
3845 delete fnspec_summaries;
3846 fnspec_summaries = NULL;
85ebbabd
JH
3847 delete escape_summaries;
3848 escape_summaries = NULL;
d119f34c
JH
3849 return 0;
3850}
3851
39b3b1bd
JH
3852/* Summaries must stay alive until end of compilation. */
3853
3854void
3855ipa_modref_c_finalize ()
3856{
71dbabcc
JH
3857 if (optimization_summaries)
3858 ggc_delete (optimization_summaries);
3859 optimization_summaries = NULL;
3860 gcc_checking_assert (!summaries);
3861 if (summaries_lto)
85ebbabd
JH
3862 ggc_delete (summaries_lto);
3863 summaries_lto = NULL;
6cef01c3
JH
3864 if (fnspec_summaries)
3865 delete fnspec_summaries;
3866 fnspec_summaries = NULL;
85ebbabd
JH
3867 if (escape_summaries)
3868 delete escape_summaries;
3869 escape_summaries = NULL;
39b3b1bd
JH
3870}
3871
d119f34c 3872#include "gt-ipa-modref.h"