]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-modref.cc
libbacktrace: replace fgrep with grep in configure script
[thirdparty/gcc.git] / gcc / ipa-modref.cc
CommitLineData
d119f34c 1/* Search for references that a functions loads or stores.
7adcbafe 2 Copyright (C) 2020-2022 Free Software Foundation, Inc.
d119f34c
JH
3 Contributed by David Cepelik and Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
85ebbabd 23 accesses across function calls.
d119f34c
JH
24
25 This file contains a tree pass and an IPA pass. Both performs the same
8a2fd716 26 analysis however tree pass is executed during early and late optimization
d119f34c
JH
27 passes to propagate info downwards in the compilation order. IPA pass
28 propagates across the callgraph and is able to handle recursion and works on
29 whole program during link-time analysis.
30
46a27415 31 LTO mode differs from the local mode by not recording alias sets but types
d119f34c 32 that are translated to alias sets later. This is necessary in order stream
46a27415 33 the information because the alias sets are rebuild at stream-in time and may
85ebbabd
JH
34 not correspond to ones seen during analysis. For this reason part of
35 analysis is duplicated.
36
37 The following information is computed
38 1) load/store access tree described in ipa-modref-tree.h
11056ab7 39 This is used by tree-ssa-alias to disambiguate load/stores
02c80893 40 2) EAF flags used by points-to analysis (in tree-ssa-structalias).
85ebbabd
JH
41 and defined in tree-core.h.
42 and stored to optimization_summaries.
43
44 There are multiple summaries computed and used during the propagation:
45 - summaries holds summaries from analysis to IPA propagation
46 time.
47 - summaries_lto is same as summaries but holds them in a format
48 that can be streamed (as described above).
49 - fnspec_summary holds fnspec strings for call. This is
50 necessary because gimple_call_fnspec performs additional
51 analysis except for looking callee fndecl.
52 - escape_summary holds escape points for given call edge.
02c80893 53 That is a vector recording what function parameters
85ebbabd 54 may escape to a function call (and with what parameter index). */
d119f34c
JH
55
56#include "config.h"
57#include "system.h"
58#include "coretypes.h"
59#include "backend.h"
60#include "tree.h"
61#include "gimple.h"
62#include "alloc-pool.h"
63#include "tree-pass.h"
64#include "gimple-iterator.h"
65#include "tree-dfa.h"
66#include "cgraph.h"
67#include "ipa-utils.h"
68#include "symbol-summary.h"
69#include "gimple-pretty-print.h"
70#include "gimple-walk.h"
71#include "print-tree.h"
72#include "tree-streamer.h"
73#include "alias.h"
74#include "calls.h"
75#include "ipa-modref-tree.h"
76#include "ipa-modref.h"
e977dd5e
JH
77#include "value-range.h"
78#include "ipa-prop.h"
79#include "ipa-fnsummary.h"
617695cd 80#include "attr-fnspec.h"
ae7a23a3 81#include "symtab-clones.h"
520d5ad3
JH
82#include "gimple-ssa.h"
83#include "tree-phinodes.h"
84#include "tree-ssa-operands.h"
85#include "ssa-iterators.h"
86#include "stringpool.h"
87#include "tree-ssanames.h"
008e7397 88#include "attribs.h"
b8ef019a 89#include "tree-cfg.h"
992644c3 90#include "tree-eh.h"
520d5ad3 91
8da8ed43 92
85ebbabd 93namespace {
d119f34c 94
6cef01c3
JH
95/* We record fnspec specifiers for call edges since they depends on actual
96 gimple statements. */
97
98class fnspec_summary
99{
100public:
101 char *fnspec;
102
103 fnspec_summary ()
104 : fnspec (NULL)
105 {
106 }
107
108 ~fnspec_summary ()
109 {
110 free (fnspec);
111 }
112};
113
114/* Summary holding fnspec string for a given call. */
115
116class fnspec_summaries_t : public call_summary <fnspec_summary *>
117{
118public:
119 fnspec_summaries_t (symbol_table *symtab)
120 : call_summary <fnspec_summary *> (symtab) {}
121 /* Hook that is called by summary when an edge is duplicated. */
f31ba116
DM
122 void duplicate (cgraph_edge *,
123 cgraph_edge *,
124 fnspec_summary *src,
125 fnspec_summary *dst) final override
6cef01c3
JH
126 {
127 dst->fnspec = xstrdup (src->fnspec);
128 }
129};
130
131static fnspec_summaries_t *fnspec_summaries = NULL;
132
85ebbabd
JH
133/* Escape summary holds a vector of param indexes that escape to
134 a given call. */
135struct escape_entry
136{
137 /* Parameter that escapes at a given call. */
b8ef019a 138 int parm_index;
85ebbabd
JH
139 /* Argument it escapes to. */
140 unsigned int arg;
141 /* Minimal flags known about the argument. */
8da8ed43 142 eaf_flags_t min_flags;
85ebbabd
JH
143 /* Does it escape directly or indirectly? */
144 bool direct;
145};
146
147/* Dump EAF flags. */
148
149static void
150dump_eaf_flags (FILE *out, int flags, bool newline = true)
151{
85ebbabd
JH
152 if (flags & EAF_UNUSED)
153 fprintf (out, " unused");
d70ef656
JH
154 if (flags & EAF_NO_DIRECT_CLOBBER)
155 fprintf (out, " no_direct_clobber");
156 if (flags & EAF_NO_INDIRECT_CLOBBER)
157 fprintf (out, " no_indirect_clobber");
158 if (flags & EAF_NO_DIRECT_ESCAPE)
159 fprintf (out, " no_direct_escape");
160 if (flags & EAF_NO_INDIRECT_ESCAPE)
161 fprintf (out, " no_indirect_escape");
f1979156
JH
162 if (flags & EAF_NOT_RETURNED_DIRECTLY)
163 fprintf (out, " not_returned_directly");
d70ef656
JH
164 if (flags & EAF_NOT_RETURNED_INDIRECTLY)
165 fprintf (out, " not_returned_indirectly");
166 if (flags & EAF_NO_DIRECT_READ)
167 fprintf (out, " no_direct_read");
168 if (flags & EAF_NO_INDIRECT_READ)
169 fprintf (out, " no_indirect_read");
85ebbabd
JH
170 if (newline)
171 fprintf (out, "\n");
172}
173
174struct escape_summary
175{
176 auto_vec <escape_entry> esc;
177 void dump (FILE *out)
178 {
179 for (unsigned int i = 0; i < esc.length (); i++)
180 {
181 fprintf (out, " parm %i arg %i %s min:",
182 esc[i].parm_index,
183 esc[i].arg,
184 esc[i].direct ? "(direct)" : "(indirect)");
185 dump_eaf_flags (out, esc[i].min_flags, false);
186 }
187 fprintf (out, "\n");
188 }
189};
190
191class escape_summaries_t : public call_summary <escape_summary *>
192{
193public:
194 escape_summaries_t (symbol_table *symtab)
195 : call_summary <escape_summary *> (symtab) {}
196 /* Hook that is called by summary when an edge is duplicated. */
f31ba116
DM
197 void duplicate (cgraph_edge *,
198 cgraph_edge *,
199 escape_summary *src,
200 escape_summary *dst) final override
85ebbabd
JH
201 {
202 dst->esc = src->esc.copy ();
203 }
204};
205
206static escape_summaries_t *escape_summaries = NULL;
207
208} /* ANON namespace: GTY annotated summaries can not be anonymous. */
209
210
d119f34c
JH
211/* Class (from which there is one global instance) that holds modref summaries
212 for all analyzed functions. */
6cef01c3 213
d119f34c
JH
214class GTY((user)) modref_summaries
215 : public fast_function_summary <modref_summary *, va_gc>
216{
217public:
218 modref_summaries (symbol_table *symtab)
219 : fast_function_summary <modref_summary *, va_gc> (symtab) {}
f31ba116
DM
220 void insert (cgraph_node *, modref_summary *state) final override;
221 void duplicate (cgraph_node *src_node,
222 cgraph_node *dst_node,
223 modref_summary *src_data,
224 modref_summary *dst_data) final override;
c9da53d6
JH
225 static modref_summaries *create_ggc (symbol_table *symtab)
226 {
227 return new (ggc_alloc_no_dtor<modref_summaries> ())
228 modref_summaries (symtab);
229 }
d119f34c
JH
230};
231
71dbabcc
JH
232class modref_summary_lto;
233
234/* Class (from which there is one global instance) that holds modref summaries
235 for all analyzed functions. */
6cef01c3 236
71dbabcc
JH
237class GTY((user)) modref_summaries_lto
238 : public fast_function_summary <modref_summary_lto *, va_gc>
239{
240public:
241 modref_summaries_lto (symbol_table *symtab)
242 : fast_function_summary <modref_summary_lto *, va_gc> (symtab),
243 propagated (false) {}
f31ba116
DM
244 void insert (cgraph_node *, modref_summary_lto *state) final override;
245 void duplicate (cgraph_node *src_node,
246 cgraph_node *dst_node,
247 modref_summary_lto *src_data,
248 modref_summary_lto *dst_data) final override;
71dbabcc
JH
249 static modref_summaries_lto *create_ggc (symbol_table *symtab)
250 {
251 return new (ggc_alloc_no_dtor<modref_summaries_lto> ())
252 modref_summaries_lto (symtab);
253 }
254 bool propagated;
255};
256
257/* Global variable holding all modref summaries
258 (from analysis to IPA propagation time). */
6cef01c3 259
71dbabcc
JH
260static GTY(()) fast_function_summary <modref_summary *, va_gc>
261 *summaries;
262
8a2fd716 263/* Global variable holding all modref optimization summaries
71dbabcc 264 (from IPA propagation time or used by local optimization pass). */
6cef01c3 265
71dbabcc
JH
266static GTY(()) fast_function_summary <modref_summary *, va_gc>
267 *optimization_summaries;
268
269/* LTO summaries hold info from analysis to LTO streaming or from LTO
270 stream-in through propagation to LTO stream-out. */
6cef01c3 271
71dbabcc
JH
272static GTY(()) fast_function_summary <modref_summary_lto *, va_gc>
273 *summaries_lto;
d119f34c
JH
274
275/* Summary for a single function which this pass produces. */
276
277modref_summary::modref_summary ()
a70c0512 278 : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
a34edf9a
JH
279 writes_errno (false), side_effects (false), nondeterministic (false),
280 calls_interposable (false), global_memory_read (false),
5aa91072 281 global_memory_written (false), try_dse (false)
d119f34c
JH
282{
283}
284
285modref_summary::~modref_summary ()
286{
287 if (loads)
288 ggc_delete (loads);
289 if (stores)
290 ggc_delete (stores);
d119f34c
JH
291}
292
4341b1b1
JH
293/* Remove all flags from EAF_FLAGS that are implied by ECF_FLAGS and not
294 useful to track. If returns_void is true moreover clear
295 EAF_NOT_RETURNED. */
296static int
297remove_useless_eaf_flags (int eaf_flags, int ecf_flags, bool returns_void)
298{
f6f704fd 299 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
4341b1b1
JH
300 eaf_flags &= ~implicit_const_eaf_flags;
301 else if (ecf_flags & ECF_PURE)
302 eaf_flags &= ~implicit_pure_eaf_flags;
303 else if ((ecf_flags & ECF_NORETURN) || returns_void)
d70ef656 304 eaf_flags &= ~(EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY);
4341b1b1
JH
305 return eaf_flags;
306}
307
85ebbabd
JH
308/* Return true if FLAGS holds some useful information. */
309
310static bool
8da8ed43 311eaf_flags_useful_p (vec <eaf_flags_t> &flags, int ecf_flags)
85ebbabd
JH
312{
313 for (unsigned i = 0; i < flags.length (); i++)
4341b1b1
JH
314 if (remove_useless_eaf_flags (flags[i], ecf_flags, false))
315 return true;
85ebbabd
JH
316 return false;
317}
318
319/* Return true if summary is potentially useful for optimization.
320 If CHECK_FLAGS is false assume that arg_flags are useful. */
67c935c8
JH
321
322bool
85ebbabd 323modref_summary::useful_p (int ecf_flags, bool check_flags)
67c935c8 324{
85ebbabd 325 if (arg_flags.length () && !check_flags)
520d5ad3 326 return true;
85ebbabd
JH
327 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
328 return true;
329 arg_flags.release ();
b8ef019a
JH
330 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
331 return true;
a70c0512
JH
332 if (check_flags
333 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
334 return true;
f6f704fd 335 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
a34edf9a
JH
336 return ((!side_effects || !nondeterministic)
337 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
71dbabcc 338 if (loads && !loads->every_base)
67c935c8 339 return true;
64f3e71c
JH
340 else
341 kills.release ();
67c935c8 342 if (ecf_flags & ECF_PURE)
a34edf9a
JH
343 return ((!side_effects || !nondeterministic)
344 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
71dbabcc 345 return stores && !stores->every_base;
67c935c8
JH
346}
347
71dbabcc
JH
348/* Single function summary used for LTO. */
349
350typedef modref_tree <tree> modref_records_lto;
351struct GTY(()) modref_summary_lto
352{
353 /* Load and stores in functions using types rather then alias sets.
354
355 This is necessary to make the information streamable for LTO but is also
356 more verbose and thus more likely to hit the limits. */
357 modref_records_lto *loads;
358 modref_records_lto *stores;
64f3e71c 359 auto_vec<modref_access_node> GTY((skip)) kills;
8da8ed43 360 auto_vec<eaf_flags_t> GTY((skip)) arg_flags;
b8ef019a 361 eaf_flags_t retslot_flags;
a70c0512 362 eaf_flags_t static_chain_flags;
a34edf9a
JH
363 unsigned writes_errno : 1;
364 unsigned side_effects : 1;
365 unsigned nondeterministic : 1;
366 unsigned calls_interposable : 1;
71dbabcc
JH
367
368 modref_summary_lto ();
369 ~modref_summary_lto ();
370 void dump (FILE *);
85ebbabd 371 bool useful_p (int ecf_flags, bool check_flags = true);
71dbabcc
JH
372};
373
374/* Summary for a single function which this pass produces. */
375
376modref_summary_lto::modref_summary_lto ()
a70c0512 377 : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
a34edf9a
JH
378 writes_errno (false), side_effects (false), nondeterministic (false),
379 calls_interposable (false)
71dbabcc
JH
380{
381}
382
383modref_summary_lto::~modref_summary_lto ()
384{
385 if (loads)
386 ggc_delete (loads);
387 if (stores)
388 ggc_delete (stores);
389}
390
391
85ebbabd
JH
392/* Return true if lto summary is potentially useful for optimization.
393 If CHECK_FLAGS is false assume that arg_flags are useful. */
67c935c8
JH
394
395bool
85ebbabd 396modref_summary_lto::useful_p (int ecf_flags, bool check_flags)
67c935c8 397{
85ebbabd
JH
398 if (arg_flags.length () && !check_flags)
399 return true;
400 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
401 return true;
402 arg_flags.release ();
b8ef019a
JH
403 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
404 return true;
a70c0512
JH
405 if (check_flags
406 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
407 return true;
f6f704fd 408 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
a34edf9a
JH
409 return ((!side_effects || !nondeterministic)
410 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
67c935c8
JH
411 if (loads && !loads->every_base)
412 return true;
74509b96
JH
413 else
414 kills.release ();
67c935c8 415 if (ecf_flags & ECF_PURE)
a34edf9a
JH
416 return ((!side_effects || !nondeterministic)
417 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
e24817aa 418 return stores && !stores->every_base;
67c935c8
JH
419}
420
d119f34c
JH
421/* Dump records TT to OUT. */
422
423static void
424dump_records (modref_records *tt, FILE *out)
425{
d119f34c
JH
426 if (tt->every_base)
427 {
428 fprintf (out, " Every base\n");
429 return;
430 }
431 size_t i;
432 modref_base_node <alias_set_type> *n;
433 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
434 {
435 fprintf (out, " Base %i: alias set %i\n", (int)i, n->base);
436 if (n->every_ref)
437 {
438 fprintf (out, " Every ref\n");
439 continue;
440 }
441 size_t j;
442 modref_ref_node <alias_set_type> *r;
443 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
444 {
445 fprintf (out, " Ref %i: alias set %i\n", (int)j, r->ref);
c33f4742
JH
446 if (r->every_access)
447 {
ada353b8 448 fprintf (out, " Every access\n");
c33f4742
JH
449 continue;
450 }
451 size_t k;
452 modref_access_node *a;
453 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
e30bf330
JH
454 {
455 fprintf (out, " access:");
456 a->dump (out);
457 }
d119f34c
JH
458 }
459 }
460}
461
462/* Dump records TT to OUT. */
463
464static void
465dump_lto_records (modref_records_lto *tt, FILE *out)
466{
d119f34c
JH
467 if (tt->every_base)
468 {
469 fprintf (out, " Every base\n");
470 return;
471 }
472 size_t i;
473 modref_base_node <tree> *n;
474 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
475 {
476 fprintf (out, " Base %i:", (int)i);
477 print_generic_expr (dump_file, n->base);
478 fprintf (out, " (alias set %i)\n",
9044db88 479 n->base ? get_alias_set (n->base) : 0);
d119f34c
JH
480 if (n->every_ref)
481 {
482 fprintf (out, " Every ref\n");
483 continue;
484 }
485 size_t j;
486 modref_ref_node <tree> *r;
487 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
488 {
489 fprintf (out, " Ref %i:", (int)j);
490 print_generic_expr (dump_file, r->ref);
491 fprintf (out, " (alias set %i)\n",
9044db88 492 r->ref ? get_alias_set (r->ref) : 0);
c33f4742
JH
493 if (r->every_access)
494 {
56cb815b 495 fprintf (out, " Every access\n");
c33f4742
JH
496 continue;
497 }
498 size_t k;
499 modref_access_node *a;
500 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
e30bf330
JH
501 {
502 fprintf (out, " access:");
503 a->dump (out);
504 }
d119f34c
JH
505 }
506 }
507}
508
85ebbabd 509/* Dump all escape points of NODE to OUT. */
520d5ad3
JH
510
511static void
85ebbabd 512dump_modref_edge_summaries (FILE *out, cgraph_node *node, int depth)
520d5ad3 513{
85ebbabd
JH
514 int i = 0;
515 if (!escape_summaries)
516 return;
517 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
518 {
519 class escape_summary *sum = escape_summaries->get (e);
520 if (sum)
521 {
522 fprintf (out, "%*sIndirect call %i in %s escapes:",
523 depth, "", i, node->dump_name ());
524 sum->dump (out);
525 }
526 i++;
527 }
528 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
529 {
530 if (!e->inline_failed)
531 dump_modref_edge_summaries (out, e->callee, depth + 1);
532 class escape_summary *sum = escape_summaries->get (e);
533 if (sum)
534 {
535 fprintf (out, "%*sCall %s->%s escapes:", depth, "",
536 node->dump_name (), e->callee->dump_name ());
537 sum->dump (out);
538 }
539 class fnspec_summary *fsum = fnspec_summaries->get (e);
540 if (fsum)
541 {
542 fprintf (out, "%*sCall %s->%s fnspec: %s\n", depth, "",
543 node->dump_name (), e->callee->dump_name (),
544 fsum->fnspec);
545 }
546 }
547}
548
549/* Remove all call edge summaries associated with NODE. */
550
551static void
552remove_modref_edge_summaries (cgraph_node *node)
553{
554 if (!escape_summaries)
555 return;
556 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
557 escape_summaries->remove (e);
558 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
559 {
560 if (!e->inline_failed)
561 remove_modref_edge_summaries (e->callee);
562 escape_summaries->remove (e);
563 fnspec_summaries->remove (e);
564 }
520d5ad3
JH
565}
566
d119f34c
JH
567/* Dump summary. */
568
569void
570modref_summary::dump (FILE *out)
571{
6cef01c3
JH
572 if (loads)
573 {
574 fprintf (out, " loads:\n");
575 dump_records (loads, out);
576 }
577 if (stores)
578 {
579 fprintf (out, " stores:\n");
580 dump_records (stores, out);
581 }
64f3e71c
JH
582 if (kills.length ())
583 {
584 fprintf (out, " kills:\n");
585 for (auto kill : kills)
586 {
587 fprintf (out, " ");
588 kill.dump (out);
589 }
590 }
617695cd
JH
591 if (writes_errno)
592 fprintf (out, " Writes errno\n");
992644c3
JH
593 if (side_effects)
594 fprintf (out, " Side effects\n");
a34edf9a
JH
595 if (nondeterministic)
596 fprintf (out, " Nondeterministic\n");
597 if (calls_interposable)
598 fprintf (out, " Calls interposable\n");
e0040bc3
JH
599 if (global_memory_read)
600 fprintf (out, " Global memory read\n");
601 if (global_memory_written)
602 fprintf (out, " Global memory written\n");
5aa91072
JH
603 if (try_dse)
604 fprintf (out, " Try dse\n");
520d5ad3
JH
605 if (arg_flags.length ())
606 {
607 for (unsigned int i = 0; i < arg_flags.length (); i++)
608 if (arg_flags[i])
609 {
610 fprintf (out, " parm %i flags:", i);
611 dump_eaf_flags (out, arg_flags[i]);
612 }
613 }
b8ef019a
JH
614 if (retslot_flags)
615 {
616 fprintf (out, " Retslot flags:");
617 dump_eaf_flags (out, retslot_flags);
618 }
a70c0512
JH
619 if (static_chain_flags)
620 {
621 fprintf (out, " Static chain flags:");
622 dump_eaf_flags (out, static_chain_flags);
623 }
71dbabcc
JH
624}
625
626/* Dump summary. */
627
628void
629modref_summary_lto::dump (FILE *out)
630{
56cb815b
JH
631 fprintf (out, " loads:\n");
632 dump_lto_records (loads, out);
633 fprintf (out, " stores:\n");
634 dump_lto_records (stores, out);
74509b96
JH
635 if (kills.length ())
636 {
637 fprintf (out, " kills:\n");
638 for (auto kill : kills)
639 {
640 fprintf (out, " ");
641 kill.dump (out);
642 }
643 }
6cef01c3
JH
644 if (writes_errno)
645 fprintf (out, " Writes errno\n");
992644c3
JH
646 if (side_effects)
647 fprintf (out, " Side effects\n");
a34edf9a
JH
648 if (nondeterministic)
649 fprintf (out, " Nondeterministic\n");
650 if (calls_interposable)
651 fprintf (out, " Calls interposable\n");
85ebbabd
JH
652 if (arg_flags.length ())
653 {
654 for (unsigned int i = 0; i < arg_flags.length (); i++)
655 if (arg_flags[i])
656 {
657 fprintf (out, " parm %i flags:", i);
658 dump_eaf_flags (out, arg_flags[i]);
659 }
660 }
b8ef019a
JH
661 if (retslot_flags)
662 {
663 fprintf (out, " Retslot flags:");
664 dump_eaf_flags (out, retslot_flags);
665 }
a70c0512
JH
666 if (static_chain_flags)
667 {
668 fprintf (out, " Static chain flags:");
669 dump_eaf_flags (out, static_chain_flags);
670 }
d119f34c
JH
671}
672
e0040bc3 673/* Called after summary is produced and before it is used by local analysis.
5aa91072
JH
674 Can be called multiple times in case summary needs to update signature.
675 FUN is decl of function summary is attached to. */
e0040bc3 676void
5aa91072 677modref_summary::finalize (tree fun)
e0040bc3
JH
678{
679 global_memory_read = !loads || loads->global_access_p ();
680 global_memory_written = !stores || stores->global_access_p ();
5aa91072
JH
681
682 /* We can do DSE if we know function has no side effects and
02c80893 683 we can analyze all stores. Disable dse if there are too many
5aa91072
JH
684 stores to try. */
685 if (side_effects || global_memory_written || writes_errno)
686 try_dse = false;
687 else
688 {
689 try_dse = true;
690 size_t i, j, k;
691 int num_tests = 0, max_tests
8632f8c6 692 = opt_for_fn (fun, param_modref_max_tests);
5aa91072
JH
693 modref_base_node <alias_set_type> *base_node;
694 modref_ref_node <alias_set_type> *ref_node;
695 modref_access_node *access_node;
696 FOR_EACH_VEC_SAFE_ELT (stores->bases, i, base_node)
697 {
698 if (base_node->every_ref)
699 {
700 try_dse = false;
701 break;
702 }
703 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
704 {
705 if (base_node->every_ref)
706 {
707 try_dse = false;
708 break;
709 }
710 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
711 if (num_tests++ > max_tests
712 || !access_node->parm_offset_known)
713 {
714 try_dse = false;
715 break;
716 }
717 if (!try_dse)
718 break;
719 }
720 if (!try_dse)
721 break;
722 }
723 }
6180f5c8
RB
724 if (loads->every_base)
725 load_accesses = 1;
726 else
727 {
728 load_accesses = 0;
729 for (auto base_node : loads->bases)
730 {
731 if (base_node->every_ref)
732 load_accesses++;
733 else
734 for (auto ref_node : base_node->refs)
735 if (ref_node->every_access)
736 load_accesses++;
737 else
738 load_accesses += ref_node->accesses->length ();
739 }
740 }
e0040bc3
JH
741}
742
d119f34c
JH
743/* Get function summary for FUNC if it exists, return NULL otherwise. */
744
745modref_summary *
746get_modref_function_summary (cgraph_node *func)
747{
748 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
71dbabcc 749 if (!optimization_summaries)
d119f34c
JH
750 return NULL;
751
752 /* A single function body may be represented by multiple symbols with
753 different visibility. For example, if FUNC is an interposable alias,
754 we don't want to return anything, even if we have summary for the target
755 function. */
756 enum availability avail;
c87ff875 757 func = func->ultimate_alias_target
520d5ad3
JH
758 (&avail, current_function_decl ?
759 cgraph_node::get (current_function_decl) : NULL);
d119f34c
JH
760 if (avail <= AVAIL_INTERPOSABLE)
761 return NULL;
762
71dbabcc
JH
763 modref_summary *r = optimization_summaries->get (func);
764 return r;
d119f34c
JH
765}
766
6dc90c4d
JH
767/* Get function summary for CALL if it exists, return NULL otherwise.
768 If non-null set interposed to indicate whether function may not
769 bind to current def. In this case sometimes loads from function
770 needs to be ignored. */
771
772modref_summary *
773get_modref_function_summary (gcall *call, bool *interposed)
774{
775 tree callee = gimple_call_fndecl (call);
776 if (!callee)
777 return NULL;
778 struct cgraph_node *node = cgraph_node::get (callee);
779 if (!node)
780 return NULL;
781 modref_summary *r = get_modref_function_summary (node);
782 if (interposed && r)
783 *interposed = r->calls_interposable
8632f8c6 784 || !node->binds_to_current_def_p ();
6dc90c4d
JH
785 return r;
786}
787
788
18f0873d
JH
789namespace {
790
02c80893 791/* Return true if ECF flags says that nondeterminism can be ignored. */
09a4ffb7
JH
792
793static bool
794ignore_nondeterminism_p (tree caller, int flags)
795{
796 if (flags & (ECF_CONST | ECF_PURE))
797 return true;
798 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
799 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
800 return true;
801 return false;
802}
803
804/* Return true if ECF flags says that return value can be ignored. */
805
806static bool
807ignore_retval_p (tree caller, int flags)
808{
809 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
810 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
811 return true;
812 return false;
813}
814
815/* Return true if ECF flags says that stores can be ignored. */
816
817static bool
818ignore_stores_p (tree caller, int flags)
819{
820 if (flags & (ECF_PURE | ECF_CONST | ECF_NOVOPS))
821 return true;
822 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
823 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
824 return true;
825 return false;
826}
827
0f5afb62 828/* Determine parm_map for PTR which is supposed to be a pointer. */
09a4ffb7
JH
829
830modref_parm_map
0f5afb62 831parm_map_for_ptr (tree op)
09a4ffb7
JH
832{
833 bool offset_known;
834 poly_int64 offset;
835 struct modref_parm_map parm_map;
0f5afb62 836 gcall *call;
09a4ffb7
JH
837
838 parm_map.parm_offset_known = false;
839 parm_map.parm_offset = 0;
840
841 offset_known = unadjusted_ptr_and_unit_offset (op, &op, &offset);
842 if (TREE_CODE (op) == SSA_NAME
843 && SSA_NAME_IS_DEFAULT_DEF (op)
844 && TREE_CODE (SSA_NAME_VAR (op)) == PARM_DECL)
845 {
846 int index = 0;
0f5afb62
JH
847
848 if (cfun->static_chain_decl
849 && op == ssa_default_def (cfun, cfun->static_chain_decl))
850 index = MODREF_STATIC_CHAIN_PARM;
851 else
852 for (tree t = DECL_ARGUMENTS (current_function_decl);
853 t != SSA_NAME_VAR (op); t = DECL_CHAIN (t))
09a4ffb7 854 index++;
09a4ffb7
JH
855 parm_map.parm_index = index;
856 parm_map.parm_offset_known = offset_known;
857 parm_map.parm_offset = offset;
858 }
859 else if (points_to_local_or_readonly_memory_p (op))
860 parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
0f5afb62
JH
861 /* Memory allocated in the function is not visible to caller before the
862 call and thus we do not need to record it as load/stores/kills. */
863 else if (TREE_CODE (op) == SSA_NAME
864 && (call = dyn_cast<gcall *>(SSA_NAME_DEF_STMT (op))) != NULL
865 && gimple_call_flags (call) & ECF_MALLOC)
866 parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
09a4ffb7
JH
867 else
868 parm_map.parm_index = MODREF_UNKNOWN_PARM;
869 return parm_map;
870}
871
3305135c
JH
872/* Return true if ARG with EAF flags FLAGS can not make any caller's parameter
873 used (if LOAD is true we check loads, otherwise stores). */
874
875static bool
876verify_arg (tree arg, int flags, bool load)
877{
878 if (flags & EAF_UNUSED)
879 return true;
880 if (load && (flags & EAF_NO_DIRECT_READ))
881 return true;
882 if (!load
883 && (flags & (EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER))
884 == (EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER))
885 return true;
886 if (is_gimple_constant (arg))
887 return true;
888 if (DECL_P (arg) && TREE_READONLY (arg))
889 return true;
890 if (TREE_CODE (arg) == ADDR_EXPR)
891 {
892 tree t = get_base_address (TREE_OPERAND (arg, 0));
893 if (is_gimple_constant (t))
894 return true;
895 if (DECL_P (t)
896 && (TREE_READONLY (t) || TREE_CODE (t) == FUNCTION_DECL))
897 return true;
898 }
899 return false;
900}
901
902/* Return true if STMT may access memory that is pointed to by parameters
903 of caller and which is not seen as an escape by PTA.
904 CALLEE_ECF_FLAGS are ECF flags of callee. If LOAD is true then by access
905 we mean load, otherwise we mean store. */
906
907static bool
908may_access_nonescaping_parm_p (gcall *call, int callee_ecf_flags, bool load)
909{
910 int implicit_flags = 0;
911
912 if (ignore_stores_p (current_function_decl, callee_ecf_flags))
913 implicit_flags |= ignore_stores_eaf_flags;
914 if (callee_ecf_flags & ECF_PURE)
915 implicit_flags |= implicit_pure_eaf_flags;
916 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
917 implicit_flags |= implicit_const_eaf_flags;
918 if (gimple_call_chain (call)
919 && !verify_arg (gimple_call_chain (call),
920 gimple_call_static_chain_flags (call) | implicit_flags,
921 load))
922 return true;
923 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
924 if (!verify_arg (gimple_call_arg (call, i),
925 gimple_call_arg_flags (call, i) | implicit_flags,
926 load))
927 return true;
928 return false;
929}
930
931
09a4ffb7
JH
932/* Analyze memory accesses (loads, stores and kills) performed
933 by the function. Set also side_effects, calls_interposable
934 and nondeterminism flags. */
935
936class modref_access_analysis
937{
938public:
939 modref_access_analysis (bool ipa, modref_summary *summary,
940 modref_summary_lto *summary_lto)
941 : m_summary (summary), m_summary_lto (summary_lto), m_ipa (ipa)
942 {
943 }
944 void analyze ();
945private:
946 bool set_side_effects ();
947 bool set_nondeterministic ();
948 static modref_access_node get_access (ao_ref *ref);
949 static void record_access (modref_records *, ao_ref *, modref_access_node &);
950 static void record_access_lto (modref_records_lto *, ao_ref *,
951 modref_access_node &a);
952 bool record_access_p (tree);
953 bool record_unknown_load ();
954 bool record_unknown_store ();
3305135c
JH
955 bool record_global_memory_load ();
956 bool record_global_memory_store ();
09a4ffb7
JH
957 bool merge_call_side_effects (gimple *, modref_summary *,
958 cgraph_node *, bool);
959 modref_access_node get_access_for_fnspec (gcall *, attr_fnspec &,
960 unsigned int, modref_parm_map &);
961 void process_fnspec (gcall *);
962 void analyze_call (gcall *);
963 static bool analyze_load (gimple *, tree, tree, void *);
964 static bool analyze_store (gimple *, tree, tree, void *);
965 void analyze_stmt (gimple *, bool);
966 void propagate ();
967
968 /* Summary being computed.
02c80893 969 We work either with m_summary or m_summary_lto. Never on both. */
09a4ffb7
JH
970 modref_summary *m_summary;
971 modref_summary_lto *m_summary_lto;
02c80893 972 /* Recursive calls needs simplistic dataflow after analysis finished.
09a4ffb7
JH
973 Collect all calls into this vector during analysis and later process
974 them in propagate. */
975 auto_vec <gimple *, 32> m_recursive_calls;
02c80893 976 /* ECF flags of function being analyzed. */
09a4ffb7
JH
977 int m_ecf_flags;
978 /* True if IPA propagation will be done later. */
979 bool m_ipa;
02c80893 980 /* Set true if statement currently analyze is known to be
09a4ffb7
JH
981 executed each time function is called. */
982 bool m_always_executed;
983};
984
02c80893 985/* Set side_effects flag and return if something changed. */
09a4ffb7
JH
986
987bool
988modref_access_analysis::set_side_effects ()
989{
990 bool changed = false;
991
992 if (m_summary && !m_summary->side_effects)
993 {
994 m_summary->side_effects = true;
995 changed = true;
996 }
997 if (m_summary_lto && !m_summary_lto->side_effects)
998 {
999 m_summary_lto->side_effects = true;
1000 changed = true;
1001 }
1002 return changed;
1003}
1004
02c80893 1005/* Set nondeterministic flag and return if something changed. */
09a4ffb7
JH
1006
1007bool
1008modref_access_analysis::set_nondeterministic ()
1009{
1010 bool changed = false;
1011
1012 if (m_summary && !m_summary->nondeterministic)
1013 {
1014 m_summary->side_effects = m_summary->nondeterministic = true;
1015 changed = true;
1016 }
1017 if (m_summary_lto && !m_summary_lto->nondeterministic)
1018 {
1019 m_summary_lto->side_effects = m_summary_lto->nondeterministic = true;
1020 changed = true;
1021 }
1022 return changed;
1023}
1024
c33f4742 1025/* Construct modref_access_node from REF. */
09a4ffb7
JH
1026
1027modref_access_node
1028modref_access_analysis::get_access (ao_ref *ref)
c33f4742 1029{
c33f4742
JH
1030 tree base;
1031
c34db4b6
JH
1032 base = ao_ref_base (ref);
1033 modref_access_node a = {ref->offset, ref->size, ref->max_size,
1f3a3363 1034 0, MODREF_UNKNOWN_PARM, false, 0};
c33f4742
JH
1035 if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
1036 {
2bdf324f 1037 tree memref = base;
0f5afb62 1038 modref_parm_map m = parm_map_for_ptr (TREE_OPERAND (base, 0));
1f3a3363 1039
0f5afb62
JH
1040 a.parm_index = m.parm_index;
1041 if (a.parm_index != MODREF_UNKNOWN_PARM && TREE_CODE (memref) == MEM_REF)
1f3a3363
JH
1042 {
1043 a.parm_offset_known
1044 = wi::to_poly_wide (TREE_OPERAND
1045 (memref, 1)).to_shwi (&a.parm_offset);
0f5afb62
JH
1046 if (a.parm_offset_known && m.parm_offset_known)
1047 a.parm_offset += m.parm_offset;
1048 else
1049 a.parm_offset_known = false;
c33f4742 1050 }
c33f4742
JH
1051 }
1052 else
1f3a3363 1053 a.parm_index = MODREF_UNKNOWN_PARM;
c33f4742
JH
1054 return a;
1055}
1056
d119f34c
JH
1057/* Record access into the modref_records data structure. */
1058
09a4ffb7
JH
1059void
1060modref_access_analysis::record_access (modref_records *tt,
1061 ao_ref *ref,
1062 modref_access_node &a)
d119f34c 1063{
16c84809
JH
1064 alias_set_type base_set = !flag_strict_aliasing
1065 || !flag_ipa_strict_aliasing ? 0
d119f34c 1066 : ao_ref_base_alias_set (ref);
16c84809
JH
1067 alias_set_type ref_set = !flag_strict_aliasing
1068 || !flag_ipa_strict_aliasing ? 0
d119f34c
JH
1069 : (ao_ref_alias_set (ref));
1070 if (dump_file)
1071 {
4898e958
JH
1072 fprintf (dump_file, " - Recording base_set=%i ref_set=%i ",
1073 base_set, ref_set);
e30bf330 1074 a.dump (dump_file);
d119f34c 1075 }
8632f8c6 1076 tt->insert (current_function_decl, base_set, ref_set, a, false);
d119f34c
JH
1077}
1078
1079/* IPA version of record_access_tree. */
1080
09a4ffb7
JH
1081void
1082modref_access_analysis::record_access_lto (modref_records_lto *tt, ao_ref *ref,
1083 modref_access_node &a)
d119f34c
JH
1084{
1085 /* get_alias_set sometimes use different type to compute the alias set
1086 than TREE_TYPE (base). Do same adjustments. */
1087 tree base_type = NULL_TREE, ref_type = NULL_TREE;
16c84809 1088 if (flag_strict_aliasing && flag_ipa_strict_aliasing)
d119f34c
JH
1089 {
1090 tree base;
1091
1092 base = ref->ref;
1093 while (handled_component_p (base))
1094 base = TREE_OPERAND (base, 0);
1095
1096 base_type = reference_alias_ptr_type_1 (&base);
1097
1098 if (!base_type)
1099 base_type = TREE_TYPE (base);
1100 else
1101 base_type = TYPE_REF_CAN_ALIAS_ALL (base_type)
1102 ? NULL_TREE : TREE_TYPE (base_type);
1103
1104 tree ref_expr = ref->ref;
1105 ref_type = reference_alias_ptr_type_1 (&ref_expr);
1106
1107 if (!ref_type)
1108 ref_type = TREE_TYPE (ref_expr);
1109 else
1110 ref_type = TYPE_REF_CAN_ALIAS_ALL (ref_type)
1111 ? NULL_TREE : TREE_TYPE (ref_type);
1112
1113 /* Sanity check that we are in sync with what get_alias_set does. */
1114 gcc_checking_assert ((!base_type && !ao_ref_base_alias_set (ref))
1115 || get_alias_set (base_type)
1116 == ao_ref_base_alias_set (ref));
1117 gcc_checking_assert ((!ref_type && !ao_ref_alias_set (ref))
1118 || get_alias_set (ref_type)
1119 == ao_ref_alias_set (ref));
1120
1121 /* Do not bother to record types that have no meaningful alias set.
1122 Also skip variably modified types since these go to local streams. */
1123 if (base_type && (!get_alias_set (base_type)
1124 || variably_modified_type_p (base_type, NULL_TREE)))
1125 base_type = NULL_TREE;
1126 if (ref_type && (!get_alias_set (ref_type)
1127 || variably_modified_type_p (ref_type, NULL_TREE)))
1128 ref_type = NULL_TREE;
1129 }
1130 if (dump_file)
1131 {
1132 fprintf (dump_file, " - Recording base type:");
1133 print_generic_expr (dump_file, base_type);
1134 fprintf (dump_file, " (alias set %i) ref type:",
1135 base_type ? get_alias_set (base_type) : 0);
1136 print_generic_expr (dump_file, ref_type);
4898e958
JH
1137 fprintf (dump_file, " (alias set %i) ",
1138 ref_type ? get_alias_set (ref_type) : 0);
e30bf330 1139 a.dump (dump_file);
d119f34c
JH
1140 }
1141
8632f8c6 1142 tt->insert (current_function_decl, base_type, ref_type, a, false);
d119f34c
JH
1143}
1144
1145/* Returns true if and only if we should store the access to EXPR.
1146 Some accesses, e.g. loads from automatic variables, are not interesting. */
1147
09a4ffb7
JH
1148bool
1149modref_access_analysis::record_access_p (tree expr)
d119f34c 1150{
09a4ffb7
JH
1151 if (TREE_THIS_VOLATILE (expr))
1152 {
1153 if (dump_file)
1154 fprintf (dump_file, " (volatile; marking nondeterministic) ");
1155 set_nondeterministic ();
1156 }
1157 if (cfun->can_throw_non_call_exceptions
1158 && tree_could_throw_p (expr))
1159 {
1160 if (dump_file)
1161 fprintf (dump_file, " (can throw; marking side effects) ");
1162 set_side_effects ();
1163 }
1164
e977dd5e 1165 if (refs_local_or_readonly_memory_p (expr))
d119f34c
JH
1166 {
1167 if (dump_file)
e977dd5e 1168 fprintf (dump_file, " - Read-only or local, ignoring.\n");
d119f34c
JH
1169 return false;
1170 }
d119f34c
JH
1171 return true;
1172}
1173
09a4ffb7 1174/* Collapse loads and return true if something changed. */
85ebbabd 1175
09a4ffb7
JH
1176bool
1177modref_access_analysis::record_unknown_load ()
85ebbabd 1178{
09a4ffb7 1179 bool changed = false;
d119f34c 1180
09a4ffb7
JH
1181 if (m_summary && !m_summary->loads->every_base)
1182 {
1183 m_summary->loads->collapse ();
1184 changed = true;
1185 }
1186 if (m_summary_lto && !m_summary_lto->loads->every_base)
1187 {
1188 m_summary_lto->loads->collapse ();
1189 changed = true;
1190 }
1191 return changed;
d119f34c
JH
1192}
1193
09a4ffb7 1194/* Collapse loads and return true if something changed. */
617695cd 1195
09a4ffb7
JH
1196bool
1197modref_access_analysis::record_unknown_store ()
617695cd 1198{
09a4ffb7 1199 bool changed = false;
ea937e7d 1200
09a4ffb7 1201 if (m_summary && !m_summary->stores->every_base)
617695cd 1202 {
09a4ffb7
JH
1203 m_summary->stores->collapse ();
1204 changed = true;
617695cd 1205 }
09a4ffb7
JH
1206 if (m_summary_lto && !m_summary_lto->stores->every_base)
1207 {
1208 m_summary_lto->stores->collapse ();
1209 changed = true;
1210 }
1211 return changed;
617695cd
JH
1212}
1213
02c80893 1214/* Record unknown load from global memory. */
3305135c
JH
1215
1216bool
1217modref_access_analysis::record_global_memory_load ()
1218{
1219 bool changed = false;
1220 modref_access_node a = {0, -1, -1,
1221 0, MODREF_GLOBAL_MEMORY_PARM, false, 0};
1222
1223 if (m_summary && !m_summary->loads->every_base)
1224 changed |= m_summary->loads->insert (current_function_decl, 0, 0, a, false);
1225 if (m_summary_lto && !m_summary_lto->loads->every_base)
1226 changed |= m_summary_lto->loads->insert (current_function_decl,
1227 0, 0, a, false);
1228 return changed;
1229}
1230
02c80893 1231/* Record unknown store from global memory. */
3305135c
JH
1232
1233bool
1234modref_access_analysis::record_global_memory_store ()
1235{
1236 bool changed = false;
1237 modref_access_node a = {0, -1, -1,
1238 0, MODREF_GLOBAL_MEMORY_PARM, false, 0};
1239
1240 if (m_summary && !m_summary->stores->every_base)
1241 changed |= m_summary->stores->insert (current_function_decl,
1242 0, 0, a, false);
1243 if (m_summary_lto && !m_summary_lto->stores->every_base)
1244 changed |= m_summary_lto->stores->insert (current_function_decl,
1245 0, 0, a, false);
1246 return changed;
1247}
1248
09a4ffb7
JH
1249/* Merge side effects of call STMT to function with CALLEE_SUMMARY.
1250 Return true if something changed.
5c85f295
JH
1251 If IGNORE_STORES is true, do not merge stores.
1252 If RECORD_ADJUSTMENTS is true cap number of adjustments to
1253 a given access to make dataflow finite. */
ada353b8
JH
1254
1255bool
09a4ffb7
JH
1256modref_access_analysis::merge_call_side_effects
1257 (gimple *stmt, modref_summary *callee_summary,
1258 cgraph_node *callee_node, bool record_adjustments)
ada353b8 1259{
3305135c
JH
1260 gcall *call = as_a <gcall *> (stmt);
1261 int flags = gimple_call_flags (call);
8d3abf42 1262
09a4ffb7 1263 /* Nothing to do for non-looping cont functions. */
64f3e71c
JH
1264 if ((flags & (ECF_CONST | ECF_NOVOPS))
1265 && !(flags & ECF_LOOPING_CONST_OR_PURE))
09a4ffb7
JH
1266 return false;
1267
1268 bool changed = false;
64f3e71c 1269
ce2dbf94
JH
1270 if (dump_file)
1271 fprintf (dump_file, " - Merging side effects of %s\n",
1272 callee_node->dump_name ());
1273
09a4ffb7
JH
1274 /* Merge side effects and non-determinism.
1275 PURE/CONST flags makes functions deterministic and if there is
1276 no LOOPING_CONST_OR_PURE they also have no side effects. */
a34edf9a
JH
1277 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
1278 || (flags & ECF_LOOPING_CONST_OR_PURE))
8d3abf42 1279 {
09a4ffb7 1280 if (!m_summary->side_effects && callee_summary->side_effects)
a34edf9a
JH
1281 {
1282 if (dump_file)
1283 fprintf (dump_file, " - merging side effects.\n");
09a4ffb7 1284 m_summary->side_effects = true;
a34edf9a
JH
1285 changed = true;
1286 }
09a4ffb7 1287 if (!m_summary->nondeterministic && callee_summary->nondeterministic
a34edf9a
JH
1288 && !ignore_nondeterminism_p (current_function_decl, flags))
1289 {
1290 if (dump_file)
1291 fprintf (dump_file, " - merging nondeterministic.\n");
09a4ffb7 1292 m_summary->nondeterministic = true;
a34edf9a
JH
1293 changed = true;
1294 }
1295 }
8d3abf42 1296
09a4ffb7 1297 /* For const functions we are done. */
8d3abf42
JH
1298 if (flags & (ECF_CONST | ECF_NOVOPS))
1299 return changed;
ada353b8 1300
09a4ffb7
JH
1301 /* Merge calls_interposable flags. */
1302 if (!m_summary->calls_interposable && callee_summary->calls_interposable)
a34edf9a
JH
1303 {
1304 if (dump_file)
1305 fprintf (dump_file, " - merging calls interposable.\n");
09a4ffb7 1306 m_summary->calls_interposable = true;
a34edf9a
JH
1307 changed = true;
1308 }
1309
09a4ffb7 1310 if (!callee_node->binds_to_current_def_p () && !m_summary->calls_interposable)
617695cd
JH
1311 {
1312 if (dump_file)
a34edf9a 1313 fprintf (dump_file, " - May be interposed.\n");
09a4ffb7 1314 m_summary->calls_interposable = true;
a34edf9a 1315 changed = true;
617695cd
JH
1316 }
1317
09a4ffb7
JH
1318 /* Now merge the actual load, store and kill vectors. For this we need
1319 to compute map translating new parameters to old. */
0b874e0f 1320 if (dump_file)
ce2dbf94 1321 fprintf (dump_file, " Parm map:");
0b874e0f 1322
09a4ffb7 1323 auto_vec <modref_parm_map, 32> parm_map;
3305135c
JH
1324 parm_map.safe_grow_cleared (gimple_call_num_args (call), true);
1325 for (unsigned i = 0; i < gimple_call_num_args (call); i++)
ada353b8 1326 {
3305135c 1327 parm_map[i] = parm_map_for_ptr (gimple_call_arg (call, i));
56cb815b 1328 if (dump_file)
c7b6a758
JH
1329 {
1330 fprintf (dump_file, " %i", parm_map[i].parm_index);
1331 if (parm_map[i].parm_offset_known)
1332 {
1333 fprintf (dump_file, " offset:");
1334 print_dec ((poly_int64_pod)parm_map[i].parm_offset,
1335 dump_file, SIGNED);
1336 }
1337 }
ada353b8 1338 }
09a4ffb7
JH
1339
1340 modref_parm_map chain_map;
3305135c 1341 if (gimple_call_chain (call))
1f3a3363 1342 {
3305135c 1343 chain_map = parm_map_for_ptr (gimple_call_chain (call));
1f3a3363
JH
1344 if (dump_file)
1345 {
1346 fprintf (dump_file, "static chain %i", chain_map.parm_index);
1347 if (chain_map.parm_offset_known)
1348 {
1349 fprintf (dump_file, " offset:");
1350 print_dec ((poly_int64_pod)chain_map.parm_offset,
1351 dump_file, SIGNED);
1352 }
1353 }
1354 }
56cb815b
JH
1355 if (dump_file)
1356 fprintf (dump_file, "\n");
ada353b8 1357
09a4ffb7
JH
1358 /* Kills can me merged in only if we know the function is going to be
1359 always executed. */
1360 if (m_always_executed
e69b7c57
JH
1361 && callee_summary->kills.length ()
1362 && (!cfun->can_throw_non_call_exceptions
3305135c 1363 || !stmt_could_throw_p (cfun, call)))
e69b7c57
JH
1364 {
1365 /* Watch for self recursive updates. */
1366 auto_vec<modref_access_node, 32> saved_kills;
1367
1368 saved_kills.reserve_exact (callee_summary->kills.length ());
1369 saved_kills.splice (callee_summary->kills);
1370 for (auto kill : saved_kills)
1371 {
1372 if (kill.parm_index >= (int)parm_map.length ())
1373 continue;
1374 modref_parm_map &m
1375 = kill.parm_index == MODREF_STATIC_CHAIN_PARM
1376 ? chain_map
1377 : parm_map[kill.parm_index];
1378 if (m.parm_index == MODREF_LOCAL_MEMORY_PARM
1379 || m.parm_index == MODREF_UNKNOWN_PARM
1380 || m.parm_index == MODREF_RETSLOT_PARM
1381 || !m.parm_offset_known)
1382 continue;
1383 modref_access_node n = kill;
1384 n.parm_index = m.parm_index;
1385 n.parm_offset += m.parm_offset;
09a4ffb7 1386 if (modref_access_node::insert_kill (m_summary->kills, n,
e69b7c57
JH
1387 record_adjustments))
1388 changed = true;
1389 }
1390 }
1391
09a4ffb7 1392 /* Merge in loads. */
8632f8c6
JH
1393 changed |= m_summary->loads->merge (current_function_decl,
1394 callee_summary->loads,
1395 &parm_map, &chain_map,
3305135c
JH
1396 record_adjustments,
1397 !may_access_nonescaping_parm_p
1398 (call, flags, true));
09a4ffb7
JH
1399 /* Merge in stores. */
1400 if (!ignore_stores_p (current_function_decl, flags))
617695cd 1401 {
8632f8c6
JH
1402 changed |= m_summary->stores->merge (current_function_decl,
1403 callee_summary->stores,
09a4ffb7 1404 &parm_map, &chain_map,
3305135c
JH
1405 record_adjustments,
1406 !may_access_nonescaping_parm_p
1407 (call, flags, false));
09a4ffb7 1408 if (!m_summary->writes_errno
617695cd
JH
1409 && callee_summary->writes_errno)
1410 {
09a4ffb7 1411 m_summary->writes_errno = true;
617695cd
JH
1412 changed = true;
1413 }
1414 }
ada353b8
JH
1415 return changed;
1416}
1417
617695cd
JH
1418/* Return access mode for argument I of call STMT with FNSPEC. */
1419
09a4ffb7
JH
1420modref_access_node
1421modref_access_analysis::get_access_for_fnspec (gcall *call, attr_fnspec &fnspec,
1422 unsigned int i,
1423 modref_parm_map &map)
617695cd
JH
1424{
1425 tree size = NULL_TREE;
1426 unsigned int size_arg;
1427
1428 if (!fnspec.arg_specified_p (i))
1429 ;
1430 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
1431 size = gimple_call_arg (call, size_arg);
1432 else if (fnspec.arg_access_size_given_by_type_p (i))
1433 {
1434 tree callee = gimple_call_fndecl (call);
1435 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
1436
1437 for (unsigned int p = 0; p < i; p++)
1438 t = TREE_CHAIN (t);
1439 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
1440 }
1441 modref_access_node a = {0, -1, -1,
1442 map.parm_offset, map.parm_index,
5c85f295 1443 map.parm_offset_known, 0};
617695cd
JH
1444 poly_int64 size_hwi;
1445 if (size
1446 && poly_int_tree_p (size, &size_hwi)
1447 && coeffs_in_range_p (size_hwi, 0,
1448 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
1449 {
1450 a.size = -1;
1451 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
1452 }
1453 return a;
1454}
617695cd
JH
1455/* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
1456 If IGNORE_STORES is true ignore them.
1457 Return false if no useful summary can be produced. */
1458
09a4ffb7
JH
1459void
1460modref_access_analysis::process_fnspec (gcall *call)
617695cd 1461{
992644c3
JH
1462 int flags = gimple_call_flags (call);
1463
09a4ffb7
JH
1464 /* PURE/CONST flags makes functions deterministic and if there is
1465 no LOOPING_CONST_OR_PURE they also have no side effects. */
8d3abf42 1466 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
992644c3
JH
1467 || (flags & ECF_LOOPING_CONST_OR_PURE)
1468 || (cfun->can_throw_non_call_exceptions
1469 && stmt_could_throw_p (cfun, call)))
1470 {
09a4ffb7
JH
1471 set_side_effects ();
1472 if (!ignore_nondeterminism_p (current_function_decl, flags))
1473 set_nondeterministic ();
992644c3 1474 }
09a4ffb7
JH
1475
1476 /* For const functions we are done. */
8d3abf42 1477 if (flags & (ECF_CONST | ECF_NOVOPS))
09a4ffb7
JH
1478 return;
1479
1480 attr_fnspec fnspec = gimple_call_fnspec (call);
1481 /* If there is no fnpec we know nothing about loads & stores. */
617695cd
JH
1482 if (!fnspec.known_p ())
1483 {
1484 if (dump_file && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1485 fprintf (dump_file, " Builtin with no fnspec: %s\n",
1486 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call))));
09a4ffb7 1487 if (!ignore_stores_p (current_function_decl, flags))
3305135c
JH
1488 {
1489 if (!may_access_nonescaping_parm_p (call, flags, false))
1490 record_global_memory_store ();
1491 else
1492 record_unknown_store ();
1493 if (!may_access_nonescaping_parm_p (call, flags, true))
1494 record_global_memory_load ();
1495 else
1496 record_unknown_load ();
1497 }
1498 else
1499 {
1500 if (!may_access_nonescaping_parm_p (call, flags, true))
1501 record_global_memory_load ();
1502 else
1503 record_unknown_load ();
1504 }
09a4ffb7 1505 return;
617695cd 1506 }
09a4ffb7 1507 /* Process fnspec. */
617695cd 1508 if (fnspec.global_memory_read_p ())
3305135c
JH
1509 {
1510 if (may_access_nonescaping_parm_p (call, flags, true))
1511 record_unknown_load ();
1512 else
1513 record_global_memory_load ();
1514 }
617695cd
JH
1515 else
1516 {
1517 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1518 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
1519 ;
1520 else if (!fnspec.arg_specified_p (i)
1521 || fnspec.arg_maybe_read_p (i))
1522 {
0f5afb62 1523 modref_parm_map map = parm_map_for_ptr
1f3a3363 1524 (gimple_call_arg (call, i));
617695cd 1525
1f3a3363 1526 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
617695cd 1527 continue;
1f3a3363 1528 if (map.parm_index == MODREF_UNKNOWN_PARM)
617695cd 1529 {
09a4ffb7 1530 record_unknown_load ();
617695cd
JH
1531 break;
1532 }
09a4ffb7 1533 modref_access_node a = get_access_for_fnspec (call, fnspec, i, map);
0f5afb62
JH
1534 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1535 continue;
09a4ffb7 1536 if (m_summary)
8632f8c6 1537 m_summary->loads->insert (current_function_decl, 0, 0, a, false);
09a4ffb7 1538 if (m_summary_lto)
8632f8c6
JH
1539 m_summary_lto->loads->insert (current_function_decl, 0, 0, a,
1540 false);
617695cd
JH
1541 }
1542 }
09a4ffb7
JH
1543 if (ignore_stores_p (current_function_decl, flags))
1544 return;
617695cd 1545 if (fnspec.global_memory_written_p ())
3305135c
JH
1546 {
1547 if (may_access_nonescaping_parm_p (call, flags, false))
1548 record_unknown_store ();
1549 else
1550 record_global_memory_store ();
1551 }
617695cd
JH
1552 else
1553 {
1554 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1555 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
1556 ;
1557 else if (!fnspec.arg_specified_p (i)
1558 || fnspec.arg_maybe_written_p (i))
1559 {
0f5afb62 1560 modref_parm_map map = parm_map_for_ptr
1f3a3363 1561 (gimple_call_arg (call, i));
617695cd 1562
1f3a3363 1563 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
617695cd 1564 continue;
1f3a3363 1565 if (map.parm_index == MODREF_UNKNOWN_PARM)
617695cd 1566 {
09a4ffb7 1567 record_unknown_store ();
617695cd
JH
1568 break;
1569 }
09a4ffb7 1570 modref_access_node a = get_access_for_fnspec (call, fnspec, i, map);
0f5afb62
JH
1571 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1572 continue;
09a4ffb7 1573 if (m_summary)
8632f8c6 1574 m_summary->stores->insert (current_function_decl, 0, 0, a, false);
09a4ffb7 1575 if (m_summary_lto)
8632f8c6
JH
1576 m_summary_lto->stores->insert (current_function_decl,
1577 0, 0, a, false);
617695cd
JH
1578 }
1579 if (fnspec.errno_maybe_written_p () && flag_errno_math)
6cef01c3 1580 {
09a4ffb7
JH
1581 if (m_summary)
1582 m_summary->writes_errno = true;
1583 if (m_summary_lto)
1584 m_summary_lto->writes_errno = true;
6cef01c3 1585 }
617695cd 1586 }
617695cd
JH
1587}
1588
ada353b8
JH
1589/* Analyze function call STMT in function F.
1590 Remember recursive calls in RECURSIVE_CALLS. */
d119f34c 1591
09a4ffb7
JH
1592void
1593modref_access_analysis::analyze_call (gcall *stmt)
d119f34c
JH
1594{
1595 /* Check flags on the function call. In certain cases, analysis can be
1596 simplified. */
1597 int flags = gimple_call_flags (stmt);
09a4ffb7 1598
3305135c
JH
1599 if (dump_file)
1600 {
1601 fprintf (dump_file, " - Analyzing call:");
1602 print_gimple_stmt (dump_file, stmt, 0);
1603 }
1604
8d3abf42
JH
1605 if ((flags & (ECF_CONST | ECF_NOVOPS))
1606 && !(flags & ECF_LOOPING_CONST_OR_PURE))
d119f34c
JH
1607 {
1608 if (dump_file)
1609 fprintf (dump_file,
1610 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
1611 "except for args.\n");
09a4ffb7 1612 return;
d119f34c
JH
1613 }
1614
d119f34c
JH
1615 /* Next, we try to get the callee's function declaration. The goal is to
1616 merge their summary with ours. */
1617 tree callee = gimple_call_fndecl (stmt);
1618
1619 /* Check if this is an indirect call. */
1620 if (!callee)
1621 {
d119f34c 1622 if (dump_file)
6cef01c3
JH
1623 fprintf (dump_file, gimple_call_internal_p (stmt)
1624 ? " - Internal call" : " - Indirect call.\n");
09a4ffb7
JH
1625 process_fnspec (stmt);
1626 return;
d119f34c 1627 }
6cef01c3 1628 /* We only need to handle internal calls in IPA mode. */
09a4ffb7 1629 gcc_checking_assert (!m_summary_lto && !m_ipa);
d119f34c
JH
1630
1631 struct cgraph_node *callee_node = cgraph_node::get_create (callee);
1632
d119f34c
JH
1633 /* If this is a recursive call, the target summary is the same as ours, so
1634 there's nothing to do. */
1635 if (recursive_call_p (current_function_decl, callee))
1636 {
09a4ffb7
JH
1637 m_recursive_calls.safe_push (stmt);
1638 set_side_effects ();
d119f34c
JH
1639 if (dump_file)
1640 fprintf (dump_file, " - Skipping recursive call.\n");
09a4ffb7 1641 return;
d119f34c
JH
1642 }
1643
1644 gcc_assert (callee_node != NULL);
1645
1646 /* Get the function symbol and its availability. */
1647 enum availability avail;
1648 callee_node = callee_node->function_symbol (&avail);
992644c3
JH
1649 bool looping;
1650 if (builtin_safe_for_const_function_p (&looping, callee))
1651 {
1652 if (looping)
09a4ffb7 1653 set_side_effects ();
992644c3 1654 if (dump_file)
09a4ffb7
JH
1655 fprintf (dump_file, " - Builtin is safe for const.\n");
1656 return;
992644c3 1657 }
d119f34c
JH
1658 if (avail <= AVAIL_INTERPOSABLE)
1659 {
d119f34c 1660 if (dump_file)
09a4ffb7
JH
1661 fprintf (dump_file,
1662 " - Function availability <= AVAIL_INTERPOSABLE.\n");
1663 process_fnspec (stmt);
1664 return;
d119f34c
JH
1665 }
1666
1667 /* Get callee's modref summary. As above, if there's no summary, we either
1668 have to give up or, if stores are ignored, we can just purge loads. */
71dbabcc 1669 modref_summary *callee_summary = optimization_summaries->get (callee_node);
d119f34c
JH
1670 if (!callee_summary)
1671 {
d119f34c
JH
1672 if (dump_file)
1673 fprintf (dump_file, " - No modref summary available for callee.\n");
09a4ffb7
JH
1674 process_fnspec (stmt);
1675 return;
d119f34c
JH
1676 }
1677
09a4ffb7 1678 merge_call_side_effects (stmt, callee_summary, callee_node, false);
d119f34c 1679
09a4ffb7 1680 return;
d119f34c
JH
1681}
1682
1683/* Helper for analyze_stmt. */
1684
09a4ffb7
JH
1685bool
1686modref_access_analysis::analyze_load (gimple *, tree, tree op, void *data)
d119f34c 1687{
09a4ffb7 1688 modref_access_analysis *t = (modref_access_analysis *)data;
d119f34c
JH
1689
1690 if (dump_file)
1691 {
1692 fprintf (dump_file, " - Analyzing load: ");
1693 print_generic_expr (dump_file, op);
1694 fprintf (dump_file, "\n");
1695 }
1696
09a4ffb7 1697 if (!t->record_access_p (op))
d119f34c
JH
1698 return false;
1699
1700 ao_ref r;
1701 ao_ref_init (&r, op);
64f3e71c 1702 modref_access_node a = get_access (&r);
0f5afb62
JH
1703 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1704 return false;
d119f34c 1705
09a4ffb7
JH
1706 if (t->m_summary)
1707 t->record_access (t->m_summary->loads, &r, a);
1708 if (t->m_summary_lto)
1709 t->record_access_lto (t->m_summary_lto->loads, &r, a);
d119f34c
JH
1710 return false;
1711}
1712
1713/* Helper for analyze_stmt. */
1714
09a4ffb7
JH
1715bool
1716modref_access_analysis::analyze_store (gimple *stmt, tree, tree op, void *data)
d119f34c 1717{
09a4ffb7 1718 modref_access_analysis *t = (modref_access_analysis *)data;
d119f34c
JH
1719
1720 if (dump_file)
1721 {
1722 fprintf (dump_file, " - Analyzing store: ");
1723 print_generic_expr (dump_file, op);
1724 fprintf (dump_file, "\n");
1725 }
1726
09a4ffb7 1727 if (!t->record_access_p (op))
d119f34c
JH
1728 return false;
1729
1730 ao_ref r;
1731 ao_ref_init (&r, op);
64f3e71c 1732 modref_access_node a = get_access (&r);
0f5afb62
JH
1733 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1734 return false;
d119f34c 1735
09a4ffb7
JH
1736 if (t->m_summary)
1737 t->record_access (t->m_summary->stores, &r, a);
1738 if (t->m_summary_lto)
1739 t->record_access_lto (t->m_summary_lto->stores, &r, a);
1740 if (t->m_always_executed
64f3e71c
JH
1741 && a.useful_for_kill_p ()
1742 && (!cfun->can_throw_non_call_exceptions
1743 || !stmt_could_throw_p (cfun, stmt)))
1744 {
1745 if (dump_file)
1746 fprintf (dump_file, " - Recording kill\n");
09a4ffb7
JH
1747 if (t->m_summary)
1748 modref_access_node::insert_kill (t->m_summary->kills, a, false);
1749 if (t->m_summary_lto)
1750 modref_access_node::insert_kill (t->m_summary_lto->kills, a, false);
64f3e71c 1751 }
d119f34c
JH
1752 return false;
1753}
1754
1755/* Analyze statement STMT of function F.
1756 If IPA is true do not merge in side effects of calls. */
1757
09a4ffb7
JH
1758void
1759modref_access_analysis::analyze_stmt (gimple *stmt, bool always_executed)
d119f34c 1760{
09a4ffb7 1761 m_always_executed = always_executed;
8a2fd716
JJ
1762 /* In general we can not ignore clobbers because they are barriers for code
1763 motion, however after inlining it is safe to do because local optimization
3991912e 1764 passes do not consider clobbers from other functions.
e53b6e56 1765 Similar logic is in ipa-pure-const.cc. */
09a4ffb7 1766 if ((m_ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
64f3e71c 1767 {
74509b96 1768 if (always_executed && record_access_p (gimple_assign_lhs (stmt)))
64f3e71c
JH
1769 {
1770 ao_ref r;
1771 ao_ref_init (&r, gimple_assign_lhs (stmt));
1772 modref_access_node a = get_access (&r);
1773 if (a.useful_for_kill_p ())
1774 {
1775 if (dump_file)
1776 fprintf (dump_file, " - Recording kill\n");
09a4ffb7
JH
1777 if (m_summary)
1778 modref_access_node::insert_kill (m_summary->kills, a, false);
1779 if (m_summary_lto)
1780 modref_access_node::insert_kill (m_summary_lto->kills,
1781 a, false);
64f3e71c
JH
1782 }
1783 }
09a4ffb7 1784 return;
64f3e71c 1785 }
3991912e 1786
d119f34c 1787 /* Analyze all loads and stores in STMT. */
09a4ffb7 1788 walk_stmt_load_store_ops (stmt, this,
d119f34c 1789 analyze_load, analyze_store);
d119f34c
JH
1790
1791 switch (gimple_code (stmt))
1792 {
1793 case GIMPLE_ASM:
a34edf9a 1794 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
09a4ffb7 1795 set_nondeterministic ();
a34edf9a
JH
1796 if (cfun->can_throw_non_call_exceptions
1797 && stmt_could_throw_p (cfun, stmt))
09a4ffb7 1798 set_side_effects ();
d119f34c
JH
1799 /* If the ASM statement does not read nor write memory, there's nothing
1800 to do. Otherwise just give up. */
1801 if (!gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
09a4ffb7 1802 return;
d119f34c
JH
1803 if (dump_file)
1804 fprintf (dump_file, " - Function contains GIMPLE_ASM statement "
1805 "which clobbers memory.\n");
09a4ffb7
JH
1806 record_unknown_load ();
1807 record_unknown_store ();
1808 return;
d119f34c 1809 case GIMPLE_CALL:
09a4ffb7
JH
1810 if (!m_ipa || gimple_call_internal_p (stmt))
1811 analyze_call (as_a <gcall *> (stmt));
6cef01c3 1812 else
09a4ffb7
JH
1813 {
1814 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
1815
1816 if (fnspec.known_p ()
1817 && (!fnspec.global_memory_read_p ()
1818 || !fnspec.global_memory_written_p ()))
1819 {
1820 cgraph_edge *e = cgraph_node::get
1821 (current_function_decl)->get_edge (stmt);
1822 if (e->callee)
1823 {
1824 fnspec_summaries->get_create (e)->fnspec
1825 = xstrdup (fnspec.get_str ());
1826 if (dump_file)
1827 fprintf (dump_file, " Recorded fnspec %s\n",
1828 fnspec.get_str ());
1829 }
1830 }
1831 }
1832 return;
d119f34c 1833 default:
992644c3
JH
1834 if (cfun->can_throw_non_call_exceptions
1835 && stmt_could_throw_p (cfun, stmt))
09a4ffb7
JH
1836 set_side_effects ();
1837 return;
d119f34c
JH
1838 }
1839}
1840
02c80893 1841/* Propagate load/stores across recursive calls. */
71dbabcc 1842
09a4ffb7
JH
1843void
1844modref_access_analysis::propagate ()
71dbabcc 1845{
09a4ffb7
JH
1846 if (m_ipa && m_summary)
1847 return;
1848
1849 bool changed = true;
1850 bool first = true;
71dbabcc 1851 cgraph_node *fnode = cgraph_node::get (current_function_decl);
09a4ffb7
JH
1852
1853 m_always_executed = false;
1854 while (changed && m_summary->useful_p (m_ecf_flags, false))
71dbabcc 1855 {
09a4ffb7
JH
1856 changed = false;
1857 for (unsigned i = 0; i < m_recursive_calls.length (); i++)
1858 {
1859 changed |= merge_call_side_effects (m_recursive_calls[i], m_summary,
1860 fnode, !first);
1861 }
1862 first = false;
1863 }
1864}
1865
1866/* Analyze function. */
1867
1868void
1869modref_access_analysis::analyze ()
1870{
1871 m_ecf_flags = flags_from_decl_or_type (current_function_decl);
1872 bool summary_useful = true;
1873
1874 /* Analyze each statement in each basic block of the function. If the
1875 statement cannot be analyzed (for any reason), the entire function cannot
1876 be analyzed by modref. */
1877 basic_block bb;
1878 FOR_EACH_BB_FN (bb, cfun)
1879 {
1880 gimple_stmt_iterator si;
1881 bool always_executed
1882 = bb == single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1883
1884 for (si = gsi_start_nondebug_after_labels_bb (bb);
1885 !gsi_end_p (si); gsi_next_nondebug (&si))
1886 {
e93809f6 1887 /* NULL memory accesses terminates BB. These accesses are known
02c80893 1888 to trip undefined behavior. gimple-ssa-isolate-paths turns them
e93809f6
JH
1889 to volatile accesses and adds builtin_trap call which would
1890 confuse us otherwise. */
1891 if (infer_nonnull_range_by_dereference (gsi_stmt (si),
1892 null_pointer_node))
1893 {
1894 if (dump_file)
1895 fprintf (dump_file, " - NULL memory access; terminating BB\n");
1896 if (flag_non_call_exceptions)
1897 set_side_effects ();
1898 break;
1899 }
09a4ffb7
JH
1900 analyze_stmt (gsi_stmt (si), always_executed);
1901
02c80893 1902 /* Avoid doing useless work. */
09a4ffb7
JH
1903 if ((!m_summary || !m_summary->useful_p (m_ecf_flags, false))
1904 && (!m_summary_lto
1905 || !m_summary_lto->useful_p (m_ecf_flags, false)))
1906 {
1907 summary_useful = false;
1908 break;
1909 }
1910 if (always_executed
1911 && stmt_can_throw_external (cfun, gsi_stmt (si)))
1912 always_executed = false;
1913 }
1914 if (!summary_useful)
1915 break;
1916 }
02c80893 1917 /* In non-IPA mode we need to perform iterative dataflow on recursive calls.
09a4ffb7
JH
1918 This needs to be done after all other side effects are computed. */
1919 if (summary_useful)
1920 {
1921 if (!m_ipa)
1922 propagate ();
1923 if (m_summary && !m_summary->side_effects && !finite_function_p ())
1924 m_summary->side_effects = true;
1925 if (m_summary_lto && !m_summary_lto->side_effects
1926 && !finite_function_p ())
1927 m_summary_lto->side_effects = true;
71dbabcc 1928 }
71dbabcc
JH
1929}
1930
520d5ad3
JH
1931/* Return true if OP accesses memory pointed to by SSA_NAME. */
1932
1933bool
1934memory_access_to (tree op, tree ssa_name)
1935{
1936 tree base = get_base_address (op);
1937 if (!base)
1938 return false;
1939 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
1940 return false;
1941 return TREE_OPERAND (base, 0) == ssa_name;
1942}
1943
1944/* Consider statement val = *arg.
1945 return EAF flags of ARG that can be determined from EAF flags of VAL
1946 (which are known to be FLAGS). If IGNORE_STORES is true we can ignore
1947 all stores to VAL, i.e. when handling noreturn function. */
1948
1949static int
1950deref_flags (int flags, bool ignore_stores)
1951{
d70ef656
JH
1952 /* Dereference is also a direct read but dereferenced value does not
1953 yield any other direct use. */
1954 int ret = EAF_NO_DIRECT_CLOBBER | EAF_NO_DIRECT_ESCAPE
1955 | EAF_NOT_RETURNED_DIRECTLY;
4341b1b1
JH
1956 /* If argument is unused just account for
1957 the read involved in dereference. */
520d5ad3 1958 if (flags & EAF_UNUSED)
d70ef656
JH
1959 ret |= EAF_NO_INDIRECT_READ | EAF_NO_INDIRECT_CLOBBER
1960 | EAF_NO_INDIRECT_ESCAPE;
520d5ad3
JH
1961 else
1962 {
d70ef656
JH
1963 /* Direct or indirect accesses leads to indirect accesses. */
1964 if (((flags & EAF_NO_DIRECT_CLOBBER)
1965 && (flags & EAF_NO_INDIRECT_CLOBBER))
1966 || ignore_stores)
1967 ret |= EAF_NO_INDIRECT_CLOBBER;
1968 if (((flags & EAF_NO_DIRECT_ESCAPE)
1969 && (flags & EAF_NO_INDIRECT_ESCAPE))
1970 || ignore_stores)
1971 ret |= EAF_NO_INDIRECT_ESCAPE;
1972 if ((flags & EAF_NO_DIRECT_READ)
1973 && (flags & EAF_NO_INDIRECT_READ))
1974 ret |= EAF_NO_INDIRECT_READ;
1975 if ((flags & EAF_NOT_RETURNED_DIRECTLY)
1976 && (flags & EAF_NOT_RETURNED_INDIRECTLY))
1977 ret |= EAF_NOT_RETURNED_INDIRECTLY;
520d5ad3
JH
1978 }
1979 return ret;
1980}
1981
85ebbabd 1982
09a4ffb7
JH
1983/* Description of an escape point: a call which affects flags of a given
1984 SSA name. */
85ebbabd
JH
1985
1986struct escape_point
1987{
1988 /* Value escapes to this call. */
1989 gcall *call;
1990 /* Argument it escapes to. */
1991 int arg;
1992 /* Flags already known about the argument (this can save us from recording
02c80893 1993 escape points if local analysis did good job already). */
8da8ed43 1994 eaf_flags_t min_flags;
02c80893 1995 /* Does value escape directly or indirectly? */
85ebbabd
JH
1996 bool direct;
1997};
1998
02c80893 1999/* Lattice used during the eaf flags analysis dataflow. For a given SSA name
09a4ffb7
JH
2000 we aim to compute its flags and escape points. We also use the lattice
2001 to dynamically build dataflow graph to propagate on. */
2002
85ebbabd
JH
2003class modref_lattice
2004{
2005public:
2006 /* EAF flags of the SSA name. */
4341b1b1 2007 eaf_flags_t flags;
4898e958
JH
2008 /* Used during DFS walk to mark names where final value was determined
2009 without need for dataflow. */
85ebbabd 2010 bool known;
4898e958 2011 /* Used during DFS walk to mark open vertices (for cycle detection). */
85ebbabd 2012 bool open;
4898e958
JH
2013 /* Set during DFS walk for names that needs dataflow propagation. */
2014 bool do_dataflow;
2015 /* Used during the iterative dataflow. */
2016 bool changed;
85ebbabd
JH
2017
2018 /* When doing IPA analysis we can not merge in callee escape points;
2019 Only remember them and do the merging at IPA propagation time. */
2020 vec <escape_point, va_heap, vl_ptr> escape_points;
2021
02c80893 2022 /* Representation of a graph for dataflow. This graph is built on-demand
4898e958
JH
2023 using modref_eaf_analysis::analyze_ssa and later solved by
2024 modref_eaf_analysis::propagate.
2025 Each edge represents the fact that flags of current lattice should be
2026 propagated to lattice of SSA_NAME. */
2027 struct propagate_edge
2028 {
2029 int ssa_name;
2030 bool deref;
2031 };
2032 vec <propagate_edge, va_heap, vl_ptr> propagate_to;
2033
85ebbabd
JH
2034 void init ();
2035 void release ();
2036 bool merge (const modref_lattice &with);
2037 bool merge (int flags);
2038 bool merge_deref (const modref_lattice &with, bool ignore_stores);
2039 bool merge_direct_load ();
2040 bool merge_direct_store ();
2041 bool add_escape_point (gcall *call, int arg, int min_flags, bool diret);
2042 void dump (FILE *out, int indent = 0) const;
2043};
2044
2045/* Lattices are saved to vectors, so keep them PODs. */
2046void
2047modref_lattice::init ()
2048{
4341b1b1 2049 /* All flags we track. */
d70ef656
JH
2050 int f = EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER
2051 | EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE
2052 | EAF_NO_DIRECT_READ | EAF_NO_INDIRECT_READ
2053 | EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY
2054 | EAF_UNUSED;
4341b1b1
JH
2055 flags = f;
2056 /* Check that eaf_flags_t is wide enough to hold all flags. */
2057 gcc_checking_assert (f == flags);
85ebbabd
JH
2058 open = true;
2059 known = false;
2060}
2061
2062/* Release memory. */
2063void
2064modref_lattice::release ()
2065{
2066 escape_points.release ();
4898e958 2067 propagate_to.release ();
85ebbabd
JH
2068}
2069
2070/* Dump lattice to OUT; indent with INDENT spaces. */
2071
2072void
2073modref_lattice::dump (FILE *out, int indent) const
2074{
2075 dump_eaf_flags (out, flags);
2076 if (escape_points.length ())
2077 {
2078 fprintf (out, "%*sEscapes:\n", indent, "");
2079 for (unsigned int i = 0; i < escape_points.length (); i++)
2080 {
2081 fprintf (out, "%*s Arg %i (%s) min flags", indent, "",
2082 escape_points[i].arg,
2083 escape_points[i].direct ? "direct" : "indirect");
9851a163 2084 dump_eaf_flags (out, escape_points[i].min_flags, false);
85ebbabd
JH
2085 fprintf (out, " in call ");
2086 print_gimple_stmt (out, escape_points[i].call, 0);
2087 }
2088 }
2089}
2090
2091/* Add escape point CALL, ARG, MIN_FLAGS, DIRECT. Return false if such escape
2092 point exists. */
2093
2094bool
2095modref_lattice::add_escape_point (gcall *call, int arg, int min_flags,
2096 bool direct)
2097{
2098 escape_point *ep;
2099 unsigned int i;
2100
2101 /* If we already determined flags to be bad enough,
4341b1b1
JH
2102 we do not need to record. */
2103 if ((flags & min_flags) == flags || (min_flags & EAF_UNUSED))
85ebbabd
JH
2104 return false;
2105
2106 FOR_EACH_VEC_ELT (escape_points, i, ep)
2107 if (ep->call == call && ep->arg == arg && ep->direct == direct)
2108 {
2109 if ((ep->min_flags & min_flags) == min_flags)
2110 return false;
2111 ep->min_flags &= min_flags;
2112 return true;
2113 }
2114 /* Give up if max escape points is met. */
2115 if ((int)escape_points.length () > param_modref_max_escape_points)
2116 {
2117 if (dump_file)
2118 fprintf (dump_file, "--param modref-max-escape-points limit reached\n");
2119 merge (0);
2120 return true;
2121 }
2122 escape_point new_ep = {call, arg, min_flags, direct};
2123 escape_points.safe_push (new_ep);
2124 return true;
2125}
2126
2127/* Merge in flags from F. */
2128bool
2129modref_lattice::merge (int f)
2130{
3350e59f
JH
2131 if (f & EAF_UNUSED)
2132 return false;
4526ec20
JH
2133 /* Check that flags seems sane: if function does not read the parameter
2134 it can not access it indirectly. */
2135 gcc_checking_assert (!(f & EAF_NO_DIRECT_READ)
2136 || ((f & EAF_NO_INDIRECT_READ)
2137 && (f & EAF_NO_INDIRECT_CLOBBER)
2138 && (f & EAF_NO_INDIRECT_ESCAPE)
2139 && (f & EAF_NOT_RETURNED_INDIRECTLY)));
85ebbabd
JH
2140 if ((flags & f) != flags)
2141 {
2142 flags &= f;
02c80893 2143 /* Prune obviously useless flags;
4341b1b1
JH
2144 We do not have ECF_FLAGS handy which is not big problem since
2145 we will do final flags cleanup before producing summary.
2146 Merging should be fast so it can work well with dataflow. */
2147 flags = remove_useless_eaf_flags (flags, 0, false);
85ebbabd
JH
2148 if (!flags)
2149 escape_points.release ();
2150 return true;
2151 }
2152 return false;
2153}
2154
02c80893 2155/* Merge in WITH. Return true if anything changed. */
85ebbabd
JH
2156
2157bool
2158modref_lattice::merge (const modref_lattice &with)
2159{
2160 if (!with.known)
4898e958 2161 do_dataflow = true;
85ebbabd
JH
2162
2163 bool changed = merge (with.flags);
2164
2165 if (!flags)
2166 return changed;
2167 for (unsigned int i = 0; i < with.escape_points.length (); i++)
2168 changed |= add_escape_point (with.escape_points[i].call,
2169 with.escape_points[i].arg,
2170 with.escape_points[i].min_flags,
2171 with.escape_points[i].direct);
2172 return changed;
2173}
2174
2175/* Merge in deref of WITH. If IGNORE_STORES is true do not consider
02c80893 2176 stores. Return true if anything changed. */
85ebbabd
JH
2177
2178bool
2179modref_lattice::merge_deref (const modref_lattice &with, bool ignore_stores)
2180{
2181 if (!with.known)
4898e958 2182 do_dataflow = true;
85ebbabd
JH
2183
2184 bool changed = merge (deref_flags (with.flags, ignore_stores));
2185
2186 if (!flags)
2187 return changed;
2188 for (unsigned int i = 0; i < with.escape_points.length (); i++)
9851a163
JH
2189 {
2190 int min_flags = with.escape_points[i].min_flags;
2191
2192 if (with.escape_points[i].direct)
2193 min_flags = deref_flags (min_flags, ignore_stores);
2194 else if (ignore_stores)
4341b1b1 2195 min_flags |= ignore_stores_eaf_flags;
9851a163
JH
2196 changed |= add_escape_point (with.escape_points[i].call,
2197 with.escape_points[i].arg,
2198 min_flags,
2199 false);
2200 }
85ebbabd
JH
2201 return changed;
2202}
2203
2204/* Merge in flags for direct load. */
2205
2206bool
2207modref_lattice::merge_direct_load ()
2208{
d70ef656 2209 return merge (~(EAF_UNUSED | EAF_NO_DIRECT_READ));
85ebbabd
JH
2210}
2211
2212/* Merge in flags for direct store. */
2213
2214bool
2215modref_lattice::merge_direct_store ()
2216{
d70ef656 2217 return merge (~(EAF_UNUSED | EAF_NO_DIRECT_CLOBBER));
85ebbabd
JH
2218}
2219
4898e958 2220/* Analyzer of EAF flags.
02c80893 2221 This is generally dataflow problem over the SSA graph, however we only
4898e958
JH
2222 care about flags of few selected ssa names (arguments, return slot and
2223 static chain). So we first call analyze_ssa_name on all relevant names
2224 and perform a DFS walk to discover SSA names where flags needs to be
2225 determined. For acyclic graphs we try to determine final flags during
02c80893 2226 this walk. Once cycles or recursion depth is met we enlist SSA names
4898e958
JH
2227 for dataflow which is done by propagate call.
2228
2229 After propagation the flags can be obtained using get_ssa_name_flags. */
18f0873d
JH
2230
2231class modref_eaf_analysis
2232{
2233public:
4898e958 2234 /* Mark NAME as relevant for analysis. */
8d1e342b 2235 void analyze_ssa_name (tree name, bool deferred = false);
02c80893 2236 /* Dataflow solver. */
4898e958 2237 void propagate ();
18f0873d
JH
2238 /* Return flags computed earlier for NAME. */
2239 int get_ssa_name_flags (tree name)
2240 {
2241 int version = SSA_NAME_VERSION (name);
2242 gcc_checking_assert (m_lattice[version].known);
2243 return m_lattice[version].flags;
2244 }
2245 /* In IPA mode this will record all escape points
2246 determined for NAME to PARM_IDNEX. Flags are minimal
2247 flags known. */
2248 void record_escape_points (tree name, int parm_index, int flags);
2249 modref_eaf_analysis (bool ipa)
2250 {
2251 m_ipa = ipa;
2252 m_depth = 0;
2253 m_lattice.safe_grow_cleared (num_ssa_names, true);
2254 }
2255 ~modref_eaf_analysis ()
2256 {
2257 gcc_checking_assert (!m_depth);
4898e958 2258 if (m_ipa || m_names_to_propagate.length ())
18f0873d
JH
2259 for (unsigned int i = 0; i < num_ssa_names; i++)
2260 m_lattice[i].release ();
2261 }
2262private:
02c80893 2263 /* If true, we produce analysis for IPA mode. In this case escape points are
18f0873d
JH
2264 collected. */
2265 bool m_ipa;
2266 /* Depth of recursion of analyze_ssa_name. */
2267 int m_depth;
2268 /* Propagation lattice for individual ssa names. */
2269 auto_vec<modref_lattice> m_lattice;
4898e958
JH
2270 auto_vec<tree> m_deferred_names;
2271 auto_vec<int> m_names_to_propagate;
18f0873d
JH
2272
2273 void merge_with_ssa_name (tree dest, tree src, bool deref);
d70ef656
JH
2274 void merge_call_lhs_flags (gcall *call, int arg, tree name, bool direct,
2275 bool deref);
18f0873d 2276};
85ebbabd 2277
85ebbabd 2278
02c80893 2279/* Call statements may return their parameters. Consider argument number
520d5ad3
JH
2280 ARG of USE_STMT and determine flags that can needs to be cleared
2281 in case pointer possibly indirectly references from ARG I is returned.
d70ef656
JH
2282 If DIRECT is true consider direct returns and if INDIRECT consider
2283 indirect returns.
62af7d94
JH
2284 LATTICE, DEPTH and ipa are same as in analyze_ssa_name.
2285 ARG is set to -1 for static chain. */
520d5ad3 2286
18f0873d
JH
2287void
2288modref_eaf_analysis::merge_call_lhs_flags (gcall *call, int arg,
d70ef656
JH
2289 tree name, bool direct,
2290 bool indirect)
520d5ad3 2291{
18f0873d 2292 int index = SSA_NAME_VERSION (name);
c331a75d 2293 bool returned_directly = false;
d70ef656 2294
520d5ad3
JH
2295 /* If there is no return value, no flags are affected. */
2296 if (!gimple_call_lhs (call))
85ebbabd 2297 return;
520d5ad3
JH
2298
2299 /* If we know that function returns given argument and it is not ARG
2300 we can still be happy. */
62af7d94
JH
2301 if (arg >= 0)
2302 {
2303 int flags = gimple_call_return_flags (call);
c331a75d
JH
2304 if (flags & ERF_RETURNS_ARG)
2305 {
2306 if ((flags & ERF_RETURN_ARG_MASK) == arg)
2307 returned_directly = true;
2308 else
2309 return;
2310 }
2311 }
2312 /* Make ERF_RETURNS_ARG overwrite EAF_UNUSED. */
2313 if (returned_directly)
2314 {
2315 direct = true;
2316 indirect = false;
62af7d94 2317 }
c331a75d
JH
2318 /* If value is not returned at all, do nothing. */
2319 else if (!direct && !indirect)
2320 return;
8da8ed43 2321
520d5ad3
JH
2322 /* If return value is SSA name determine its flags. */
2323 if (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME)
85ebbabd
JH
2324 {
2325 tree lhs = gimple_call_lhs (call);
d70ef656
JH
2326 if (direct)
2327 merge_with_ssa_name (name, lhs, false);
2328 if (indirect)
2329 merge_with_ssa_name (name, lhs, true);
85ebbabd 2330 }
520d5ad3 2331 /* In the case of memory store we can do nothing. */
d70ef656 2332 else if (!direct)
18f0873d 2333 m_lattice[index].merge (deref_flags (0, false));
520d5ad3 2334 else
18f0873d 2335 m_lattice[index].merge (0);
520d5ad3
JH
2336}
2337
62af7d94
JH
2338/* CALL_FLAGS are EAF_FLAGS of the argument. Turn them
2339 into flags for caller, update LATTICE of corresponding
2340 argument if needed. */
2341
2342static int
2343callee_to_caller_flags (int call_flags, bool ignore_stores,
2344 modref_lattice &lattice)
2345{
2346 /* call_flags is about callee returning a value
2347 that is not the same as caller returning it. */
d70ef656
JH
2348 call_flags |= EAF_NOT_RETURNED_DIRECTLY
2349 | EAF_NOT_RETURNED_INDIRECTLY;
62af7d94
JH
2350 if (!ignore_stores && !(call_flags & EAF_UNUSED))
2351 {
2f3d43a3
JH
2352 /* If value escapes we are no longer able to track what happens
2353 with it because we can read it from the escaped location
2354 anytime. */
d70ef656 2355 if (!(call_flags & EAF_NO_DIRECT_ESCAPE))
2f3d43a3
JH
2356 lattice.merge (0);
2357 else if (!(call_flags & EAF_NO_INDIRECT_ESCAPE))
d70ef656 2358 lattice.merge (~(EAF_NOT_RETURNED_INDIRECTLY
4526ec20 2359 | EAF_NO_DIRECT_READ
2f3d43a3
JH
2360 | EAF_NO_INDIRECT_READ
2361 | EAF_NO_INDIRECT_CLOBBER
62af7d94
JH
2362 | EAF_UNUSED));
2363 }
2364 else
2365 call_flags |= ignore_stores_eaf_flags;
2366 return call_flags;
2367}
2368
85ebbabd
JH
2369/* Analyze EAF flags for SSA name NAME and store result to LATTICE.
2370 LATTICE is an array of modref_lattices.
2371 DEPTH is a recursion depth used to make debug output prettier.
2372 If IPA is true we analyze for IPA propagation (and thus call escape points
2373 are processed later) */
520d5ad3 2374
18f0873d 2375void
8d1e342b 2376modref_eaf_analysis::analyze_ssa_name (tree name, bool deferred)
520d5ad3
JH
2377{
2378 imm_use_iterator ui;
2379 gimple *use_stmt;
85ebbabd 2380 int index = SSA_NAME_VERSION (name);
520d5ad3 2381
8d1e342b 2382 if (!deferred)
520d5ad3 2383 {
8d1e342b
JH
2384 /* See if value is already computed. */
2385 if (m_lattice[index].known || m_lattice[index].do_dataflow)
2386 return;
2387 if (m_lattice[index].open)
2388 {
2389 if (dump_file)
2390 fprintf (dump_file,
2391 "%*sCycle in SSA graph\n",
2392 m_depth * 4, "");
2393 return;
2394 }
2395 /* Recursion guard. */
2396 m_lattice[index].init ();
2397 if (m_depth == param_modref_max_depth)
2398 {
2399 if (dump_file)
2400 fprintf (dump_file,
2401 "%*sMax recursion depth reached; postponing\n",
2402 m_depth * 4, "");
2403 m_deferred_names.safe_push (name);
2404 return;
2405 }
520d5ad3 2406 }
520d5ad3
JH
2407
2408 if (dump_file)
2409 {
2410 fprintf (dump_file,
18f0873d 2411 "%*sAnalyzing flags of ssa name: ", m_depth * 4, "");
520d5ad3
JH
2412 print_generic_expr (dump_file, name);
2413 fprintf (dump_file, "\n");
2414 }
2415
2416 FOR_EACH_IMM_USE_STMT (use_stmt, ui, name)
2417 {
18f0873d 2418 if (m_lattice[index].flags == 0)
640296c3 2419 break;
520d5ad3
JH
2420 if (is_gimple_debug (use_stmt))
2421 continue;
2422 if (dump_file)
2423 {
18f0873d 2424 fprintf (dump_file, "%*s Analyzing stmt: ", m_depth * 4, "");
520d5ad3
JH
2425 print_gimple_stmt (dump_file, use_stmt, 0);
2426 }
4341b1b1 2427 /* If we see a direct non-debug use, clear unused bit.
02c80893 2428 All dereferences should be accounted below using deref_flags. */
18f0873d 2429 m_lattice[index].merge (~EAF_UNUSED);
520d5ad3 2430
26285af4 2431 /* Gimple return may load the return value.
e53b6e56 2432 Returning name counts as an use by tree-ssa-structalias.cc */
520d5ad3
JH
2433 if (greturn *ret = dyn_cast <greturn *> (use_stmt))
2434 {
b8ef019a
JH
2435 /* Returning through return slot is seen as memory write earlier. */
2436 if (DECL_RESULT (current_function_decl)
2437 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
2438 ;
2439 else if (gimple_return_retval (ret) == name)
d70ef656 2440 m_lattice[index].merge (~(EAF_UNUSED | EAF_NOT_RETURNED_DIRECTLY
18f0873d 2441 | EAF_NOT_RETURNED_DIRECTLY));
85ebbabd 2442 else if (memory_access_to (gimple_return_retval (ret), name))
8da8ed43 2443 {
18f0873d 2444 m_lattice[index].merge_direct_load ();
d70ef656
JH
2445 m_lattice[index].merge (~(EAF_UNUSED
2446 | EAF_NOT_RETURNED_INDIRECTLY));
8da8ed43 2447 }
520d5ad3
JH
2448 }
2449 /* Account for LHS store, arg loads and flags from callee function. */
2450 else if (gcall *call = dyn_cast <gcall *> (use_stmt))
2451 {
2452 tree callee = gimple_call_fndecl (call);
9b08f776
JH
2453
2454 /* IPA PTA internally it treats calling a function as "writing" to
2455 the argument space of all functions the function pointer points to
2456 (PR101949). We can not drop EAF_NOCLOBBER only when ipa-pta
2457 is on since that would allow propagation of this from -fno-ipa-pta
2458 to -fipa-pta functions. */
2459 if (gimple_call_fn (use_stmt) == name)
d70ef656 2460 m_lattice[index].merge (~(EAF_NO_DIRECT_CLOBBER | EAF_UNUSED));
9b08f776 2461
520d5ad3 2462 /* Recursion would require bit of propagation; give up for now. */
18f0873d 2463 if (callee && !m_ipa && recursive_call_p (current_function_decl,
85ebbabd 2464 callee))
18f0873d 2465 m_lattice[index].merge (0);
520d5ad3
JH
2466 else
2467 {
2468 int ecf_flags = gimple_call_flags (call);
2469 bool ignore_stores = ignore_stores_p (current_function_decl,
2470 ecf_flags);
85ebbabd
JH
2471 bool ignore_retval = ignore_retval_p (current_function_decl,
2472 ecf_flags);
520d5ad3
JH
2473
2474 /* Handle *name = func (...). */
2475 if (gimple_call_lhs (call)
2476 && memory_access_to (gimple_call_lhs (call), name))
59f38935 2477 {
18f0873d 2478 m_lattice[index].merge_direct_store ();
59f38935
JH
2479 /* Return slot optimization passes address of
2480 LHS to callee via hidden parameter and this
2481 may make LHS to escape. See PR 98499. */
2482 if (gimple_call_return_slot_opt_p (call)
2483 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call))))
62af7d94
JH
2484 {
2485 int call_flags = gimple_call_retslot_flags (call);
2486 bool isretslot = false;
2487
2488 if (DECL_RESULT (current_function_decl)
2489 && DECL_BY_REFERENCE
2490 (DECL_RESULT (current_function_decl)))
2491 isretslot = ssa_default_def
2492 (cfun,
2493 DECL_RESULT (current_function_decl))
2494 == name;
2495
2496 /* Passing returnslot to return slot is special because
2497 not_returned and escape has same meaning.
2498 However passing arg to return slot is different. If
2499 the callee's return slot is returned it means that
2f3d43a3
JH
2500 arg is written to itself which is an escape.
2501 Since we do not track the memory it is written to we
02c80893 2502 need to give up on analyzing it. */
62af7d94
JH
2503 if (!isretslot)
2504 {
62af7d94 2505 if (!(call_flags & (EAF_NOT_RETURNED_DIRECTLY
d70ef656 2506 | EAF_UNUSED)))
2f3d43a3
JH
2507 m_lattice[index].merge (0);
2508 else gcc_checking_assert
2509 (call_flags & (EAF_NOT_RETURNED_INDIRECTLY
2510 | EAF_UNUSED));
62af7d94
JH
2511 call_flags = callee_to_caller_flags
2512 (call_flags, false,
2513 m_lattice[index]);
2514 }
2515 m_lattice[index].merge (call_flags);
2516 }
59f38935 2517 }
520d5ad3 2518
520d5ad3
JH
2519 if (gimple_call_chain (call)
2520 && (gimple_call_chain (call) == name))
62af7d94
JH
2521 {
2522 int call_flags = gimple_call_static_chain_flags (call);
d70ef656
JH
2523 if (!ignore_retval && !(call_flags & EAF_UNUSED))
2524 merge_call_lhs_flags
2525 (call, -1, name,
2526 !(call_flags & EAF_NOT_RETURNED_DIRECTLY),
2527 !(call_flags & EAF_NOT_RETURNED_INDIRECTLY));
62af7d94
JH
2528 call_flags = callee_to_caller_flags
2529 (call_flags, ignore_stores,
2530 m_lattice[index]);
2531 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
2532 m_lattice[index].merge (call_flags);
2533 }
85ebbabd
JH
2534
2535 /* Process internal functions and right away. */
18f0873d 2536 bool record_ipa = m_ipa && !gimple_call_internal_p (call);
520d5ad3
JH
2537
2538 /* Handle all function parameters. */
85ebbabd 2539 for (unsigned i = 0;
18f0873d
JH
2540 i < gimple_call_num_args (call)
2541 && m_lattice[index].flags; i++)
520d5ad3
JH
2542 /* Name is directly passed to the callee. */
2543 if (gimple_call_arg (call, i) == name)
2544 {
62af7d94 2545 int call_flags = gimple_call_arg_flags (call, i);
c331a75d 2546 if (!ignore_retval)
62af7d94
JH
2547 merge_call_lhs_flags
2548 (call, i, name,
c331a75d
JH
2549 !(call_flags & (EAF_NOT_RETURNED_DIRECTLY
2550 | EAF_UNUSED)),
2551 !(call_flags & (EAF_NOT_RETURNED_INDIRECTLY
2552 | EAF_UNUSED)));
85ebbabd 2553 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
520d5ad3 2554 {
62af7d94
JH
2555 call_flags = callee_to_caller_flags
2556 (call_flags, ignore_stores,
2557 m_lattice[index]);
85ebbabd 2558 if (!record_ipa)
18f0873d 2559 m_lattice[index].merge (call_flags);
c3c61674 2560 else
18f0873d 2561 m_lattice[index].add_escape_point (call, i,
b8ef019a 2562 call_flags, true);
520d5ad3
JH
2563 }
2564 }
2565 /* Name is dereferenced and passed to a callee. */
2566 else if (memory_access_to (gimple_call_arg (call, i), name))
2567 {
62af7d94
JH
2568 int call_flags = deref_flags
2569 (gimple_call_arg_flags (call, i), ignore_stores);
d70ef656
JH
2570 if (!ignore_retval && !(call_flags & EAF_UNUSED)
2571 && !(call_flags & EAF_NOT_RETURNED_DIRECTLY)
2572 && !(call_flags & EAF_NOT_RETURNED_INDIRECTLY))
2573 merge_call_lhs_flags (call, i, name, false, true);
520d5ad3 2574 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
18f0873d 2575 m_lattice[index].merge_direct_load ();
520d5ad3 2576 else
85ebbabd 2577 {
62af7d94
JH
2578 call_flags = callee_to_caller_flags
2579 (call_flags, ignore_stores,
2580 m_lattice[index]);
85ebbabd 2581 if (!record_ipa)
18f0873d 2582 m_lattice[index].merge (call_flags);
c3c61674 2583 else
18f0873d 2584 m_lattice[index].add_escape_point (call, i,
62af7d94 2585 call_flags, false);
85ebbabd 2586 }
520d5ad3
JH
2587 }
2588 }
520d5ad3
JH
2589 }
2590 else if (gimple_assign_load_p (use_stmt))
2591 {
2592 gassign *assign = as_a <gassign *> (use_stmt);
2593 /* Memory to memory copy. */
2594 if (gimple_store_p (assign))
2595 {
520d5ad3
JH
2596 /* Handle *lhs = *name.
2597
2598 We do not track memory locations, so assume that value
2599 is used arbitrarily. */
2600 if (memory_access_to (gimple_assign_rhs1 (assign), name))
18f0873d 2601 m_lattice[index].merge (deref_flags (0, false));
85ebbabd
JH
2602 /* Handle *name = *exp. */
2603 else if (memory_access_to (gimple_assign_lhs (assign), name))
18f0873d 2604 m_lattice[index].merge_direct_store ();
520d5ad3
JH
2605 }
2606 /* Handle lhs = *name. */
2607 else if (memory_access_to (gimple_assign_rhs1 (assign), name))
85ebbabd
JH
2608 {
2609 tree lhs = gimple_assign_lhs (assign);
18f0873d 2610 merge_with_ssa_name (name, lhs, true);
85ebbabd 2611 }
520d5ad3
JH
2612 }
2613 else if (gimple_store_p (use_stmt))
2614 {
2615 gassign *assign = dyn_cast <gassign *> (use_stmt);
2616
2617 /* Handle *lhs = name. */
2618 if (assign && gimple_assign_rhs1 (assign) == name)
2619 {
2620 if (dump_file)
2621 fprintf (dump_file, "%*s ssa name saved to memory\n",
18f0873d
JH
2622 m_depth * 4, "");
2623 m_lattice[index].merge (0);
520d5ad3
JH
2624 }
2625 /* Handle *name = exp. */
2626 else if (assign
2627 && memory_access_to (gimple_assign_lhs (assign), name))
0c9687d0
JH
2628 {
2629 /* In general we can not ignore clobbers because they are
2630 barriers for code motion, however after inlining it is safe to
2631 do because local optimization passes do not consider clobbers
18f0873d 2632 from other functions.
e53b6e56 2633 Similar logic is in ipa-pure-const.cc. */
0c9687d0 2634 if (!cfun->after_inlining || !gimple_clobber_p (assign))
18f0873d 2635 m_lattice[index].merge_direct_store ();
0c9687d0 2636 }
520d5ad3
JH
2637 /* ASM statements etc. */
2638 else if (!assign)
2639 {
2640 if (dump_file)
18f0873d
JH
2641 fprintf (dump_file, "%*s Unhandled store\n", m_depth * 4, "");
2642 m_lattice[index].merge (0);
520d5ad3
JH
2643 }
2644 }
2645 else if (gassign *assign = dyn_cast <gassign *> (use_stmt))
2646 {
2647 enum tree_code code = gimple_assign_rhs_code (assign);
2648
2649 /* See if operation is a merge as considered by
e53b6e56 2650 tree-ssa-structalias.cc:find_func_aliases. */
520d5ad3
JH
2651 if (!truth_value_p (code)
2652 && code != POINTER_DIFF_EXPR
2653 && (code != POINTER_PLUS_EXPR
2654 || gimple_assign_rhs1 (assign) == name))
85ebbabd
JH
2655 {
2656 tree lhs = gimple_assign_lhs (assign);
18f0873d 2657 merge_with_ssa_name (name, lhs, false);
85ebbabd 2658 }
520d5ad3
JH
2659 }
2660 else if (gphi *phi = dyn_cast <gphi *> (use_stmt))
2661 {
85ebbabd 2662 tree result = gimple_phi_result (phi);
18f0873d 2663 merge_with_ssa_name (name, result, false);
520d5ad3
JH
2664 }
2665 /* Conditions are not considered escape points
2666 by tree-ssa-structalias. */
2667 else if (gimple_code (use_stmt) == GIMPLE_COND)
2668 ;
2669 else
2670 {
2671 if (dump_file)
18f0873d
JH
2672 fprintf (dump_file, "%*s Unhandled stmt\n", m_depth * 4, "");
2673 m_lattice[index].merge (0);
520d5ad3
JH
2674 }
2675
2676 if (dump_file)
2677 {
18f0873d 2678 fprintf (dump_file, "%*s current flags of ", m_depth * 4, "");
520d5ad3 2679 print_generic_expr (dump_file, name);
18f0873d 2680 m_lattice[index].dump (dump_file, m_depth * 4 + 4);
520d5ad3
JH
2681 }
2682 }
2683 if (dump_file)
2684 {
18f0873d 2685 fprintf (dump_file, "%*sflags of ssa name ", m_depth * 4, "");
520d5ad3 2686 print_generic_expr (dump_file, name);
18f0873d 2687 m_lattice[index].dump (dump_file, m_depth * 4 + 2);
520d5ad3 2688 }
18f0873d 2689 m_lattice[index].open = false;
4898e958
JH
2690 if (!m_lattice[index].do_dataflow)
2691 m_lattice[index].known = true;
18f0873d
JH
2692}
2693
2694/* Propagate info from SRC to DEST. If DEREF it true, assume that SRC
2695 is dereferenced. */
2696
2697void
2698modref_eaf_analysis::merge_with_ssa_name (tree dest, tree src, bool deref)
2699{
2700 int index = SSA_NAME_VERSION (dest);
2701 int src_index = SSA_NAME_VERSION (src);
2702
4898e958
JH
2703 /* Merging lattice with itself is a no-op. */
2704 if (!deref && src == dest)
2705 return;
2706
18f0873d
JH
2707 m_depth++;
2708 analyze_ssa_name (src);
2709 m_depth--;
2710 if (deref)
2711 m_lattice[index].merge_deref (m_lattice[src_index], false);
2712 else
2713 m_lattice[index].merge (m_lattice[src_index]);
4898e958
JH
2714
2715 /* If we failed to produce final solution add an edge to the dataflow
2716 graph. */
2717 if (!m_lattice[src_index].known)
2718 {
2719 modref_lattice::propagate_edge e = {index, deref};
2720
2721 if (!m_lattice[src_index].propagate_to.length ())
2722 m_names_to_propagate.safe_push (src_index);
2723 m_lattice[src_index].propagate_to.safe_push (e);
2724 m_lattice[src_index].changed = true;
2725 m_lattice[src_index].do_dataflow = true;
2726 if (dump_file)
2727 fprintf (dump_file,
2728 "%*sWill propgate from ssa_name %i to %i%s\n",
2729 m_depth * 4 + 4,
2730 "", src_index, index, deref ? " (deref)" : "");
2731 }
2732}
2733
2734/* In the case we deferred some SSA names, reprocess them. In the case some
2735 dataflow edges were introduced, do the actual iterative dataflow. */
2736
2737void
2738modref_eaf_analysis::propagate ()
2739{
2740 int iterations = 0;
2741 size_t i;
2742 int index;
2743 bool changed = true;
2744
2745 while (m_deferred_names.length ())
2746 {
2747 tree name = m_deferred_names.pop ();
4898e958
JH
2748 if (dump_file)
2749 fprintf (dump_file, "Analyzing deferred SSA name\n");
8d1e342b 2750 analyze_ssa_name (name, true);
4898e958
JH
2751 }
2752
2753 if (!m_names_to_propagate.length ())
2754 return;
2755 if (dump_file)
2756 fprintf (dump_file, "Propagating EAF flags\n");
2757
2758 /* Compute reverse postorder. */
2759 auto_vec <int> rpo;
2760 struct stack_entry
2761 {
2762 int name;
2763 unsigned pos;
2764 };
2765 auto_vec <struct stack_entry> stack;
2766 int pos = m_names_to_propagate.length () - 1;
2767
2768 rpo.safe_grow (m_names_to_propagate.length (), true);
2769 stack.reserve_exact (m_names_to_propagate.length ());
2770
02c80893 2771 /* We reuse known flag for RPO DFS walk bookkeeping. */
4898e958
JH
2772 if (flag_checking)
2773 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
2774 gcc_assert (!m_lattice[index].known && m_lattice[index].changed);
2775
2776 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
2777 {
2778 if (!m_lattice[index].known)
2779 {
2780 stack_entry e = {index, 0};
2781
2782 stack.quick_push (e);
2783 m_lattice[index].known = true;
2784 }
2785 while (stack.length ())
2786 {
2787 bool found = false;
2788 int index1 = stack.last ().name;
2789
2790 while (stack.last ().pos < m_lattice[index1].propagate_to.length ())
2791 {
2792 int index2 = m_lattice[index1]
2793 .propagate_to[stack.last ().pos].ssa_name;
2794
2795 stack.last ().pos++;
2796 if (!m_lattice[index2].known
2797 && m_lattice[index2].propagate_to.length ())
2798 {
2799 stack_entry e = {index2, 0};
2800
2801 stack.quick_push (e);
2802 m_lattice[index2].known = true;
2803 found = true;
2804 break;
2805 }
2806 }
2807 if (!found
2808 && stack.last ().pos == m_lattice[index1].propagate_to.length ())
2809 {
2810 rpo[pos--] = index1;
2811 stack.pop ();
2812 }
2813 }
2814 }
2815
02c80893 2816 /* Perform iterative dataflow. */
4898e958
JH
2817 while (changed)
2818 {
2819 changed = false;
2820 iterations++;
2821 if (dump_file)
2822 fprintf (dump_file, " iteration %i\n", iterations);
2823 FOR_EACH_VEC_ELT (rpo, i, index)
2824 {
2825 if (m_lattice[index].changed)
2826 {
2827 size_t j;
2828
2829 m_lattice[index].changed = false;
2830 if (dump_file)
2831 fprintf (dump_file, " Visiting ssa name %i\n", index);
2832 for (j = 0; j < m_lattice[index].propagate_to.length (); j++)
2833 {
2834 bool ch;
2835 int target = m_lattice[index].propagate_to[j].ssa_name;
2836 bool deref = m_lattice[index].propagate_to[j].deref;
2837
2838 if (dump_file)
2839 fprintf (dump_file, " Propagating flags of ssa name"
2840 " %i to %i%s\n",
2841 index, target, deref ? " (deref)" : "");
2842 m_lattice[target].known = true;
2843 if (!m_lattice[index].propagate_to[j].deref)
2844 ch = m_lattice[target].merge (m_lattice[index]);
2845 else
2846 ch = m_lattice[target].merge_deref (m_lattice[index],
2847 false);
2848 if (!ch)
2849 continue;
2850 if (dump_file)
2851 {
2852 fprintf (dump_file, " New lattice: ");
2853 m_lattice[target].dump (dump_file);
2854 }
d70ef656 2855 changed = true;
4898e958
JH
2856 m_lattice[target].changed = true;
2857 }
2858 }
2859 }
2860 }
2861 if (dump_file)
2862 fprintf (dump_file, "EAF flags propagated in %i iterations\n", iterations);
520d5ad3
JH
2863}
2864
b8ef019a
JH
2865/* Record escape points of PARM_INDEX according to LATTICE. */
2866
18f0873d
JH
2867void
2868modref_eaf_analysis::record_escape_points (tree name, int parm_index, int flags)
b8ef019a 2869{
18f0873d
JH
2870 modref_lattice &lattice = m_lattice[SSA_NAME_VERSION (name)];
2871
b8ef019a
JH
2872 if (lattice.escape_points.length ())
2873 {
2874 escape_point *ep;
2875 unsigned int ip;
2876 cgraph_node *node = cgraph_node::get (current_function_decl);
2877
18f0873d 2878 gcc_assert (m_ipa);
b8ef019a
JH
2879 FOR_EACH_VEC_ELT (lattice.escape_points, ip, ep)
2880 if ((ep->min_flags & flags) != flags)
2881 {
2882 cgraph_edge *e = node->get_edge (ep->call);
2883 struct escape_entry ee = {parm_index, ep->arg,
2884 ep->min_flags, ep->direct};
2885
2886 escape_summaries->get_create (e)->esc.safe_push (ee);
2887 }
2888 }
2889}
2890
7798ae1a
JH
2891/* Determine EAF flags for function parameters
2892 and fill in SUMMARY/SUMMARY_LTO. If IPA is true work in IPA mode
02c80893 2893 where we also collect escape points.
7798ae1a 2894 PAST_FLAGS, PAST_RETSLOT_FLAGS, PAST_STATIC_CHAIN_FLAGS can be
02c80893 2895 used to preserve flags from previous (IPA) run for cases where
7798ae1a
JH
2896 late optimizations changed code in a way we can no longer analyze
2897 it easily. */
520d5ad3
JH
2898
2899static void
85ebbabd 2900analyze_parms (modref_summary *summary, modref_summary_lto *summary_lto,
7798ae1a
JH
2901 bool ipa, vec<eaf_flags_t> &past_flags,
2902 int past_retslot_flags, int past_static_chain_flags)
520d5ad3
JH
2903{
2904 unsigned int parm_index = 0;
2905 unsigned int count = 0;
85ebbabd 2906 int ecf_flags = flags_from_decl_or_type (current_function_decl);
b8ef019a 2907 tree retslot = NULL;
a70c0512 2908 tree static_chain = NULL;
85ebbabd 2909
b8ef019a
JH
2910 /* If there is return slot, look up its SSA name. */
2911 if (DECL_RESULT (current_function_decl)
2912 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
2913 retslot = ssa_default_def (cfun, DECL_RESULT (current_function_decl));
a70c0512
JH
2914 if (cfun->static_chain_decl)
2915 static_chain = ssa_default_def (cfun, cfun->static_chain_decl);
b8ef019a 2916
520d5ad3
JH
2917 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
2918 parm = TREE_CHAIN (parm))
2919 count++;
2920
a70c0512 2921 if (!count && !retslot && !static_chain)
520d5ad3
JH
2922 return;
2923
18f0873d 2924 modref_eaf_analysis eaf_analysis (ipa);
520d5ad3 2925
4898e958
JH
2926 /* Determine all SSA names we need to know flags for. */
2927 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
2928 parm = TREE_CHAIN (parm))
2929 {
2930 tree name = ssa_default_def (cfun, parm);
2931 if (name)
2932 eaf_analysis.analyze_ssa_name (name);
2933 }
2934 if (retslot)
2935 eaf_analysis.analyze_ssa_name (retslot);
2936 if (static_chain)
2937 eaf_analysis.analyze_ssa_name (static_chain);
2938
2939 /* Do the dataflow. */
2940 eaf_analysis.propagate ();
2941
e2dd12ab
JH
2942 tree attr = lookup_attribute ("fn spec",
2943 TYPE_ATTRIBUTES
2944 (TREE_TYPE (current_function_decl)));
2945 attr_fnspec fnspec (attr
2946 ? TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))
2947 : "");
2948
2949
4898e958 2950 /* Store results to summaries. */
520d5ad3
JH
2951 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm; parm_index++,
2952 parm = TREE_CHAIN (parm))
2953 {
2954 tree name = ssa_default_def (cfun, parm);
3350e59f
JH
2955 if (!name || has_zero_uses (name))
2956 {
2957 /* We do not track non-SSA parameters,
2958 but we want to track unused gimple_regs. */
2959 if (!is_gimple_reg (parm))
2960 continue;
2961 if (summary)
2962 {
2963 if (parm_index >= summary->arg_flags.length ())
2964 summary->arg_flags.safe_grow_cleared (count, true);
2965 summary->arg_flags[parm_index] = EAF_UNUSED;
2966 }
2967 else if (summary_lto)
2968 {
2969 if (parm_index >= summary_lto->arg_flags.length ())
2970 summary_lto->arg_flags.safe_grow_cleared (count, true);
2971 summary_lto->arg_flags[parm_index] = EAF_UNUSED;
2972 }
2973 continue;
2974 }
18f0873d 2975 int flags = eaf_analysis.get_ssa_name_flags (name);
e2dd12ab
JH
2976 int attr_flags = fnspec.arg_eaf_flags (parm_index);
2977
2978 if (dump_file && (flags | attr_flags) != flags && !(flags & EAF_UNUSED))
2979 {
2980 fprintf (dump_file,
2981 " Flags for param %i combined with fnspec flags:",
2982 (int)parm_index);
2983 dump_eaf_flags (dump_file, attr_flags, false);
2984 fprintf (dump_file, " determined: ");
2985 dump_eaf_flags (dump_file, flags, true);
2986 }
2987 flags |= attr_flags;
85ebbabd 2988
4341b1b1
JH
2989 /* Eliminate useless flags so we do not end up storing unnecessary
2990 summaries. */
2991
2992 flags = remove_useless_eaf_flags
2993 (flags, ecf_flags,
2994 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
7798ae1a
JH
2995 if (past_flags.length () > parm_index)
2996 {
2997 int past = past_flags[parm_index];
2998 past = remove_useless_eaf_flags
2999 (past, ecf_flags,
3000 VOID_TYPE_P (TREE_TYPE
3001 (TREE_TYPE (current_function_decl))));
3002 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
3003 {
3004 fprintf (dump_file,
3005 " Flags for param %i combined with IPA pass:",
3006 (int)parm_index);
3007 dump_eaf_flags (dump_file, past, false);
e2dd12ab
JH
3008 fprintf (dump_file, " determined: ");
3009 dump_eaf_flags (dump_file, flags, true);
7798ae1a
JH
3010 }
3011 if (!(flags & EAF_UNUSED))
3012 flags |= past;
3013 }
520d5ad3
JH
3014
3015 if (flags)
3016 {
85ebbabd
JH
3017 if (summary)
3018 {
3019 if (parm_index >= summary->arg_flags.length ())
3020 summary->arg_flags.safe_grow_cleared (count, true);
3021 summary->arg_flags[parm_index] = flags;
3022 }
3023 else if (summary_lto)
3024 {
3025 if (parm_index >= summary_lto->arg_flags.length ())
3026 summary_lto->arg_flags.safe_grow_cleared (count, true);
3027 summary_lto->arg_flags[parm_index] = flags;
3028 }
18f0873d 3029 eaf_analysis.record_escape_points (name, parm_index, flags);
b8ef019a
JH
3030 }
3031 }
3032 if (retslot)
3033 {
18f0873d 3034 int flags = eaf_analysis.get_ssa_name_flags (retslot);
7798ae1a 3035 int past = past_retslot_flags;
85ebbabd 3036
b8ef019a 3037 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
7798ae1a
JH
3038 past = remove_useless_eaf_flags
3039 (past, ecf_flags,
3040 VOID_TYPE_P (TREE_TYPE
3041 (TREE_TYPE (current_function_decl))));
3042 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
3043 {
3044 fprintf (dump_file,
3045 " Retslot flags combined with IPA pass:");
3046 dump_eaf_flags (dump_file, past, false);
e2dd12ab 3047 fprintf (dump_file, " determined: ");
7798ae1a
JH
3048 dump_eaf_flags (dump_file, flags, true);
3049 }
3050 if (!(flags & EAF_UNUSED))
3051 flags |= past;
b8ef019a
JH
3052 if (flags)
3053 {
3054 if (summary)
3055 summary->retslot_flags = flags;
3056 if (summary_lto)
3057 summary_lto->retslot_flags = flags;
18f0873d 3058 eaf_analysis.record_escape_points (retslot,
1f3a3363 3059 MODREF_RETSLOT_PARM, flags);
520d5ad3
JH
3060 }
3061 }
a70c0512
JH
3062 if (static_chain)
3063 {
18f0873d 3064 int flags = eaf_analysis.get_ssa_name_flags (static_chain);
7798ae1a 3065 int past = past_static_chain_flags;
a70c0512
JH
3066
3067 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
7798ae1a
JH
3068 past = remove_useless_eaf_flags
3069 (past, ecf_flags,
3070 VOID_TYPE_P (TREE_TYPE
3071 (TREE_TYPE (current_function_decl))));
3072 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
3073 {
3074 fprintf (dump_file,
3075 " Static chain flags combined with IPA pass:");
3076 dump_eaf_flags (dump_file, past, false);
e2dd12ab 3077 fprintf (dump_file, " determined: ");
7798ae1a
JH
3078 dump_eaf_flags (dump_file, flags, true);
3079 }
3080 if (!(flags & EAF_UNUSED))
78dd0de9 3081 flags |= past;
a70c0512
JH
3082 if (flags)
3083 {
3084 if (summary)
3085 summary->static_chain_flags = flags;
3086 if (summary_lto)
3087 summary_lto->static_chain_flags = flags;
18f0873d 3088 eaf_analysis.record_escape_points (static_chain,
1f3a3363 3089 MODREF_STATIC_CHAIN_PARM,
18f0873d 3090 flags);
a70c0512
JH
3091 }
3092 }
520d5ad3
JH
3093}
3094
2cadaa1f 3095/* Analyze function. IPA indicates whether we're running in local mode
494bdadf
JH
3096 (false) or the IPA mode (true).
3097 Return true if fixup cfg is needed after the pass. */
d119f34c 3098
494bdadf 3099static bool
2cadaa1f 3100analyze_function (bool ipa)
d119f34c 3101{
494bdadf 3102 bool fixup_cfg = false;
d119f34c 3103 if (dump_file)
2cadaa1f
JH
3104 fprintf (dump_file, "\n\nmodref analyzing '%s' (ipa=%i)%s%s\n",
3105 cgraph_node::get (current_function_decl)->dump_name (), ipa,
67c935c8
JH
3106 TREE_READONLY (current_function_decl) ? " (const)" : "",
3107 DECL_PURE_P (current_function_decl) ? " (pure)" : "");
d119f34c
JH
3108
3109 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
008e7397
JH
3110 if (!flag_ipa_modref
3111 || lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl)))
494bdadf 3112 return false;
d119f34c 3113
d119f34c
JH
3114 /* Compute no-LTO summaries when local optimization is going to happen. */
3115 bool nolto = (!ipa || ((!flag_lto || flag_fat_lto_objects) && !in_lto_p)
3116 || (in_lto_p && !flag_wpa
3117 && flag_incremental_link != INCREMENTAL_LINK_LTO));
d119f34c
JH
3118 /* Compute LTO when LTO streaming is going to happen. */
3119 bool lto = ipa && ((flag_lto && !in_lto_p)
3120 || flag_wpa
3121 || flag_incremental_link == INCREMENTAL_LINK_LTO);
71dbabcc
JH
3122 cgraph_node *fnode = cgraph_node::get (current_function_decl);
3123
3124 modref_summary *summary = NULL;
3125 modref_summary_lto *summary_lto = NULL;
3126
f6f704fd
JH
3127 bool past_flags_known = false;
3128 auto_vec <eaf_flags_t> past_flags;
3129 int past_retslot_flags = 0;
3130 int past_static_chain_flags = 0;
3131
71dbabcc
JH
3132 /* Initialize the summary.
3133 If we run in local mode there is possibly pre-existing summary from
3134 IPA pass. Dump it so it is easy to compare if mod-ref info has
3135 improved. */
3136 if (!ipa)
3137 {
3138 if (!optimization_summaries)
3139 optimization_summaries = modref_summaries::create_ggc (symtab);
3140 else /* Remove existing summary if we are re-running the pass. */
3141 {
22c24234
ML
3142 summary = optimization_summaries->get (fnode);
3143 if (summary != NULL
56cb815b 3144 && summary->loads)
71dbabcc 3145 {
22c24234
ML
3146 if (dump_file)
3147 {
3148 fprintf (dump_file, "Past summary:\n");
3149 optimization_summaries->get (fnode)->dump (dump_file);
3150 }
f6f704fd
JH
3151 past_flags.reserve_exact (summary->arg_flags.length ());
3152 past_flags.splice (summary->arg_flags);
3153 past_retslot_flags = summary->retslot_flags;
3154 past_static_chain_flags = summary->static_chain_flags;
3155 past_flags_known = true;
71dbabcc 3156 }
616ca102 3157 optimization_summaries->remove (fnode);
71dbabcc 3158 }
616ca102 3159 summary = optimization_summaries->get_create (fnode);
71dbabcc
JH
3160 gcc_checking_assert (nolto && !lto);
3161 }
8a2fd716 3162 /* In IPA mode we analyze every function precisely once. Assert that. */
71dbabcc
JH
3163 else
3164 {
3165 if (nolto)
3166 {
3167 if (!summaries)
3168 summaries = modref_summaries::create_ggc (symtab);
3169 else
616ca102
ML
3170 summaries->remove (fnode);
3171 summary = summaries->get_create (fnode);
71dbabcc
JH
3172 }
3173 if (lto)
3174 {
3175 if (!summaries_lto)
3176 summaries_lto = modref_summaries_lto::create_ggc (symtab);
3177 else
616ca102
ML
3178 summaries_lto->remove (fnode);
3179 summary_lto = summaries_lto->get_create (fnode);
71dbabcc 3180 }
6cef01c3
JH
3181 if (!fnspec_summaries)
3182 fnspec_summaries = new fnspec_summaries_t (symtab);
85ebbabd
JH
3183 if (!escape_summaries)
3184 escape_summaries = new escape_summaries_t (symtab);
71dbabcc
JH
3185 }
3186
d119f34c
JH
3187
3188 /* Create and initialize summary for F.
3189 Note that summaries may be already allocated from previous
3190 run of the pass. */
3191 if (nolto)
3192 {
3193 gcc_assert (!summary->loads);
8632f8c6 3194 summary->loads = modref_records::create_ggc ();
d119f34c 3195 gcc_assert (!summary->stores);
8632f8c6 3196 summary->stores = modref_records::create_ggc ();
617695cd 3197 summary->writes_errno = false;
992644c3 3198 summary->side_effects = false;
a34edf9a
JH
3199 summary->nondeterministic = false;
3200 summary->calls_interposable = false;
d119f34c
JH
3201 }
3202 if (lto)
3203 {
71dbabcc 3204 gcc_assert (!summary_lto->loads);
8632f8c6 3205 summary_lto->loads = modref_records_lto::create_ggc ();
71dbabcc 3206 gcc_assert (!summary_lto->stores);
8632f8c6 3207 summary_lto->stores = modref_records_lto::create_ggc ();
6cef01c3 3208 summary_lto->writes_errno = false;
992644c3 3209 summary_lto->side_effects = false;
a34edf9a
JH
3210 summary_lto->nondeterministic = false;
3211 summary_lto->calls_interposable = false;
d119f34c 3212 }
520d5ad3 3213
7798ae1a
JH
3214 analyze_parms (summary, summary_lto, ipa,
3215 past_flags, past_retslot_flags, past_static_chain_flags);
520d5ad3 3216
09a4ffb7
JH
3217 {
3218 modref_access_analysis analyzer (ipa, summary, summary_lto);
3219 analyzer.analyze ();
3220 }
494bdadf
JH
3221
3222 if (!ipa && flag_ipa_pure_const)
3223 {
1b62cddc 3224 if (!summary->stores->every_base && !summary->stores->bases
a34edf9a 3225 && !summary->nondeterministic)
494bdadf 3226 {
a34edf9a
JH
3227 if (!summary->loads->every_base && !summary->loads->bases
3228 && !summary->calls_interposable)
616ca102
ML
3229 fixup_cfg = ipa_make_function_const (fnode,
3230 summary->side_effects, true);
494bdadf 3231 else
616ca102
ML
3232 fixup_cfg = ipa_make_function_pure (fnode,
3233 summary->side_effects, true);
494bdadf
JH
3234 }
3235 }
09a4ffb7 3236 int ecf_flags = flags_from_decl_or_type (current_function_decl);
71dbabcc
JH
3237 if (summary && !summary->useful_p (ecf_flags))
3238 {
3239 if (!ipa)
3240 optimization_summaries->remove (fnode);
3241 else
3242 summaries->remove (fnode);
3243 summary = NULL;
3244 }
e0040bc3 3245 if (summary)
5aa91072 3246 summary->finalize (current_function_decl);
71dbabcc
JH
3247 if (summary_lto && !summary_lto->useful_p (ecf_flags))
3248 {
3249 summaries_lto->remove (fnode);
3250 summary_lto = NULL;
3251 }
992644c3 3252
85ebbabd
JH
3253 if (ipa && !summary && !summary_lto)
3254 remove_modref_edge_summaries (fnode);
d119f34c
JH
3255
3256 if (dump_file)
3257 {
3258 fprintf (dump_file, " - modref done with result: tracked.\n");
71dbabcc
JH
3259 if (summary)
3260 summary->dump (dump_file);
3261 if (summary_lto)
3262 summary_lto->dump (dump_file);
85ebbabd 3263 dump_modref_edge_summaries (dump_file, fnode, 2);
f6f704fd
JH
3264 /* To simplify debugging, compare IPA and local solutions. */
3265 if (past_flags_known && summary)
3266 {
3267 size_t len = summary->arg_flags.length ();
3268
3269 if (past_flags.length () > len)
3270 len = past_flags.length ();
3271 for (size_t i = 0; i < len; i++)
3272 {
3273 int old_flags = i < past_flags.length () ? past_flags[i] : 0;
3274 int new_flags = i < summary->arg_flags.length ()
3275 ? summary->arg_flags[i] : 0;
3276 old_flags = remove_useless_eaf_flags
3277 (old_flags, flags_from_decl_or_type (current_function_decl),
3278 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3279 if (old_flags != new_flags)
3280 {
7798ae1a
JH
3281 if ((old_flags & ~new_flags) == 0
3282 || (new_flags & EAF_UNUSED))
f6f704fd
JH
3283 fprintf (dump_file, " Flags for param %i improved:",
3284 (int)i);
f6f704fd 3285 else
7798ae1a 3286 gcc_unreachable ();
f6f704fd
JH
3287 dump_eaf_flags (dump_file, old_flags, false);
3288 fprintf (dump_file, " -> ");
3289 dump_eaf_flags (dump_file, new_flags, true);
3290 }
3291 }
3292 past_retslot_flags = remove_useless_eaf_flags
3293 (past_retslot_flags,
3294 flags_from_decl_or_type (current_function_decl),
3295 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3296 if (past_retslot_flags != summary->retslot_flags)
3297 {
7798ae1a
JH
3298 if ((past_retslot_flags & ~summary->retslot_flags) == 0
3299 || (summary->retslot_flags & EAF_UNUSED))
f6f704fd 3300 fprintf (dump_file, " Flags for retslot improved:");
f6f704fd 3301 else
7798ae1a 3302 gcc_unreachable ();
f6f704fd
JH
3303 dump_eaf_flags (dump_file, past_retslot_flags, false);
3304 fprintf (dump_file, " -> ");
3305 dump_eaf_flags (dump_file, summary->retslot_flags, true);
3306 }
3307 past_static_chain_flags = remove_useless_eaf_flags
3308 (past_static_chain_flags,
3309 flags_from_decl_or_type (current_function_decl),
3310 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3311 if (past_static_chain_flags != summary->static_chain_flags)
3312 {
7798ae1a
JH
3313 if ((past_static_chain_flags & ~summary->static_chain_flags) == 0
3314 || (summary->static_chain_flags & EAF_UNUSED))
f6f704fd 3315 fprintf (dump_file, " Flags for static chain improved:");
f6f704fd 3316 else
7798ae1a 3317 gcc_unreachable ();
f6f704fd
JH
3318 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3319 fprintf (dump_file, " -> ");
3320 dump_eaf_flags (dump_file, summary->static_chain_flags, true);
3321 }
3322 }
3323 else if (past_flags_known && !summary)
3324 {
3325 for (size_t i = 0; i < past_flags.length (); i++)
3326 {
3327 int old_flags = past_flags[i];
3328 old_flags = remove_useless_eaf_flags
3329 (old_flags, flags_from_decl_or_type (current_function_decl),
3330 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3331 if (old_flags)
3332 {
3333 fprintf (dump_file, " Flags for param %i worsened:",
3334 (int)i);
3335 dump_eaf_flags (dump_file, old_flags, false);
3336 fprintf (dump_file, " -> \n");
3337 }
3338 }
3339 past_retslot_flags = remove_useless_eaf_flags
3340 (past_retslot_flags,
3341 flags_from_decl_or_type (current_function_decl),
3342 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3343 if (past_retslot_flags)
3344 {
3345 fprintf (dump_file, " Flags for retslot worsened:");
3346 dump_eaf_flags (dump_file, past_retslot_flags, false);
3347 fprintf (dump_file, " ->\n");
3348 }
3349 past_static_chain_flags = remove_useless_eaf_flags
3350 (past_static_chain_flags,
3351 flags_from_decl_or_type (current_function_decl),
3352 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3353 if (past_static_chain_flags)
3354 {
3355 fprintf (dump_file, " Flags for static chain worsened:");
3356 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3357 fprintf (dump_file, " ->\n");
3358 }
3359 }
d119f34c 3360 }
494bdadf 3361 return fixup_cfg;
d119f34c
JH
3362}
3363
3364/* Callback for generate_summary. */
3365
3366static void
3367modref_generate (void)
3368{
3369 struct cgraph_node *node;
3370 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
3371 {
3372 function *f = DECL_STRUCT_FUNCTION (node->decl);
3373 if (!f)
3374 continue;
3375 push_cfun (f);
2cadaa1f 3376 analyze_function (true);
d119f34c
JH
3377 pop_cfun ();
3378 }
3379}
3380
18f0873d
JH
3381} /* ANON namespace. */
3382
d70ef656
JH
3383/* Debugging helper. */
3384
3385void
3386debug_eaf_flags (int flags)
3387{
3388 dump_eaf_flags (stderr, flags, true);
3389}
3390
d119f34c
JH
3391/* Called when a new function is inserted to callgraph late. */
3392
3393void
3394modref_summaries::insert (struct cgraph_node *node, modref_summary *)
3395{
56cb815b
JH
3396 /* Local passes ought to be executed by the pass manager. */
3397 if (this == optimization_summaries)
71dbabcc
JH
3398 {
3399 optimization_summaries->remove (node);
56cb815b
JH
3400 return;
3401 }
1a90e99f
JH
3402 if (!DECL_STRUCT_FUNCTION (node->decl)
3403 || !opt_for_fn (node->decl, flag_ipa_modref))
56cb815b 3404 {
71dbabcc 3405 summaries->remove (node);
56cb815b 3406 return;
71dbabcc
JH
3407 }
3408 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2cadaa1f 3409 analyze_function (true);
71dbabcc
JH
3410 pop_cfun ();
3411}
3412
3413/* Called when a new function is inserted to callgraph late. */
3414
3415void
3416modref_summaries_lto::insert (struct cgraph_node *node, modref_summary_lto *)
3417{
3418 /* We do not support adding new function when IPA information is already
3419 propagated. This is done only by SIMD cloning that is not very
3420 critical. */
3421 if (!DECL_STRUCT_FUNCTION (node->decl)
1a90e99f 3422 || !opt_for_fn (node->decl, flag_ipa_modref)
71dbabcc
JH
3423 || propagated)
3424 {
3425 summaries_lto->remove (node);
3426 return;
3427 }
d119f34c 3428 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2cadaa1f 3429 analyze_function (true);
d119f34c
JH
3430 pop_cfun ();
3431}
3432
3433/* Called when new clone is inserted to callgraph late. */
3434
3435void
56cb815b 3436modref_summaries::duplicate (cgraph_node *, cgraph_node *dst,
d119f34c
JH
3437 modref_summary *src_data,
3438 modref_summary *dst_data)
3439{
8a2fd716 3440 /* Do not duplicate optimization summaries; we do not handle parameter
56cb815b
JH
3441 transforms on them. */
3442 if (this == optimization_summaries)
d119f34c 3443 {
56cb815b
JH
3444 optimization_summaries->remove (dst);
3445 return;
d119f34c 3446 }
8632f8c6 3447 dst_data->stores = modref_records::create_ggc ();
56cb815b 3448 dst_data->stores->copy_from (src_data->stores);
8632f8c6 3449 dst_data->loads = modref_records::create_ggc ();
56cb815b 3450 dst_data->loads->copy_from (src_data->loads);
64f3e71c
JH
3451 dst_data->kills.reserve_exact (src_data->kills.length ());
3452 dst_data->kills.splice (src_data->kills);
617695cd 3453 dst_data->writes_errno = src_data->writes_errno;
992644c3 3454 dst_data->side_effects = src_data->side_effects;
a34edf9a
JH
3455 dst_data->nondeterministic = src_data->nondeterministic;
3456 dst_data->calls_interposable = src_data->calls_interposable;
5962efe9
JH
3457 if (src_data->arg_flags.length ())
3458 dst_data->arg_flags = src_data->arg_flags.copy ();
b8ef019a 3459 dst_data->retslot_flags = src_data->retslot_flags;
a70c0512 3460 dst_data->static_chain_flags = src_data->static_chain_flags;
71dbabcc
JH
3461}
3462
3463/* Called when new clone is inserted to callgraph late. */
3464
3465void
3466modref_summaries_lto::duplicate (cgraph_node *, cgraph_node *,
3467 modref_summary_lto *src_data,
3468 modref_summary_lto *dst_data)
3469{
8a2fd716 3470 /* Be sure that no further cloning happens after ipa-modref. If it does
fe90c504
JH
3471 we will need to update signatures for possible param changes. */
3472 gcc_checking_assert (!((modref_summaries_lto *)summaries_lto)->propagated);
8632f8c6 3473 dst_data->stores = modref_records_lto::create_ggc ();
56cb815b 3474 dst_data->stores->copy_from (src_data->stores);
8632f8c6 3475 dst_data->loads = modref_records_lto::create_ggc ();
56cb815b 3476 dst_data->loads->copy_from (src_data->loads);
74509b96
JH
3477 dst_data->kills.reserve_exact (src_data->kills.length ());
3478 dst_data->kills.splice (src_data->kills);
6cef01c3 3479 dst_data->writes_errno = src_data->writes_errno;
992644c3 3480 dst_data->side_effects = src_data->side_effects;
a34edf9a
JH
3481 dst_data->nondeterministic = src_data->nondeterministic;
3482 dst_data->calls_interposable = src_data->calls_interposable;
5962efe9
JH
3483 if (src_data->arg_flags.length ())
3484 dst_data->arg_flags = src_data->arg_flags.copy ();
b8ef019a 3485 dst_data->retslot_flags = src_data->retslot_flags;
a70c0512 3486 dst_data->static_chain_flags = src_data->static_chain_flags;
d119f34c
JH
3487}
3488
3489namespace
3490{
3491/* Definition of the modref pass on GIMPLE. */
3492const pass_data pass_data_modref = {
3493 GIMPLE_PASS,
3494 "modref",
3495 OPTGROUP_IPA,
3496 TV_TREE_MODREF,
3497 (PROP_cfg | PROP_ssa),
3498 0,
3499 0,
3500 0,
3501 0,
3502};
3503
3504class pass_modref : public gimple_opt_pass
3505{
3506 public:
3507 pass_modref (gcc::context *ctxt)
3508 : gimple_opt_pass (pass_data_modref, ctxt) {}
3509
d119f34c
JH
3510 /* opt_pass methods: */
3511 opt_pass *clone ()
3512 {
3513 return new pass_modref (m_ctxt);
3514 }
3515 virtual bool gate (function *)
3516 {
3517 return flag_ipa_modref;
3518 }
3519 virtual unsigned int execute (function *);
3520};
3521
3522/* Encode TT to the output block OB using the summary streaming API. */
3523
3524static void
3525write_modref_records (modref_records_lto *tt, struct output_block *ob)
3526{
d119f34c
JH
3527 streamer_write_uhwi (ob, tt->every_base);
3528 streamer_write_uhwi (ob, vec_safe_length (tt->bases));
74509b96 3529 for (auto base_node : tt->bases)
d119f34c
JH
3530 {
3531 stream_write_tree (ob, base_node->base, true);
3532
3533 streamer_write_uhwi (ob, base_node->every_ref);
3534 streamer_write_uhwi (ob, vec_safe_length (base_node->refs));
c33f4742 3535
74509b96 3536 for (auto ref_node : base_node->refs)
d119f34c
JH
3537 {
3538 stream_write_tree (ob, ref_node->ref, true);
c33f4742
JH
3539 streamer_write_uhwi (ob, ref_node->every_access);
3540 streamer_write_uhwi (ob, vec_safe_length (ref_node->accesses));
3541
74509b96
JH
3542 for (auto access_node : ref_node->accesses)
3543 access_node.stream_out (ob);
d119f34c
JH
3544 }
3545 }
3546}
3547
3548/* Read a modref_tree from the input block IB using the data from DATA_IN.
3549 This assumes that the tree was encoded using write_modref_tree.
3550 Either nolto_ret or lto_ret is initialized by the tree depending whether
46a27415 3551 LTO streaming is expected or not. */
d119f34c 3552
18f0873d 3553static void
8632f8c6
JH
3554read_modref_records (tree decl,
3555 lto_input_block *ib, struct data_in *data_in,
d119f34c
JH
3556 modref_records **nolto_ret,
3557 modref_records_lto **lto_ret)
3558{
8632f8c6
JH
3559 size_t max_bases = opt_for_fn (decl, param_modref_max_bases);
3560 size_t max_refs = opt_for_fn (decl, param_modref_max_refs);
3561 size_t max_accesses = opt_for_fn (decl, param_modref_max_accesses);
d119f34c 3562
71dbabcc 3563 if (lto_ret)
8632f8c6 3564 *lto_ret = modref_records_lto::create_ggc ();
71dbabcc 3565 if (nolto_ret)
8632f8c6 3566 *nolto_ret = modref_records::create_ggc ();
71dbabcc 3567 gcc_checking_assert (lto_ret || nolto_ret);
d119f34c
JH
3568
3569 size_t every_base = streamer_read_uhwi (ib);
3570 size_t nbase = streamer_read_uhwi (ib);
3571
3572 gcc_assert (!every_base || nbase == 0);
3573 if (every_base)
3574 {
71dbabcc 3575 if (nolto_ret)
d119f34c 3576 (*nolto_ret)->collapse ();
71dbabcc 3577 if (lto_ret)
d119f34c
JH
3578 (*lto_ret)->collapse ();
3579 }
3580 for (size_t i = 0; i < nbase; i++)
3581 {
3582 tree base_tree = stream_read_tree (ib, data_in);
3583 modref_base_node <alias_set_type> *nolto_base_node = NULL;
3584 modref_base_node <tree> *lto_base_node = NULL;
3585
3586 /* At stream in time we have LTO alias info. Check if we streamed in
3587 something obviously unnecessary. Do not glob types by alias sets;
3588 it is not 100% clear that ltrans types will get merged same way.
3589 Types may get refined based on ODR type conflicts. */
3590 if (base_tree && !get_alias_set (base_tree))
3591 {
3592 if (dump_file)
3593 {
3594 fprintf (dump_file, "Streamed in alias set 0 type ");
3595 print_generic_expr (dump_file, base_tree);
3596 fprintf (dump_file, "\n");
3597 }
3598 base_tree = NULL;
3599 }
3600
71dbabcc 3601 if (nolto_ret)
d119f34c
JH
3602 nolto_base_node = (*nolto_ret)->insert_base (base_tree
3603 ? get_alias_set (base_tree)
8632f8c6 3604 : 0, 0, INT_MAX);
71dbabcc 3605 if (lto_ret)
8632f8c6 3606 lto_base_node = (*lto_ret)->insert_base (base_tree, 0, max_bases);
d119f34c
JH
3607 size_t every_ref = streamer_read_uhwi (ib);
3608 size_t nref = streamer_read_uhwi (ib);
3609
3610 gcc_assert (!every_ref || nref == 0);
3611 if (every_ref)
3612 {
3613 if (nolto_base_node)
3614 nolto_base_node->collapse ();
3615 if (lto_base_node)
3616 lto_base_node->collapse ();
3617 }
3618 for (size_t j = 0; j < nref; j++)
3619 {
3620 tree ref_tree = stream_read_tree (ib, data_in);
3621
3622 if (ref_tree && !get_alias_set (ref_tree))
3623 {
3624 if (dump_file)
3625 {
3626 fprintf (dump_file, "Streamed in alias set 0 type ");
3627 print_generic_expr (dump_file, ref_tree);
3628 fprintf (dump_file, "\n");
3629 }
c33f4742 3630 ref_tree = NULL;
d119f34c
JH
3631 }
3632
c33f4742
JH
3633 modref_ref_node <alias_set_type> *nolto_ref_node = NULL;
3634 modref_ref_node <tree> *lto_ref_node = NULL;
3635
d119f34c 3636 if (nolto_base_node)
c33f4742
JH
3637 nolto_ref_node
3638 = nolto_base_node->insert_ref (ref_tree
3639 ? get_alias_set (ref_tree) : 0,
3640 max_refs);
d119f34c 3641 if (lto_base_node)
c33f4742
JH
3642 lto_ref_node = lto_base_node->insert_ref (ref_tree, max_refs);
3643
3644 size_t every_access = streamer_read_uhwi (ib);
3645 size_t naccesses = streamer_read_uhwi (ib);
3646
425369bf
JH
3647 if (nolto_ref_node && every_access)
3648 nolto_ref_node->collapse ();
3649 if (lto_ref_node && every_access)
3650 lto_ref_node->collapse ();
c33f4742
JH
3651
3652 for (size_t k = 0; k < naccesses; k++)
3653 {
74509b96 3654 modref_access_node a = modref_access_node::stream_in (ib);
c33f4742 3655 if (nolto_ref_node)
5c85f295 3656 nolto_ref_node->insert_access (a, max_accesses, false);
c33f4742 3657 if (lto_ref_node)
5c85f295 3658 lto_ref_node->insert_access (a, max_accesses, false);
c33f4742 3659 }
d119f34c
JH
3660 }
3661 }
71dbabcc 3662 if (lto_ret)
c33f4742 3663 (*lto_ret)->cleanup ();
71dbabcc 3664 if (nolto_ret)
c33f4742 3665 (*nolto_ret)->cleanup ();
d119f34c
JH
3666}
3667
85ebbabd
JH
3668/* Write ESUM to BP. */
3669
3670static void
3671modref_write_escape_summary (struct bitpack_d *bp, escape_summary *esum)
3672{
3673 if (!esum)
3674 {
3675 bp_pack_var_len_unsigned (bp, 0);
3676 return;
3677 }
3678 bp_pack_var_len_unsigned (bp, esum->esc.length ());
3679 unsigned int i;
3680 escape_entry *ee;
3681 FOR_EACH_VEC_ELT (esum->esc, i, ee)
3682 {
b8ef019a 3683 bp_pack_var_len_int (bp, ee->parm_index);
85ebbabd
JH
3684 bp_pack_var_len_unsigned (bp, ee->arg);
3685 bp_pack_var_len_unsigned (bp, ee->min_flags);
3686 bp_pack_value (bp, ee->direct, 1);
3687 }
3688}
3689
3690/* Read escape summary for E from BP. */
3691
3692static void
3693modref_read_escape_summary (struct bitpack_d *bp, cgraph_edge *e)
3694{
3695 unsigned int n = bp_unpack_var_len_unsigned (bp);
3696 if (!n)
3697 return;
3698 escape_summary *esum = escape_summaries->get_create (e);
3699 esum->esc.reserve_exact (n);
3700 for (unsigned int i = 0; i < n; i++)
3701 {
3702 escape_entry ee;
b8ef019a 3703 ee.parm_index = bp_unpack_var_len_int (bp);
85ebbabd
JH
3704 ee.arg = bp_unpack_var_len_unsigned (bp);
3705 ee.min_flags = bp_unpack_var_len_unsigned (bp);
3706 ee.direct = bp_unpack_value (bp, 1);
3707 esum->esc.quick_push (ee);
3708 }
3709}
3710
d119f34c
JH
3711/* Callback for write_summary. */
3712
3713static void
3714modref_write ()
3715{
3716 struct output_block *ob = create_output_block (LTO_section_ipa_modref);
3717 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3718 unsigned int count = 0;
3719 int i;
3720
71dbabcc 3721 if (!summaries_lto)
d119f34c
JH
3722 {
3723 streamer_write_uhwi (ob, 0);
3724 streamer_write_char_stream (ob->main_stream, 0);
3725 produce_asm (ob, NULL);
3726 destroy_output_block (ob);
3727 return;
3728 }
3729
3730 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3731 {
3732 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3733 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
71dbabcc 3734 modref_summary_lto *r;
d119f34c
JH
3735
3736 if (cnode && cnode->definition && !cnode->alias
71dbabcc
JH
3737 && (r = summaries_lto->get (cnode))
3738 && r->useful_p (flags_from_decl_or_type (cnode->decl)))
d119f34c
JH
3739 count++;
3740 }
3741 streamer_write_uhwi (ob, count);
3742
3743 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3744 {
3745 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3746 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
3747
3748 if (cnode && cnode->definition && !cnode->alias)
3749 {
71dbabcc 3750 modref_summary_lto *r = summaries_lto->get (cnode);
d119f34c 3751
71dbabcc 3752 if (!r || !r->useful_p (flags_from_decl_or_type (cnode->decl)))
d119f34c
JH
3753 continue;
3754
3755 streamer_write_uhwi (ob, lto_symtab_encoder_encode (encoder, cnode));
3756
85ebbabd
JH
3757 streamer_write_uhwi (ob, r->arg_flags.length ());
3758 for (unsigned int i = 0; i < r->arg_flags.length (); i++)
8da8ed43 3759 streamer_write_uhwi (ob, r->arg_flags[i]);
b8ef019a 3760 streamer_write_uhwi (ob, r->retslot_flags);
a70c0512 3761 streamer_write_uhwi (ob, r->static_chain_flags);
85ebbabd 3762
56cb815b
JH
3763 write_modref_records (r->loads, ob);
3764 write_modref_records (r->stores, ob);
74509b96
JH
3765 streamer_write_uhwi (ob, r->kills.length ());
3766 for (auto kill : r->kills)
3767 kill.stream_out (ob);
6cef01c3
JH
3768
3769 struct bitpack_d bp = bitpack_create (ob->main_stream);
3770 bp_pack_value (&bp, r->writes_errno, 1);
992644c3 3771 bp_pack_value (&bp, r->side_effects, 1);
a34edf9a
JH
3772 bp_pack_value (&bp, r->nondeterministic, 1);
3773 bp_pack_value (&bp, r->calls_interposable, 1);
6cef01c3
JH
3774 if (!flag_wpa)
3775 {
3776 for (cgraph_edge *e = cnode->indirect_calls;
3777 e; e = e->next_callee)
3778 {
3779 class fnspec_summary *sum = fnspec_summaries->get (e);
3780 bp_pack_value (&bp, sum != NULL, 1);
3781 if (sum)
3782 bp_pack_string (ob, &bp, sum->fnspec, true);
85ebbabd
JH
3783 class escape_summary *esum = escape_summaries->get (e);
3784 modref_write_escape_summary (&bp,esum);
6cef01c3
JH
3785 }
3786 for (cgraph_edge *e = cnode->callees; e; e = e->next_callee)
3787 {
3788 class fnspec_summary *sum = fnspec_summaries->get (e);
3789 bp_pack_value (&bp, sum != NULL, 1);
3790 if (sum)
3791 bp_pack_string (ob, &bp, sum->fnspec, true);
85ebbabd
JH
3792 class escape_summary *esum = escape_summaries->get (e);
3793 modref_write_escape_summary (&bp,esum);
6cef01c3
JH
3794 }
3795 }
3796 streamer_write_bitpack (&bp);
d119f34c
JH
3797 }
3798 }
3799 streamer_write_char_stream (ob->main_stream, 0);
3800 produce_asm (ob, NULL);
3801 destroy_output_block (ob);
3802}
3803
3804static void
3805read_section (struct lto_file_decl_data *file_data, const char *data,
3806 size_t len)
3807{
3808 const struct lto_function_header *header
3809 = (const struct lto_function_header *) data;
3810 const int cfg_offset = sizeof (struct lto_function_header);
3811 const int main_offset = cfg_offset + header->cfg_size;
3812 const int string_offset = main_offset + header->main_size;
3813 struct data_in *data_in;
3814 unsigned int i;
3815 unsigned int f_count;
3816
3817 lto_input_block ib ((const char *) data + main_offset, header->main_size,
3818 file_data->mode_table);
3819
3820 data_in
3821 = lto_data_in_create (file_data, (const char *) data + string_offset,
3822 header->string_size, vNULL);
3823 f_count = streamer_read_uhwi (&ib);
3824 for (i = 0; i < f_count; i++)
3825 {
3826 struct cgraph_node *node;
3827 lto_symtab_encoder_t encoder;
3828
3829 unsigned int index = streamer_read_uhwi (&ib);
3830 encoder = file_data->symtab_node_encoder;
3831 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder,
3832 index));
3833
71dbabcc
JH
3834 modref_summary *modref_sum = summaries
3835 ? summaries->get_create (node) : NULL;
3836 modref_summary_lto *modref_sum_lto = summaries_lto
3837 ? summaries_lto->get_create (node)
3838 : NULL;
71dbabcc
JH
3839 if (optimization_summaries)
3840 modref_sum = optimization_summaries->get_create (node);
3841
ea937e7d 3842 if (modref_sum)
992644c3
JH
3843 {
3844 modref_sum->writes_errno = false;
3845 modref_sum->side_effects = false;
a34edf9a
JH
3846 modref_sum->nondeterministic = false;
3847 modref_sum->calls_interposable = false;
992644c3 3848 }
6cef01c3 3849 if (modref_sum_lto)
992644c3
JH
3850 {
3851 modref_sum_lto->writes_errno = false;
3852 modref_sum_lto->side_effects = false;
a34edf9a
JH
3853 modref_sum_lto->nondeterministic = false;
3854 modref_sum_lto->calls_interposable = false;
992644c3 3855 }
ea937e7d 3856
71dbabcc
JH
3857 gcc_assert (!modref_sum || (!modref_sum->loads
3858 && !modref_sum->stores));
3859 gcc_assert (!modref_sum_lto || (!modref_sum_lto->loads
3860 && !modref_sum_lto->stores));
85ebbabd
JH
3861 unsigned int args = streamer_read_uhwi (&ib);
3862 if (args && modref_sum)
3863 modref_sum->arg_flags.reserve_exact (args);
3864 if (args && modref_sum_lto)
3865 modref_sum_lto->arg_flags.reserve_exact (args);
3866 for (unsigned int i = 0; i < args; i++)
3867 {
8da8ed43 3868 eaf_flags_t flags = streamer_read_uhwi (&ib);
85ebbabd
JH
3869 if (modref_sum)
3870 modref_sum->arg_flags.quick_push (flags);
3871 if (modref_sum_lto)
3872 modref_sum_lto->arg_flags.quick_push (flags);
3873 }
b8ef019a
JH
3874 eaf_flags_t flags = streamer_read_uhwi (&ib);
3875 if (modref_sum)
3876 modref_sum->retslot_flags = flags;
3877 if (modref_sum_lto)
3878 modref_sum_lto->retslot_flags = flags;
a70c0512
JH
3879
3880 flags = streamer_read_uhwi (&ib);
3881 if (modref_sum)
3882 modref_sum->static_chain_flags = flags;
3883 if (modref_sum_lto)
3884 modref_sum_lto->static_chain_flags = flags;
3885
8632f8c6 3886 read_modref_records (node->decl, &ib, data_in,
56cb815b
JH
3887 modref_sum ? &modref_sum->loads : NULL,
3888 modref_sum_lto ? &modref_sum_lto->loads : NULL);
8632f8c6 3889 read_modref_records (node->decl, &ib, data_in,
56cb815b
JH
3890 modref_sum ? &modref_sum->stores : NULL,
3891 modref_sum_lto ? &modref_sum_lto->stores : NULL);
74509b96
JH
3892 int j = streamer_read_uhwi (&ib);
3893 if (j && modref_sum)
3894 modref_sum->kills.reserve_exact (j);
3895 if (j && modref_sum_lto)
3896 modref_sum_lto->kills.reserve_exact (j);
3897 for (int k = 0; k < j; k++)
3898 {
3899 modref_access_node a = modref_access_node::stream_in (&ib);
3900
3901 if (modref_sum)
3902 modref_sum->kills.quick_push (a);
3903 if (modref_sum_lto)
3904 modref_sum_lto->kills.quick_push (a);
3905 }
6cef01c3
JH
3906 struct bitpack_d bp = streamer_read_bitpack (&ib);
3907 if (bp_unpack_value (&bp, 1))
3908 {
3909 if (modref_sum)
3910 modref_sum->writes_errno = true;
3911 if (modref_sum_lto)
3912 modref_sum_lto->writes_errno = true;
3913 }
992644c3
JH
3914 if (bp_unpack_value (&bp, 1))
3915 {
3916 if (modref_sum)
3917 modref_sum->side_effects = true;
3918 if (modref_sum_lto)
3919 modref_sum_lto->side_effects = true;
3920 }
a34edf9a
JH
3921 if (bp_unpack_value (&bp, 1))
3922 {
3923 if (modref_sum)
3924 modref_sum->nondeterministic = true;
3925 if (modref_sum_lto)
3926 modref_sum_lto->nondeterministic = true;
3927 }
3928 if (bp_unpack_value (&bp, 1))
3929 {
3930 if (modref_sum)
3931 modref_sum->calls_interposable = true;
3932 if (modref_sum_lto)
3933 modref_sum_lto->calls_interposable = true;
3934 }
6cef01c3
JH
3935 if (!flag_ltrans)
3936 {
3937 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
3938 {
3939 if (bp_unpack_value (&bp, 1))
3940 {
3941 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3942 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3943 }
85ebbabd 3944 modref_read_escape_summary (&bp, e);
6cef01c3
JH
3945 }
3946 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
3947 {
3948 if (bp_unpack_value (&bp, 1))
3949 {
3950 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3951 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3952 }
85ebbabd 3953 modref_read_escape_summary (&bp, e);
6cef01c3
JH
3954 }
3955 }
e0040bc3 3956 if (flag_ltrans)
5aa91072 3957 modref_sum->finalize (node->decl);
d119f34c
JH
3958 if (dump_file)
3959 {
3960 fprintf (dump_file, "Read modref for %s\n",
3961 node->dump_name ());
71dbabcc
JH
3962 if (modref_sum)
3963 modref_sum->dump (dump_file);
3964 if (modref_sum_lto)
3965 modref_sum_lto->dump (dump_file);
85ebbabd 3966 dump_modref_edge_summaries (dump_file, node, 4);
d119f34c 3967 }
d119f34c
JH
3968 }
3969
3970 lto_free_section_data (file_data, LTO_section_ipa_modref, NULL, data,
3971 len);
3972 lto_data_in_delete (data_in);
3973}
3974
3975/* Callback for read_summary. */
3976
3977static void
3978modref_read (void)
3979{
3980 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3981 struct lto_file_decl_data *file_data;
3982 unsigned int j = 0;
3983
71dbabcc
JH
3984 gcc_checking_assert (!optimization_summaries && !summaries && !summaries_lto);
3985 if (flag_ltrans)
3986 optimization_summaries = modref_summaries::create_ggc (symtab);
3987 else
3988 {
3989 if (flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
3990 summaries_lto = modref_summaries_lto::create_ggc (symtab);
3991 if (!flag_wpa
3992 || (flag_incremental_link == INCREMENTAL_LINK_LTO
3993 && flag_fat_lto_objects))
3994 summaries = modref_summaries::create_ggc (symtab);
6cef01c3
JH
3995 if (!fnspec_summaries)
3996 fnspec_summaries = new fnspec_summaries_t (symtab);
85ebbabd
JH
3997 if (!escape_summaries)
3998 escape_summaries = new escape_summaries_t (symtab);
71dbabcc 3999 }
d119f34c
JH
4000
4001 while ((file_data = file_data_vec[j++]))
4002 {
4003 size_t len;
4004 const char *data = lto_get_summary_section_data (file_data,
4005 LTO_section_ipa_modref,
4006 &len);
4007 if (data)
4008 read_section (file_data, data, len);
4009 else
4010 /* Fatal error here. We do not want to support compiling ltrans units
4011 with different version of compiler or different flags than the WPA
4012 unit, so this should never happen. */
4013 fatal_error (input_location,
4014 "IPA modref summary is missing in input file");
4015 }
4016}
4017
85ebbabd
JH
4018/* Recompute arg_flags for param adjustments in INFO. */
4019
4020static void
8da8ed43 4021remap_arg_flags (auto_vec <eaf_flags_t> &arg_flags, clone_info *info)
85ebbabd 4022{
8da8ed43 4023 auto_vec<eaf_flags_t> old = arg_flags.copy ();
85ebbabd
JH
4024 int max = -1;
4025 size_t i;
4026 ipa_adjusted_param *p;
4027
4028 arg_flags.release ();
4029
4030 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
4031 {
4032 int o = info->param_adjustments->get_original_index (i);
4033 if (o >= 0 && (int)old.length () > o && old[o])
4034 max = i;
4035 }
5962efe9 4036 if (max >= 0)
85ebbabd
JH
4037 arg_flags.safe_grow_cleared (max + 1, true);
4038 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
4039 {
4040 int o = info->param_adjustments->get_original_index (i);
4041 if (o >= 0 && (int)old.length () > o && old[o])
4042 arg_flags[i] = old[o];
4043 }
4044}
4045
02c80893 4046/* Update kills according to the parm map MAP. */
74509b96
JH
4047
4048static void
4049remap_kills (vec <modref_access_node> &kills, const vec <int> &map)
4050{
4051 for (size_t i = 0; i < kills.length ();)
4052 if (kills[i].parm_index >= 0)
4053 {
4054 if (kills[i].parm_index < (int)map.length ()
4055 && map[kills[i].parm_index] != MODREF_UNKNOWN_PARM)
4056 {
4057 kills[i].parm_index = map[kills[i].parm_index];
4058 i++;
4059 }
4060 else
4061 kills.unordered_remove (i);
4062 }
4063 else
4064 i++;
4065}
4066
c8fd2be1
JH
4067/* If signature changed, update the summary. */
4068
fe90c504
JH
4069static void
4070update_signature (struct cgraph_node *node)
c8fd2be1 4071{
ae7a23a3
JH
4072 clone_info *info = clone_info::get (node);
4073 if (!info || !info->param_adjustments)
fe90c504
JH
4074 return;
4075
4076 modref_summary *r = optimization_summaries
4077 ? optimization_summaries->get (node) : NULL;
4078 modref_summary_lto *r_lto = summaries_lto
4079 ? summaries_lto->get (node) : NULL;
4080 if (!r && !r_lto)
4081 return;
c8fd2be1
JH
4082 if (dump_file)
4083 {
4084 fprintf (dump_file, "Updating summary for %s from:\n",
4085 node->dump_name ());
85ebbabd
JH
4086 if (r)
4087 r->dump (dump_file);
4088 if (r_lto)
4089 r_lto->dump (dump_file);
c8fd2be1
JH
4090 }
4091
4092 size_t i, max = 0;
4093 ipa_adjusted_param *p;
4094
ae7a23a3 4095 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
c8fd2be1 4096 {
ae7a23a3 4097 int idx = info->param_adjustments->get_original_index (i);
c8fd2be1
JH
4098 if (idx > (int)max)
4099 max = idx;
4100 }
4101
4102 auto_vec <int, 32> map;
4103
5d2cedaa 4104 map.reserve (max + 1);
c8fd2be1 4105 for (i = 0; i <= max; i++)
992644c3 4106 map.quick_push (MODREF_UNKNOWN_PARM);
ae7a23a3 4107 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
c8fd2be1 4108 {
ae7a23a3 4109 int idx = info->param_adjustments->get_original_index (i);
c8fd2be1 4110 if (idx >= 0)
2f61125f 4111 map[idx] = i;
c8fd2be1 4112 }
fe90c504
JH
4113 if (r)
4114 {
4115 r->loads->remap_params (&map);
4116 r->stores->remap_params (&map);
74509b96 4117 remap_kills (r->kills, map);
85ebbabd
JH
4118 if (r->arg_flags.length ())
4119 remap_arg_flags (r->arg_flags, info);
fe90c504
JH
4120 }
4121 if (r_lto)
4122 {
4123 r_lto->loads->remap_params (&map);
4124 r_lto->stores->remap_params (&map);
74509b96 4125 remap_kills (r_lto->kills, map);
85ebbabd
JH
4126 if (r_lto->arg_flags.length ())
4127 remap_arg_flags (r_lto->arg_flags, info);
fe90c504 4128 }
c8fd2be1
JH
4129 if (dump_file)
4130 {
4131 fprintf (dump_file, "to:\n");
fe90c504 4132 if (r)
6cef01c3 4133 r->dump (dump_file);
fe90c504 4134 if (r_lto)
6cef01c3 4135 r_lto->dump (dump_file);
c8fd2be1 4136 }
e0040bc3 4137 if (r)
5aa91072 4138 r->finalize (node->decl);
fe90c504 4139 return;
c8fd2be1
JH
4140}
4141
d119f34c
JH
4142/* Definition of the modref IPA pass. */
4143const pass_data pass_data_ipa_modref =
4144{
4145 IPA_PASS, /* type */
4146 "modref", /* name */
4147 OPTGROUP_IPA, /* optinfo_flags */
4148 TV_IPA_MODREF, /* tv_id */
4149 0, /* properties_required */
4150 0, /* properties_provided */
4151 0, /* properties_destroyed */
4152 0, /* todo_flags_start */
4153 ( TODO_dump_symtab ), /* todo_flags_finish */
4154};
4155
4156class pass_ipa_modref : public ipa_opt_pass_d
4157{
4158public:
4159 pass_ipa_modref (gcc::context *ctxt)
4160 : ipa_opt_pass_d (pass_data_ipa_modref, ctxt,
4161 modref_generate, /* generate_summary */
4162 modref_write, /* write_summary */
4163 modref_read, /* read_summary */
4164 modref_write, /* write_optimization_summary */
4165 modref_read, /* read_optimization_summary */
4166 NULL, /* stmt_fixup */
4167 0, /* function_transform_todo_flags_start */
fe90c504 4168 NULL, /* function_transform */
d119f34c
JH
4169 NULL) /* variable_transform */
4170 {}
4171
4172 /* opt_pass methods: */
4173 opt_pass *clone () { return new pass_ipa_modref (m_ctxt); }
4174 virtual bool gate (function *)
4175 {
4176 return true;
4177 }
4178 virtual unsigned int execute (function *);
4179
4180};
4181
4182}
4183
2cadaa1f 4184unsigned int pass_modref::execute (function *)
d119f34c 4185{
2cadaa1f 4186 if (analyze_function (false))
494bdadf 4187 return execute_fixup_cfg ();
d119f34c
JH
4188 return 0;
4189}
4190
4191gimple_opt_pass *
4192make_pass_modref (gcc::context *ctxt)
4193{
4194 return new pass_modref (ctxt);
4195}
4196
4197ipa_opt_pass_d *
4198make_pass_ipa_modref (gcc::context *ctxt)
4199{
4200 return new pass_ipa_modref (ctxt);
4201}
4202
18f0873d
JH
4203namespace {
4204
d119f34c
JH
4205/* Skip edges from and to nodes without ipa_pure_const enabled.
4206 Ignore not available symbols. */
4207
4208static bool
4209ignore_edge (struct cgraph_edge *e)
4210{
87d75a11
JH
4211 /* We merge summaries of inline clones into summaries of functions they
4212 are inlined to. For that reason the complete function bodies must
4213 act as unit. */
4214 if (!e->inline_failed)
4215 return false;
d119f34c 4216 enum availability avail;
c87ff875 4217 cgraph_node *callee = e->callee->ultimate_alias_target
d119f34c
JH
4218 (&avail, e->caller);
4219
4220 return (avail <= AVAIL_INTERPOSABLE
56cb815b 4221 || ((!optimization_summaries || !optimization_summaries->get (callee))
494bdadf 4222 && (!summaries_lto || !summaries_lto->get (callee))));
d119f34c
JH
4223}
4224
8a2fd716 4225/* Compute parm_map for CALLEE_EDGE. */
d119f34c 4226
6cef01c3 4227static bool
c34db4b6 4228compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
ada353b8
JH
4229{
4230 class ipa_edge_args *args;
4231 if (ipa_node_params_sum
4232 && !callee_edge->call_stmt_cannot_inline_p
a4a3cdd0 4233 && (args = ipa_edge_args_sum->get (callee_edge)) != NULL)
ada353b8
JH
4234 {
4235 int i, count = ipa_get_cs_argument_count (args);
4236 class ipa_node_params *caller_parms_info, *callee_pi;
4237 class ipa_call_summary *es
4238 = ipa_call_summaries->get (callee_edge);
4239 cgraph_node *callee
c87ff875 4240 = callee_edge->callee->ultimate_alias_target
ada353b8
JH
4241 (NULL, callee_edge->caller);
4242
a4a3cdd0
MJ
4243 caller_parms_info
4244 = ipa_node_params_sum->get (callee_edge->caller->inlined_to
4245 ? callee_edge->caller->inlined_to
4246 : callee_edge->caller);
4247 callee_pi = ipa_node_params_sum->get (callee);
ada353b8 4248
520d5ad3 4249 (*parm_map).safe_grow_cleared (count, true);
ada353b8
JH
4250
4251 for (i = 0; i < count; i++)
4252 {
4253 if (es && es->param[i].points_to_local_or_readonly_memory)
4254 {
1f3a3363 4255 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
ada353b8
JH
4256 continue;
4257 }
4258
4259 struct ipa_jump_func *jf
4260 = ipa_get_ith_jump_func (args, i);
899c10c9 4261 if (jf && callee_pi)
ada353b8
JH
4262 {
4263 tree cst = ipa_value_from_jfunc (caller_parms_info,
4264 jf,
4265 ipa_get_type
4266 (callee_pi, i));
4267 if (cst && points_to_local_or_readonly_memory_p (cst))
4268 {
1f3a3363 4269 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
ada353b8
JH
4270 continue;
4271 }
4272 }
4273 if (jf && jf->type == IPA_JF_PASS_THROUGH)
4274 {
c34db4b6 4275 (*parm_map)[i].parm_index
56cb815b 4276 = ipa_get_jf_pass_through_formal_id (jf);
4d90edb9
JH
4277 if (ipa_get_jf_pass_through_operation (jf) == NOP_EXPR)
4278 {
4279 (*parm_map)[i].parm_offset_known = true;
4280 (*parm_map)[i].parm_offset = 0;
4281 }
4282 else if (ipa_get_jf_pass_through_operation (jf)
4283 == POINTER_PLUS_EXPR
4284 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf),
4285 &(*parm_map)[i].parm_offset))
4286 (*parm_map)[i].parm_offset_known = true;
4287 else
4288 (*parm_map)[i].parm_offset_known = false;
ada353b8
JH
4289 continue;
4290 }
4291 if (jf && jf->type == IPA_JF_ANCESTOR)
c34db4b6
JH
4292 {
4293 (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
4294 (*parm_map)[i].parm_offset_known = true;
c8fd2be1
JH
4295 gcc_checking_assert
4296 (!(ipa_get_jf_ancestor_offset (jf) & (BITS_PER_UNIT - 1)));
4297 (*parm_map)[i].parm_offset
4298 = ipa_get_jf_ancestor_offset (jf) >> LOG2_BITS_PER_UNIT;
85ebbabd 4299 }
ada353b8 4300 else
c34db4b6 4301 (*parm_map)[i].parm_index = -1;
ada353b8
JH
4302 }
4303 if (dump_file)
4304 {
4305 fprintf (dump_file, " Parm map: ");
4306 for (i = 0; i < count; i++)
c34db4b6 4307 fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
ada353b8
JH
4308 fprintf (dump_file, "\n");
4309 }
6cef01c3 4310 return true;
ada353b8 4311 }
6cef01c3 4312 return false;
ada353b8
JH
4313}
4314
85ebbabd
JH
4315/* Map used to translate escape infos. */
4316
4317struct escape_map
4318{
4319 int parm_index;
4320 bool direct;
4321};
4322
b8ef019a 4323/* Update escape map for E. */
85ebbabd
JH
4324
4325static void
4326update_escape_summary_1 (cgraph_edge *e,
9851a163
JH
4327 vec <vec <escape_map>> &map,
4328 bool ignore_stores)
85ebbabd
JH
4329{
4330 escape_summary *sum = escape_summaries->get (e);
4331 if (!sum)
4332 return;
4333 auto_vec <escape_entry> old = sum->esc.copy ();
4334 sum->esc.release ();
4335
4336 unsigned int i;
4337 escape_entry *ee;
4338 FOR_EACH_VEC_ELT (old, i, ee)
4339 {
4340 unsigned int j;
4341 struct escape_map *em;
b8ef019a
JH
4342 /* TODO: We do not have jump functions for return slots, so we
4343 never propagate them to outer function. */
4344 if (ee->parm_index >= (int)map.length ()
4345 || ee->parm_index < 0)
85ebbabd
JH
4346 continue;
4347 FOR_EACH_VEC_ELT (map[ee->parm_index], j, em)
4348 {
9851a163
JH
4349 int min_flags = ee->min_flags;
4350 if (ee->direct && !em->direct)
4351 min_flags = deref_flags (min_flags, ignore_stores);
85ebbabd 4352 struct escape_entry entry = {em->parm_index, ee->arg,
a70faf6e 4353 min_flags,
85ebbabd
JH
4354 ee->direct & em->direct};
4355 sum->esc.safe_push (entry);
4356 }
4357 }
4358 if (!sum->esc.length ())
4359 escape_summaries->remove (e);
4360}
4361
02c80893 4362/* Update escape map for NODE. */
85ebbabd
JH
4363
4364static void
4365update_escape_summary (cgraph_node *node,
9851a163
JH
4366 vec <vec <escape_map>> &map,
4367 bool ignore_stores)
85ebbabd
JH
4368{
4369 if (!escape_summaries)
4370 return;
4371 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
9851a163 4372 update_escape_summary_1 (e, map, ignore_stores);
85ebbabd
JH
4373 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
4374 {
4375 if (!e->inline_failed)
9851a163 4376 update_escape_summary (e->callee, map, ignore_stores);
85ebbabd 4377 else
9851a163 4378 update_escape_summary_1 (e, map, ignore_stores);
85ebbabd
JH
4379 }
4380}
4381
6cef01c3
JH
4382/* Get parameter type from DECL. This is only safe for special cases
4383 like builtins we create fnspec for because the type match is checked
4384 at fnspec creation time. */
d119f34c 4385
6cef01c3
JH
4386static tree
4387get_parm_type (tree decl, unsigned int i)
ada353b8 4388{
6cef01c3 4389 tree t = TYPE_ARG_TYPES (TREE_TYPE (decl));
ada353b8 4390
6cef01c3
JH
4391 for (unsigned int p = 0; p < i; p++)
4392 t = TREE_CHAIN (t);
4393 return TREE_VALUE (t);
4394}
4395
4396/* Return access mode for argument I of call E with FNSPEC. */
4397
4398static modref_access_node
4399get_access_for_fnspec (cgraph_edge *e, attr_fnspec &fnspec,
4400 unsigned int i, modref_parm_map &map)
4401{
4402 tree size = NULL_TREE;
4403 unsigned int size_arg;
4404
4405 if (!fnspec.arg_specified_p (i))
4406 ;
4407 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
ada353b8 4408 {
6cef01c3
JH
4409 cgraph_node *node = e->caller->inlined_to
4410 ? e->caller->inlined_to : e->caller;
a4a3cdd0
MJ
4411 ipa_node_params *caller_parms_info = ipa_node_params_sum->get (node);
4412 ipa_edge_args *args = ipa_edge_args_sum->get (e);
6cef01c3
JH
4413 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, size_arg);
4414
4415 if (jf)
4416 size = ipa_value_from_jfunc (caller_parms_info, jf,
4417 get_parm_type (e->callee->decl, size_arg));
ada353b8 4418 }
6cef01c3
JH
4419 else if (fnspec.arg_access_size_given_by_type_p (i))
4420 size = TYPE_SIZE_UNIT (get_parm_type (e->callee->decl, i));
4421 modref_access_node a = {0, -1, -1,
4422 map.parm_offset, map.parm_index,
5c85f295 4423 map.parm_offset_known, 0};
6cef01c3
JH
4424 poly_int64 size_hwi;
4425 if (size
4426 && poly_int_tree_p (size, &size_hwi)
4427 && coeffs_in_range_p (size_hwi, 0,
4428 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
ada353b8 4429 {
6cef01c3
JH
4430 a.size = -1;
4431 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
ada353b8 4432 }
6cef01c3
JH
4433 return a;
4434}
4435
09a4ffb7
JH
4436 /* Collapse loads and return true if something changed. */
4437static bool
4438collapse_loads (modref_summary *cur_summary,
4439 modref_summary_lto *cur_summary_lto)
4440{
4441 bool changed = false;
4442
4443 if (cur_summary && !cur_summary->loads->every_base)
4444 {
4445 cur_summary->loads->collapse ();
4446 changed = true;
4447 }
4448 if (cur_summary_lto
4449 && !cur_summary_lto->loads->every_base)
4450 {
4451 cur_summary_lto->loads->collapse ();
4452 changed = true;
4453 }
4454 return changed;
4455}
4456
4457/* Collapse loads and return true if something changed. */
4458
4459static bool
4460collapse_stores (modref_summary *cur_summary,
4461 modref_summary_lto *cur_summary_lto)
4462{
4463 bool changed = false;
4464
4465 if (cur_summary && !cur_summary->stores->every_base)
4466 {
4467 cur_summary->stores->collapse ();
4468 changed = true;
4469 }
4470 if (cur_summary_lto
4471 && !cur_summary_lto->stores->every_base)
4472 {
4473 cur_summary_lto->stores->collapse ();
4474 changed = true;
4475 }
4476 return changed;
4477}
4478
6cef01c3
JH
4479/* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
4480 CUR_SUMMARY_LTO accordingly. Return true if something changed. */
4481
4482static bool
4483propagate_unknown_call (cgraph_node *node,
4484 cgraph_edge *e, int ecf_flags,
85ebbabd 4485 modref_summary *cur_summary,
8d3abf42
JH
4486 modref_summary_lto *cur_summary_lto,
4487 bool nontrivial_scc)
6cef01c3
JH
4488{
4489 bool changed = false;
6cef01c3
JH
4490 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4491 auto_vec <modref_parm_map, 32> parm_map;
992644c3
JH
4492 bool looping;
4493
4494 if (e->callee
4495 && builtin_safe_for_const_function_p (&looping, e->callee->decl))
4496 {
8d3abf42 4497 if (looping && cur_summary && !cur_summary->side_effects)
992644c3
JH
4498 {
4499 cur_summary->side_effects = true;
4500 changed = true;
4501 }
8d3abf42 4502 if (looping && cur_summary_lto && !cur_summary_lto->side_effects)
992644c3
JH
4503 {
4504 cur_summary_lto->side_effects = true;
4505 changed = true;
4506 }
4507 return changed;
4508 }
4509
8d3abf42
JH
4510 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
4511 || (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
4512 || nontrivial_scc)
992644c3
JH
4513 {
4514 if (cur_summary && !cur_summary->side_effects)
4515 {
4516 cur_summary->side_effects = true;
4517 changed = true;
4518 }
4519 if (cur_summary_lto && !cur_summary_lto->side_effects)
4520 {
4521 cur_summary_lto->side_effects = true;
4522 changed = true;
4523 }
a34edf9a
JH
4524 if (cur_summary && !cur_summary->nondeterministic
4525 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4526 {
4527 cur_summary->nondeterministic = true;
4528 changed = true;
4529 }
4530 if (cur_summary_lto && !cur_summary_lto->nondeterministic
4531 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4532 {
4533 cur_summary_lto->nondeterministic = true;
4534 changed = true;
4535 }
992644c3 4536 }
8d3abf42
JH
4537 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
4538 return changed;
992644c3 4539
6cef01c3
JH
4540 if (fnspec_sum
4541 && compute_parm_map (e, &parm_map))
4542 {
4543 attr_fnspec fnspec (fnspec_sum->fnspec);
4544
4545 gcc_checking_assert (fnspec.known_p ());
4546 if (fnspec.global_memory_read_p ())
4547 collapse_loads (cur_summary, cur_summary_lto);
4548 else
4549 {
4550 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
4551 for (unsigned i = 0; i < parm_map.length () && t;
4552 i++, t = TREE_CHAIN (t))
4553 if (!POINTER_TYPE_P (TREE_VALUE (t)))
4554 ;
4555 else if (!fnspec.arg_specified_p (i)
4556 || fnspec.arg_maybe_read_p (i))
4557 {
4558 modref_parm_map map = parm_map[i];
1f3a3363 4559 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
6cef01c3 4560 continue;
1f3a3363 4561 if (map.parm_index == MODREF_UNKNOWN_PARM)
6cef01c3
JH
4562 {
4563 collapse_loads (cur_summary, cur_summary_lto);
4564 break;
4565 }
4566 if (cur_summary)
4567 changed |= cur_summary->loads->insert
8632f8c6
JH
4568 (node->decl, 0, 0,
4569 get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4570 if (cur_summary_lto)
4571 changed |= cur_summary_lto->loads->insert
8632f8c6
JH
4572 (node->decl, 0, 0,
4573 get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4574 }
4575 }
4576 if (ignore_stores_p (node->decl, ecf_flags))
4577 ;
4578 else if (fnspec.global_memory_written_p ())
4579 collapse_stores (cur_summary, cur_summary_lto);
4580 else
4581 {
4582 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
4583 for (unsigned i = 0; i < parm_map.length () && t;
4584 i++, t = TREE_CHAIN (t))
4585 if (!POINTER_TYPE_P (TREE_VALUE (t)))
4586 ;
4587 else if (!fnspec.arg_specified_p (i)
4588 || fnspec.arg_maybe_written_p (i))
4589 {
4590 modref_parm_map map = parm_map[i];
1f3a3363 4591 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
6cef01c3 4592 continue;
1f3a3363 4593 if (map.parm_index == MODREF_UNKNOWN_PARM)
6cef01c3
JH
4594 {
4595 collapse_stores (cur_summary, cur_summary_lto);
4596 break;
4597 }
4598 if (cur_summary)
4599 changed |= cur_summary->stores->insert
8632f8c6
JH
4600 (node->decl, 0, 0,
4601 get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4602 if (cur_summary_lto)
4603 changed |= cur_summary_lto->stores->insert
8632f8c6
JH
4604 (node->decl, 0, 0,
4605 get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4606 }
4607 }
4608 if (fnspec.errno_maybe_written_p () && flag_errno_math)
4609 {
4610 if (cur_summary && !cur_summary->writes_errno)
4611 {
4612 cur_summary->writes_errno = true;
4613 changed = true;
4614 }
4615 if (cur_summary_lto && !cur_summary_lto->writes_errno)
4616 {
4617 cur_summary_lto->writes_errno = true;
4618 changed = true;
4619 }
4620 }
4621 return changed;
4622 }
85ebbabd
JH
4623 if (dump_file)
4624 fprintf (dump_file, " collapsing loads\n");
4625 changed |= collapse_loads (cur_summary, cur_summary_lto);
4626 if (!ignore_stores_p (node->decl, ecf_flags))
6cef01c3
JH
4627 {
4628 if (dump_file)
85ebbabd
JH
4629 fprintf (dump_file, " collapsing stores\n");
4630 changed |= collapse_stores (cur_summary, cur_summary_lto);
6cef01c3 4631 }
85ebbabd 4632 return changed;
ada353b8 4633}
d119f34c 4634
02c80893 4635/* Maybe remove summaries of NODE pointed to by CUR_SUMMARY_PTR
85ebbabd
JH
4636 and CUR_SUMMARY_LTO_PTR if they are useless according to ECF_FLAGS. */
4637
4638static void
4639remove_useless_summaries (cgraph_node *node,
4640 modref_summary **cur_summary_ptr,
4641 modref_summary_lto **cur_summary_lto_ptr,
4642 int ecf_flags)
4643{
4644 if (*cur_summary_ptr && !(*cur_summary_ptr)->useful_p (ecf_flags, false))
4645 {
4646 optimization_summaries->remove (node);
4647 *cur_summary_ptr = NULL;
4648 }
4649 if (*cur_summary_lto_ptr
4650 && !(*cur_summary_lto_ptr)->useful_p (ecf_flags, false))
4651 {
4652 summaries_lto->remove (node);
4653 *cur_summary_lto_ptr = NULL;
4654 }
4655}
4656
4657/* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
4658 and propagate loads/stores. */
ada353b8 4659
494bdadf 4660static bool
ada353b8
JH
4661modref_propagate_in_scc (cgraph_node *component_node)
4662{
4663 bool changed = true;
5c85f295 4664 bool first = true;
ada353b8
JH
4665 int iteration = 0;
4666
4667 while (changed)
4668 {
8d3abf42
JH
4669 bool nontrivial_scc
4670 = ((struct ipa_dfs_info *) component_node->aux)->next_cycle;
ada353b8
JH
4671 changed = false;
4672 for (struct cgraph_node *cur = component_node; cur;
d119f34c
JH
4673 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4674 {
ada353b8 4675 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
71dbabcc
JH
4676 modref_summary *cur_summary = optimization_summaries
4677 ? optimization_summaries->get (node)
4678 : NULL;
4679 modref_summary_lto *cur_summary_lto = summaries_lto
4680 ? summaries_lto->get (node)
4681 : NULL;
4682
4683 if (!cur_summary && !cur_summary_lto)
ada353b8
JH
4684 continue;
4685
85ebbabd
JH
4686 int cur_ecf_flags = flags_from_decl_or_type (node->decl);
4687
ada353b8
JH
4688 if (dump_file)
4689 fprintf (dump_file, " Processing %s%s%s\n",
4690 cur->dump_name (),
4691 TREE_READONLY (cur->decl) ? " (const)" : "",
4692 DECL_PURE_P (cur->decl) ? " (pure)" : "");
d119f34c 4693
d119f34c
JH
4694 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
4695 {
6cef01c3 4696 if (dump_file)
8d3abf42 4697 fprintf (dump_file, " Indirect call\n");
85ebbabd 4698 if (propagate_unknown_call
6cef01c3 4699 (node, e, e->indirect_info->ecf_flags,
8d3abf42
JH
4700 cur_summary, cur_summary_lto,
4701 nontrivial_scc))
85ebbabd
JH
4702 {
4703 changed = true;
4704 remove_useless_summaries (node, &cur_summary,
4705 &cur_summary_lto,
4706 cur_ecf_flags);
4707 if (!cur_summary && !cur_summary_lto)
4708 break;
4709 }
d119f34c
JH
4710 }
4711
71dbabcc 4712 if (!cur_summary && !cur_summary_lto)
ada353b8
JH
4713 continue;
4714
d119f34c
JH
4715 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
4716 callee_edge = callee_edge->next_callee)
4717 {
4718 int flags = flags_from_decl_or_type (callee_edge->callee->decl);
71dbabcc
JH
4719 modref_summary *callee_summary = NULL;
4720 modref_summary_lto *callee_summary_lto = NULL;
d119f34c
JH
4721 struct cgraph_node *callee;
4722
8d3abf42
JH
4723 if (!callee_edge->inline_failed
4724 || ((flags & (ECF_CONST | ECF_NOVOPS))
4725 && !(flags & ECF_LOOPING_CONST_OR_PURE)))
d119f34c
JH
4726 continue;
4727
d119f34c
JH
4728 /* Get the callee and its summary. */
4729 enum availability avail;
c87ff875 4730 callee = callee_edge->callee->ultimate_alias_target
d119f34c
JH
4731 (&avail, cur);
4732
ada353b8
JH
4733 /* It is not necessary to re-process calls outside of the
4734 SCC component. */
4735 if (iteration > 0
4736 && (!callee->aux
4737 || ((struct ipa_dfs_info *)cur->aux)->scc_no
4738 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
4739 continue;
4740
4741 if (dump_file)
4742 fprintf (dump_file, " Call to %s\n",
4743 callee_edge->callee->dump_name ());
d119f34c
JH
4744
4745 bool ignore_stores = ignore_stores_p (cur->decl, flags);
4746
71dbabcc 4747 if (avail <= AVAIL_INTERPOSABLE)
d119f34c 4748 {
6cef01c3
JH
4749 if (dump_file)
4750 fprintf (dump_file, " Call target interposable"
4751 " or not available\n");
4752 changed |= propagate_unknown_call
4753 (node, callee_edge, flags,
8d3abf42
JH
4754 cur_summary, cur_summary_lto,
4755 nontrivial_scc);
6cef01c3
JH
4756 if (!cur_summary && !cur_summary_lto)
4757 break;
4758 continue;
71dbabcc
JH
4759 }
4760
4761 /* We don't know anything about CALLEE, hence we cannot tell
4762 anything about the entire component. */
4763
4764 if (cur_summary
4765 && !(callee_summary = optimization_summaries->get (callee)))
4766 {
6cef01c3
JH
4767 if (dump_file)
4768 fprintf (dump_file, " No call target summary\n");
4769 changed |= propagate_unknown_call
4770 (node, callee_edge, flags,
8d3abf42
JH
4771 cur_summary, NULL,
4772 nontrivial_scc);
71dbabcc
JH
4773 }
4774 if (cur_summary_lto
4775 && !(callee_summary_lto = summaries_lto->get (callee)))
4776 {
6cef01c3
JH
4777 if (dump_file)
4778 fprintf (dump_file, " No call target summary\n");
4779 changed |= propagate_unknown_call
4780 (node, callee_edge, flags,
8d3abf42
JH
4781 NULL, cur_summary_lto,
4782 nontrivial_scc);
d119f34c
JH
4783 }
4784
8d3abf42
JH
4785 if (callee_summary && !cur_summary->side_effects
4786 && (callee_summary->side_effects
4787 || callee_edge->recursive_p ()))
4788 {
4789 cur_summary->side_effects = true;
4790 changed = true;
4791 }
4792 if (callee_summary_lto && !cur_summary_lto->side_effects
4793 && (callee_summary_lto->side_effects
4794 || callee_edge->recursive_p ()))
4795 {
4796 cur_summary_lto->side_effects = true;
4797 changed = true;
4798 }
a34edf9a
JH
4799 if (callee_summary && !cur_summary->nondeterministic
4800 && callee_summary->nondeterministic
4801 && !ignore_nondeterminism_p (cur->decl, flags))
4802 {
4803 cur_summary->nondeterministic = true;
4804 changed = true;
4805 }
4806 if (callee_summary_lto && !cur_summary_lto->nondeterministic
4807 && callee_summary_lto->nondeterministic
4808 && !ignore_nondeterminism_p (cur->decl, flags))
4809 {
4810 cur_summary_lto->nondeterministic = true;
4811 changed = true;
4812 }
8d3abf42
JH
4813 if (flags & (ECF_CONST | ECF_NOVOPS))
4814 continue;
4815
ada353b8
JH
4816 /* We can not safely optimize based on summary of callee if it
4817 does not always bind to current def: it is possible that
4818 memory load was optimized out earlier which may not happen in
4819 the interposed variant. */
4820 if (!callee_edge->binds_to_current_def_p ())
4821 {
a34edf9a
JH
4822 if (cur_summary && !cur_summary->calls_interposable)
4823 {
4824 cur_summary->calls_interposable = true;
4825 changed = true;
4826 }
4827 if (cur_summary_lto && !cur_summary_lto->calls_interposable)
4828 {
4829 cur_summary_lto->calls_interposable = true;
4830 changed = true;
4831 }
ada353b8
JH
4832 if (dump_file)
4833 fprintf (dump_file, " May not bind local;"
4834 " collapsing loads\n");
4835 }
4836
4837
c34db4b6 4838 auto_vec <modref_parm_map, 32> parm_map;
1f3a3363
JH
4839 modref_parm_map chain_map;
4840 /* TODO: Once we get jump functions for static chains we could
4841 compute this. */
4842 chain_map.parm_index = MODREF_UNKNOWN_PARM;
ada353b8
JH
4843
4844 compute_parm_map (callee_edge, &parm_map);
c33f4742 4845
d119f34c 4846 /* Merge in callee's information. */
71dbabcc
JH
4847 if (callee_summary)
4848 {
56cb815b 4849 changed |= cur_summary->loads->merge
8632f8c6
JH
4850 (node->decl, callee_summary->loads,
4851 &parm_map, &chain_map, !first);
56cb815b 4852 if (!ignore_stores)
6cef01c3
JH
4853 {
4854 changed |= cur_summary->stores->merge
8632f8c6
JH
4855 (node->decl, callee_summary->stores,
4856 &parm_map, &chain_map, !first);
6cef01c3
JH
4857 if (!cur_summary->writes_errno
4858 && callee_summary->writes_errno)
4859 {
4860 cur_summary->writes_errno = true;
4861 changed = true;
4862 }
4863 }
71dbabcc
JH
4864 }
4865 if (callee_summary_lto)
4866 {
56cb815b 4867 changed |= cur_summary_lto->loads->merge
8632f8c6
JH
4868 (node->decl, callee_summary_lto->loads,
4869 &parm_map, &chain_map, !first);
56cb815b 4870 if (!ignore_stores)
6cef01c3
JH
4871 {
4872 changed |= cur_summary_lto->stores->merge
8632f8c6
JH
4873 (node->decl, callee_summary_lto->stores,
4874 &parm_map, &chain_map, !first);
6cef01c3
JH
4875 if (!cur_summary_lto->writes_errno
4876 && callee_summary_lto->writes_errno)
4877 {
4878 cur_summary_lto->writes_errno = true;
4879 changed = true;
4880 }
4881 }
71dbabcc 4882 }
85ebbabd
JH
4883 if (changed)
4884 remove_useless_summaries (node, &cur_summary,
4885 &cur_summary_lto,
4886 cur_ecf_flags);
4887 if (!cur_summary && !cur_summary_lto)
4888 break;
ada353b8 4889 if (dump_file && changed)
71dbabcc
JH
4890 {
4891 if (cur_summary)
4892 cur_summary->dump (dump_file);
4893 if (cur_summary_lto)
4894 cur_summary_lto->dump (dump_file);
85ebbabd 4895 dump_modref_edge_summaries (dump_file, node, 4);
71dbabcc 4896 }
d119f34c
JH
4897 }
4898 }
ada353b8 4899 iteration++;
5c85f295 4900 first = false;
ada353b8 4901 }
ada353b8 4902 if (dump_file)
85ebbabd
JH
4903 fprintf (dump_file,
4904 "Propagation finished in %i iterations\n", iteration);
494bdadf
JH
4905 bool pureconst = false;
4906 for (struct cgraph_node *cur = component_node; cur;
4907 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4908 if (!cur->inlined_to && opt_for_fn (cur->decl, flag_ipa_pure_const))
4909 {
4910 modref_summary *summary = optimization_summaries
4911 ? optimization_summaries->get (cur)
4912 : NULL;
4913 modref_summary_lto *summary_lto = summaries_lto
4914 ? summaries_lto->get (cur)
4915 : NULL;
1b62cddc 4916 if (summary && !summary->stores->every_base && !summary->stores->bases
a34edf9a 4917 && !summary->nondeterministic)
494bdadf 4918 {
a34edf9a
JH
4919 if (!summary->loads->every_base && !summary->loads->bases
4920 && !summary->calls_interposable)
494bdadf
JH
4921 pureconst |= ipa_make_function_const
4922 (cur, summary->side_effects, false);
4923 else
4924 pureconst |= ipa_make_function_pure
4925 (cur, summary->side_effects, false);
4926 }
4927 if (summary_lto && !summary_lto->stores->every_base
a34edf9a 4928 && !summary_lto->stores->bases && !summary_lto->nondeterministic)
494bdadf 4929 {
a34edf9a
JH
4930 if (!summary_lto->loads->every_base && !summary_lto->loads->bases
4931 && !summary_lto->calls_interposable)
494bdadf
JH
4932 pureconst |= ipa_make_function_const
4933 (cur, summary_lto->side_effects, false);
4934 else
4935 pureconst |= ipa_make_function_pure
4936 (cur, summary_lto->side_effects, false);
4937 }
4938 }
4939 return pureconst;
85ebbabd
JH
4940}
4941
4942/* Dump results of propagation in SCC rooted in COMPONENT_NODE. */
4943
4944static void
4945modref_propagate_dump_scc (cgraph_node *component_node)
4946{
4947 for (struct cgraph_node *cur = component_node; cur;
4948 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4949 if (!cur->inlined_to)
4950 {
4951 modref_summary *cur_summary = optimization_summaries
4952 ? optimization_summaries->get (cur)
4953 : NULL;
4954 modref_summary_lto *cur_summary_lto = summaries_lto
4955 ? summaries_lto->get (cur)
4956 : NULL;
4957
4958 fprintf (dump_file, "Propagated modref for %s%s%s\n",
4959 cur->dump_name (),
4960 TREE_READONLY (cur->decl) ? " (const)" : "",
4961 DECL_PURE_P (cur->decl) ? " (pure)" : "");
4962 if (optimization_summaries)
4963 {
4964 if (cur_summary)
4965 cur_summary->dump (dump_file);
4966 else
4967 fprintf (dump_file, " Not tracked\n");
4968 }
4969 if (summaries_lto)
4970 {
4971 if (cur_summary_lto)
4972 cur_summary_lto->dump (dump_file);
4973 else
4974 fprintf (dump_file, " Not tracked (lto)\n");
4975 }
4976 }
4977}
4978
16e85390
JH
4979/* Determine EAF flags know for call E with CALLEE_ECF_FLAGS and ARG. */
4980
4981int
4982implicit_eaf_flags_for_edge_and_arg (cgraph_edge *e, int callee_ecf_flags,
4983 bool ignore_stores, int arg)
4984{
4985 /* Returning the value is already accounted to at local propagation. */
4986 int implicit_flags = EAF_NOT_RETURNED_DIRECTLY
4987 | EAF_NOT_RETURNED_INDIRECTLY;
4988 if (ignore_stores)
4989 implicit_flags |= ignore_stores_eaf_flags;
4990 if (callee_ecf_flags & ECF_PURE)
4991 implicit_flags |= implicit_pure_eaf_flags;
4992 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
4993 implicit_flags |= implicit_const_eaf_flags;
4994 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4995 if (fnspec_sum)
4996 {
4997 attr_fnspec fnspec (fnspec_sum->fnspec);
4998 implicit_flags |= fnspec.arg_eaf_flags (arg);
4999 }
5000 return implicit_flags;
5001}
5002
85ebbabd
JH
5003/* Process escapes in SUM and merge SUMMARY to CUR_SUMMARY
5004 and SUMMARY_LTO to CUR_SUMMARY_LTO.
5005 Return true if something changed. */
5006
5007static bool
5008modref_merge_call_site_flags (escape_summary *sum,
5009 modref_summary *cur_summary,
5010 modref_summary_lto *cur_summary_lto,
5011 modref_summary *summary,
5012 modref_summary_lto *summary_lto,
4341b1b1 5013 tree caller,
f6f704fd
JH
5014 cgraph_edge *e,
5015 int caller_ecf_flags,
5016 int callee_ecf_flags,
5017 bool binds_to_current_def)
85ebbabd
JH
5018{
5019 escape_entry *ee;
5020 unsigned int i;
5021 bool changed = false;
f6f704fd 5022 bool ignore_stores = ignore_stores_p (caller, callee_ecf_flags);
85ebbabd 5023
fcbf94a5
JH
5024 /* Return early if we have no useful info to propagate. */
5025 if ((!cur_summary
5026 || (!cur_summary->arg_flags.length ()
5027 && !cur_summary->static_chain_flags
5028 && !cur_summary->retslot_flags))
5029 && (!cur_summary_lto
5030 || (!cur_summary_lto->arg_flags.length ()
5031 && !cur_summary_lto->static_chain_flags
5032 && !cur_summary_lto->retslot_flags)))
85ebbabd
JH
5033 return false;
5034
5035 FOR_EACH_VEC_ELT (sum->esc, i, ee)
ada353b8 5036 {
85ebbabd
JH
5037 int flags = 0;
5038 int flags_lto = 0;
16e85390
JH
5039 int implicit_flags = implicit_eaf_flags_for_edge_and_arg
5040 (e, callee_ecf_flags, ignore_stores, ee->arg);
85ebbabd
JH
5041
5042 if (summary && ee->arg < summary->arg_flags.length ())
5043 flags = summary->arg_flags[ee->arg];
5044 if (summary_lto
5045 && ee->arg < summary_lto->arg_flags.length ())
5046 flags_lto = summary_lto->arg_flags[ee->arg];
5047 if (!ee->direct)
5048 {
5049 flags = deref_flags (flags, ignore_stores);
5050 flags_lto = deref_flags (flags_lto, ignore_stores);
5051 }
f6f704fd
JH
5052 if (ignore_stores)
5053 implicit_flags |= ignore_stores_eaf_flags;
5054 if (callee_ecf_flags & ECF_PURE)
5055 implicit_flags |= implicit_pure_eaf_flags;
5056 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
5057 implicit_flags |= implicit_const_eaf_flags;
5058 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
5059 if (fnspec_sum)
85ebbabd 5060 {
f6f704fd 5061 attr_fnspec fnspec (fnspec_sum->fnspec);
e2dd12ab 5062 implicit_flags |= fnspec.arg_eaf_flags (ee->arg);
f6f704fd
JH
5063 }
5064 if (!ee->direct)
5065 implicit_flags = deref_flags (implicit_flags, ignore_stores);
5066 flags |= implicit_flags;
5067 flags_lto |= implicit_flags;
5068 if (!binds_to_current_def && (flags || flags_lto))
5069 {
5070 flags = interposable_eaf_flags (flags, implicit_flags);
5071 flags_lto = interposable_eaf_flags (flags_lto, implicit_flags);
85ebbabd 5072 }
3350e59f 5073 if (!(flags & EAF_UNUSED)
b8ef019a 5074 && cur_summary && ee->parm_index < (int)cur_summary->arg_flags.length ())
85ebbabd 5075 {
1f3a3363 5076 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
b8ef019a 5077 ? cur_summary->retslot_flags
1f3a3363 5078 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
a70c0512 5079 ? cur_summary->static_chain_flags
b8ef019a 5080 : cur_summary->arg_flags[ee->parm_index];
85ebbabd
JH
5081 if ((f & flags) != f)
5082 {
4341b1b1 5083 f = remove_useless_eaf_flags
f6f704fd 5084 (f & flags, caller_ecf_flags,
4341b1b1 5085 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
85ebbabd
JH
5086 changed = true;
5087 }
5088 }
3350e59f
JH
5089 if (!(flags_lto & EAF_UNUSED)
5090 && cur_summary_lto
b8ef019a 5091 && ee->parm_index < (int)cur_summary_lto->arg_flags.length ())
85ebbabd 5092 {
1f3a3363 5093 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
b8ef019a 5094 ? cur_summary_lto->retslot_flags
1f3a3363 5095 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
a70c0512 5096 ? cur_summary_lto->static_chain_flags
b8ef019a 5097 : cur_summary_lto->arg_flags[ee->parm_index];
85ebbabd
JH
5098 if ((f & flags_lto) != f)
5099 {
4341b1b1 5100 f = remove_useless_eaf_flags
f6f704fd 5101 (f & flags_lto, caller_ecf_flags,
4341b1b1 5102 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
85ebbabd
JH
5103 changed = true;
5104 }
5105 }
5106 }
5107 return changed;
5108}
5109
5110/* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
5111 and propagate arg flags. */
5112
5113static void
5114modref_propagate_flags_in_scc (cgraph_node *component_node)
5115{
5116 bool changed = true;
5117 int iteration = 0;
5118
5119 while (changed)
5120 {
5121 changed = false;
ada353b8 5122 for (struct cgraph_node *cur = component_node; cur;
d119f34c 5123 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
85ebbabd
JH
5124 {
5125 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
5126 modref_summary *cur_summary = optimization_summaries
5127 ? optimization_summaries->get (node)
5128 : NULL;
5129 modref_summary_lto *cur_summary_lto = summaries_lto
5130 ? summaries_lto->get (node)
5131 : NULL;
5132
5133 if (!cur_summary && !cur_summary_lto)
5134 continue;
f6f704fd 5135 int caller_ecf_flags = flags_from_decl_or_type (cur->decl);
85ebbabd
JH
5136
5137 if (dump_file)
5138 fprintf (dump_file, " Processing %s%s%s\n",
ada353b8
JH
5139 cur->dump_name (),
5140 TREE_READONLY (cur->decl) ? " (const)" : "",
5141 DECL_PURE_P (cur->decl) ? " (pure)" : "");
85ebbabd
JH
5142
5143 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
5144 {
5145 escape_summary *sum = escape_summaries->get (e);
5146
5147 if (!sum || (e->indirect_info->ecf_flags
5148 & (ECF_CONST | ECF_NOVOPS)))
5149 continue;
5150
5151 changed |= modref_merge_call_site_flags
5152 (sum, cur_summary, cur_summary_lto,
4341b1b1 5153 NULL, NULL,
f6f704fd
JH
5154 node->decl,
5155 e,
5156 caller_ecf_flags,
5157 e->indirect_info->ecf_flags,
5158 false);
85ebbabd
JH
5159 }
5160
5161 if (!cur_summary && !cur_summary_lto)
5162 continue;
5163
5164 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
5165 callee_edge = callee_edge->next_callee)
5166 {
4341b1b1
JH
5167 int ecf_flags = flags_from_decl_or_type
5168 (callee_edge->callee->decl);
85ebbabd
JH
5169 modref_summary *callee_summary = NULL;
5170 modref_summary_lto *callee_summary_lto = NULL;
5171 struct cgraph_node *callee;
5172
4341b1b1 5173 if (ecf_flags & (ECF_CONST | ECF_NOVOPS)
85ebbabd
JH
5174 || !callee_edge->inline_failed)
5175 continue;
16e85390 5176
85ebbabd
JH
5177 /* Get the callee and its summary. */
5178 enum availability avail;
c87ff875 5179 callee = callee_edge->callee->ultimate_alias_target
85ebbabd
JH
5180 (&avail, cur);
5181
5182 /* It is not necessary to re-process calls outside of the
5183 SCC component. */
5184 if (iteration > 0
5185 && (!callee->aux
5186 || ((struct ipa_dfs_info *)cur->aux)->scc_no
5187 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
5188 continue;
5189
5190 escape_summary *sum = escape_summaries->get (callee_edge);
5191 if (!sum)
5192 continue;
5193
5194 if (dump_file)
5195 fprintf (dump_file, " Call to %s\n",
5196 callee_edge->callee->dump_name ());
5197
5198 if (avail <= AVAIL_INTERPOSABLE
5199 || callee_edge->call_stmt_cannot_inline_p)
5200 ;
5201 else
5202 {
5203 if (cur_summary)
5204 callee_summary = optimization_summaries->get (callee);
5205 if (cur_summary_lto)
5206 callee_summary_lto = summaries_lto->get (callee);
5207 }
5208 changed |= modref_merge_call_site_flags
5209 (sum, cur_summary, cur_summary_lto,
5210 callee_summary, callee_summary_lto,
f6f704fd
JH
5211 node->decl,
5212 callee_edge,
5213 caller_ecf_flags,
5214 ecf_flags,
5215 callee->binds_to_current_def_p ());
85ebbabd
JH
5216 if (dump_file && changed)
5217 {
5218 if (cur_summary)
5219 cur_summary->dump (dump_file);
5220 if (cur_summary_lto)
5221 cur_summary_lto->dump (dump_file);
5222 }
5223 }
5224 }
5225 iteration++;
5226 }
5227 if (dump_file)
5228 fprintf (dump_file,
5229 "Propagation of flags finished in %i iterations\n", iteration);
ada353b8
JH
5230}
5231
18f0873d
JH
5232} /* ANON namespace. */
5233
5234/* Call EDGE was inlined; merge summary from callee to the caller. */
5235
5236void
5237ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
5238{
5239 if (!summaries && !summaries_lto)
5240 return;
5241
5242 struct cgraph_node *to = (edge->caller->inlined_to
5243 ? edge->caller->inlined_to : edge->caller);
5244 class modref_summary *to_info = summaries ? summaries->get (to) : NULL;
5245 class modref_summary_lto *to_info_lto = summaries_lto
5246 ? summaries_lto->get (to) : NULL;
5247
5248 if (!to_info && !to_info_lto)
5249 {
5250 if (summaries)
5251 summaries->remove (edge->callee);
5252 if (summaries_lto)
5253 summaries_lto->remove (edge->callee);
5254 remove_modref_edge_summaries (edge->callee);
5255 return;
5256 }
5257
5258 class modref_summary *callee_info = summaries ? summaries->get (edge->callee)
5259 : NULL;
5260 class modref_summary_lto *callee_info_lto
5261 = summaries_lto ? summaries_lto->get (edge->callee) : NULL;
5262 int flags = flags_from_decl_or_type (edge->callee->decl);
16e85390 5263 /* Combine in outer flags. */
2cadaa1f
JH
5264 cgraph_node *n;
5265 for (n = edge->caller; n->inlined_to; n = n->callers->caller)
5266 flags |= flags_from_decl_or_type (n->decl);
5267 flags |= flags_from_decl_or_type (n->decl);
18f0873d
JH
5268 bool ignore_stores = ignore_stores_p (edge->caller->decl, flags);
5269
5270 if (!callee_info && to_info)
5271 {
5272 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5273 to_info->loads->collapse ();
5274 if (!ignore_stores)
5275 to_info->stores->collapse ();
5276 }
5277 if (!callee_info_lto && to_info_lto)
5278 {
5279 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5280 to_info_lto->loads->collapse ();
5281 if (!ignore_stores)
5282 to_info_lto->stores->collapse ();
5283 }
aabb9a26
JH
5284 /* Merge side effects and non-determinism.
5285 PURE/CONST flags makes functions deterministic and if there is
5286 no LOOPING_CONST_OR_PURE they also have no side effects. */
5287 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
5288 || (flags & ECF_LOOPING_CONST_OR_PURE))
5289 {
5290 if (to_info)
5291 {
5292 if (!callee_info || callee_info->side_effects)
5293 to_info->side_effects = true;
5294 if ((!callee_info || callee_info->nondeterministic)
5295 && !ignore_nondeterminism_p (edge->caller->decl, flags))
5296 to_info->nondeterministic = true;
5297 }
5298 if (to_info_lto)
5299 {
5300 if (!callee_info_lto || callee_info_lto->side_effects)
5301 to_info_lto->side_effects = true;
5302 if ((!callee_info_lto || callee_info_lto->nondeterministic)
5303 && !ignore_nondeterminism_p (edge->caller->decl, flags))
5304 to_info_lto->nondeterministic = true;
5305 }
5306 }
18f0873d
JH
5307 if (callee_info || callee_info_lto)
5308 {
5309 auto_vec <modref_parm_map, 32> parm_map;
1f3a3363
JH
5310 modref_parm_map chain_map;
5311 /* TODO: Once we get jump functions for static chains we could
74a4ece0 5312 compute parm_index. */
18f0873d
JH
5313
5314 compute_parm_map (edge, &parm_map);
5315
5316 if (!ignore_stores)
5317 {
5318 if (to_info && callee_info)
8632f8c6 5319 to_info->stores->merge (to->decl, callee_info->stores, &parm_map,
1f3a3363 5320 &chain_map, false);
18f0873d 5321 if (to_info_lto && callee_info_lto)
8632f8c6
JH
5322 to_info_lto->stores->merge (to->decl, callee_info_lto->stores,
5323 &parm_map, &chain_map, false);
18f0873d
JH
5324 }
5325 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5326 {
5327 if (to_info && callee_info)
8632f8c6 5328 to_info->loads->merge (to->decl, callee_info->loads, &parm_map,
1f3a3363 5329 &chain_map, false);
18f0873d 5330 if (to_info_lto && callee_info_lto)
8632f8c6
JH
5331 to_info_lto->loads->merge (to->decl, callee_info_lto->loads,
5332 &parm_map, &chain_map, false);
18f0873d
JH
5333 }
5334 }
5335
5336 /* Now merge escape summaries.
02c80893
JJ
5337 For every escape to the callee we need to merge callee flags
5338 and remap callee's escapes. */
18f0873d
JH
5339 class escape_summary *sum = escape_summaries->get (edge);
5340 int max_escape = -1;
5341 escape_entry *ee;
5342 unsigned int i;
5343
5344 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
5345 FOR_EACH_VEC_ELT (sum->esc, i, ee)
5346 if ((int)ee->arg > max_escape)
5347 max_escape = ee->arg;
5348
5349 auto_vec <vec <struct escape_map>, 32> emap (max_escape + 1);
5350 emap.safe_grow (max_escape + 1, true);
5351 for (i = 0; (int)i < max_escape + 1; i++)
5352 emap[i] = vNULL;
5353
5354 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
5355 FOR_EACH_VEC_ELT (sum->esc, i, ee)
5356 {
5357 bool needed = false;
16e85390
JH
5358 int implicit_flags = implicit_eaf_flags_for_edge_and_arg
5359 (edge, flags, ignore_stores,
5360 ee->arg);
5361 if (!ee->direct)
5362 implicit_flags = deref_flags (implicit_flags, ignore_stores);
18f0873d
JH
5363 if (to_info && (int)to_info->arg_flags.length () > ee->parm_index)
5364 {
5365 int flags = callee_info
5366 && callee_info->arg_flags.length () > ee->arg
5367 ? callee_info->arg_flags[ee->arg] : 0;
5368 if (!ee->direct)
5369 flags = deref_flags (flags, ignore_stores);
16e85390
JH
5370 flags |= ee->min_flags | implicit_flags;
5371 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
5372 ? to_info->retslot_flags
5373 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
5374 ? to_info->static_chain_flags
5375 : to_info->arg_flags[ee->parm_index];
5376 f &= flags;
5377 if (f)
18f0873d
JH
5378 needed = true;
5379 }
5380 if (to_info_lto
62af7d94 5381 && (int)to_info_lto->arg_flags.length () > ee->parm_index)
18f0873d
JH
5382 {
5383 int flags = callee_info_lto
5384 && callee_info_lto->arg_flags.length () > ee->arg
5385 ? callee_info_lto->arg_flags[ee->arg] : 0;
5386 if (!ee->direct)
5387 flags = deref_flags (flags, ignore_stores);
16e85390
JH
5388 flags |= ee->min_flags | implicit_flags;
5389 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
5390 ? to_info_lto->retslot_flags
5391 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
5392 ? to_info_lto->static_chain_flags
5393 : to_info_lto->arg_flags[ee->parm_index];
5394 f &= flags;
5395 if (f)
18f0873d
JH
5396 needed = true;
5397 }
5398 struct escape_map entry = {ee->parm_index, ee->direct};
5399 if (needed)
5400 emap[ee->arg].safe_push (entry);
5401 }
5402 update_escape_summary (edge->callee, emap, ignore_stores);
5403 for (i = 0; (int)i < max_escape + 1; i++)
5404 emap[i].release ();
5405 if (sum)
5406 escape_summaries->remove (edge);
5407
5408 if (summaries)
5409 {
5410 if (to_info && !to_info->useful_p (flags))
5411 {
5412 if (dump_file)
5413 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5414 to->dump_name ());
5415 summaries->remove (to);
5416 to_info = NULL;
5417 }
5418 else if (to_info && dump_file)
5419 {
5420 if (dump_file)
5421 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5422 to->dump_name ());
5423 to_info->dump (dump_file);
5424 }
5425 if (callee_info)
5426 summaries->remove (edge->callee);
5427 }
5428 if (summaries_lto)
5429 {
5430 if (to_info_lto && !to_info_lto->useful_p (flags))
5431 {
5432 if (dump_file)
5433 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5434 to->dump_name ());
5435 summaries_lto->remove (to);
8c693978 5436 to_info_lto = NULL;
18f0873d
JH
5437 }
5438 else if (to_info_lto && dump_file)
5439 {
5440 if (dump_file)
5441 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5442 to->dump_name ());
5443 to_info_lto->dump (dump_file);
18f0873d
JH
5444 }
5445 if (callee_info_lto)
5446 summaries_lto->remove (edge->callee);
5447 }
5448 if (!to_info && !to_info_lto)
5449 remove_modref_edge_summaries (to);
5450 return;
5451}
5452
ada353b8
JH
5453/* Run the IPA pass. This will take a function's summaries and calls and
5454 construct new summaries which represent a transitive closure. So that
5455 summary of an analyzed function contains information about the loads and
5456 stores that the function or any function that it calls does. */
5457
5458unsigned int
5459pass_ipa_modref::execute (function *)
5460{
71dbabcc 5461 if (!summaries && !summaries_lto)
ada353b8 5462 return 0;
494bdadf 5463 bool pureconst = false;
ada353b8 5464
71dbabcc
JH
5465 if (optimization_summaries)
5466 ggc_delete (optimization_summaries);
5467 optimization_summaries = summaries;
5468 summaries = NULL;
5469
ada353b8
JH
5470 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *,
5471 symtab->cgraph_count);
5472 int order_pos;
5473 order_pos = ipa_reduced_postorder (order, true, ignore_edge);
5474 int i;
5475
5476 /* Iterate over all strongly connected components in post-order. */
5477 for (i = 0; i < order_pos; i++)
5478 {
5479 /* Get the component's representative. That's just any node in the
5480 component from which we can traverse the entire component. */
5481 struct cgraph_node *component_node = order[i];
5482
5483 if (dump_file)
5484 fprintf (dump_file, "\n\nStart of SCC component\n");
5485
494bdadf 5486 pureconst |= modref_propagate_in_scc (component_node);
85ebbabd 5487 modref_propagate_flags_in_scc (component_node);
e0040bc3
JH
5488 if (optimization_summaries)
5489 for (struct cgraph_node *cur = component_node; cur;
5490 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
5491 if (modref_summary *sum = optimization_summaries->get (cur))
5aa91072 5492 sum->finalize (cur->decl);
85ebbabd
JH
5493 if (dump_file)
5494 modref_propagate_dump_scc (component_node);
d119f34c 5495 }
fe90c504
JH
5496 cgraph_node *node;
5497 FOR_EACH_FUNCTION (node)
5498 update_signature (node);
71dbabcc
JH
5499 if (summaries_lto)
5500 ((modref_summaries_lto *)summaries_lto)->propagated = true;
d119f34c 5501 ipa_free_postorder_info ();
a0e6e49d 5502 free (order);
6cef01c3
JH
5503 delete fnspec_summaries;
5504 fnspec_summaries = NULL;
85ebbabd
JH
5505 delete escape_summaries;
5506 escape_summaries = NULL;
494bdadf 5507
02c80893 5508 /* If we possibly made constructors const/pure we may need to remove
494bdadf
JH
5509 them. */
5510 return pureconst ? TODO_remove_functions : 0;
d119f34c
JH
5511}
5512
39b3b1bd
JH
5513/* Summaries must stay alive until end of compilation. */
5514
5515void
d5148d4f 5516ipa_modref_cc_finalize ()
39b3b1bd 5517{
71dbabcc
JH
5518 if (optimization_summaries)
5519 ggc_delete (optimization_summaries);
5520 optimization_summaries = NULL;
71dbabcc 5521 if (summaries_lto)
85ebbabd
JH
5522 ggc_delete (summaries_lto);
5523 summaries_lto = NULL;
6cef01c3
JH
5524 if (fnspec_summaries)
5525 delete fnspec_summaries;
5526 fnspec_summaries = NULL;
85ebbabd
JH
5527 if (escape_summaries)
5528 delete escape_summaries;
5529 escape_summaries = NULL;
39b3b1bd
JH
5530}
5531
d119f34c 5532#include "gt-ipa-modref.h"