]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-modref.cc
Fix wrong code issues with ipa-sra
[thirdparty/gcc.git] / gcc / ipa-modref.cc
CommitLineData
d119f34c 1/* Search for references that a functions loads or stores.
aeee4812 2 Copyright (C) 2020-2023 Free Software Foundation, Inc.
d119f34c
JH
3 Contributed by David Cepelik and Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
85ebbabd 23 accesses across function calls.
d119f34c
JH
24
25 This file contains a tree pass and an IPA pass. Both performs the same
8a2fd716 26 analysis however tree pass is executed during early and late optimization
d119f34c
JH
27 passes to propagate info downwards in the compilation order. IPA pass
28 propagates across the callgraph and is able to handle recursion and works on
29 whole program during link-time analysis.
30
46a27415 31 LTO mode differs from the local mode by not recording alias sets but types
d119f34c 32 that are translated to alias sets later. This is necessary in order stream
46a27415 33 the information because the alias sets are rebuild at stream-in time and may
85ebbabd
JH
34 not correspond to ones seen during analysis. For this reason part of
35 analysis is duplicated.
36
37 The following information is computed
38 1) load/store access tree described in ipa-modref-tree.h
11056ab7 39 This is used by tree-ssa-alias to disambiguate load/stores
02c80893 40 2) EAF flags used by points-to analysis (in tree-ssa-structalias).
85ebbabd
JH
41 and defined in tree-core.h.
42 and stored to optimization_summaries.
43
44 There are multiple summaries computed and used during the propagation:
45 - summaries holds summaries from analysis to IPA propagation
46 time.
47 - summaries_lto is same as summaries but holds them in a format
48 that can be streamed (as described above).
49 - fnspec_summary holds fnspec strings for call. This is
50 necessary because gimple_call_fnspec performs additional
51 analysis except for looking callee fndecl.
52 - escape_summary holds escape points for given call edge.
02c80893 53 That is a vector recording what function parameters
85ebbabd 54 may escape to a function call (and with what parameter index). */
d119f34c
JH
55
56#include "config.h"
57#include "system.h"
58#include "coretypes.h"
59#include "backend.h"
60#include "tree.h"
61#include "gimple.h"
62#include "alloc-pool.h"
63#include "tree-pass.h"
64#include "gimple-iterator.h"
65#include "tree-dfa.h"
66#include "cgraph.h"
67#include "ipa-utils.h"
68#include "symbol-summary.h"
69#include "gimple-pretty-print.h"
70#include "gimple-walk.h"
71#include "print-tree.h"
72#include "tree-streamer.h"
73#include "alias.h"
74#include "calls.h"
75#include "ipa-modref-tree.h"
76#include "ipa-modref.h"
e977dd5e
JH
77#include "value-range.h"
78#include "ipa-prop.h"
79#include "ipa-fnsummary.h"
617695cd 80#include "attr-fnspec.h"
ae7a23a3 81#include "symtab-clones.h"
520d5ad3
JH
82#include "gimple-ssa.h"
83#include "tree-phinodes.h"
84#include "tree-ssa-operands.h"
85#include "ssa-iterators.h"
86#include "stringpool.h"
87#include "tree-ssanames.h"
008e7397 88#include "attribs.h"
b8ef019a 89#include "tree-cfg.h"
992644c3 90#include "tree-eh.h"
520d5ad3 91
8da8ed43 92
85ebbabd 93namespace {
d119f34c 94
6cef01c3
JH
95/* We record fnspec specifiers for call edges since they depends on actual
96 gimple statements. */
97
98class fnspec_summary
99{
100public:
101 char *fnspec;
102
103 fnspec_summary ()
104 : fnspec (NULL)
105 {
106 }
107
108 ~fnspec_summary ()
109 {
110 free (fnspec);
111 }
112};
113
114/* Summary holding fnspec string for a given call. */
115
116class fnspec_summaries_t : public call_summary <fnspec_summary *>
117{
118public:
119 fnspec_summaries_t (symbol_table *symtab)
120 : call_summary <fnspec_summary *> (symtab) {}
121 /* Hook that is called by summary when an edge is duplicated. */
f31ba116
DM
122 void duplicate (cgraph_edge *,
123 cgraph_edge *,
124 fnspec_summary *src,
125 fnspec_summary *dst) final override
6cef01c3
JH
126 {
127 dst->fnspec = xstrdup (src->fnspec);
128 }
129};
130
131static fnspec_summaries_t *fnspec_summaries = NULL;
132
85ebbabd
JH
133/* Escape summary holds a vector of param indexes that escape to
134 a given call. */
135struct escape_entry
136{
137 /* Parameter that escapes at a given call. */
b8ef019a 138 int parm_index;
85ebbabd
JH
139 /* Argument it escapes to. */
140 unsigned int arg;
141 /* Minimal flags known about the argument. */
8da8ed43 142 eaf_flags_t min_flags;
85ebbabd
JH
143 /* Does it escape directly or indirectly? */
144 bool direct;
145};
146
147/* Dump EAF flags. */
148
149static void
150dump_eaf_flags (FILE *out, int flags, bool newline = true)
151{
85ebbabd
JH
152 if (flags & EAF_UNUSED)
153 fprintf (out, " unused");
d70ef656
JH
154 if (flags & EAF_NO_DIRECT_CLOBBER)
155 fprintf (out, " no_direct_clobber");
156 if (flags & EAF_NO_INDIRECT_CLOBBER)
157 fprintf (out, " no_indirect_clobber");
158 if (flags & EAF_NO_DIRECT_ESCAPE)
159 fprintf (out, " no_direct_escape");
160 if (flags & EAF_NO_INDIRECT_ESCAPE)
161 fprintf (out, " no_indirect_escape");
f1979156
JH
162 if (flags & EAF_NOT_RETURNED_DIRECTLY)
163 fprintf (out, " not_returned_directly");
d70ef656
JH
164 if (flags & EAF_NOT_RETURNED_INDIRECTLY)
165 fprintf (out, " not_returned_indirectly");
166 if (flags & EAF_NO_DIRECT_READ)
167 fprintf (out, " no_direct_read");
168 if (flags & EAF_NO_INDIRECT_READ)
169 fprintf (out, " no_indirect_read");
85ebbabd
JH
170 if (newline)
171 fprintf (out, "\n");
172}
173
174struct escape_summary
175{
176 auto_vec <escape_entry> esc;
177 void dump (FILE *out)
178 {
179 for (unsigned int i = 0; i < esc.length (); i++)
180 {
181 fprintf (out, " parm %i arg %i %s min:",
182 esc[i].parm_index,
183 esc[i].arg,
184 esc[i].direct ? "(direct)" : "(indirect)");
185 dump_eaf_flags (out, esc[i].min_flags, false);
186 }
187 fprintf (out, "\n");
188 }
189};
190
191class escape_summaries_t : public call_summary <escape_summary *>
192{
193public:
194 escape_summaries_t (symbol_table *symtab)
195 : call_summary <escape_summary *> (symtab) {}
196 /* Hook that is called by summary when an edge is duplicated. */
f31ba116
DM
197 void duplicate (cgraph_edge *,
198 cgraph_edge *,
199 escape_summary *src,
200 escape_summary *dst) final override
85ebbabd
JH
201 {
202 dst->esc = src->esc.copy ();
203 }
204};
205
206static escape_summaries_t *escape_summaries = NULL;
207
208} /* ANON namespace: GTY annotated summaries can not be anonymous. */
209
210
d119f34c
JH
211/* Class (from which there is one global instance) that holds modref summaries
212 for all analyzed functions. */
6cef01c3 213
d119f34c
JH
214class GTY((user)) modref_summaries
215 : public fast_function_summary <modref_summary *, va_gc>
216{
217public:
218 modref_summaries (symbol_table *symtab)
219 : fast_function_summary <modref_summary *, va_gc> (symtab) {}
f31ba116
DM
220 void insert (cgraph_node *, modref_summary *state) final override;
221 void duplicate (cgraph_node *src_node,
222 cgraph_node *dst_node,
223 modref_summary *src_data,
224 modref_summary *dst_data) final override;
c9da53d6
JH
225 static modref_summaries *create_ggc (symbol_table *symtab)
226 {
227 return new (ggc_alloc_no_dtor<modref_summaries> ())
228 modref_summaries (symtab);
229 }
d119f34c
JH
230};
231
71dbabcc
JH
232class modref_summary_lto;
233
234/* Class (from which there is one global instance) that holds modref summaries
235 for all analyzed functions. */
6cef01c3 236
71dbabcc
JH
237class GTY((user)) modref_summaries_lto
238 : public fast_function_summary <modref_summary_lto *, va_gc>
239{
240public:
241 modref_summaries_lto (symbol_table *symtab)
242 : fast_function_summary <modref_summary_lto *, va_gc> (symtab),
243 propagated (false) {}
f31ba116
DM
244 void insert (cgraph_node *, modref_summary_lto *state) final override;
245 void duplicate (cgraph_node *src_node,
246 cgraph_node *dst_node,
247 modref_summary_lto *src_data,
248 modref_summary_lto *dst_data) final override;
71dbabcc
JH
249 static modref_summaries_lto *create_ggc (symbol_table *symtab)
250 {
251 return new (ggc_alloc_no_dtor<modref_summaries_lto> ())
252 modref_summaries_lto (symtab);
253 }
254 bool propagated;
255};
256
257/* Global variable holding all modref summaries
258 (from analysis to IPA propagation time). */
6cef01c3 259
71dbabcc
JH
260static GTY(()) fast_function_summary <modref_summary *, va_gc>
261 *summaries;
262
8a2fd716 263/* Global variable holding all modref optimization summaries
71dbabcc 264 (from IPA propagation time or used by local optimization pass). */
6cef01c3 265
71dbabcc
JH
266static GTY(()) fast_function_summary <modref_summary *, va_gc>
267 *optimization_summaries;
268
269/* LTO summaries hold info from analysis to LTO streaming or from LTO
270 stream-in through propagation to LTO stream-out. */
6cef01c3 271
71dbabcc
JH
272static GTY(()) fast_function_summary <modref_summary_lto *, va_gc>
273 *summaries_lto;
d119f34c
JH
274
275/* Summary for a single function which this pass produces. */
276
277modref_summary::modref_summary ()
a70c0512 278 : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
a34edf9a
JH
279 writes_errno (false), side_effects (false), nondeterministic (false),
280 calls_interposable (false), global_memory_read (false),
5aa91072 281 global_memory_written (false), try_dse (false)
d119f34c
JH
282{
283}
284
285modref_summary::~modref_summary ()
286{
287 if (loads)
288 ggc_delete (loads);
289 if (stores)
290 ggc_delete (stores);
d119f34c
JH
291}
292
4341b1b1
JH
293/* Remove all flags from EAF_FLAGS that are implied by ECF_FLAGS and not
294 useful to track. If returns_void is true moreover clear
295 EAF_NOT_RETURNED. */
296static int
297remove_useless_eaf_flags (int eaf_flags, int ecf_flags, bool returns_void)
298{
f6f704fd 299 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
4341b1b1
JH
300 eaf_flags &= ~implicit_const_eaf_flags;
301 else if (ecf_flags & ECF_PURE)
302 eaf_flags &= ~implicit_pure_eaf_flags;
303 else if ((ecf_flags & ECF_NORETURN) || returns_void)
d70ef656 304 eaf_flags &= ~(EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY);
4341b1b1
JH
305 return eaf_flags;
306}
307
85ebbabd
JH
308/* Return true if FLAGS holds some useful information. */
309
310static bool
8da8ed43 311eaf_flags_useful_p (vec <eaf_flags_t> &flags, int ecf_flags)
85ebbabd
JH
312{
313 for (unsigned i = 0; i < flags.length (); i++)
4341b1b1
JH
314 if (remove_useless_eaf_flags (flags[i], ecf_flags, false))
315 return true;
85ebbabd
JH
316 return false;
317}
318
319/* Return true if summary is potentially useful for optimization.
320 If CHECK_FLAGS is false assume that arg_flags are useful. */
67c935c8
JH
321
322bool
85ebbabd 323modref_summary::useful_p (int ecf_flags, bool check_flags)
67c935c8 324{
85ebbabd 325 if (arg_flags.length () && !check_flags)
520d5ad3 326 return true;
85ebbabd
JH
327 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
328 return true;
329 arg_flags.release ();
b8ef019a
JH
330 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
331 return true;
a70c0512
JH
332 if (check_flags
333 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
334 return true;
f6f704fd 335 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
a34edf9a
JH
336 return ((!side_effects || !nondeterministic)
337 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
71dbabcc 338 if (loads && !loads->every_base)
67c935c8 339 return true;
64f3e71c
JH
340 else
341 kills.release ();
67c935c8 342 if (ecf_flags & ECF_PURE)
a34edf9a
JH
343 return ((!side_effects || !nondeterministic)
344 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
71dbabcc 345 return stores && !stores->every_base;
67c935c8
JH
346}
347
71dbabcc
JH
348/* Single function summary used for LTO. */
349
350typedef modref_tree <tree> modref_records_lto;
351struct GTY(()) modref_summary_lto
352{
353 /* Load and stores in functions using types rather then alias sets.
354
355 This is necessary to make the information streamable for LTO but is also
356 more verbose and thus more likely to hit the limits. */
357 modref_records_lto *loads;
358 modref_records_lto *stores;
64f3e71c 359 auto_vec<modref_access_node> GTY((skip)) kills;
8da8ed43 360 auto_vec<eaf_flags_t> GTY((skip)) arg_flags;
b8ef019a 361 eaf_flags_t retslot_flags;
a70c0512 362 eaf_flags_t static_chain_flags;
a34edf9a
JH
363 unsigned writes_errno : 1;
364 unsigned side_effects : 1;
365 unsigned nondeterministic : 1;
366 unsigned calls_interposable : 1;
71dbabcc
JH
367
368 modref_summary_lto ();
369 ~modref_summary_lto ();
370 void dump (FILE *);
85ebbabd 371 bool useful_p (int ecf_flags, bool check_flags = true);
71dbabcc
JH
372};
373
374/* Summary for a single function which this pass produces. */
375
376modref_summary_lto::modref_summary_lto ()
a70c0512 377 : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
a34edf9a
JH
378 writes_errno (false), side_effects (false), nondeterministic (false),
379 calls_interposable (false)
71dbabcc
JH
380{
381}
382
383modref_summary_lto::~modref_summary_lto ()
384{
385 if (loads)
386 ggc_delete (loads);
387 if (stores)
388 ggc_delete (stores);
389}
390
391
85ebbabd
JH
392/* Return true if lto summary is potentially useful for optimization.
393 If CHECK_FLAGS is false assume that arg_flags are useful. */
67c935c8
JH
394
395bool
85ebbabd 396modref_summary_lto::useful_p (int ecf_flags, bool check_flags)
67c935c8 397{
85ebbabd
JH
398 if (arg_flags.length () && !check_flags)
399 return true;
400 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
401 return true;
402 arg_flags.release ();
b8ef019a
JH
403 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
404 return true;
a70c0512
JH
405 if (check_flags
406 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
407 return true;
f6f704fd 408 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
a34edf9a
JH
409 return ((!side_effects || !nondeterministic)
410 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
67c935c8
JH
411 if (loads && !loads->every_base)
412 return true;
74509b96
JH
413 else
414 kills.release ();
67c935c8 415 if (ecf_flags & ECF_PURE)
a34edf9a
JH
416 return ((!side_effects || !nondeterministic)
417 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
e24817aa 418 return stores && !stores->every_base;
67c935c8
JH
419}
420
d119f34c
JH
421/* Dump records TT to OUT. */
422
423static void
424dump_records (modref_records *tt, FILE *out)
425{
d119f34c
JH
426 if (tt->every_base)
427 {
428 fprintf (out, " Every base\n");
429 return;
430 }
431 size_t i;
432 modref_base_node <alias_set_type> *n;
433 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
434 {
435 fprintf (out, " Base %i: alias set %i\n", (int)i, n->base);
436 if (n->every_ref)
437 {
438 fprintf (out, " Every ref\n");
439 continue;
440 }
441 size_t j;
442 modref_ref_node <alias_set_type> *r;
443 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
444 {
445 fprintf (out, " Ref %i: alias set %i\n", (int)j, r->ref);
c33f4742
JH
446 if (r->every_access)
447 {
ada353b8 448 fprintf (out, " Every access\n");
c33f4742
JH
449 continue;
450 }
451 size_t k;
452 modref_access_node *a;
453 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
e30bf330
JH
454 {
455 fprintf (out, " access:");
456 a->dump (out);
457 }
d119f34c
JH
458 }
459 }
460}
461
462/* Dump records TT to OUT. */
463
464static void
465dump_lto_records (modref_records_lto *tt, FILE *out)
466{
d119f34c
JH
467 if (tt->every_base)
468 {
469 fprintf (out, " Every base\n");
470 return;
471 }
472 size_t i;
473 modref_base_node <tree> *n;
474 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
475 {
476 fprintf (out, " Base %i:", (int)i);
477 print_generic_expr (dump_file, n->base);
478 fprintf (out, " (alias set %i)\n",
9044db88 479 n->base ? get_alias_set (n->base) : 0);
d119f34c
JH
480 if (n->every_ref)
481 {
482 fprintf (out, " Every ref\n");
483 continue;
484 }
485 size_t j;
486 modref_ref_node <tree> *r;
487 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
488 {
489 fprintf (out, " Ref %i:", (int)j);
490 print_generic_expr (dump_file, r->ref);
491 fprintf (out, " (alias set %i)\n",
9044db88 492 r->ref ? get_alias_set (r->ref) : 0);
c33f4742
JH
493 if (r->every_access)
494 {
56cb815b 495 fprintf (out, " Every access\n");
c33f4742
JH
496 continue;
497 }
498 size_t k;
499 modref_access_node *a;
500 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
e30bf330
JH
501 {
502 fprintf (out, " access:");
503 a->dump (out);
504 }
d119f34c
JH
505 }
506 }
507}
508
85ebbabd 509/* Dump all escape points of NODE to OUT. */
520d5ad3
JH
510
511static void
85ebbabd 512dump_modref_edge_summaries (FILE *out, cgraph_node *node, int depth)
520d5ad3 513{
85ebbabd
JH
514 int i = 0;
515 if (!escape_summaries)
516 return;
517 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
518 {
519 class escape_summary *sum = escape_summaries->get (e);
520 if (sum)
521 {
522 fprintf (out, "%*sIndirect call %i in %s escapes:",
523 depth, "", i, node->dump_name ());
524 sum->dump (out);
525 }
526 i++;
527 }
528 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
529 {
530 if (!e->inline_failed)
531 dump_modref_edge_summaries (out, e->callee, depth + 1);
532 class escape_summary *sum = escape_summaries->get (e);
533 if (sum)
534 {
535 fprintf (out, "%*sCall %s->%s escapes:", depth, "",
536 node->dump_name (), e->callee->dump_name ());
537 sum->dump (out);
538 }
539 class fnspec_summary *fsum = fnspec_summaries->get (e);
540 if (fsum)
541 {
542 fprintf (out, "%*sCall %s->%s fnspec: %s\n", depth, "",
543 node->dump_name (), e->callee->dump_name (),
544 fsum->fnspec);
545 }
546 }
547}
548
549/* Remove all call edge summaries associated with NODE. */
550
551static void
552remove_modref_edge_summaries (cgraph_node *node)
553{
554 if (!escape_summaries)
555 return;
556 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
557 escape_summaries->remove (e);
558 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
559 {
560 if (!e->inline_failed)
561 remove_modref_edge_summaries (e->callee);
562 escape_summaries->remove (e);
563 fnspec_summaries->remove (e);
564 }
520d5ad3
JH
565}
566
d119f34c
JH
567/* Dump summary. */
568
569void
570modref_summary::dump (FILE *out)
571{
6cef01c3
JH
572 if (loads)
573 {
574 fprintf (out, " loads:\n");
575 dump_records (loads, out);
576 }
577 if (stores)
578 {
579 fprintf (out, " stores:\n");
580 dump_records (stores, out);
581 }
64f3e71c
JH
582 if (kills.length ())
583 {
584 fprintf (out, " kills:\n");
585 for (auto kill : kills)
586 {
587 fprintf (out, " ");
588 kill.dump (out);
589 }
590 }
617695cd
JH
591 if (writes_errno)
592 fprintf (out, " Writes errno\n");
992644c3
JH
593 if (side_effects)
594 fprintf (out, " Side effects\n");
a34edf9a
JH
595 if (nondeterministic)
596 fprintf (out, " Nondeterministic\n");
597 if (calls_interposable)
598 fprintf (out, " Calls interposable\n");
e0040bc3
JH
599 if (global_memory_read)
600 fprintf (out, " Global memory read\n");
601 if (global_memory_written)
602 fprintf (out, " Global memory written\n");
5aa91072
JH
603 if (try_dse)
604 fprintf (out, " Try dse\n");
520d5ad3
JH
605 if (arg_flags.length ())
606 {
607 for (unsigned int i = 0; i < arg_flags.length (); i++)
608 if (arg_flags[i])
609 {
610 fprintf (out, " parm %i flags:", i);
611 dump_eaf_flags (out, arg_flags[i]);
612 }
613 }
b8ef019a
JH
614 if (retslot_flags)
615 {
616 fprintf (out, " Retslot flags:");
617 dump_eaf_flags (out, retslot_flags);
618 }
a70c0512
JH
619 if (static_chain_flags)
620 {
621 fprintf (out, " Static chain flags:");
622 dump_eaf_flags (out, static_chain_flags);
623 }
71dbabcc
JH
624}
625
626/* Dump summary. */
627
628void
629modref_summary_lto::dump (FILE *out)
630{
56cb815b
JH
631 fprintf (out, " loads:\n");
632 dump_lto_records (loads, out);
633 fprintf (out, " stores:\n");
634 dump_lto_records (stores, out);
74509b96
JH
635 if (kills.length ())
636 {
637 fprintf (out, " kills:\n");
638 for (auto kill : kills)
639 {
640 fprintf (out, " ");
641 kill.dump (out);
642 }
643 }
6cef01c3
JH
644 if (writes_errno)
645 fprintf (out, " Writes errno\n");
992644c3
JH
646 if (side_effects)
647 fprintf (out, " Side effects\n");
a34edf9a
JH
648 if (nondeterministic)
649 fprintf (out, " Nondeterministic\n");
650 if (calls_interposable)
651 fprintf (out, " Calls interposable\n");
85ebbabd
JH
652 if (arg_flags.length ())
653 {
654 for (unsigned int i = 0; i < arg_flags.length (); i++)
655 if (arg_flags[i])
656 {
657 fprintf (out, " parm %i flags:", i);
658 dump_eaf_flags (out, arg_flags[i]);
659 }
660 }
b8ef019a
JH
661 if (retslot_flags)
662 {
663 fprintf (out, " Retslot flags:");
664 dump_eaf_flags (out, retslot_flags);
665 }
a70c0512
JH
666 if (static_chain_flags)
667 {
668 fprintf (out, " Static chain flags:");
669 dump_eaf_flags (out, static_chain_flags);
670 }
d119f34c
JH
671}
672
e0040bc3 673/* Called after summary is produced and before it is used by local analysis.
5aa91072
JH
674 Can be called multiple times in case summary needs to update signature.
675 FUN is decl of function summary is attached to. */
e0040bc3 676void
5aa91072 677modref_summary::finalize (tree fun)
e0040bc3
JH
678{
679 global_memory_read = !loads || loads->global_access_p ();
680 global_memory_written = !stores || stores->global_access_p ();
5aa91072
JH
681
682 /* We can do DSE if we know function has no side effects and
02c80893 683 we can analyze all stores. Disable dse if there are too many
5aa91072
JH
684 stores to try. */
685 if (side_effects || global_memory_written || writes_errno)
686 try_dse = false;
687 else
688 {
689 try_dse = true;
690 size_t i, j, k;
691 int num_tests = 0, max_tests
8632f8c6 692 = opt_for_fn (fun, param_modref_max_tests);
5aa91072
JH
693 modref_base_node <alias_set_type> *base_node;
694 modref_ref_node <alias_set_type> *ref_node;
695 modref_access_node *access_node;
696 FOR_EACH_VEC_SAFE_ELT (stores->bases, i, base_node)
697 {
698 if (base_node->every_ref)
699 {
700 try_dse = false;
701 break;
702 }
703 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
704 {
705 if (base_node->every_ref)
706 {
707 try_dse = false;
708 break;
709 }
710 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
711 if (num_tests++ > max_tests
712 || !access_node->parm_offset_known)
713 {
714 try_dse = false;
715 break;
716 }
717 if (!try_dse)
718 break;
719 }
720 if (!try_dse)
721 break;
722 }
723 }
6180f5c8
RB
724 if (loads->every_base)
725 load_accesses = 1;
726 else
727 {
728 load_accesses = 0;
729 for (auto base_node : loads->bases)
730 {
731 if (base_node->every_ref)
732 load_accesses++;
733 else
734 for (auto ref_node : base_node->refs)
735 if (ref_node->every_access)
736 load_accesses++;
737 else
738 load_accesses += ref_node->accesses->length ();
739 }
740 }
e0040bc3
JH
741}
742
d119f34c
JH
743/* Get function summary for FUNC if it exists, return NULL otherwise. */
744
745modref_summary *
746get_modref_function_summary (cgraph_node *func)
747{
748 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
71dbabcc 749 if (!optimization_summaries)
d119f34c
JH
750 return NULL;
751
752 /* A single function body may be represented by multiple symbols with
753 different visibility. For example, if FUNC is an interposable alias,
754 we don't want to return anything, even if we have summary for the target
755 function. */
756 enum availability avail;
c87ff875 757 func = func->ultimate_alias_target
520d5ad3
JH
758 (&avail, current_function_decl ?
759 cgraph_node::get (current_function_decl) : NULL);
d119f34c
JH
760 if (avail <= AVAIL_INTERPOSABLE)
761 return NULL;
762
71dbabcc
JH
763 modref_summary *r = optimization_summaries->get (func);
764 return r;
d119f34c
JH
765}
766
6dc90c4d
JH
767/* Get function summary for CALL if it exists, return NULL otherwise.
768 If non-null set interposed to indicate whether function may not
769 bind to current def. In this case sometimes loads from function
770 needs to be ignored. */
771
772modref_summary *
773get_modref_function_summary (gcall *call, bool *interposed)
774{
775 tree callee = gimple_call_fndecl (call);
776 if (!callee)
777 return NULL;
778 struct cgraph_node *node = cgraph_node::get (callee);
779 if (!node)
780 return NULL;
781 modref_summary *r = get_modref_function_summary (node);
782 if (interposed && r)
783 *interposed = r->calls_interposable
8632f8c6 784 || !node->binds_to_current_def_p ();
6dc90c4d
JH
785 return r;
786}
787
788
18f0873d
JH
789namespace {
790
02c80893 791/* Return true if ECF flags says that nondeterminism can be ignored. */
09a4ffb7
JH
792
793static bool
794ignore_nondeterminism_p (tree caller, int flags)
795{
796 if (flags & (ECF_CONST | ECF_PURE))
797 return true;
798 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
799 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
800 return true;
801 return false;
802}
803
804/* Return true if ECF flags says that return value can be ignored. */
805
806static bool
807ignore_retval_p (tree caller, int flags)
808{
809 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
810 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
811 return true;
812 return false;
813}
814
815/* Return true if ECF flags says that stores can be ignored. */
816
817static bool
818ignore_stores_p (tree caller, int flags)
819{
820 if (flags & (ECF_PURE | ECF_CONST | ECF_NOVOPS))
821 return true;
822 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
823 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
824 return true;
825 return false;
826}
827
0f5afb62 828/* Determine parm_map for PTR which is supposed to be a pointer. */
09a4ffb7
JH
829
830modref_parm_map
0f5afb62 831parm_map_for_ptr (tree op)
09a4ffb7
JH
832{
833 bool offset_known;
834 poly_int64 offset;
835 struct modref_parm_map parm_map;
0f5afb62 836 gcall *call;
09a4ffb7
JH
837
838 parm_map.parm_offset_known = false;
839 parm_map.parm_offset = 0;
840
841 offset_known = unadjusted_ptr_and_unit_offset (op, &op, &offset);
842 if (TREE_CODE (op) == SSA_NAME
843 && SSA_NAME_IS_DEFAULT_DEF (op)
844 && TREE_CODE (SSA_NAME_VAR (op)) == PARM_DECL)
845 {
846 int index = 0;
0f5afb62
JH
847
848 if (cfun->static_chain_decl
849 && op == ssa_default_def (cfun, cfun->static_chain_decl))
850 index = MODREF_STATIC_CHAIN_PARM;
851 else
852 for (tree t = DECL_ARGUMENTS (current_function_decl);
853 t != SSA_NAME_VAR (op); t = DECL_CHAIN (t))
09a4ffb7 854 index++;
09a4ffb7
JH
855 parm_map.parm_index = index;
856 parm_map.parm_offset_known = offset_known;
857 parm_map.parm_offset = offset;
858 }
859 else if (points_to_local_or_readonly_memory_p (op))
860 parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
0f5afb62
JH
861 /* Memory allocated in the function is not visible to caller before the
862 call and thus we do not need to record it as load/stores/kills. */
863 else if (TREE_CODE (op) == SSA_NAME
864 && (call = dyn_cast<gcall *>(SSA_NAME_DEF_STMT (op))) != NULL
865 && gimple_call_flags (call) & ECF_MALLOC)
866 parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
09a4ffb7
JH
867 else
868 parm_map.parm_index = MODREF_UNKNOWN_PARM;
869 return parm_map;
870}
871
3305135c
JH
872/* Return true if ARG with EAF flags FLAGS can not make any caller's parameter
873 used (if LOAD is true we check loads, otherwise stores). */
874
875static bool
876verify_arg (tree arg, int flags, bool load)
877{
878 if (flags & EAF_UNUSED)
879 return true;
880 if (load && (flags & EAF_NO_DIRECT_READ))
881 return true;
882 if (!load
883 && (flags & (EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER))
884 == (EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER))
885 return true;
886 if (is_gimple_constant (arg))
887 return true;
888 if (DECL_P (arg) && TREE_READONLY (arg))
889 return true;
890 if (TREE_CODE (arg) == ADDR_EXPR)
891 {
892 tree t = get_base_address (TREE_OPERAND (arg, 0));
893 if (is_gimple_constant (t))
894 return true;
895 if (DECL_P (t)
896 && (TREE_READONLY (t) || TREE_CODE (t) == FUNCTION_DECL))
897 return true;
898 }
899 return false;
900}
901
902/* Return true if STMT may access memory that is pointed to by parameters
903 of caller and which is not seen as an escape by PTA.
904 CALLEE_ECF_FLAGS are ECF flags of callee. If LOAD is true then by access
905 we mean load, otherwise we mean store. */
906
907static bool
908may_access_nonescaping_parm_p (gcall *call, int callee_ecf_flags, bool load)
909{
910 int implicit_flags = 0;
911
912 if (ignore_stores_p (current_function_decl, callee_ecf_flags))
913 implicit_flags |= ignore_stores_eaf_flags;
914 if (callee_ecf_flags & ECF_PURE)
915 implicit_flags |= implicit_pure_eaf_flags;
916 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
917 implicit_flags |= implicit_const_eaf_flags;
918 if (gimple_call_chain (call)
919 && !verify_arg (gimple_call_chain (call),
920 gimple_call_static_chain_flags (call) | implicit_flags,
921 load))
922 return true;
923 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
924 if (!verify_arg (gimple_call_arg (call, i),
925 gimple_call_arg_flags (call, i) | implicit_flags,
926 load))
927 return true;
928 return false;
929}
930
931
09a4ffb7
JH
932/* Analyze memory accesses (loads, stores and kills) performed
933 by the function. Set also side_effects, calls_interposable
934 and nondeterminism flags. */
935
936class modref_access_analysis
937{
938public:
939 modref_access_analysis (bool ipa, modref_summary *summary,
940 modref_summary_lto *summary_lto)
941 : m_summary (summary), m_summary_lto (summary_lto), m_ipa (ipa)
942 {
943 }
944 void analyze ();
945private:
946 bool set_side_effects ();
947 bool set_nondeterministic ();
948 static modref_access_node get_access (ao_ref *ref);
949 static void record_access (modref_records *, ao_ref *, modref_access_node &);
950 static void record_access_lto (modref_records_lto *, ao_ref *,
951 modref_access_node &a);
952 bool record_access_p (tree);
953 bool record_unknown_load ();
954 bool record_unknown_store ();
3305135c
JH
955 bool record_global_memory_load ();
956 bool record_global_memory_store ();
09a4ffb7
JH
957 bool merge_call_side_effects (gimple *, modref_summary *,
958 cgraph_node *, bool);
959 modref_access_node get_access_for_fnspec (gcall *, attr_fnspec &,
960 unsigned int, modref_parm_map &);
961 void process_fnspec (gcall *);
962 void analyze_call (gcall *);
963 static bool analyze_load (gimple *, tree, tree, void *);
964 static bool analyze_store (gimple *, tree, tree, void *);
965 void analyze_stmt (gimple *, bool);
966 void propagate ();
967
968 /* Summary being computed.
02c80893 969 We work either with m_summary or m_summary_lto. Never on both. */
09a4ffb7
JH
970 modref_summary *m_summary;
971 modref_summary_lto *m_summary_lto;
02c80893 972 /* Recursive calls needs simplistic dataflow after analysis finished.
09a4ffb7
JH
973 Collect all calls into this vector during analysis and later process
974 them in propagate. */
975 auto_vec <gimple *, 32> m_recursive_calls;
02c80893 976 /* ECF flags of function being analyzed. */
09a4ffb7
JH
977 int m_ecf_flags;
978 /* True if IPA propagation will be done later. */
979 bool m_ipa;
02c80893 980 /* Set true if statement currently analyze is known to be
09a4ffb7
JH
981 executed each time function is called. */
982 bool m_always_executed;
983};
984
02c80893 985/* Set side_effects flag and return if something changed. */
09a4ffb7
JH
986
987bool
988modref_access_analysis::set_side_effects ()
989{
990 bool changed = false;
991
992 if (m_summary && !m_summary->side_effects)
993 {
994 m_summary->side_effects = true;
995 changed = true;
996 }
997 if (m_summary_lto && !m_summary_lto->side_effects)
998 {
999 m_summary_lto->side_effects = true;
1000 changed = true;
1001 }
1002 return changed;
1003}
1004
02c80893 1005/* Set nondeterministic flag and return if something changed. */
09a4ffb7
JH
1006
1007bool
1008modref_access_analysis::set_nondeterministic ()
1009{
1010 bool changed = false;
1011
1012 if (m_summary && !m_summary->nondeterministic)
1013 {
1014 m_summary->side_effects = m_summary->nondeterministic = true;
1015 changed = true;
1016 }
1017 if (m_summary_lto && !m_summary_lto->nondeterministic)
1018 {
1019 m_summary_lto->side_effects = m_summary_lto->nondeterministic = true;
1020 changed = true;
1021 }
1022 return changed;
1023}
1024
c33f4742 1025/* Construct modref_access_node from REF. */
09a4ffb7
JH
1026
1027modref_access_node
1028modref_access_analysis::get_access (ao_ref *ref)
c33f4742 1029{
c33f4742
JH
1030 tree base;
1031
c34db4b6
JH
1032 base = ao_ref_base (ref);
1033 modref_access_node a = {ref->offset, ref->size, ref->max_size,
1f3a3363 1034 0, MODREF_UNKNOWN_PARM, false, 0};
c33f4742
JH
1035 if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
1036 {
2bdf324f 1037 tree memref = base;
0f5afb62 1038 modref_parm_map m = parm_map_for_ptr (TREE_OPERAND (base, 0));
1f3a3363 1039
0f5afb62
JH
1040 a.parm_index = m.parm_index;
1041 if (a.parm_index != MODREF_UNKNOWN_PARM && TREE_CODE (memref) == MEM_REF)
1f3a3363
JH
1042 {
1043 a.parm_offset_known
1044 = wi::to_poly_wide (TREE_OPERAND
1045 (memref, 1)).to_shwi (&a.parm_offset);
0f5afb62
JH
1046 if (a.parm_offset_known && m.parm_offset_known)
1047 a.parm_offset += m.parm_offset;
1048 else
1049 a.parm_offset_known = false;
c33f4742 1050 }
c33f4742
JH
1051 }
1052 else
1f3a3363 1053 a.parm_index = MODREF_UNKNOWN_PARM;
c33f4742
JH
1054 return a;
1055}
1056
d119f34c
JH
1057/* Record access into the modref_records data structure. */
1058
09a4ffb7
JH
1059void
1060modref_access_analysis::record_access (modref_records *tt,
1061 ao_ref *ref,
1062 modref_access_node &a)
d119f34c 1063{
16c84809
JH
1064 alias_set_type base_set = !flag_strict_aliasing
1065 || !flag_ipa_strict_aliasing ? 0
d119f34c 1066 : ao_ref_base_alias_set (ref);
16c84809
JH
1067 alias_set_type ref_set = !flag_strict_aliasing
1068 || !flag_ipa_strict_aliasing ? 0
d119f34c
JH
1069 : (ao_ref_alias_set (ref));
1070 if (dump_file)
1071 {
4898e958
JH
1072 fprintf (dump_file, " - Recording base_set=%i ref_set=%i ",
1073 base_set, ref_set);
e30bf330 1074 a.dump (dump_file);
d119f34c 1075 }
8632f8c6 1076 tt->insert (current_function_decl, base_set, ref_set, a, false);
d119f34c
JH
1077}
1078
1079/* IPA version of record_access_tree. */
1080
09a4ffb7
JH
1081void
1082modref_access_analysis::record_access_lto (modref_records_lto *tt, ao_ref *ref,
1083 modref_access_node &a)
d119f34c
JH
1084{
1085 /* get_alias_set sometimes use different type to compute the alias set
1086 than TREE_TYPE (base). Do same adjustments. */
1087 tree base_type = NULL_TREE, ref_type = NULL_TREE;
16c84809 1088 if (flag_strict_aliasing && flag_ipa_strict_aliasing)
d119f34c
JH
1089 {
1090 tree base;
1091
1092 base = ref->ref;
1093 while (handled_component_p (base))
1094 base = TREE_OPERAND (base, 0);
1095
1096 base_type = reference_alias_ptr_type_1 (&base);
1097
1098 if (!base_type)
1099 base_type = TREE_TYPE (base);
1100 else
1101 base_type = TYPE_REF_CAN_ALIAS_ALL (base_type)
1102 ? NULL_TREE : TREE_TYPE (base_type);
1103
1104 tree ref_expr = ref->ref;
1105 ref_type = reference_alias_ptr_type_1 (&ref_expr);
1106
1107 if (!ref_type)
1108 ref_type = TREE_TYPE (ref_expr);
1109 else
1110 ref_type = TYPE_REF_CAN_ALIAS_ALL (ref_type)
1111 ? NULL_TREE : TREE_TYPE (ref_type);
1112
1113 /* Sanity check that we are in sync with what get_alias_set does. */
1114 gcc_checking_assert ((!base_type && !ao_ref_base_alias_set (ref))
1115 || get_alias_set (base_type)
1116 == ao_ref_base_alias_set (ref));
1117 gcc_checking_assert ((!ref_type && !ao_ref_alias_set (ref))
1118 || get_alias_set (ref_type)
1119 == ao_ref_alias_set (ref));
1120
1121 /* Do not bother to record types that have no meaningful alias set.
1122 Also skip variably modified types since these go to local streams. */
1123 if (base_type && (!get_alias_set (base_type)
1124 || variably_modified_type_p (base_type, NULL_TREE)))
1125 base_type = NULL_TREE;
1126 if (ref_type && (!get_alias_set (ref_type)
1127 || variably_modified_type_p (ref_type, NULL_TREE)))
1128 ref_type = NULL_TREE;
1129 }
1130 if (dump_file)
1131 {
1132 fprintf (dump_file, " - Recording base type:");
1133 print_generic_expr (dump_file, base_type);
1134 fprintf (dump_file, " (alias set %i) ref type:",
1135 base_type ? get_alias_set (base_type) : 0);
1136 print_generic_expr (dump_file, ref_type);
4898e958
JH
1137 fprintf (dump_file, " (alias set %i) ",
1138 ref_type ? get_alias_set (ref_type) : 0);
e30bf330 1139 a.dump (dump_file);
d119f34c
JH
1140 }
1141
8632f8c6 1142 tt->insert (current_function_decl, base_type, ref_type, a, false);
d119f34c
JH
1143}
1144
1145/* Returns true if and only if we should store the access to EXPR.
1146 Some accesses, e.g. loads from automatic variables, are not interesting. */
1147
09a4ffb7
JH
1148bool
1149modref_access_analysis::record_access_p (tree expr)
d119f34c 1150{
09a4ffb7
JH
1151 if (TREE_THIS_VOLATILE (expr))
1152 {
1153 if (dump_file)
1154 fprintf (dump_file, " (volatile; marking nondeterministic) ");
1155 set_nondeterministic ();
1156 }
1157 if (cfun->can_throw_non_call_exceptions
1158 && tree_could_throw_p (expr))
1159 {
1160 if (dump_file)
1161 fprintf (dump_file, " (can throw; marking side effects) ");
1162 set_side_effects ();
1163 }
1164
e977dd5e 1165 if (refs_local_or_readonly_memory_p (expr))
d119f34c
JH
1166 {
1167 if (dump_file)
e977dd5e 1168 fprintf (dump_file, " - Read-only or local, ignoring.\n");
d119f34c
JH
1169 return false;
1170 }
d119f34c
JH
1171 return true;
1172}
1173
09a4ffb7 1174/* Collapse loads and return true if something changed. */
85ebbabd 1175
09a4ffb7
JH
1176bool
1177modref_access_analysis::record_unknown_load ()
85ebbabd 1178{
09a4ffb7 1179 bool changed = false;
d119f34c 1180
09a4ffb7
JH
1181 if (m_summary && !m_summary->loads->every_base)
1182 {
1183 m_summary->loads->collapse ();
1184 changed = true;
1185 }
1186 if (m_summary_lto && !m_summary_lto->loads->every_base)
1187 {
1188 m_summary_lto->loads->collapse ();
1189 changed = true;
1190 }
1191 return changed;
d119f34c
JH
1192}
1193
09a4ffb7 1194/* Collapse loads and return true if something changed. */
617695cd 1195
09a4ffb7
JH
1196bool
1197modref_access_analysis::record_unknown_store ()
617695cd 1198{
09a4ffb7 1199 bool changed = false;
ea937e7d 1200
09a4ffb7 1201 if (m_summary && !m_summary->stores->every_base)
617695cd 1202 {
09a4ffb7
JH
1203 m_summary->stores->collapse ();
1204 changed = true;
617695cd 1205 }
09a4ffb7
JH
1206 if (m_summary_lto && !m_summary_lto->stores->every_base)
1207 {
1208 m_summary_lto->stores->collapse ();
1209 changed = true;
1210 }
1211 return changed;
617695cd
JH
1212}
1213
02c80893 1214/* Record unknown load from global memory. */
3305135c
JH
1215
1216bool
1217modref_access_analysis::record_global_memory_load ()
1218{
1219 bool changed = false;
1220 modref_access_node a = {0, -1, -1,
1221 0, MODREF_GLOBAL_MEMORY_PARM, false, 0};
1222
1223 if (m_summary && !m_summary->loads->every_base)
1224 changed |= m_summary->loads->insert (current_function_decl, 0, 0, a, false);
1225 if (m_summary_lto && !m_summary_lto->loads->every_base)
1226 changed |= m_summary_lto->loads->insert (current_function_decl,
1227 0, 0, a, false);
1228 return changed;
1229}
1230
02c80893 1231/* Record unknown store from global memory. */
3305135c
JH
1232
1233bool
1234modref_access_analysis::record_global_memory_store ()
1235{
1236 bool changed = false;
1237 modref_access_node a = {0, -1, -1,
1238 0, MODREF_GLOBAL_MEMORY_PARM, false, 0};
1239
1240 if (m_summary && !m_summary->stores->every_base)
1241 changed |= m_summary->stores->insert (current_function_decl,
1242 0, 0, a, false);
1243 if (m_summary_lto && !m_summary_lto->stores->every_base)
1244 changed |= m_summary_lto->stores->insert (current_function_decl,
1245 0, 0, a, false);
1246 return changed;
1247}
1248
09a4ffb7
JH
1249/* Merge side effects of call STMT to function with CALLEE_SUMMARY.
1250 Return true if something changed.
5c85f295
JH
1251 If IGNORE_STORES is true, do not merge stores.
1252 If RECORD_ADJUSTMENTS is true cap number of adjustments to
1253 a given access to make dataflow finite. */
ada353b8
JH
1254
1255bool
09a4ffb7
JH
1256modref_access_analysis::merge_call_side_effects
1257 (gimple *stmt, modref_summary *callee_summary,
1258 cgraph_node *callee_node, bool record_adjustments)
ada353b8 1259{
3305135c
JH
1260 gcall *call = as_a <gcall *> (stmt);
1261 int flags = gimple_call_flags (call);
8d3abf42 1262
09a4ffb7 1263 /* Nothing to do for non-looping cont functions. */
64f3e71c
JH
1264 if ((flags & (ECF_CONST | ECF_NOVOPS))
1265 && !(flags & ECF_LOOPING_CONST_OR_PURE))
09a4ffb7
JH
1266 return false;
1267
1268 bool changed = false;
64f3e71c 1269
ce2dbf94
JH
1270 if (dump_file)
1271 fprintf (dump_file, " - Merging side effects of %s\n",
1272 callee_node->dump_name ());
1273
09a4ffb7
JH
1274 /* Merge side effects and non-determinism.
1275 PURE/CONST flags makes functions deterministic and if there is
1276 no LOOPING_CONST_OR_PURE they also have no side effects. */
a34edf9a
JH
1277 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
1278 || (flags & ECF_LOOPING_CONST_OR_PURE))
8d3abf42 1279 {
09a4ffb7 1280 if (!m_summary->side_effects && callee_summary->side_effects)
a34edf9a
JH
1281 {
1282 if (dump_file)
1283 fprintf (dump_file, " - merging side effects.\n");
09a4ffb7 1284 m_summary->side_effects = true;
a34edf9a
JH
1285 changed = true;
1286 }
09a4ffb7 1287 if (!m_summary->nondeterministic && callee_summary->nondeterministic
a34edf9a
JH
1288 && !ignore_nondeterminism_p (current_function_decl, flags))
1289 {
1290 if (dump_file)
1291 fprintf (dump_file, " - merging nondeterministic.\n");
09a4ffb7 1292 m_summary->nondeterministic = true;
a34edf9a
JH
1293 changed = true;
1294 }
1295 }
8d3abf42 1296
09a4ffb7 1297 /* For const functions we are done. */
8d3abf42
JH
1298 if (flags & (ECF_CONST | ECF_NOVOPS))
1299 return changed;
ada353b8 1300
09a4ffb7
JH
1301 /* Merge calls_interposable flags. */
1302 if (!m_summary->calls_interposable && callee_summary->calls_interposable)
a34edf9a
JH
1303 {
1304 if (dump_file)
1305 fprintf (dump_file, " - merging calls interposable.\n");
09a4ffb7 1306 m_summary->calls_interposable = true;
a34edf9a
JH
1307 changed = true;
1308 }
1309
09a4ffb7 1310 if (!callee_node->binds_to_current_def_p () && !m_summary->calls_interposable)
617695cd
JH
1311 {
1312 if (dump_file)
a34edf9a 1313 fprintf (dump_file, " - May be interposed.\n");
09a4ffb7 1314 m_summary->calls_interposable = true;
a34edf9a 1315 changed = true;
617695cd
JH
1316 }
1317
09a4ffb7
JH
1318 /* Now merge the actual load, store and kill vectors. For this we need
1319 to compute map translating new parameters to old. */
0b874e0f 1320 if (dump_file)
ce2dbf94 1321 fprintf (dump_file, " Parm map:");
0b874e0f 1322
09a4ffb7 1323 auto_vec <modref_parm_map, 32> parm_map;
3305135c
JH
1324 parm_map.safe_grow_cleared (gimple_call_num_args (call), true);
1325 for (unsigned i = 0; i < gimple_call_num_args (call); i++)
ada353b8 1326 {
3305135c 1327 parm_map[i] = parm_map_for_ptr (gimple_call_arg (call, i));
56cb815b 1328 if (dump_file)
c7b6a758
JH
1329 {
1330 fprintf (dump_file, " %i", parm_map[i].parm_index);
1331 if (parm_map[i].parm_offset_known)
1332 {
1333 fprintf (dump_file, " offset:");
1334 print_dec ((poly_int64_pod)parm_map[i].parm_offset,
1335 dump_file, SIGNED);
1336 }
1337 }
ada353b8 1338 }
09a4ffb7
JH
1339
1340 modref_parm_map chain_map;
3305135c 1341 if (gimple_call_chain (call))
1f3a3363 1342 {
3305135c 1343 chain_map = parm_map_for_ptr (gimple_call_chain (call));
1f3a3363
JH
1344 if (dump_file)
1345 {
1346 fprintf (dump_file, "static chain %i", chain_map.parm_index);
1347 if (chain_map.parm_offset_known)
1348 {
1349 fprintf (dump_file, " offset:");
1350 print_dec ((poly_int64_pod)chain_map.parm_offset,
1351 dump_file, SIGNED);
1352 }
1353 }
1354 }
56cb815b
JH
1355 if (dump_file)
1356 fprintf (dump_file, "\n");
ada353b8 1357
09a4ffb7
JH
1358 /* Kills can me merged in only if we know the function is going to be
1359 always executed. */
1360 if (m_always_executed
e69b7c57
JH
1361 && callee_summary->kills.length ()
1362 && (!cfun->can_throw_non_call_exceptions
3305135c 1363 || !stmt_could_throw_p (cfun, call)))
e69b7c57
JH
1364 {
1365 /* Watch for self recursive updates. */
1366 auto_vec<modref_access_node, 32> saved_kills;
1367
1368 saved_kills.reserve_exact (callee_summary->kills.length ());
1369 saved_kills.splice (callee_summary->kills);
1370 for (auto kill : saved_kills)
1371 {
1372 if (kill.parm_index >= (int)parm_map.length ())
1373 continue;
1374 modref_parm_map &m
1375 = kill.parm_index == MODREF_STATIC_CHAIN_PARM
1376 ? chain_map
1377 : parm_map[kill.parm_index];
1378 if (m.parm_index == MODREF_LOCAL_MEMORY_PARM
1379 || m.parm_index == MODREF_UNKNOWN_PARM
1380 || m.parm_index == MODREF_RETSLOT_PARM
1381 || !m.parm_offset_known)
1382 continue;
1383 modref_access_node n = kill;
1384 n.parm_index = m.parm_index;
1385 n.parm_offset += m.parm_offset;
09a4ffb7 1386 if (modref_access_node::insert_kill (m_summary->kills, n,
e69b7c57
JH
1387 record_adjustments))
1388 changed = true;
1389 }
1390 }
1391
09a4ffb7 1392 /* Merge in loads. */
8632f8c6
JH
1393 changed |= m_summary->loads->merge (current_function_decl,
1394 callee_summary->loads,
1395 &parm_map, &chain_map,
3305135c
JH
1396 record_adjustments,
1397 !may_access_nonescaping_parm_p
1398 (call, flags, true));
09a4ffb7
JH
1399 /* Merge in stores. */
1400 if (!ignore_stores_p (current_function_decl, flags))
617695cd 1401 {
8632f8c6
JH
1402 changed |= m_summary->stores->merge (current_function_decl,
1403 callee_summary->stores,
09a4ffb7 1404 &parm_map, &chain_map,
3305135c
JH
1405 record_adjustments,
1406 !may_access_nonescaping_parm_p
1407 (call, flags, false));
09a4ffb7 1408 if (!m_summary->writes_errno
617695cd
JH
1409 && callee_summary->writes_errno)
1410 {
09a4ffb7 1411 m_summary->writes_errno = true;
617695cd
JH
1412 changed = true;
1413 }
1414 }
ada353b8
JH
1415 return changed;
1416}
1417
617695cd
JH
1418/* Return access mode for argument I of call STMT with FNSPEC. */
1419
09a4ffb7
JH
1420modref_access_node
1421modref_access_analysis::get_access_for_fnspec (gcall *call, attr_fnspec &fnspec,
1422 unsigned int i,
1423 modref_parm_map &map)
617695cd
JH
1424{
1425 tree size = NULL_TREE;
1426 unsigned int size_arg;
1427
1428 if (!fnspec.arg_specified_p (i))
1429 ;
1430 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
1431 size = gimple_call_arg (call, size_arg);
1432 else if (fnspec.arg_access_size_given_by_type_p (i))
1433 {
1434 tree callee = gimple_call_fndecl (call);
1435 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
1436
1437 for (unsigned int p = 0; p < i; p++)
1438 t = TREE_CHAIN (t);
1439 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
1440 }
1441 modref_access_node a = {0, -1, -1,
1442 map.parm_offset, map.parm_index,
5c85f295 1443 map.parm_offset_known, 0};
617695cd
JH
1444 poly_int64 size_hwi;
1445 if (size
1446 && poly_int_tree_p (size, &size_hwi)
1447 && coeffs_in_range_p (size_hwi, 0,
1448 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
1449 {
1450 a.size = -1;
1451 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
1452 }
1453 return a;
1454}
617695cd
JH
1455/* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
1456 If IGNORE_STORES is true ignore them.
1457 Return false if no useful summary can be produced. */
1458
09a4ffb7
JH
1459void
1460modref_access_analysis::process_fnspec (gcall *call)
617695cd 1461{
992644c3
JH
1462 int flags = gimple_call_flags (call);
1463
09a4ffb7
JH
1464 /* PURE/CONST flags makes functions deterministic and if there is
1465 no LOOPING_CONST_OR_PURE they also have no side effects. */
8d3abf42 1466 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
992644c3
JH
1467 || (flags & ECF_LOOPING_CONST_OR_PURE)
1468 || (cfun->can_throw_non_call_exceptions
1469 && stmt_could_throw_p (cfun, call)))
1470 {
09a4ffb7
JH
1471 set_side_effects ();
1472 if (!ignore_nondeterminism_p (current_function_decl, flags))
1473 set_nondeterministic ();
992644c3 1474 }
09a4ffb7
JH
1475
1476 /* For const functions we are done. */
8d3abf42 1477 if (flags & (ECF_CONST | ECF_NOVOPS))
09a4ffb7
JH
1478 return;
1479
1480 attr_fnspec fnspec = gimple_call_fnspec (call);
1481 /* If there is no fnpec we know nothing about loads & stores. */
617695cd
JH
1482 if (!fnspec.known_p ())
1483 {
1484 if (dump_file && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1485 fprintf (dump_file, " Builtin with no fnspec: %s\n",
1486 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call))));
09a4ffb7 1487 if (!ignore_stores_p (current_function_decl, flags))
3305135c
JH
1488 {
1489 if (!may_access_nonescaping_parm_p (call, flags, false))
1490 record_global_memory_store ();
1491 else
1492 record_unknown_store ();
1493 if (!may_access_nonescaping_parm_p (call, flags, true))
1494 record_global_memory_load ();
1495 else
1496 record_unknown_load ();
1497 }
1498 else
1499 {
1500 if (!may_access_nonescaping_parm_p (call, flags, true))
1501 record_global_memory_load ();
1502 else
1503 record_unknown_load ();
1504 }
09a4ffb7 1505 return;
617695cd 1506 }
09a4ffb7 1507 /* Process fnspec. */
617695cd 1508 if (fnspec.global_memory_read_p ())
3305135c
JH
1509 {
1510 if (may_access_nonescaping_parm_p (call, flags, true))
1511 record_unknown_load ();
1512 else
1513 record_global_memory_load ();
1514 }
617695cd
JH
1515 else
1516 {
1517 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1518 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
1519 ;
1520 else if (!fnspec.arg_specified_p (i)
1521 || fnspec.arg_maybe_read_p (i))
1522 {
0f5afb62 1523 modref_parm_map map = parm_map_for_ptr
1f3a3363 1524 (gimple_call_arg (call, i));
617695cd 1525
1f3a3363 1526 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
617695cd 1527 continue;
1f3a3363 1528 if (map.parm_index == MODREF_UNKNOWN_PARM)
617695cd 1529 {
09a4ffb7 1530 record_unknown_load ();
617695cd
JH
1531 break;
1532 }
09a4ffb7 1533 modref_access_node a = get_access_for_fnspec (call, fnspec, i, map);
0f5afb62
JH
1534 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1535 continue;
09a4ffb7 1536 if (m_summary)
8632f8c6 1537 m_summary->loads->insert (current_function_decl, 0, 0, a, false);
09a4ffb7 1538 if (m_summary_lto)
8632f8c6
JH
1539 m_summary_lto->loads->insert (current_function_decl, 0, 0, a,
1540 false);
617695cd
JH
1541 }
1542 }
09a4ffb7
JH
1543 if (ignore_stores_p (current_function_decl, flags))
1544 return;
617695cd 1545 if (fnspec.global_memory_written_p ())
3305135c
JH
1546 {
1547 if (may_access_nonescaping_parm_p (call, flags, false))
1548 record_unknown_store ();
1549 else
1550 record_global_memory_store ();
1551 }
617695cd
JH
1552 else
1553 {
1554 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1555 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
1556 ;
1557 else if (!fnspec.arg_specified_p (i)
1558 || fnspec.arg_maybe_written_p (i))
1559 {
0f5afb62 1560 modref_parm_map map = parm_map_for_ptr
1f3a3363 1561 (gimple_call_arg (call, i));
617695cd 1562
1f3a3363 1563 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
617695cd 1564 continue;
1f3a3363 1565 if (map.parm_index == MODREF_UNKNOWN_PARM)
617695cd 1566 {
09a4ffb7 1567 record_unknown_store ();
617695cd
JH
1568 break;
1569 }
09a4ffb7 1570 modref_access_node a = get_access_for_fnspec (call, fnspec, i, map);
0f5afb62
JH
1571 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1572 continue;
09a4ffb7 1573 if (m_summary)
8632f8c6 1574 m_summary->stores->insert (current_function_decl, 0, 0, a, false);
09a4ffb7 1575 if (m_summary_lto)
8632f8c6
JH
1576 m_summary_lto->stores->insert (current_function_decl,
1577 0, 0, a, false);
617695cd
JH
1578 }
1579 if (fnspec.errno_maybe_written_p () && flag_errno_math)
6cef01c3 1580 {
09a4ffb7
JH
1581 if (m_summary)
1582 m_summary->writes_errno = true;
1583 if (m_summary_lto)
1584 m_summary_lto->writes_errno = true;
6cef01c3 1585 }
617695cd 1586 }
617695cd
JH
1587}
1588
ada353b8
JH
1589/* Analyze function call STMT in function F.
1590 Remember recursive calls in RECURSIVE_CALLS. */
d119f34c 1591
09a4ffb7
JH
1592void
1593modref_access_analysis::analyze_call (gcall *stmt)
d119f34c
JH
1594{
1595 /* Check flags on the function call. In certain cases, analysis can be
1596 simplified. */
1597 int flags = gimple_call_flags (stmt);
09a4ffb7 1598
3305135c
JH
1599 if (dump_file)
1600 {
1601 fprintf (dump_file, " - Analyzing call:");
1602 print_gimple_stmt (dump_file, stmt, 0);
1603 }
1604
8d3abf42
JH
1605 if ((flags & (ECF_CONST | ECF_NOVOPS))
1606 && !(flags & ECF_LOOPING_CONST_OR_PURE))
d119f34c
JH
1607 {
1608 if (dump_file)
1609 fprintf (dump_file,
1610 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
1611 "except for args.\n");
09a4ffb7 1612 return;
d119f34c
JH
1613 }
1614
d119f34c
JH
1615 /* Next, we try to get the callee's function declaration. The goal is to
1616 merge their summary with ours. */
1617 tree callee = gimple_call_fndecl (stmt);
1618
1619 /* Check if this is an indirect call. */
1620 if (!callee)
1621 {
d119f34c 1622 if (dump_file)
6cef01c3
JH
1623 fprintf (dump_file, gimple_call_internal_p (stmt)
1624 ? " - Internal call" : " - Indirect call.\n");
09a4ffb7
JH
1625 process_fnspec (stmt);
1626 return;
d119f34c 1627 }
6cef01c3 1628 /* We only need to handle internal calls in IPA mode. */
09a4ffb7 1629 gcc_checking_assert (!m_summary_lto && !m_ipa);
d119f34c
JH
1630
1631 struct cgraph_node *callee_node = cgraph_node::get_create (callee);
1632
d119f34c
JH
1633 /* If this is a recursive call, the target summary is the same as ours, so
1634 there's nothing to do. */
1635 if (recursive_call_p (current_function_decl, callee))
1636 {
09a4ffb7
JH
1637 m_recursive_calls.safe_push (stmt);
1638 set_side_effects ();
d119f34c
JH
1639 if (dump_file)
1640 fprintf (dump_file, " - Skipping recursive call.\n");
09a4ffb7 1641 return;
d119f34c
JH
1642 }
1643
1644 gcc_assert (callee_node != NULL);
1645
1646 /* Get the function symbol and its availability. */
1647 enum availability avail;
1648 callee_node = callee_node->function_symbol (&avail);
992644c3
JH
1649 bool looping;
1650 if (builtin_safe_for_const_function_p (&looping, callee))
1651 {
1652 if (looping)
09a4ffb7 1653 set_side_effects ();
992644c3 1654 if (dump_file)
09a4ffb7
JH
1655 fprintf (dump_file, " - Builtin is safe for const.\n");
1656 return;
992644c3 1657 }
d119f34c
JH
1658 if (avail <= AVAIL_INTERPOSABLE)
1659 {
d119f34c 1660 if (dump_file)
09a4ffb7
JH
1661 fprintf (dump_file,
1662 " - Function availability <= AVAIL_INTERPOSABLE.\n");
1663 process_fnspec (stmt);
1664 return;
d119f34c
JH
1665 }
1666
1667 /* Get callee's modref summary. As above, if there's no summary, we either
1668 have to give up or, if stores are ignored, we can just purge loads. */
71dbabcc 1669 modref_summary *callee_summary = optimization_summaries->get (callee_node);
d119f34c
JH
1670 if (!callee_summary)
1671 {
d119f34c
JH
1672 if (dump_file)
1673 fprintf (dump_file, " - No modref summary available for callee.\n");
09a4ffb7
JH
1674 process_fnspec (stmt);
1675 return;
d119f34c
JH
1676 }
1677
09a4ffb7 1678 merge_call_side_effects (stmt, callee_summary, callee_node, false);
d119f34c 1679
09a4ffb7 1680 return;
d119f34c
JH
1681}
1682
1683/* Helper for analyze_stmt. */
1684
09a4ffb7
JH
1685bool
1686modref_access_analysis::analyze_load (gimple *, tree, tree op, void *data)
d119f34c 1687{
09a4ffb7 1688 modref_access_analysis *t = (modref_access_analysis *)data;
d119f34c
JH
1689
1690 if (dump_file)
1691 {
1692 fprintf (dump_file, " - Analyzing load: ");
1693 print_generic_expr (dump_file, op);
1694 fprintf (dump_file, "\n");
1695 }
1696
09a4ffb7 1697 if (!t->record_access_p (op))
d119f34c
JH
1698 return false;
1699
1700 ao_ref r;
1701 ao_ref_init (&r, op);
64f3e71c 1702 modref_access_node a = get_access (&r);
0f5afb62
JH
1703 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1704 return false;
d119f34c 1705
09a4ffb7
JH
1706 if (t->m_summary)
1707 t->record_access (t->m_summary->loads, &r, a);
1708 if (t->m_summary_lto)
1709 t->record_access_lto (t->m_summary_lto->loads, &r, a);
d119f34c
JH
1710 return false;
1711}
1712
1713/* Helper for analyze_stmt. */
1714
09a4ffb7
JH
1715bool
1716modref_access_analysis::analyze_store (gimple *stmt, tree, tree op, void *data)
d119f34c 1717{
09a4ffb7 1718 modref_access_analysis *t = (modref_access_analysis *)data;
d119f34c
JH
1719
1720 if (dump_file)
1721 {
1722 fprintf (dump_file, " - Analyzing store: ");
1723 print_generic_expr (dump_file, op);
1724 fprintf (dump_file, "\n");
1725 }
1726
09a4ffb7 1727 if (!t->record_access_p (op))
d119f34c
JH
1728 return false;
1729
1730 ao_ref r;
1731 ao_ref_init (&r, op);
64f3e71c 1732 modref_access_node a = get_access (&r);
0f5afb62
JH
1733 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1734 return false;
d119f34c 1735
09a4ffb7
JH
1736 if (t->m_summary)
1737 t->record_access (t->m_summary->stores, &r, a);
1738 if (t->m_summary_lto)
1739 t->record_access_lto (t->m_summary_lto->stores, &r, a);
1740 if (t->m_always_executed
64f3e71c
JH
1741 && a.useful_for_kill_p ()
1742 && (!cfun->can_throw_non_call_exceptions
1743 || !stmt_could_throw_p (cfun, stmt)))
1744 {
1745 if (dump_file)
1746 fprintf (dump_file, " - Recording kill\n");
09a4ffb7
JH
1747 if (t->m_summary)
1748 modref_access_node::insert_kill (t->m_summary->kills, a, false);
1749 if (t->m_summary_lto)
1750 modref_access_node::insert_kill (t->m_summary_lto->kills, a, false);
64f3e71c 1751 }
d119f34c
JH
1752 return false;
1753}
1754
1755/* Analyze statement STMT of function F.
1756 If IPA is true do not merge in side effects of calls. */
1757
09a4ffb7
JH
1758void
1759modref_access_analysis::analyze_stmt (gimple *stmt, bool always_executed)
d119f34c 1760{
09a4ffb7 1761 m_always_executed = always_executed;
8a2fd716
JJ
1762 /* In general we can not ignore clobbers because they are barriers for code
1763 motion, however after inlining it is safe to do because local optimization
3991912e 1764 passes do not consider clobbers from other functions.
e53b6e56 1765 Similar logic is in ipa-pure-const.cc. */
09a4ffb7 1766 if ((m_ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
64f3e71c 1767 {
74509b96 1768 if (always_executed && record_access_p (gimple_assign_lhs (stmt)))
64f3e71c
JH
1769 {
1770 ao_ref r;
1771 ao_ref_init (&r, gimple_assign_lhs (stmt));
1772 modref_access_node a = get_access (&r);
1773 if (a.useful_for_kill_p ())
1774 {
1775 if (dump_file)
1776 fprintf (dump_file, " - Recording kill\n");
09a4ffb7
JH
1777 if (m_summary)
1778 modref_access_node::insert_kill (m_summary->kills, a, false);
1779 if (m_summary_lto)
1780 modref_access_node::insert_kill (m_summary_lto->kills,
1781 a, false);
64f3e71c
JH
1782 }
1783 }
09a4ffb7 1784 return;
64f3e71c 1785 }
3991912e 1786
d119f34c 1787 /* Analyze all loads and stores in STMT. */
09a4ffb7 1788 walk_stmt_load_store_ops (stmt, this,
d119f34c 1789 analyze_load, analyze_store);
d119f34c
JH
1790
1791 switch (gimple_code (stmt))
1792 {
1793 case GIMPLE_ASM:
a34edf9a 1794 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
09a4ffb7 1795 set_nondeterministic ();
a34edf9a
JH
1796 if (cfun->can_throw_non_call_exceptions
1797 && stmt_could_throw_p (cfun, stmt))
09a4ffb7 1798 set_side_effects ();
d119f34c
JH
1799 /* If the ASM statement does not read nor write memory, there's nothing
1800 to do. Otherwise just give up. */
1801 if (!gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
09a4ffb7 1802 return;
d119f34c
JH
1803 if (dump_file)
1804 fprintf (dump_file, " - Function contains GIMPLE_ASM statement "
1805 "which clobbers memory.\n");
09a4ffb7
JH
1806 record_unknown_load ();
1807 record_unknown_store ();
1808 return;
d119f34c 1809 case GIMPLE_CALL:
09a4ffb7
JH
1810 if (!m_ipa || gimple_call_internal_p (stmt))
1811 analyze_call (as_a <gcall *> (stmt));
6cef01c3 1812 else
09a4ffb7
JH
1813 {
1814 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
1815
1816 if (fnspec.known_p ()
1817 && (!fnspec.global_memory_read_p ()
1818 || !fnspec.global_memory_written_p ()))
1819 {
1820 cgraph_edge *e = cgraph_node::get
1821 (current_function_decl)->get_edge (stmt);
1822 if (e->callee)
1823 {
1824 fnspec_summaries->get_create (e)->fnspec
1825 = xstrdup (fnspec.get_str ());
1826 if (dump_file)
1827 fprintf (dump_file, " Recorded fnspec %s\n",
1828 fnspec.get_str ());
1829 }
1830 }
1831 }
1832 return;
d119f34c 1833 default:
992644c3
JH
1834 if (cfun->can_throw_non_call_exceptions
1835 && stmt_could_throw_p (cfun, stmt))
09a4ffb7
JH
1836 set_side_effects ();
1837 return;
d119f34c
JH
1838 }
1839}
1840
02c80893 1841/* Propagate load/stores across recursive calls. */
71dbabcc 1842
09a4ffb7
JH
1843void
1844modref_access_analysis::propagate ()
71dbabcc 1845{
09a4ffb7
JH
1846 if (m_ipa && m_summary)
1847 return;
1848
1849 bool changed = true;
1850 bool first = true;
71dbabcc 1851 cgraph_node *fnode = cgraph_node::get (current_function_decl);
09a4ffb7
JH
1852
1853 m_always_executed = false;
1854 while (changed && m_summary->useful_p (m_ecf_flags, false))
71dbabcc 1855 {
09a4ffb7
JH
1856 changed = false;
1857 for (unsigned i = 0; i < m_recursive_calls.length (); i++)
1858 {
1859 changed |= merge_call_side_effects (m_recursive_calls[i], m_summary,
1860 fnode, !first);
1861 }
1862 first = false;
1863 }
1864}
1865
1866/* Analyze function. */
1867
1868void
1869modref_access_analysis::analyze ()
1870{
1871 m_ecf_flags = flags_from_decl_or_type (current_function_decl);
1872 bool summary_useful = true;
1873
1874 /* Analyze each statement in each basic block of the function. If the
1875 statement cannot be analyzed (for any reason), the entire function cannot
1876 be analyzed by modref. */
1877 basic_block bb;
b1f30bf4 1878 bitmap always_executed_bbs = find_always_executed_bbs (cfun, true);
09a4ffb7
JH
1879 FOR_EACH_BB_FN (bb, cfun)
1880 {
1881 gimple_stmt_iterator si;
b1f30bf4 1882 bool always_executed = bitmap_bit_p (always_executed_bbs, bb->index);
09a4ffb7
JH
1883
1884 for (si = gsi_start_nondebug_after_labels_bb (bb);
1885 !gsi_end_p (si); gsi_next_nondebug (&si))
1886 {
e93809f6 1887 /* NULL memory accesses terminates BB. These accesses are known
02c80893 1888 to trip undefined behavior. gimple-ssa-isolate-paths turns them
e93809f6
JH
1889 to volatile accesses and adds builtin_trap call which would
1890 confuse us otherwise. */
1891 if (infer_nonnull_range_by_dereference (gsi_stmt (si),
1892 null_pointer_node))
1893 {
1894 if (dump_file)
1895 fprintf (dump_file, " - NULL memory access; terminating BB\n");
1896 if (flag_non_call_exceptions)
1897 set_side_effects ();
1898 break;
1899 }
09a4ffb7
JH
1900 analyze_stmt (gsi_stmt (si), always_executed);
1901
02c80893 1902 /* Avoid doing useless work. */
09a4ffb7
JH
1903 if ((!m_summary || !m_summary->useful_p (m_ecf_flags, false))
1904 && (!m_summary_lto
1905 || !m_summary_lto->useful_p (m_ecf_flags, false)))
1906 {
1907 summary_useful = false;
1908 break;
1909 }
1910 if (always_executed
1911 && stmt_can_throw_external (cfun, gsi_stmt (si)))
1912 always_executed = false;
1913 }
1914 if (!summary_useful)
1915 break;
1916 }
02c80893 1917 /* In non-IPA mode we need to perform iterative dataflow on recursive calls.
09a4ffb7
JH
1918 This needs to be done after all other side effects are computed. */
1919 if (summary_useful)
1920 {
1921 if (!m_ipa)
1922 propagate ();
1923 if (m_summary && !m_summary->side_effects && !finite_function_p ())
1924 m_summary->side_effects = true;
1925 if (m_summary_lto && !m_summary_lto->side_effects
1926 && !finite_function_p ())
1927 m_summary_lto->side_effects = true;
71dbabcc 1928 }
b1f30bf4 1929 BITMAP_FREE (always_executed_bbs);
71dbabcc
JH
1930}
1931
520d5ad3
JH
1932/* Return true if OP accesses memory pointed to by SSA_NAME. */
1933
1934bool
1935memory_access_to (tree op, tree ssa_name)
1936{
1937 tree base = get_base_address (op);
1938 if (!base)
1939 return false;
1940 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
1941 return false;
1942 return TREE_OPERAND (base, 0) == ssa_name;
1943}
1944
1945/* Consider statement val = *arg.
1946 return EAF flags of ARG that can be determined from EAF flags of VAL
1947 (which are known to be FLAGS). If IGNORE_STORES is true we can ignore
1948 all stores to VAL, i.e. when handling noreturn function. */
1949
1950static int
1951deref_flags (int flags, bool ignore_stores)
1952{
d70ef656
JH
1953 /* Dereference is also a direct read but dereferenced value does not
1954 yield any other direct use. */
1955 int ret = EAF_NO_DIRECT_CLOBBER | EAF_NO_DIRECT_ESCAPE
1956 | EAF_NOT_RETURNED_DIRECTLY;
4341b1b1
JH
1957 /* If argument is unused just account for
1958 the read involved in dereference. */
520d5ad3 1959 if (flags & EAF_UNUSED)
d70ef656
JH
1960 ret |= EAF_NO_INDIRECT_READ | EAF_NO_INDIRECT_CLOBBER
1961 | EAF_NO_INDIRECT_ESCAPE;
520d5ad3
JH
1962 else
1963 {
d70ef656
JH
1964 /* Direct or indirect accesses leads to indirect accesses. */
1965 if (((flags & EAF_NO_DIRECT_CLOBBER)
1966 && (flags & EAF_NO_INDIRECT_CLOBBER))
1967 || ignore_stores)
1968 ret |= EAF_NO_INDIRECT_CLOBBER;
1969 if (((flags & EAF_NO_DIRECT_ESCAPE)
1970 && (flags & EAF_NO_INDIRECT_ESCAPE))
1971 || ignore_stores)
1972 ret |= EAF_NO_INDIRECT_ESCAPE;
1973 if ((flags & EAF_NO_DIRECT_READ)
1974 && (flags & EAF_NO_INDIRECT_READ))
1975 ret |= EAF_NO_INDIRECT_READ;
1976 if ((flags & EAF_NOT_RETURNED_DIRECTLY)
1977 && (flags & EAF_NOT_RETURNED_INDIRECTLY))
1978 ret |= EAF_NOT_RETURNED_INDIRECTLY;
520d5ad3
JH
1979 }
1980 return ret;
1981}
1982
85ebbabd 1983
09a4ffb7
JH
1984/* Description of an escape point: a call which affects flags of a given
1985 SSA name. */
85ebbabd
JH
1986
1987struct escape_point
1988{
1989 /* Value escapes to this call. */
1990 gcall *call;
1991 /* Argument it escapes to. */
1992 int arg;
1993 /* Flags already known about the argument (this can save us from recording
02c80893 1994 escape points if local analysis did good job already). */
8da8ed43 1995 eaf_flags_t min_flags;
02c80893 1996 /* Does value escape directly or indirectly? */
85ebbabd
JH
1997 bool direct;
1998};
1999
02c80893 2000/* Lattice used during the eaf flags analysis dataflow. For a given SSA name
09a4ffb7
JH
2001 we aim to compute its flags and escape points. We also use the lattice
2002 to dynamically build dataflow graph to propagate on. */
2003
85ebbabd
JH
2004class modref_lattice
2005{
2006public:
2007 /* EAF flags of the SSA name. */
4341b1b1 2008 eaf_flags_t flags;
4898e958
JH
2009 /* Used during DFS walk to mark names where final value was determined
2010 without need for dataflow. */
85ebbabd 2011 bool known;
4898e958 2012 /* Used during DFS walk to mark open vertices (for cycle detection). */
85ebbabd 2013 bool open;
4898e958
JH
2014 /* Set during DFS walk for names that needs dataflow propagation. */
2015 bool do_dataflow;
2016 /* Used during the iterative dataflow. */
2017 bool changed;
85ebbabd
JH
2018
2019 /* When doing IPA analysis we can not merge in callee escape points;
2020 Only remember them and do the merging at IPA propagation time. */
2021 vec <escape_point, va_heap, vl_ptr> escape_points;
2022
02c80893 2023 /* Representation of a graph for dataflow. This graph is built on-demand
4898e958
JH
2024 using modref_eaf_analysis::analyze_ssa and later solved by
2025 modref_eaf_analysis::propagate.
2026 Each edge represents the fact that flags of current lattice should be
2027 propagated to lattice of SSA_NAME. */
2028 struct propagate_edge
2029 {
2030 int ssa_name;
2031 bool deref;
2032 };
2033 vec <propagate_edge, va_heap, vl_ptr> propagate_to;
2034
85ebbabd
JH
2035 void init ();
2036 void release ();
2037 bool merge (const modref_lattice &with);
2038 bool merge (int flags);
2039 bool merge_deref (const modref_lattice &with, bool ignore_stores);
2040 bool merge_direct_load ();
2041 bool merge_direct_store ();
2042 bool add_escape_point (gcall *call, int arg, int min_flags, bool diret);
2043 void dump (FILE *out, int indent = 0) const;
2044};
2045
2046/* Lattices are saved to vectors, so keep them PODs. */
2047void
2048modref_lattice::init ()
2049{
4341b1b1 2050 /* All flags we track. */
d70ef656
JH
2051 int f = EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER
2052 | EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE
2053 | EAF_NO_DIRECT_READ | EAF_NO_INDIRECT_READ
2054 | EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY
2055 | EAF_UNUSED;
4341b1b1
JH
2056 flags = f;
2057 /* Check that eaf_flags_t is wide enough to hold all flags. */
2058 gcc_checking_assert (f == flags);
85ebbabd
JH
2059 open = true;
2060 known = false;
2061}
2062
2063/* Release memory. */
2064void
2065modref_lattice::release ()
2066{
2067 escape_points.release ();
4898e958 2068 propagate_to.release ();
85ebbabd
JH
2069}
2070
2071/* Dump lattice to OUT; indent with INDENT spaces. */
2072
2073void
2074modref_lattice::dump (FILE *out, int indent) const
2075{
2076 dump_eaf_flags (out, flags);
2077 if (escape_points.length ())
2078 {
2079 fprintf (out, "%*sEscapes:\n", indent, "");
2080 for (unsigned int i = 0; i < escape_points.length (); i++)
2081 {
2082 fprintf (out, "%*s Arg %i (%s) min flags", indent, "",
2083 escape_points[i].arg,
2084 escape_points[i].direct ? "direct" : "indirect");
9851a163 2085 dump_eaf_flags (out, escape_points[i].min_flags, false);
85ebbabd
JH
2086 fprintf (out, " in call ");
2087 print_gimple_stmt (out, escape_points[i].call, 0);
2088 }
2089 }
2090}
2091
2092/* Add escape point CALL, ARG, MIN_FLAGS, DIRECT. Return false if such escape
2093 point exists. */
2094
2095bool
2096modref_lattice::add_escape_point (gcall *call, int arg, int min_flags,
2097 bool direct)
2098{
2099 escape_point *ep;
2100 unsigned int i;
2101
2102 /* If we already determined flags to be bad enough,
4341b1b1
JH
2103 we do not need to record. */
2104 if ((flags & min_flags) == flags || (min_flags & EAF_UNUSED))
85ebbabd
JH
2105 return false;
2106
2107 FOR_EACH_VEC_ELT (escape_points, i, ep)
2108 if (ep->call == call && ep->arg == arg && ep->direct == direct)
2109 {
2110 if ((ep->min_flags & min_flags) == min_flags)
2111 return false;
2112 ep->min_flags &= min_flags;
2113 return true;
2114 }
2115 /* Give up if max escape points is met. */
2116 if ((int)escape_points.length () > param_modref_max_escape_points)
2117 {
2118 if (dump_file)
2119 fprintf (dump_file, "--param modref-max-escape-points limit reached\n");
2120 merge (0);
2121 return true;
2122 }
2123 escape_point new_ep = {call, arg, min_flags, direct};
2124 escape_points.safe_push (new_ep);
2125 return true;
2126}
2127
2128/* Merge in flags from F. */
2129bool
2130modref_lattice::merge (int f)
2131{
3350e59f
JH
2132 if (f & EAF_UNUSED)
2133 return false;
4526ec20
JH
2134 /* Check that flags seems sane: if function does not read the parameter
2135 it can not access it indirectly. */
2136 gcc_checking_assert (!(f & EAF_NO_DIRECT_READ)
2137 || ((f & EAF_NO_INDIRECT_READ)
2138 && (f & EAF_NO_INDIRECT_CLOBBER)
2139 && (f & EAF_NO_INDIRECT_ESCAPE)
2140 && (f & EAF_NOT_RETURNED_INDIRECTLY)));
85ebbabd
JH
2141 if ((flags & f) != flags)
2142 {
2143 flags &= f;
02c80893 2144 /* Prune obviously useless flags;
4341b1b1
JH
2145 We do not have ECF_FLAGS handy which is not big problem since
2146 we will do final flags cleanup before producing summary.
2147 Merging should be fast so it can work well with dataflow. */
2148 flags = remove_useless_eaf_flags (flags, 0, false);
85ebbabd
JH
2149 if (!flags)
2150 escape_points.release ();
2151 return true;
2152 }
2153 return false;
2154}
2155
02c80893 2156/* Merge in WITH. Return true if anything changed. */
85ebbabd
JH
2157
2158bool
2159modref_lattice::merge (const modref_lattice &with)
2160{
2161 if (!with.known)
4898e958 2162 do_dataflow = true;
85ebbabd
JH
2163
2164 bool changed = merge (with.flags);
2165
2166 if (!flags)
2167 return changed;
2168 for (unsigned int i = 0; i < with.escape_points.length (); i++)
2169 changed |= add_escape_point (with.escape_points[i].call,
2170 with.escape_points[i].arg,
2171 with.escape_points[i].min_flags,
2172 with.escape_points[i].direct);
2173 return changed;
2174}
2175
2176/* Merge in deref of WITH. If IGNORE_STORES is true do not consider
02c80893 2177 stores. Return true if anything changed. */
85ebbabd
JH
2178
2179bool
2180modref_lattice::merge_deref (const modref_lattice &with, bool ignore_stores)
2181{
2182 if (!with.known)
4898e958 2183 do_dataflow = true;
85ebbabd
JH
2184
2185 bool changed = merge (deref_flags (with.flags, ignore_stores));
2186
2187 if (!flags)
2188 return changed;
2189 for (unsigned int i = 0; i < with.escape_points.length (); i++)
9851a163
JH
2190 {
2191 int min_flags = with.escape_points[i].min_flags;
2192
2193 if (with.escape_points[i].direct)
2194 min_flags = deref_flags (min_flags, ignore_stores);
2195 else if (ignore_stores)
4341b1b1 2196 min_flags |= ignore_stores_eaf_flags;
9851a163
JH
2197 changed |= add_escape_point (with.escape_points[i].call,
2198 with.escape_points[i].arg,
2199 min_flags,
2200 false);
2201 }
85ebbabd
JH
2202 return changed;
2203}
2204
2205/* Merge in flags for direct load. */
2206
2207bool
2208modref_lattice::merge_direct_load ()
2209{
d70ef656 2210 return merge (~(EAF_UNUSED | EAF_NO_DIRECT_READ));
85ebbabd
JH
2211}
2212
2213/* Merge in flags for direct store. */
2214
2215bool
2216modref_lattice::merge_direct_store ()
2217{
d70ef656 2218 return merge (~(EAF_UNUSED | EAF_NO_DIRECT_CLOBBER));
85ebbabd
JH
2219}
2220
4898e958 2221/* Analyzer of EAF flags.
02c80893 2222 This is generally dataflow problem over the SSA graph, however we only
4898e958
JH
2223 care about flags of few selected ssa names (arguments, return slot and
2224 static chain). So we first call analyze_ssa_name on all relevant names
2225 and perform a DFS walk to discover SSA names where flags needs to be
2226 determined. For acyclic graphs we try to determine final flags during
02c80893 2227 this walk. Once cycles or recursion depth is met we enlist SSA names
4898e958
JH
2228 for dataflow which is done by propagate call.
2229
2230 After propagation the flags can be obtained using get_ssa_name_flags. */
18f0873d
JH
2231
2232class modref_eaf_analysis
2233{
2234public:
4898e958 2235 /* Mark NAME as relevant for analysis. */
8d1e342b 2236 void analyze_ssa_name (tree name, bool deferred = false);
02c80893 2237 /* Dataflow solver. */
4898e958 2238 void propagate ();
18f0873d
JH
2239 /* Return flags computed earlier for NAME. */
2240 int get_ssa_name_flags (tree name)
2241 {
2242 int version = SSA_NAME_VERSION (name);
2243 gcc_checking_assert (m_lattice[version].known);
2244 return m_lattice[version].flags;
2245 }
2246 /* In IPA mode this will record all escape points
2247 determined for NAME to PARM_IDNEX. Flags are minimal
2248 flags known. */
2249 void record_escape_points (tree name, int parm_index, int flags);
2250 modref_eaf_analysis (bool ipa)
2251 {
2252 m_ipa = ipa;
2253 m_depth = 0;
2254 m_lattice.safe_grow_cleared (num_ssa_names, true);
2255 }
2256 ~modref_eaf_analysis ()
2257 {
2258 gcc_checking_assert (!m_depth);
4898e958 2259 if (m_ipa || m_names_to_propagate.length ())
18f0873d
JH
2260 for (unsigned int i = 0; i < num_ssa_names; i++)
2261 m_lattice[i].release ();
2262 }
2263private:
02c80893 2264 /* If true, we produce analysis for IPA mode. In this case escape points are
18f0873d
JH
2265 collected. */
2266 bool m_ipa;
2267 /* Depth of recursion of analyze_ssa_name. */
2268 int m_depth;
2269 /* Propagation lattice for individual ssa names. */
2270 auto_vec<modref_lattice> m_lattice;
4898e958
JH
2271 auto_vec<tree> m_deferred_names;
2272 auto_vec<int> m_names_to_propagate;
18f0873d
JH
2273
2274 void merge_with_ssa_name (tree dest, tree src, bool deref);
d70ef656
JH
2275 void merge_call_lhs_flags (gcall *call, int arg, tree name, bool direct,
2276 bool deref);
18f0873d 2277};
85ebbabd 2278
85ebbabd 2279
02c80893 2280/* Call statements may return their parameters. Consider argument number
520d5ad3
JH
2281 ARG of USE_STMT and determine flags that can needs to be cleared
2282 in case pointer possibly indirectly references from ARG I is returned.
d70ef656
JH
2283 If DIRECT is true consider direct returns and if INDIRECT consider
2284 indirect returns.
62af7d94
JH
2285 LATTICE, DEPTH and ipa are same as in analyze_ssa_name.
2286 ARG is set to -1 for static chain. */
520d5ad3 2287
18f0873d
JH
2288void
2289modref_eaf_analysis::merge_call_lhs_flags (gcall *call, int arg,
d70ef656
JH
2290 tree name, bool direct,
2291 bool indirect)
520d5ad3 2292{
18f0873d 2293 int index = SSA_NAME_VERSION (name);
c331a75d 2294 bool returned_directly = false;
d70ef656 2295
520d5ad3
JH
2296 /* If there is no return value, no flags are affected. */
2297 if (!gimple_call_lhs (call))
85ebbabd 2298 return;
520d5ad3
JH
2299
2300 /* If we know that function returns given argument and it is not ARG
2301 we can still be happy. */
62af7d94
JH
2302 if (arg >= 0)
2303 {
2304 int flags = gimple_call_return_flags (call);
c331a75d
JH
2305 if (flags & ERF_RETURNS_ARG)
2306 {
2307 if ((flags & ERF_RETURN_ARG_MASK) == arg)
2308 returned_directly = true;
2309 else
2310 return;
2311 }
2312 }
2313 /* Make ERF_RETURNS_ARG overwrite EAF_UNUSED. */
2314 if (returned_directly)
2315 {
2316 direct = true;
2317 indirect = false;
62af7d94 2318 }
c331a75d
JH
2319 /* If value is not returned at all, do nothing. */
2320 else if (!direct && !indirect)
2321 return;
8da8ed43 2322
520d5ad3
JH
2323 /* If return value is SSA name determine its flags. */
2324 if (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME)
85ebbabd
JH
2325 {
2326 tree lhs = gimple_call_lhs (call);
d70ef656
JH
2327 if (direct)
2328 merge_with_ssa_name (name, lhs, false);
2329 if (indirect)
2330 merge_with_ssa_name (name, lhs, true);
85ebbabd 2331 }
520d5ad3 2332 /* In the case of memory store we can do nothing. */
d70ef656 2333 else if (!direct)
18f0873d 2334 m_lattice[index].merge (deref_flags (0, false));
520d5ad3 2335 else
18f0873d 2336 m_lattice[index].merge (0);
520d5ad3
JH
2337}
2338
62af7d94
JH
2339/* CALL_FLAGS are EAF_FLAGS of the argument. Turn them
2340 into flags for caller, update LATTICE of corresponding
2341 argument if needed. */
2342
2343static int
2344callee_to_caller_flags (int call_flags, bool ignore_stores,
2345 modref_lattice &lattice)
2346{
2347 /* call_flags is about callee returning a value
2348 that is not the same as caller returning it. */
d70ef656
JH
2349 call_flags |= EAF_NOT_RETURNED_DIRECTLY
2350 | EAF_NOT_RETURNED_INDIRECTLY;
62af7d94
JH
2351 if (!ignore_stores && !(call_flags & EAF_UNUSED))
2352 {
2f3d43a3
JH
2353 /* If value escapes we are no longer able to track what happens
2354 with it because we can read it from the escaped location
2355 anytime. */
d70ef656 2356 if (!(call_flags & EAF_NO_DIRECT_ESCAPE))
2f3d43a3
JH
2357 lattice.merge (0);
2358 else if (!(call_flags & EAF_NO_INDIRECT_ESCAPE))
d70ef656 2359 lattice.merge (~(EAF_NOT_RETURNED_INDIRECTLY
4526ec20 2360 | EAF_NO_DIRECT_READ
2f3d43a3
JH
2361 | EAF_NO_INDIRECT_READ
2362 | EAF_NO_INDIRECT_CLOBBER
62af7d94
JH
2363 | EAF_UNUSED));
2364 }
2365 else
2366 call_flags |= ignore_stores_eaf_flags;
2367 return call_flags;
2368}
2369
85ebbabd
JH
2370/* Analyze EAF flags for SSA name NAME and store result to LATTICE.
2371 LATTICE is an array of modref_lattices.
2372 DEPTH is a recursion depth used to make debug output prettier.
2373 If IPA is true we analyze for IPA propagation (and thus call escape points
2374 are processed later) */
520d5ad3 2375
18f0873d 2376void
8d1e342b 2377modref_eaf_analysis::analyze_ssa_name (tree name, bool deferred)
520d5ad3
JH
2378{
2379 imm_use_iterator ui;
2380 gimple *use_stmt;
85ebbabd 2381 int index = SSA_NAME_VERSION (name);
520d5ad3 2382
8d1e342b 2383 if (!deferred)
520d5ad3 2384 {
8d1e342b
JH
2385 /* See if value is already computed. */
2386 if (m_lattice[index].known || m_lattice[index].do_dataflow)
2387 return;
2388 if (m_lattice[index].open)
2389 {
2390 if (dump_file)
2391 fprintf (dump_file,
2392 "%*sCycle in SSA graph\n",
2393 m_depth * 4, "");
2394 return;
2395 }
2396 /* Recursion guard. */
2397 m_lattice[index].init ();
2398 if (m_depth == param_modref_max_depth)
2399 {
2400 if (dump_file)
2401 fprintf (dump_file,
2402 "%*sMax recursion depth reached; postponing\n",
2403 m_depth * 4, "");
2404 m_deferred_names.safe_push (name);
2405 return;
2406 }
520d5ad3 2407 }
520d5ad3
JH
2408
2409 if (dump_file)
2410 {
2411 fprintf (dump_file,
18f0873d 2412 "%*sAnalyzing flags of ssa name: ", m_depth * 4, "");
520d5ad3
JH
2413 print_generic_expr (dump_file, name);
2414 fprintf (dump_file, "\n");
2415 }
2416
2417 FOR_EACH_IMM_USE_STMT (use_stmt, ui, name)
2418 {
18f0873d 2419 if (m_lattice[index].flags == 0)
640296c3 2420 break;
520d5ad3
JH
2421 if (is_gimple_debug (use_stmt))
2422 continue;
2423 if (dump_file)
2424 {
18f0873d 2425 fprintf (dump_file, "%*s Analyzing stmt: ", m_depth * 4, "");
520d5ad3
JH
2426 print_gimple_stmt (dump_file, use_stmt, 0);
2427 }
4341b1b1 2428 /* If we see a direct non-debug use, clear unused bit.
02c80893 2429 All dereferences should be accounted below using deref_flags. */
18f0873d 2430 m_lattice[index].merge (~EAF_UNUSED);
520d5ad3 2431
26285af4 2432 /* Gimple return may load the return value.
e53b6e56 2433 Returning name counts as an use by tree-ssa-structalias.cc */
520d5ad3
JH
2434 if (greturn *ret = dyn_cast <greturn *> (use_stmt))
2435 {
b8ef019a
JH
2436 /* Returning through return slot is seen as memory write earlier. */
2437 if (DECL_RESULT (current_function_decl)
2438 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
2439 ;
2440 else if (gimple_return_retval (ret) == name)
d70ef656 2441 m_lattice[index].merge (~(EAF_UNUSED | EAF_NOT_RETURNED_DIRECTLY
18f0873d 2442 | EAF_NOT_RETURNED_DIRECTLY));
85ebbabd 2443 else if (memory_access_to (gimple_return_retval (ret), name))
8da8ed43 2444 {
18f0873d 2445 m_lattice[index].merge_direct_load ();
d70ef656
JH
2446 m_lattice[index].merge (~(EAF_UNUSED
2447 | EAF_NOT_RETURNED_INDIRECTLY));
8da8ed43 2448 }
520d5ad3
JH
2449 }
2450 /* Account for LHS store, arg loads and flags from callee function. */
2451 else if (gcall *call = dyn_cast <gcall *> (use_stmt))
2452 {
2453 tree callee = gimple_call_fndecl (call);
9b08f776
JH
2454
2455 /* IPA PTA internally it treats calling a function as "writing" to
2456 the argument space of all functions the function pointer points to
2457 (PR101949). We can not drop EAF_NOCLOBBER only when ipa-pta
2458 is on since that would allow propagation of this from -fno-ipa-pta
2459 to -fipa-pta functions. */
2460 if (gimple_call_fn (use_stmt) == name)
d70ef656 2461 m_lattice[index].merge (~(EAF_NO_DIRECT_CLOBBER | EAF_UNUSED));
9b08f776 2462
520d5ad3 2463 /* Recursion would require bit of propagation; give up for now. */
18f0873d 2464 if (callee && !m_ipa && recursive_call_p (current_function_decl,
85ebbabd 2465 callee))
18f0873d 2466 m_lattice[index].merge (0);
520d5ad3
JH
2467 else
2468 {
2469 int ecf_flags = gimple_call_flags (call);
2470 bool ignore_stores = ignore_stores_p (current_function_decl,
2471 ecf_flags);
85ebbabd
JH
2472 bool ignore_retval = ignore_retval_p (current_function_decl,
2473 ecf_flags);
520d5ad3
JH
2474
2475 /* Handle *name = func (...). */
2476 if (gimple_call_lhs (call)
2477 && memory_access_to (gimple_call_lhs (call), name))
59f38935 2478 {
18f0873d 2479 m_lattice[index].merge_direct_store ();
59f38935
JH
2480 /* Return slot optimization passes address of
2481 LHS to callee via hidden parameter and this
2482 may make LHS to escape. See PR 98499. */
2483 if (gimple_call_return_slot_opt_p (call)
2484 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call))))
62af7d94
JH
2485 {
2486 int call_flags = gimple_call_retslot_flags (call);
2487 bool isretslot = false;
2488
2489 if (DECL_RESULT (current_function_decl)
2490 && DECL_BY_REFERENCE
2491 (DECL_RESULT (current_function_decl)))
2492 isretslot = ssa_default_def
2493 (cfun,
2494 DECL_RESULT (current_function_decl))
2495 == name;
2496
2497 /* Passing returnslot to return slot is special because
2498 not_returned and escape has same meaning.
2499 However passing arg to return slot is different. If
2500 the callee's return slot is returned it means that
2f3d43a3
JH
2501 arg is written to itself which is an escape.
2502 Since we do not track the memory it is written to we
02c80893 2503 need to give up on analyzing it. */
62af7d94
JH
2504 if (!isretslot)
2505 {
62af7d94 2506 if (!(call_flags & (EAF_NOT_RETURNED_DIRECTLY
d70ef656 2507 | EAF_UNUSED)))
2f3d43a3
JH
2508 m_lattice[index].merge (0);
2509 else gcc_checking_assert
2510 (call_flags & (EAF_NOT_RETURNED_INDIRECTLY
2511 | EAF_UNUSED));
62af7d94
JH
2512 call_flags = callee_to_caller_flags
2513 (call_flags, false,
2514 m_lattice[index]);
2515 }
2516 m_lattice[index].merge (call_flags);
2517 }
59f38935 2518 }
520d5ad3 2519
520d5ad3
JH
2520 if (gimple_call_chain (call)
2521 && (gimple_call_chain (call) == name))
62af7d94
JH
2522 {
2523 int call_flags = gimple_call_static_chain_flags (call);
d70ef656
JH
2524 if (!ignore_retval && !(call_flags & EAF_UNUSED))
2525 merge_call_lhs_flags
2526 (call, -1, name,
2527 !(call_flags & EAF_NOT_RETURNED_DIRECTLY),
2528 !(call_flags & EAF_NOT_RETURNED_INDIRECTLY));
62af7d94
JH
2529 call_flags = callee_to_caller_flags
2530 (call_flags, ignore_stores,
2531 m_lattice[index]);
2532 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
2533 m_lattice[index].merge (call_flags);
2534 }
85ebbabd
JH
2535
2536 /* Process internal functions and right away. */
18f0873d 2537 bool record_ipa = m_ipa && !gimple_call_internal_p (call);
520d5ad3
JH
2538
2539 /* Handle all function parameters. */
85ebbabd 2540 for (unsigned i = 0;
18f0873d
JH
2541 i < gimple_call_num_args (call)
2542 && m_lattice[index].flags; i++)
520d5ad3
JH
2543 /* Name is directly passed to the callee. */
2544 if (gimple_call_arg (call, i) == name)
2545 {
62af7d94 2546 int call_flags = gimple_call_arg_flags (call, i);
c331a75d 2547 if (!ignore_retval)
62af7d94
JH
2548 merge_call_lhs_flags
2549 (call, i, name,
c331a75d
JH
2550 !(call_flags & (EAF_NOT_RETURNED_DIRECTLY
2551 | EAF_UNUSED)),
2552 !(call_flags & (EAF_NOT_RETURNED_INDIRECTLY
2553 | EAF_UNUSED)));
85ebbabd 2554 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
520d5ad3 2555 {
62af7d94
JH
2556 call_flags = callee_to_caller_flags
2557 (call_flags, ignore_stores,
2558 m_lattice[index]);
85ebbabd 2559 if (!record_ipa)
18f0873d 2560 m_lattice[index].merge (call_flags);
c3c61674 2561 else
18f0873d 2562 m_lattice[index].add_escape_point (call, i,
b8ef019a 2563 call_flags, true);
520d5ad3
JH
2564 }
2565 }
2566 /* Name is dereferenced and passed to a callee. */
2567 else if (memory_access_to (gimple_call_arg (call, i), name))
2568 {
62af7d94
JH
2569 int call_flags = deref_flags
2570 (gimple_call_arg_flags (call, i), ignore_stores);
d70ef656
JH
2571 if (!ignore_retval && !(call_flags & EAF_UNUSED)
2572 && !(call_flags & EAF_NOT_RETURNED_DIRECTLY)
2573 && !(call_flags & EAF_NOT_RETURNED_INDIRECTLY))
2574 merge_call_lhs_flags (call, i, name, false, true);
520d5ad3 2575 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
18f0873d 2576 m_lattice[index].merge_direct_load ();
520d5ad3 2577 else
85ebbabd 2578 {
62af7d94
JH
2579 call_flags = callee_to_caller_flags
2580 (call_flags, ignore_stores,
2581 m_lattice[index]);
85ebbabd 2582 if (!record_ipa)
18f0873d 2583 m_lattice[index].merge (call_flags);
c3c61674 2584 else
18f0873d 2585 m_lattice[index].add_escape_point (call, i,
62af7d94 2586 call_flags, false);
85ebbabd 2587 }
520d5ad3
JH
2588 }
2589 }
520d5ad3
JH
2590 }
2591 else if (gimple_assign_load_p (use_stmt))
2592 {
2593 gassign *assign = as_a <gassign *> (use_stmt);
2594 /* Memory to memory copy. */
2595 if (gimple_store_p (assign))
2596 {
520d5ad3
JH
2597 /* Handle *lhs = *name.
2598
2599 We do not track memory locations, so assume that value
2600 is used arbitrarily. */
2601 if (memory_access_to (gimple_assign_rhs1 (assign), name))
18f0873d 2602 m_lattice[index].merge (deref_flags (0, false));
85ebbabd
JH
2603 /* Handle *name = *exp. */
2604 else if (memory_access_to (gimple_assign_lhs (assign), name))
18f0873d 2605 m_lattice[index].merge_direct_store ();
520d5ad3
JH
2606 }
2607 /* Handle lhs = *name. */
2608 else if (memory_access_to (gimple_assign_rhs1 (assign), name))
85ebbabd
JH
2609 {
2610 tree lhs = gimple_assign_lhs (assign);
18f0873d 2611 merge_with_ssa_name (name, lhs, true);
85ebbabd 2612 }
520d5ad3
JH
2613 }
2614 else if (gimple_store_p (use_stmt))
2615 {
2616 gassign *assign = dyn_cast <gassign *> (use_stmt);
2617
2618 /* Handle *lhs = name. */
2619 if (assign && gimple_assign_rhs1 (assign) == name)
2620 {
2621 if (dump_file)
2622 fprintf (dump_file, "%*s ssa name saved to memory\n",
18f0873d
JH
2623 m_depth * 4, "");
2624 m_lattice[index].merge (0);
520d5ad3
JH
2625 }
2626 /* Handle *name = exp. */
2627 else if (assign
2628 && memory_access_to (gimple_assign_lhs (assign), name))
0c9687d0
JH
2629 {
2630 /* In general we can not ignore clobbers because they are
2631 barriers for code motion, however after inlining it is safe to
2632 do because local optimization passes do not consider clobbers
18f0873d 2633 from other functions.
e53b6e56 2634 Similar logic is in ipa-pure-const.cc. */
0c9687d0 2635 if (!cfun->after_inlining || !gimple_clobber_p (assign))
18f0873d 2636 m_lattice[index].merge_direct_store ();
0c9687d0 2637 }
520d5ad3
JH
2638 /* ASM statements etc. */
2639 else if (!assign)
2640 {
2641 if (dump_file)
18f0873d
JH
2642 fprintf (dump_file, "%*s Unhandled store\n", m_depth * 4, "");
2643 m_lattice[index].merge (0);
520d5ad3
JH
2644 }
2645 }
2646 else if (gassign *assign = dyn_cast <gassign *> (use_stmt))
2647 {
2648 enum tree_code code = gimple_assign_rhs_code (assign);
2649
2650 /* See if operation is a merge as considered by
e53b6e56 2651 tree-ssa-structalias.cc:find_func_aliases. */
520d5ad3
JH
2652 if (!truth_value_p (code)
2653 && code != POINTER_DIFF_EXPR
2654 && (code != POINTER_PLUS_EXPR
2655 || gimple_assign_rhs1 (assign) == name))
85ebbabd
JH
2656 {
2657 tree lhs = gimple_assign_lhs (assign);
18f0873d 2658 merge_with_ssa_name (name, lhs, false);
85ebbabd 2659 }
520d5ad3
JH
2660 }
2661 else if (gphi *phi = dyn_cast <gphi *> (use_stmt))
2662 {
85ebbabd 2663 tree result = gimple_phi_result (phi);
18f0873d 2664 merge_with_ssa_name (name, result, false);
520d5ad3
JH
2665 }
2666 /* Conditions are not considered escape points
2667 by tree-ssa-structalias. */
2668 else if (gimple_code (use_stmt) == GIMPLE_COND)
2669 ;
2670 else
2671 {
2672 if (dump_file)
18f0873d
JH
2673 fprintf (dump_file, "%*s Unhandled stmt\n", m_depth * 4, "");
2674 m_lattice[index].merge (0);
520d5ad3
JH
2675 }
2676
2677 if (dump_file)
2678 {
18f0873d 2679 fprintf (dump_file, "%*s current flags of ", m_depth * 4, "");
520d5ad3 2680 print_generic_expr (dump_file, name);
18f0873d 2681 m_lattice[index].dump (dump_file, m_depth * 4 + 4);
520d5ad3
JH
2682 }
2683 }
2684 if (dump_file)
2685 {
18f0873d 2686 fprintf (dump_file, "%*sflags of ssa name ", m_depth * 4, "");
520d5ad3 2687 print_generic_expr (dump_file, name);
18f0873d 2688 m_lattice[index].dump (dump_file, m_depth * 4 + 2);
520d5ad3 2689 }
18f0873d 2690 m_lattice[index].open = false;
4898e958
JH
2691 if (!m_lattice[index].do_dataflow)
2692 m_lattice[index].known = true;
18f0873d
JH
2693}
2694
2695/* Propagate info from SRC to DEST. If DEREF it true, assume that SRC
2696 is dereferenced. */
2697
2698void
2699modref_eaf_analysis::merge_with_ssa_name (tree dest, tree src, bool deref)
2700{
2701 int index = SSA_NAME_VERSION (dest);
2702 int src_index = SSA_NAME_VERSION (src);
2703
4898e958
JH
2704 /* Merging lattice with itself is a no-op. */
2705 if (!deref && src == dest)
2706 return;
2707
18f0873d
JH
2708 m_depth++;
2709 analyze_ssa_name (src);
2710 m_depth--;
2711 if (deref)
2712 m_lattice[index].merge_deref (m_lattice[src_index], false);
2713 else
2714 m_lattice[index].merge (m_lattice[src_index]);
4898e958
JH
2715
2716 /* If we failed to produce final solution add an edge to the dataflow
2717 graph. */
2718 if (!m_lattice[src_index].known)
2719 {
2720 modref_lattice::propagate_edge e = {index, deref};
2721
2722 if (!m_lattice[src_index].propagate_to.length ())
2723 m_names_to_propagate.safe_push (src_index);
2724 m_lattice[src_index].propagate_to.safe_push (e);
2725 m_lattice[src_index].changed = true;
2726 m_lattice[src_index].do_dataflow = true;
2727 if (dump_file)
2728 fprintf (dump_file,
2729 "%*sWill propgate from ssa_name %i to %i%s\n",
2730 m_depth * 4 + 4,
2731 "", src_index, index, deref ? " (deref)" : "");
2732 }
2733}
2734
2735/* In the case we deferred some SSA names, reprocess them. In the case some
2736 dataflow edges were introduced, do the actual iterative dataflow. */
2737
2738void
2739modref_eaf_analysis::propagate ()
2740{
2741 int iterations = 0;
2742 size_t i;
2743 int index;
2744 bool changed = true;
2745
2746 while (m_deferred_names.length ())
2747 {
2748 tree name = m_deferred_names.pop ();
4898e958
JH
2749 if (dump_file)
2750 fprintf (dump_file, "Analyzing deferred SSA name\n");
8d1e342b 2751 analyze_ssa_name (name, true);
4898e958
JH
2752 }
2753
2754 if (!m_names_to_propagate.length ())
2755 return;
2756 if (dump_file)
2757 fprintf (dump_file, "Propagating EAF flags\n");
2758
2759 /* Compute reverse postorder. */
2760 auto_vec <int> rpo;
2761 struct stack_entry
2762 {
2763 int name;
2764 unsigned pos;
2765 };
2766 auto_vec <struct stack_entry> stack;
2767 int pos = m_names_to_propagate.length () - 1;
2768
2769 rpo.safe_grow (m_names_to_propagate.length (), true);
2770 stack.reserve_exact (m_names_to_propagate.length ());
2771
02c80893 2772 /* We reuse known flag for RPO DFS walk bookkeeping. */
4898e958
JH
2773 if (flag_checking)
2774 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
2775 gcc_assert (!m_lattice[index].known && m_lattice[index].changed);
2776
2777 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
2778 {
2779 if (!m_lattice[index].known)
2780 {
2781 stack_entry e = {index, 0};
2782
2783 stack.quick_push (e);
2784 m_lattice[index].known = true;
2785 }
2786 while (stack.length ())
2787 {
2788 bool found = false;
2789 int index1 = stack.last ().name;
2790
2791 while (stack.last ().pos < m_lattice[index1].propagate_to.length ())
2792 {
2793 int index2 = m_lattice[index1]
2794 .propagate_to[stack.last ().pos].ssa_name;
2795
2796 stack.last ().pos++;
2797 if (!m_lattice[index2].known
2798 && m_lattice[index2].propagate_to.length ())
2799 {
2800 stack_entry e = {index2, 0};
2801
2802 stack.quick_push (e);
2803 m_lattice[index2].known = true;
2804 found = true;
2805 break;
2806 }
2807 }
2808 if (!found
2809 && stack.last ().pos == m_lattice[index1].propagate_to.length ())
2810 {
2811 rpo[pos--] = index1;
2812 stack.pop ();
2813 }
2814 }
2815 }
2816
02c80893 2817 /* Perform iterative dataflow. */
4898e958
JH
2818 while (changed)
2819 {
2820 changed = false;
2821 iterations++;
2822 if (dump_file)
2823 fprintf (dump_file, " iteration %i\n", iterations);
2824 FOR_EACH_VEC_ELT (rpo, i, index)
2825 {
2826 if (m_lattice[index].changed)
2827 {
2828 size_t j;
2829
2830 m_lattice[index].changed = false;
2831 if (dump_file)
2832 fprintf (dump_file, " Visiting ssa name %i\n", index);
2833 for (j = 0; j < m_lattice[index].propagate_to.length (); j++)
2834 {
2835 bool ch;
2836 int target = m_lattice[index].propagate_to[j].ssa_name;
2837 bool deref = m_lattice[index].propagate_to[j].deref;
2838
2839 if (dump_file)
2840 fprintf (dump_file, " Propagating flags of ssa name"
2841 " %i to %i%s\n",
2842 index, target, deref ? " (deref)" : "");
2843 m_lattice[target].known = true;
2844 if (!m_lattice[index].propagate_to[j].deref)
2845 ch = m_lattice[target].merge (m_lattice[index]);
2846 else
2847 ch = m_lattice[target].merge_deref (m_lattice[index],
2848 false);
2849 if (!ch)
2850 continue;
2851 if (dump_file)
2852 {
2853 fprintf (dump_file, " New lattice: ");
2854 m_lattice[target].dump (dump_file);
2855 }
d70ef656 2856 changed = true;
4898e958
JH
2857 m_lattice[target].changed = true;
2858 }
2859 }
2860 }
2861 }
2862 if (dump_file)
2863 fprintf (dump_file, "EAF flags propagated in %i iterations\n", iterations);
520d5ad3
JH
2864}
2865
b8ef019a
JH
2866/* Record escape points of PARM_INDEX according to LATTICE. */
2867
18f0873d
JH
2868void
2869modref_eaf_analysis::record_escape_points (tree name, int parm_index, int flags)
b8ef019a 2870{
18f0873d
JH
2871 modref_lattice &lattice = m_lattice[SSA_NAME_VERSION (name)];
2872
b8ef019a
JH
2873 if (lattice.escape_points.length ())
2874 {
2875 escape_point *ep;
2876 unsigned int ip;
2877 cgraph_node *node = cgraph_node::get (current_function_decl);
2878
18f0873d 2879 gcc_assert (m_ipa);
b8ef019a
JH
2880 FOR_EACH_VEC_ELT (lattice.escape_points, ip, ep)
2881 if ((ep->min_flags & flags) != flags)
2882 {
2883 cgraph_edge *e = node->get_edge (ep->call);
2884 struct escape_entry ee = {parm_index, ep->arg,
2885 ep->min_flags, ep->direct};
2886
2887 escape_summaries->get_create (e)->esc.safe_push (ee);
2888 }
2889 }
2890}
2891
7798ae1a
JH
2892/* Determine EAF flags for function parameters
2893 and fill in SUMMARY/SUMMARY_LTO. If IPA is true work in IPA mode
02c80893 2894 where we also collect escape points.
7798ae1a 2895 PAST_FLAGS, PAST_RETSLOT_FLAGS, PAST_STATIC_CHAIN_FLAGS can be
02c80893 2896 used to preserve flags from previous (IPA) run for cases where
7798ae1a
JH
2897 late optimizations changed code in a way we can no longer analyze
2898 it easily. */
520d5ad3
JH
2899
2900static void
85ebbabd 2901analyze_parms (modref_summary *summary, modref_summary_lto *summary_lto,
7798ae1a
JH
2902 bool ipa, vec<eaf_flags_t> &past_flags,
2903 int past_retslot_flags, int past_static_chain_flags)
520d5ad3
JH
2904{
2905 unsigned int parm_index = 0;
2906 unsigned int count = 0;
85ebbabd 2907 int ecf_flags = flags_from_decl_or_type (current_function_decl);
b8ef019a 2908 tree retslot = NULL;
a70c0512 2909 tree static_chain = NULL;
85ebbabd 2910
b8ef019a
JH
2911 /* If there is return slot, look up its SSA name. */
2912 if (DECL_RESULT (current_function_decl)
2913 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
2914 retslot = ssa_default_def (cfun, DECL_RESULT (current_function_decl));
a70c0512
JH
2915 if (cfun->static_chain_decl)
2916 static_chain = ssa_default_def (cfun, cfun->static_chain_decl);
b8ef019a 2917
520d5ad3
JH
2918 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
2919 parm = TREE_CHAIN (parm))
2920 count++;
2921
a70c0512 2922 if (!count && !retslot && !static_chain)
520d5ad3
JH
2923 return;
2924
18f0873d 2925 modref_eaf_analysis eaf_analysis (ipa);
520d5ad3 2926
4898e958
JH
2927 /* Determine all SSA names we need to know flags for. */
2928 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
2929 parm = TREE_CHAIN (parm))
2930 {
2931 tree name = ssa_default_def (cfun, parm);
2932 if (name)
2933 eaf_analysis.analyze_ssa_name (name);
2934 }
2935 if (retslot)
2936 eaf_analysis.analyze_ssa_name (retslot);
2937 if (static_chain)
2938 eaf_analysis.analyze_ssa_name (static_chain);
2939
2940 /* Do the dataflow. */
2941 eaf_analysis.propagate ();
2942
e2dd12ab
JH
2943 tree attr = lookup_attribute ("fn spec",
2944 TYPE_ATTRIBUTES
2945 (TREE_TYPE (current_function_decl)));
2946 attr_fnspec fnspec (attr
2947 ? TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))
2948 : "");
2949
2950
4898e958 2951 /* Store results to summaries. */
520d5ad3
JH
2952 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm; parm_index++,
2953 parm = TREE_CHAIN (parm))
2954 {
2955 tree name = ssa_default_def (cfun, parm);
3350e59f
JH
2956 if (!name || has_zero_uses (name))
2957 {
2958 /* We do not track non-SSA parameters,
2959 but we want to track unused gimple_regs. */
2960 if (!is_gimple_reg (parm))
2961 continue;
2962 if (summary)
2963 {
2964 if (parm_index >= summary->arg_flags.length ())
2965 summary->arg_flags.safe_grow_cleared (count, true);
2966 summary->arg_flags[parm_index] = EAF_UNUSED;
2967 }
2968 else if (summary_lto)
2969 {
2970 if (parm_index >= summary_lto->arg_flags.length ())
2971 summary_lto->arg_flags.safe_grow_cleared (count, true);
2972 summary_lto->arg_flags[parm_index] = EAF_UNUSED;
2973 }
2974 continue;
2975 }
18f0873d 2976 int flags = eaf_analysis.get_ssa_name_flags (name);
e2dd12ab
JH
2977 int attr_flags = fnspec.arg_eaf_flags (parm_index);
2978
2979 if (dump_file && (flags | attr_flags) != flags && !(flags & EAF_UNUSED))
2980 {
2981 fprintf (dump_file,
2982 " Flags for param %i combined with fnspec flags:",
2983 (int)parm_index);
2984 dump_eaf_flags (dump_file, attr_flags, false);
2985 fprintf (dump_file, " determined: ");
2986 dump_eaf_flags (dump_file, flags, true);
2987 }
2988 flags |= attr_flags;
85ebbabd 2989
4341b1b1
JH
2990 /* Eliminate useless flags so we do not end up storing unnecessary
2991 summaries. */
2992
2993 flags = remove_useless_eaf_flags
2994 (flags, ecf_flags,
2995 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
7798ae1a
JH
2996 if (past_flags.length () > parm_index)
2997 {
2998 int past = past_flags[parm_index];
2999 past = remove_useless_eaf_flags
3000 (past, ecf_flags,
3001 VOID_TYPE_P (TREE_TYPE
3002 (TREE_TYPE (current_function_decl))));
3003 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
3004 {
3005 fprintf (dump_file,
3006 " Flags for param %i combined with IPA pass:",
3007 (int)parm_index);
3008 dump_eaf_flags (dump_file, past, false);
e2dd12ab
JH
3009 fprintf (dump_file, " determined: ");
3010 dump_eaf_flags (dump_file, flags, true);
7798ae1a
JH
3011 }
3012 if (!(flags & EAF_UNUSED))
3013 flags |= past;
3014 }
520d5ad3
JH
3015
3016 if (flags)
3017 {
85ebbabd
JH
3018 if (summary)
3019 {
3020 if (parm_index >= summary->arg_flags.length ())
3021 summary->arg_flags.safe_grow_cleared (count, true);
3022 summary->arg_flags[parm_index] = flags;
3023 }
3024 else if (summary_lto)
3025 {
3026 if (parm_index >= summary_lto->arg_flags.length ())
3027 summary_lto->arg_flags.safe_grow_cleared (count, true);
3028 summary_lto->arg_flags[parm_index] = flags;
3029 }
18f0873d 3030 eaf_analysis.record_escape_points (name, parm_index, flags);
b8ef019a
JH
3031 }
3032 }
3033 if (retslot)
3034 {
18f0873d 3035 int flags = eaf_analysis.get_ssa_name_flags (retslot);
7798ae1a 3036 int past = past_retslot_flags;
85ebbabd 3037
b8ef019a 3038 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
7798ae1a
JH
3039 past = remove_useless_eaf_flags
3040 (past, ecf_flags,
3041 VOID_TYPE_P (TREE_TYPE
3042 (TREE_TYPE (current_function_decl))));
3043 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
3044 {
3045 fprintf (dump_file,
3046 " Retslot flags combined with IPA pass:");
3047 dump_eaf_flags (dump_file, past, false);
e2dd12ab 3048 fprintf (dump_file, " determined: ");
7798ae1a
JH
3049 dump_eaf_flags (dump_file, flags, true);
3050 }
3051 if (!(flags & EAF_UNUSED))
3052 flags |= past;
b8ef019a
JH
3053 if (flags)
3054 {
3055 if (summary)
3056 summary->retslot_flags = flags;
3057 if (summary_lto)
3058 summary_lto->retslot_flags = flags;
18f0873d 3059 eaf_analysis.record_escape_points (retslot,
1f3a3363 3060 MODREF_RETSLOT_PARM, flags);
520d5ad3
JH
3061 }
3062 }
a70c0512
JH
3063 if (static_chain)
3064 {
18f0873d 3065 int flags = eaf_analysis.get_ssa_name_flags (static_chain);
7798ae1a 3066 int past = past_static_chain_flags;
a70c0512
JH
3067
3068 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
7798ae1a
JH
3069 past = remove_useless_eaf_flags
3070 (past, ecf_flags,
3071 VOID_TYPE_P (TREE_TYPE
3072 (TREE_TYPE (current_function_decl))));
3073 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
3074 {
3075 fprintf (dump_file,
3076 " Static chain flags combined with IPA pass:");
3077 dump_eaf_flags (dump_file, past, false);
e2dd12ab 3078 fprintf (dump_file, " determined: ");
7798ae1a
JH
3079 dump_eaf_flags (dump_file, flags, true);
3080 }
3081 if (!(flags & EAF_UNUSED))
78dd0de9 3082 flags |= past;
a70c0512
JH
3083 if (flags)
3084 {
3085 if (summary)
3086 summary->static_chain_flags = flags;
3087 if (summary_lto)
3088 summary_lto->static_chain_flags = flags;
18f0873d 3089 eaf_analysis.record_escape_points (static_chain,
1f3a3363 3090 MODREF_STATIC_CHAIN_PARM,
18f0873d 3091 flags);
a70c0512
JH
3092 }
3093 }
520d5ad3
JH
3094}
3095
2cadaa1f 3096/* Analyze function. IPA indicates whether we're running in local mode
494bdadf
JH
3097 (false) or the IPA mode (true).
3098 Return true if fixup cfg is needed after the pass. */
d119f34c 3099
494bdadf 3100static bool
2cadaa1f 3101analyze_function (bool ipa)
d119f34c 3102{
494bdadf 3103 bool fixup_cfg = false;
d119f34c 3104 if (dump_file)
2cadaa1f
JH
3105 fprintf (dump_file, "\n\nmodref analyzing '%s' (ipa=%i)%s%s\n",
3106 cgraph_node::get (current_function_decl)->dump_name (), ipa,
67c935c8
JH
3107 TREE_READONLY (current_function_decl) ? " (const)" : "",
3108 DECL_PURE_P (current_function_decl) ? " (pure)" : "");
d119f34c
JH
3109
3110 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
008e7397
JH
3111 if (!flag_ipa_modref
3112 || lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl)))
494bdadf 3113 return false;
d119f34c 3114
d119f34c
JH
3115 /* Compute no-LTO summaries when local optimization is going to happen. */
3116 bool nolto = (!ipa || ((!flag_lto || flag_fat_lto_objects) && !in_lto_p)
3117 || (in_lto_p && !flag_wpa
3118 && flag_incremental_link != INCREMENTAL_LINK_LTO));
d119f34c
JH
3119 /* Compute LTO when LTO streaming is going to happen. */
3120 bool lto = ipa && ((flag_lto && !in_lto_p)
3121 || flag_wpa
3122 || flag_incremental_link == INCREMENTAL_LINK_LTO);
71dbabcc
JH
3123 cgraph_node *fnode = cgraph_node::get (current_function_decl);
3124
3125 modref_summary *summary = NULL;
3126 modref_summary_lto *summary_lto = NULL;
3127
f6f704fd
JH
3128 bool past_flags_known = false;
3129 auto_vec <eaf_flags_t> past_flags;
3130 int past_retslot_flags = 0;
3131 int past_static_chain_flags = 0;
3132
71dbabcc
JH
3133 /* Initialize the summary.
3134 If we run in local mode there is possibly pre-existing summary from
3135 IPA pass. Dump it so it is easy to compare if mod-ref info has
3136 improved. */
3137 if (!ipa)
3138 {
3139 if (!optimization_summaries)
3140 optimization_summaries = modref_summaries::create_ggc (symtab);
3141 else /* Remove existing summary if we are re-running the pass. */
3142 {
22c24234
ML
3143 summary = optimization_summaries->get (fnode);
3144 if (summary != NULL
56cb815b 3145 && summary->loads)
71dbabcc 3146 {
22c24234
ML
3147 if (dump_file)
3148 {
3149 fprintf (dump_file, "Past summary:\n");
3150 optimization_summaries->get (fnode)->dump (dump_file);
3151 }
f6f704fd
JH
3152 past_flags.reserve_exact (summary->arg_flags.length ());
3153 past_flags.splice (summary->arg_flags);
3154 past_retslot_flags = summary->retslot_flags;
3155 past_static_chain_flags = summary->static_chain_flags;
3156 past_flags_known = true;
71dbabcc 3157 }
616ca102 3158 optimization_summaries->remove (fnode);
71dbabcc 3159 }
616ca102 3160 summary = optimization_summaries->get_create (fnode);
71dbabcc
JH
3161 gcc_checking_assert (nolto && !lto);
3162 }
8a2fd716 3163 /* In IPA mode we analyze every function precisely once. Assert that. */
71dbabcc
JH
3164 else
3165 {
3166 if (nolto)
3167 {
3168 if (!summaries)
3169 summaries = modref_summaries::create_ggc (symtab);
3170 else
616ca102
ML
3171 summaries->remove (fnode);
3172 summary = summaries->get_create (fnode);
71dbabcc
JH
3173 }
3174 if (lto)
3175 {
3176 if (!summaries_lto)
3177 summaries_lto = modref_summaries_lto::create_ggc (symtab);
3178 else
616ca102
ML
3179 summaries_lto->remove (fnode);
3180 summary_lto = summaries_lto->get_create (fnode);
71dbabcc 3181 }
6cef01c3
JH
3182 if (!fnspec_summaries)
3183 fnspec_summaries = new fnspec_summaries_t (symtab);
85ebbabd
JH
3184 if (!escape_summaries)
3185 escape_summaries = new escape_summaries_t (symtab);
71dbabcc
JH
3186 }
3187
d119f34c
JH
3188
3189 /* Create and initialize summary for F.
3190 Note that summaries may be already allocated from previous
3191 run of the pass. */
3192 if (nolto)
3193 {
3194 gcc_assert (!summary->loads);
8632f8c6 3195 summary->loads = modref_records::create_ggc ();
d119f34c 3196 gcc_assert (!summary->stores);
8632f8c6 3197 summary->stores = modref_records::create_ggc ();
617695cd 3198 summary->writes_errno = false;
992644c3 3199 summary->side_effects = false;
a34edf9a
JH
3200 summary->nondeterministic = false;
3201 summary->calls_interposable = false;
d119f34c
JH
3202 }
3203 if (lto)
3204 {
71dbabcc 3205 gcc_assert (!summary_lto->loads);
8632f8c6 3206 summary_lto->loads = modref_records_lto::create_ggc ();
71dbabcc 3207 gcc_assert (!summary_lto->stores);
8632f8c6 3208 summary_lto->stores = modref_records_lto::create_ggc ();
6cef01c3 3209 summary_lto->writes_errno = false;
992644c3 3210 summary_lto->side_effects = false;
a34edf9a
JH
3211 summary_lto->nondeterministic = false;
3212 summary_lto->calls_interposable = false;
d119f34c 3213 }
520d5ad3 3214
7798ae1a
JH
3215 analyze_parms (summary, summary_lto, ipa,
3216 past_flags, past_retslot_flags, past_static_chain_flags);
520d5ad3 3217
09a4ffb7
JH
3218 {
3219 modref_access_analysis analyzer (ipa, summary, summary_lto);
3220 analyzer.analyze ();
3221 }
494bdadf
JH
3222
3223 if (!ipa && flag_ipa_pure_const)
3224 {
1b62cddc 3225 if (!summary->stores->every_base && !summary->stores->bases
a34edf9a 3226 && !summary->nondeterministic)
494bdadf 3227 {
a34edf9a
JH
3228 if (!summary->loads->every_base && !summary->loads->bases
3229 && !summary->calls_interposable)
616ca102
ML
3230 fixup_cfg = ipa_make_function_const (fnode,
3231 summary->side_effects, true);
494bdadf 3232 else
616ca102
ML
3233 fixup_cfg = ipa_make_function_pure (fnode,
3234 summary->side_effects, true);
494bdadf
JH
3235 }
3236 }
09a4ffb7 3237 int ecf_flags = flags_from_decl_or_type (current_function_decl);
71dbabcc
JH
3238 if (summary && !summary->useful_p (ecf_flags))
3239 {
3240 if (!ipa)
3241 optimization_summaries->remove (fnode);
3242 else
3243 summaries->remove (fnode);
3244 summary = NULL;
3245 }
e0040bc3 3246 if (summary)
5aa91072 3247 summary->finalize (current_function_decl);
71dbabcc
JH
3248 if (summary_lto && !summary_lto->useful_p (ecf_flags))
3249 {
3250 summaries_lto->remove (fnode);
3251 summary_lto = NULL;
3252 }
992644c3 3253
85ebbabd
JH
3254 if (ipa && !summary && !summary_lto)
3255 remove_modref_edge_summaries (fnode);
d119f34c
JH
3256
3257 if (dump_file)
3258 {
3259 fprintf (dump_file, " - modref done with result: tracked.\n");
71dbabcc
JH
3260 if (summary)
3261 summary->dump (dump_file);
3262 if (summary_lto)
3263 summary_lto->dump (dump_file);
85ebbabd 3264 dump_modref_edge_summaries (dump_file, fnode, 2);
f6f704fd
JH
3265 /* To simplify debugging, compare IPA and local solutions. */
3266 if (past_flags_known && summary)
3267 {
3268 size_t len = summary->arg_flags.length ();
3269
3270 if (past_flags.length () > len)
3271 len = past_flags.length ();
3272 for (size_t i = 0; i < len; i++)
3273 {
3274 int old_flags = i < past_flags.length () ? past_flags[i] : 0;
3275 int new_flags = i < summary->arg_flags.length ()
3276 ? summary->arg_flags[i] : 0;
3277 old_flags = remove_useless_eaf_flags
3278 (old_flags, flags_from_decl_or_type (current_function_decl),
3279 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3280 if (old_flags != new_flags)
3281 {
7798ae1a
JH
3282 if ((old_flags & ~new_flags) == 0
3283 || (new_flags & EAF_UNUSED))
f6f704fd
JH
3284 fprintf (dump_file, " Flags for param %i improved:",
3285 (int)i);
f6f704fd 3286 else
7798ae1a 3287 gcc_unreachable ();
f6f704fd
JH
3288 dump_eaf_flags (dump_file, old_flags, false);
3289 fprintf (dump_file, " -> ");
3290 dump_eaf_flags (dump_file, new_flags, true);
3291 }
3292 }
3293 past_retslot_flags = remove_useless_eaf_flags
3294 (past_retslot_flags,
3295 flags_from_decl_or_type (current_function_decl),
3296 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3297 if (past_retslot_flags != summary->retslot_flags)
3298 {
7798ae1a
JH
3299 if ((past_retslot_flags & ~summary->retslot_flags) == 0
3300 || (summary->retslot_flags & EAF_UNUSED))
f6f704fd 3301 fprintf (dump_file, " Flags for retslot improved:");
f6f704fd 3302 else
7798ae1a 3303 gcc_unreachable ();
f6f704fd
JH
3304 dump_eaf_flags (dump_file, past_retslot_flags, false);
3305 fprintf (dump_file, " -> ");
3306 dump_eaf_flags (dump_file, summary->retslot_flags, true);
3307 }
3308 past_static_chain_flags = remove_useless_eaf_flags
3309 (past_static_chain_flags,
3310 flags_from_decl_or_type (current_function_decl),
3311 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3312 if (past_static_chain_flags != summary->static_chain_flags)
3313 {
7798ae1a
JH
3314 if ((past_static_chain_flags & ~summary->static_chain_flags) == 0
3315 || (summary->static_chain_flags & EAF_UNUSED))
f6f704fd 3316 fprintf (dump_file, " Flags for static chain improved:");
f6f704fd 3317 else
7798ae1a 3318 gcc_unreachable ();
f6f704fd
JH
3319 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3320 fprintf (dump_file, " -> ");
3321 dump_eaf_flags (dump_file, summary->static_chain_flags, true);
3322 }
3323 }
3324 else if (past_flags_known && !summary)
3325 {
3326 for (size_t i = 0; i < past_flags.length (); i++)
3327 {
3328 int old_flags = past_flags[i];
3329 old_flags = remove_useless_eaf_flags
3330 (old_flags, flags_from_decl_or_type (current_function_decl),
3331 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3332 if (old_flags)
3333 {
3334 fprintf (dump_file, " Flags for param %i worsened:",
3335 (int)i);
3336 dump_eaf_flags (dump_file, old_flags, false);
3337 fprintf (dump_file, " -> \n");
3338 }
3339 }
3340 past_retslot_flags = remove_useless_eaf_flags
3341 (past_retslot_flags,
3342 flags_from_decl_or_type (current_function_decl),
3343 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3344 if (past_retslot_flags)
3345 {
3346 fprintf (dump_file, " Flags for retslot worsened:");
3347 dump_eaf_flags (dump_file, past_retslot_flags, false);
3348 fprintf (dump_file, " ->\n");
3349 }
3350 past_static_chain_flags = remove_useless_eaf_flags
3351 (past_static_chain_flags,
3352 flags_from_decl_or_type (current_function_decl),
3353 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3354 if (past_static_chain_flags)
3355 {
3356 fprintf (dump_file, " Flags for static chain worsened:");
3357 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3358 fprintf (dump_file, " ->\n");
3359 }
3360 }
d119f34c 3361 }
494bdadf 3362 return fixup_cfg;
d119f34c
JH
3363}
3364
3365/* Callback for generate_summary. */
3366
3367static void
3368modref_generate (void)
3369{
3370 struct cgraph_node *node;
3371 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
3372 {
3373 function *f = DECL_STRUCT_FUNCTION (node->decl);
3374 if (!f)
3375 continue;
3376 push_cfun (f);
2cadaa1f 3377 analyze_function (true);
d119f34c
JH
3378 pop_cfun ();
3379 }
3380}
3381
18f0873d
JH
3382} /* ANON namespace. */
3383
d70ef656
JH
3384/* Debugging helper. */
3385
3386void
3387debug_eaf_flags (int flags)
3388{
3389 dump_eaf_flags (stderr, flags, true);
3390}
3391
d119f34c
JH
3392/* Called when a new function is inserted to callgraph late. */
3393
3394void
3395modref_summaries::insert (struct cgraph_node *node, modref_summary *)
3396{
56cb815b
JH
3397 /* Local passes ought to be executed by the pass manager. */
3398 if (this == optimization_summaries)
71dbabcc
JH
3399 {
3400 optimization_summaries->remove (node);
56cb815b
JH
3401 return;
3402 }
1a90e99f
JH
3403 if (!DECL_STRUCT_FUNCTION (node->decl)
3404 || !opt_for_fn (node->decl, flag_ipa_modref))
56cb815b 3405 {
71dbabcc 3406 summaries->remove (node);
56cb815b 3407 return;
71dbabcc
JH
3408 }
3409 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2cadaa1f 3410 analyze_function (true);
71dbabcc
JH
3411 pop_cfun ();
3412}
3413
3414/* Called when a new function is inserted to callgraph late. */
3415
3416void
3417modref_summaries_lto::insert (struct cgraph_node *node, modref_summary_lto *)
3418{
3419 /* We do not support adding new function when IPA information is already
3420 propagated. This is done only by SIMD cloning that is not very
3421 critical. */
3422 if (!DECL_STRUCT_FUNCTION (node->decl)
1a90e99f 3423 || !opt_for_fn (node->decl, flag_ipa_modref)
71dbabcc
JH
3424 || propagated)
3425 {
3426 summaries_lto->remove (node);
3427 return;
3428 }
d119f34c 3429 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2cadaa1f 3430 analyze_function (true);
d119f34c
JH
3431 pop_cfun ();
3432}
3433
3434/* Called when new clone is inserted to callgraph late. */
3435
3436void
56cb815b 3437modref_summaries::duplicate (cgraph_node *, cgraph_node *dst,
d119f34c
JH
3438 modref_summary *src_data,
3439 modref_summary *dst_data)
3440{
8a2fd716 3441 /* Do not duplicate optimization summaries; we do not handle parameter
56cb815b
JH
3442 transforms on them. */
3443 if (this == optimization_summaries)
d119f34c 3444 {
56cb815b
JH
3445 optimization_summaries->remove (dst);
3446 return;
d119f34c 3447 }
8632f8c6 3448 dst_data->stores = modref_records::create_ggc ();
56cb815b 3449 dst_data->stores->copy_from (src_data->stores);
8632f8c6 3450 dst_data->loads = modref_records::create_ggc ();
56cb815b 3451 dst_data->loads->copy_from (src_data->loads);
64f3e71c
JH
3452 dst_data->kills.reserve_exact (src_data->kills.length ());
3453 dst_data->kills.splice (src_data->kills);
617695cd 3454 dst_data->writes_errno = src_data->writes_errno;
992644c3 3455 dst_data->side_effects = src_data->side_effects;
a34edf9a
JH
3456 dst_data->nondeterministic = src_data->nondeterministic;
3457 dst_data->calls_interposable = src_data->calls_interposable;
5962efe9
JH
3458 if (src_data->arg_flags.length ())
3459 dst_data->arg_flags = src_data->arg_flags.copy ();
b8ef019a 3460 dst_data->retslot_flags = src_data->retslot_flags;
a70c0512 3461 dst_data->static_chain_flags = src_data->static_chain_flags;
71dbabcc
JH
3462}
3463
3464/* Called when new clone is inserted to callgraph late. */
3465
3466void
3467modref_summaries_lto::duplicate (cgraph_node *, cgraph_node *,
3468 modref_summary_lto *src_data,
3469 modref_summary_lto *dst_data)
3470{
8a2fd716 3471 /* Be sure that no further cloning happens after ipa-modref. If it does
fe90c504
JH
3472 we will need to update signatures for possible param changes. */
3473 gcc_checking_assert (!((modref_summaries_lto *)summaries_lto)->propagated);
8632f8c6 3474 dst_data->stores = modref_records_lto::create_ggc ();
56cb815b 3475 dst_data->stores->copy_from (src_data->stores);
8632f8c6 3476 dst_data->loads = modref_records_lto::create_ggc ();
56cb815b 3477 dst_data->loads->copy_from (src_data->loads);
74509b96
JH
3478 dst_data->kills.reserve_exact (src_data->kills.length ());
3479 dst_data->kills.splice (src_data->kills);
6cef01c3 3480 dst_data->writes_errno = src_data->writes_errno;
992644c3 3481 dst_data->side_effects = src_data->side_effects;
a34edf9a
JH
3482 dst_data->nondeterministic = src_data->nondeterministic;
3483 dst_data->calls_interposable = src_data->calls_interposable;
5962efe9
JH
3484 if (src_data->arg_flags.length ())
3485 dst_data->arg_flags = src_data->arg_flags.copy ();
b8ef019a 3486 dst_data->retslot_flags = src_data->retslot_flags;
a70c0512 3487 dst_data->static_chain_flags = src_data->static_chain_flags;
d119f34c
JH
3488}
3489
3490namespace
3491{
3492/* Definition of the modref pass on GIMPLE. */
3493const pass_data pass_data_modref = {
3494 GIMPLE_PASS,
3495 "modref",
3496 OPTGROUP_IPA,
3497 TV_TREE_MODREF,
3498 (PROP_cfg | PROP_ssa),
3499 0,
3500 0,
3501 0,
3502 0,
3503};
3504
3505class pass_modref : public gimple_opt_pass
3506{
3507 public:
3508 pass_modref (gcc::context *ctxt)
3509 : gimple_opt_pass (pass_data_modref, ctxt) {}
3510
d119f34c 3511 /* opt_pass methods: */
725793af 3512 opt_pass *clone () final override
d119f34c
JH
3513 {
3514 return new pass_modref (m_ctxt);
3515 }
725793af 3516 bool gate (function *) final override
d119f34c
JH
3517 {
3518 return flag_ipa_modref;
3519 }
725793af 3520 unsigned int execute (function *) final override;
d119f34c
JH
3521};
3522
3523/* Encode TT to the output block OB using the summary streaming API. */
3524
3525static void
3526write_modref_records (modref_records_lto *tt, struct output_block *ob)
3527{
d119f34c
JH
3528 streamer_write_uhwi (ob, tt->every_base);
3529 streamer_write_uhwi (ob, vec_safe_length (tt->bases));
74509b96 3530 for (auto base_node : tt->bases)
d119f34c
JH
3531 {
3532 stream_write_tree (ob, base_node->base, true);
3533
3534 streamer_write_uhwi (ob, base_node->every_ref);
3535 streamer_write_uhwi (ob, vec_safe_length (base_node->refs));
c33f4742 3536
74509b96 3537 for (auto ref_node : base_node->refs)
d119f34c
JH
3538 {
3539 stream_write_tree (ob, ref_node->ref, true);
c33f4742
JH
3540 streamer_write_uhwi (ob, ref_node->every_access);
3541 streamer_write_uhwi (ob, vec_safe_length (ref_node->accesses));
3542
74509b96
JH
3543 for (auto access_node : ref_node->accesses)
3544 access_node.stream_out (ob);
d119f34c
JH
3545 }
3546 }
3547}
3548
3549/* Read a modref_tree from the input block IB using the data from DATA_IN.
3550 This assumes that the tree was encoded using write_modref_tree.
3551 Either nolto_ret or lto_ret is initialized by the tree depending whether
46a27415 3552 LTO streaming is expected or not. */
d119f34c 3553
18f0873d 3554static void
8632f8c6
JH
3555read_modref_records (tree decl,
3556 lto_input_block *ib, struct data_in *data_in,
d119f34c
JH
3557 modref_records **nolto_ret,
3558 modref_records_lto **lto_ret)
3559{
8632f8c6
JH
3560 size_t max_bases = opt_for_fn (decl, param_modref_max_bases);
3561 size_t max_refs = opt_for_fn (decl, param_modref_max_refs);
3562 size_t max_accesses = opt_for_fn (decl, param_modref_max_accesses);
d119f34c 3563
71dbabcc 3564 if (lto_ret)
8632f8c6 3565 *lto_ret = modref_records_lto::create_ggc ();
71dbabcc 3566 if (nolto_ret)
8632f8c6 3567 *nolto_ret = modref_records::create_ggc ();
71dbabcc 3568 gcc_checking_assert (lto_ret || nolto_ret);
d119f34c
JH
3569
3570 size_t every_base = streamer_read_uhwi (ib);
3571 size_t nbase = streamer_read_uhwi (ib);
3572
3573 gcc_assert (!every_base || nbase == 0);
3574 if (every_base)
3575 {
71dbabcc 3576 if (nolto_ret)
d119f34c 3577 (*nolto_ret)->collapse ();
71dbabcc 3578 if (lto_ret)
d119f34c
JH
3579 (*lto_ret)->collapse ();
3580 }
3581 for (size_t i = 0; i < nbase; i++)
3582 {
3583 tree base_tree = stream_read_tree (ib, data_in);
3584 modref_base_node <alias_set_type> *nolto_base_node = NULL;
3585 modref_base_node <tree> *lto_base_node = NULL;
3586
3587 /* At stream in time we have LTO alias info. Check if we streamed in
3588 something obviously unnecessary. Do not glob types by alias sets;
3589 it is not 100% clear that ltrans types will get merged same way.
3590 Types may get refined based on ODR type conflicts. */
3591 if (base_tree && !get_alias_set (base_tree))
3592 {
3593 if (dump_file)
3594 {
3595 fprintf (dump_file, "Streamed in alias set 0 type ");
3596 print_generic_expr (dump_file, base_tree);
3597 fprintf (dump_file, "\n");
3598 }
3599 base_tree = NULL;
3600 }
3601
71dbabcc 3602 if (nolto_ret)
d119f34c
JH
3603 nolto_base_node = (*nolto_ret)->insert_base (base_tree
3604 ? get_alias_set (base_tree)
8632f8c6 3605 : 0, 0, INT_MAX);
71dbabcc 3606 if (lto_ret)
8632f8c6 3607 lto_base_node = (*lto_ret)->insert_base (base_tree, 0, max_bases);
d119f34c
JH
3608 size_t every_ref = streamer_read_uhwi (ib);
3609 size_t nref = streamer_read_uhwi (ib);
3610
3611 gcc_assert (!every_ref || nref == 0);
3612 if (every_ref)
3613 {
3614 if (nolto_base_node)
3615 nolto_base_node->collapse ();
3616 if (lto_base_node)
3617 lto_base_node->collapse ();
3618 }
3619 for (size_t j = 0; j < nref; j++)
3620 {
3621 tree ref_tree = stream_read_tree (ib, data_in);
3622
3623 if (ref_tree && !get_alias_set (ref_tree))
3624 {
3625 if (dump_file)
3626 {
3627 fprintf (dump_file, "Streamed in alias set 0 type ");
3628 print_generic_expr (dump_file, ref_tree);
3629 fprintf (dump_file, "\n");
3630 }
c33f4742 3631 ref_tree = NULL;
d119f34c
JH
3632 }
3633
c33f4742
JH
3634 modref_ref_node <alias_set_type> *nolto_ref_node = NULL;
3635 modref_ref_node <tree> *lto_ref_node = NULL;
3636
d119f34c 3637 if (nolto_base_node)
c33f4742
JH
3638 nolto_ref_node
3639 = nolto_base_node->insert_ref (ref_tree
3640 ? get_alias_set (ref_tree) : 0,
3641 max_refs);
d119f34c 3642 if (lto_base_node)
c33f4742
JH
3643 lto_ref_node = lto_base_node->insert_ref (ref_tree, max_refs);
3644
3645 size_t every_access = streamer_read_uhwi (ib);
3646 size_t naccesses = streamer_read_uhwi (ib);
3647
425369bf
JH
3648 if (nolto_ref_node && every_access)
3649 nolto_ref_node->collapse ();
3650 if (lto_ref_node && every_access)
3651 lto_ref_node->collapse ();
c33f4742
JH
3652
3653 for (size_t k = 0; k < naccesses; k++)
3654 {
74509b96 3655 modref_access_node a = modref_access_node::stream_in (ib);
c33f4742 3656 if (nolto_ref_node)
5c85f295 3657 nolto_ref_node->insert_access (a, max_accesses, false);
c33f4742 3658 if (lto_ref_node)
5c85f295 3659 lto_ref_node->insert_access (a, max_accesses, false);
c33f4742 3660 }
d119f34c
JH
3661 }
3662 }
71dbabcc 3663 if (lto_ret)
c33f4742 3664 (*lto_ret)->cleanup ();
71dbabcc 3665 if (nolto_ret)
c33f4742 3666 (*nolto_ret)->cleanup ();
d119f34c
JH
3667}
3668
85ebbabd
JH
3669/* Write ESUM to BP. */
3670
3671static void
3672modref_write_escape_summary (struct bitpack_d *bp, escape_summary *esum)
3673{
3674 if (!esum)
3675 {
3676 bp_pack_var_len_unsigned (bp, 0);
3677 return;
3678 }
3679 bp_pack_var_len_unsigned (bp, esum->esc.length ());
3680 unsigned int i;
3681 escape_entry *ee;
3682 FOR_EACH_VEC_ELT (esum->esc, i, ee)
3683 {
b8ef019a 3684 bp_pack_var_len_int (bp, ee->parm_index);
85ebbabd
JH
3685 bp_pack_var_len_unsigned (bp, ee->arg);
3686 bp_pack_var_len_unsigned (bp, ee->min_flags);
3687 bp_pack_value (bp, ee->direct, 1);
3688 }
3689}
3690
3691/* Read escape summary for E from BP. */
3692
3693static void
3694modref_read_escape_summary (struct bitpack_d *bp, cgraph_edge *e)
3695{
3696 unsigned int n = bp_unpack_var_len_unsigned (bp);
3697 if (!n)
3698 return;
3699 escape_summary *esum = escape_summaries->get_create (e);
3700 esum->esc.reserve_exact (n);
3701 for (unsigned int i = 0; i < n; i++)
3702 {
3703 escape_entry ee;
b8ef019a 3704 ee.parm_index = bp_unpack_var_len_int (bp);
85ebbabd
JH
3705 ee.arg = bp_unpack_var_len_unsigned (bp);
3706 ee.min_flags = bp_unpack_var_len_unsigned (bp);
3707 ee.direct = bp_unpack_value (bp, 1);
3708 esum->esc.quick_push (ee);
3709 }
3710}
3711
d119f34c
JH
3712/* Callback for write_summary. */
3713
3714static void
3715modref_write ()
3716{
3717 struct output_block *ob = create_output_block (LTO_section_ipa_modref);
3718 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3719 unsigned int count = 0;
3720 int i;
3721
71dbabcc 3722 if (!summaries_lto)
d119f34c
JH
3723 {
3724 streamer_write_uhwi (ob, 0);
3725 streamer_write_char_stream (ob->main_stream, 0);
3726 produce_asm (ob, NULL);
3727 destroy_output_block (ob);
3728 return;
3729 }
3730
3731 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3732 {
3733 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3734 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
71dbabcc 3735 modref_summary_lto *r;
d119f34c
JH
3736
3737 if (cnode && cnode->definition && !cnode->alias
71dbabcc
JH
3738 && (r = summaries_lto->get (cnode))
3739 && r->useful_p (flags_from_decl_or_type (cnode->decl)))
d119f34c
JH
3740 count++;
3741 }
3742 streamer_write_uhwi (ob, count);
3743
3744 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3745 {
3746 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3747 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
3748
3749 if (cnode && cnode->definition && !cnode->alias)
3750 {
71dbabcc 3751 modref_summary_lto *r = summaries_lto->get (cnode);
d119f34c 3752
71dbabcc 3753 if (!r || !r->useful_p (flags_from_decl_or_type (cnode->decl)))
d119f34c
JH
3754 continue;
3755
3756 streamer_write_uhwi (ob, lto_symtab_encoder_encode (encoder, cnode));
3757
85ebbabd
JH
3758 streamer_write_uhwi (ob, r->arg_flags.length ());
3759 for (unsigned int i = 0; i < r->arg_flags.length (); i++)
8da8ed43 3760 streamer_write_uhwi (ob, r->arg_flags[i]);
b8ef019a 3761 streamer_write_uhwi (ob, r->retslot_flags);
a70c0512 3762 streamer_write_uhwi (ob, r->static_chain_flags);
85ebbabd 3763
56cb815b
JH
3764 write_modref_records (r->loads, ob);
3765 write_modref_records (r->stores, ob);
74509b96
JH
3766 streamer_write_uhwi (ob, r->kills.length ());
3767 for (auto kill : r->kills)
3768 kill.stream_out (ob);
6cef01c3
JH
3769
3770 struct bitpack_d bp = bitpack_create (ob->main_stream);
3771 bp_pack_value (&bp, r->writes_errno, 1);
992644c3 3772 bp_pack_value (&bp, r->side_effects, 1);
a34edf9a
JH
3773 bp_pack_value (&bp, r->nondeterministic, 1);
3774 bp_pack_value (&bp, r->calls_interposable, 1);
6cef01c3
JH
3775 if (!flag_wpa)
3776 {
3777 for (cgraph_edge *e = cnode->indirect_calls;
3778 e; e = e->next_callee)
3779 {
3780 class fnspec_summary *sum = fnspec_summaries->get (e);
3781 bp_pack_value (&bp, sum != NULL, 1);
3782 if (sum)
3783 bp_pack_string (ob, &bp, sum->fnspec, true);
85ebbabd
JH
3784 class escape_summary *esum = escape_summaries->get (e);
3785 modref_write_escape_summary (&bp,esum);
6cef01c3
JH
3786 }
3787 for (cgraph_edge *e = cnode->callees; e; e = e->next_callee)
3788 {
3789 class fnspec_summary *sum = fnspec_summaries->get (e);
3790 bp_pack_value (&bp, sum != NULL, 1);
3791 if (sum)
3792 bp_pack_string (ob, &bp, sum->fnspec, true);
85ebbabd
JH
3793 class escape_summary *esum = escape_summaries->get (e);
3794 modref_write_escape_summary (&bp,esum);
6cef01c3
JH
3795 }
3796 }
3797 streamer_write_bitpack (&bp);
d119f34c
JH
3798 }
3799 }
3800 streamer_write_char_stream (ob->main_stream, 0);
3801 produce_asm (ob, NULL);
3802 destroy_output_block (ob);
3803}
3804
3805static void
3806read_section (struct lto_file_decl_data *file_data, const char *data,
3807 size_t len)
3808{
3809 const struct lto_function_header *header
3810 = (const struct lto_function_header *) data;
3811 const int cfg_offset = sizeof (struct lto_function_header);
3812 const int main_offset = cfg_offset + header->cfg_size;
3813 const int string_offset = main_offset + header->main_size;
3814 struct data_in *data_in;
3815 unsigned int i;
3816 unsigned int f_count;
3817
3818 lto_input_block ib ((const char *) data + main_offset, header->main_size,
3819 file_data->mode_table);
3820
3821 data_in
3822 = lto_data_in_create (file_data, (const char *) data + string_offset,
3823 header->string_size, vNULL);
3824 f_count = streamer_read_uhwi (&ib);
3825 for (i = 0; i < f_count; i++)
3826 {
3827 struct cgraph_node *node;
3828 lto_symtab_encoder_t encoder;
3829
3830 unsigned int index = streamer_read_uhwi (&ib);
3831 encoder = file_data->symtab_node_encoder;
3832 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder,
3833 index));
3834
71dbabcc
JH
3835 modref_summary *modref_sum = summaries
3836 ? summaries->get_create (node) : NULL;
3837 modref_summary_lto *modref_sum_lto = summaries_lto
3838 ? summaries_lto->get_create (node)
3839 : NULL;
71dbabcc
JH
3840 if (optimization_summaries)
3841 modref_sum = optimization_summaries->get_create (node);
3842
ea937e7d 3843 if (modref_sum)
992644c3
JH
3844 {
3845 modref_sum->writes_errno = false;
3846 modref_sum->side_effects = false;
a34edf9a
JH
3847 modref_sum->nondeterministic = false;
3848 modref_sum->calls_interposable = false;
992644c3 3849 }
6cef01c3 3850 if (modref_sum_lto)
992644c3
JH
3851 {
3852 modref_sum_lto->writes_errno = false;
3853 modref_sum_lto->side_effects = false;
a34edf9a
JH
3854 modref_sum_lto->nondeterministic = false;
3855 modref_sum_lto->calls_interposable = false;
992644c3 3856 }
ea937e7d 3857
71dbabcc
JH
3858 gcc_assert (!modref_sum || (!modref_sum->loads
3859 && !modref_sum->stores));
3860 gcc_assert (!modref_sum_lto || (!modref_sum_lto->loads
3861 && !modref_sum_lto->stores));
85ebbabd
JH
3862 unsigned int args = streamer_read_uhwi (&ib);
3863 if (args && modref_sum)
3864 modref_sum->arg_flags.reserve_exact (args);
3865 if (args && modref_sum_lto)
3866 modref_sum_lto->arg_flags.reserve_exact (args);
3867 for (unsigned int i = 0; i < args; i++)
3868 {
8da8ed43 3869 eaf_flags_t flags = streamer_read_uhwi (&ib);
85ebbabd
JH
3870 if (modref_sum)
3871 modref_sum->arg_flags.quick_push (flags);
3872 if (modref_sum_lto)
3873 modref_sum_lto->arg_flags.quick_push (flags);
3874 }
b8ef019a
JH
3875 eaf_flags_t flags = streamer_read_uhwi (&ib);
3876 if (modref_sum)
3877 modref_sum->retslot_flags = flags;
3878 if (modref_sum_lto)
3879 modref_sum_lto->retslot_flags = flags;
a70c0512
JH
3880
3881 flags = streamer_read_uhwi (&ib);
3882 if (modref_sum)
3883 modref_sum->static_chain_flags = flags;
3884 if (modref_sum_lto)
3885 modref_sum_lto->static_chain_flags = flags;
3886
8632f8c6 3887 read_modref_records (node->decl, &ib, data_in,
56cb815b
JH
3888 modref_sum ? &modref_sum->loads : NULL,
3889 modref_sum_lto ? &modref_sum_lto->loads : NULL);
8632f8c6 3890 read_modref_records (node->decl, &ib, data_in,
56cb815b
JH
3891 modref_sum ? &modref_sum->stores : NULL,
3892 modref_sum_lto ? &modref_sum_lto->stores : NULL);
74509b96
JH
3893 int j = streamer_read_uhwi (&ib);
3894 if (j && modref_sum)
3895 modref_sum->kills.reserve_exact (j);
3896 if (j && modref_sum_lto)
3897 modref_sum_lto->kills.reserve_exact (j);
3898 for (int k = 0; k < j; k++)
3899 {
3900 modref_access_node a = modref_access_node::stream_in (&ib);
3901
3902 if (modref_sum)
3903 modref_sum->kills.quick_push (a);
3904 if (modref_sum_lto)
3905 modref_sum_lto->kills.quick_push (a);
3906 }
6cef01c3
JH
3907 struct bitpack_d bp = streamer_read_bitpack (&ib);
3908 if (bp_unpack_value (&bp, 1))
3909 {
3910 if (modref_sum)
3911 modref_sum->writes_errno = true;
3912 if (modref_sum_lto)
3913 modref_sum_lto->writes_errno = true;
3914 }
992644c3
JH
3915 if (bp_unpack_value (&bp, 1))
3916 {
3917 if (modref_sum)
3918 modref_sum->side_effects = true;
3919 if (modref_sum_lto)
3920 modref_sum_lto->side_effects = true;
3921 }
a34edf9a
JH
3922 if (bp_unpack_value (&bp, 1))
3923 {
3924 if (modref_sum)
3925 modref_sum->nondeterministic = true;
3926 if (modref_sum_lto)
3927 modref_sum_lto->nondeterministic = true;
3928 }
3929 if (bp_unpack_value (&bp, 1))
3930 {
3931 if (modref_sum)
3932 modref_sum->calls_interposable = true;
3933 if (modref_sum_lto)
3934 modref_sum_lto->calls_interposable = true;
3935 }
6cef01c3
JH
3936 if (!flag_ltrans)
3937 {
3938 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
3939 {
3940 if (bp_unpack_value (&bp, 1))
3941 {
3942 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3943 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3944 }
85ebbabd 3945 modref_read_escape_summary (&bp, e);
6cef01c3
JH
3946 }
3947 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
3948 {
3949 if (bp_unpack_value (&bp, 1))
3950 {
3951 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3952 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3953 }
85ebbabd 3954 modref_read_escape_summary (&bp, e);
6cef01c3
JH
3955 }
3956 }
e0040bc3 3957 if (flag_ltrans)
5aa91072 3958 modref_sum->finalize (node->decl);
d119f34c
JH
3959 if (dump_file)
3960 {
3961 fprintf (dump_file, "Read modref for %s\n",
3962 node->dump_name ());
71dbabcc
JH
3963 if (modref_sum)
3964 modref_sum->dump (dump_file);
3965 if (modref_sum_lto)
3966 modref_sum_lto->dump (dump_file);
85ebbabd 3967 dump_modref_edge_summaries (dump_file, node, 4);
d119f34c 3968 }
d119f34c
JH
3969 }
3970
3971 lto_free_section_data (file_data, LTO_section_ipa_modref, NULL, data,
3972 len);
3973 lto_data_in_delete (data_in);
3974}
3975
3976/* Callback for read_summary. */
3977
3978static void
3979modref_read (void)
3980{
3981 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3982 struct lto_file_decl_data *file_data;
3983 unsigned int j = 0;
3984
71dbabcc
JH
3985 gcc_checking_assert (!optimization_summaries && !summaries && !summaries_lto);
3986 if (flag_ltrans)
3987 optimization_summaries = modref_summaries::create_ggc (symtab);
3988 else
3989 {
3990 if (flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
3991 summaries_lto = modref_summaries_lto::create_ggc (symtab);
3992 if (!flag_wpa
3993 || (flag_incremental_link == INCREMENTAL_LINK_LTO
3994 && flag_fat_lto_objects))
3995 summaries = modref_summaries::create_ggc (symtab);
6cef01c3
JH
3996 if (!fnspec_summaries)
3997 fnspec_summaries = new fnspec_summaries_t (symtab);
85ebbabd
JH
3998 if (!escape_summaries)
3999 escape_summaries = new escape_summaries_t (symtab);
71dbabcc 4000 }
d119f34c
JH
4001
4002 while ((file_data = file_data_vec[j++]))
4003 {
4004 size_t len;
4005 const char *data = lto_get_summary_section_data (file_data,
4006 LTO_section_ipa_modref,
4007 &len);
4008 if (data)
4009 read_section (file_data, data, len);
4010 else
4011 /* Fatal error here. We do not want to support compiling ltrans units
4012 with different version of compiler or different flags than the WPA
4013 unit, so this should never happen. */
4014 fatal_error (input_location,
4015 "IPA modref summary is missing in input file");
4016 }
4017}
4018
85ebbabd
JH
4019/* Recompute arg_flags for param adjustments in INFO. */
4020
4021static void
8da8ed43 4022remap_arg_flags (auto_vec <eaf_flags_t> &arg_flags, clone_info *info)
85ebbabd 4023{
8da8ed43 4024 auto_vec<eaf_flags_t> old = arg_flags.copy ();
85ebbabd
JH
4025 int max = -1;
4026 size_t i;
4027 ipa_adjusted_param *p;
4028
4029 arg_flags.release ();
4030
4031 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
4032 {
4033 int o = info->param_adjustments->get_original_index (i);
4034 if (o >= 0 && (int)old.length () > o && old[o])
4035 max = i;
4036 }
5962efe9 4037 if (max >= 0)
85ebbabd
JH
4038 arg_flags.safe_grow_cleared (max + 1, true);
4039 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
4040 {
4041 int o = info->param_adjustments->get_original_index (i);
4042 if (o >= 0 && (int)old.length () > o && old[o])
4043 arg_flags[i] = old[o];
4044 }
4045}
4046
02c80893 4047/* Update kills according to the parm map MAP. */
74509b96
JH
4048
4049static void
4050remap_kills (vec <modref_access_node> &kills, const vec <int> &map)
4051{
4052 for (size_t i = 0; i < kills.length ();)
4053 if (kills[i].parm_index >= 0)
4054 {
4055 if (kills[i].parm_index < (int)map.length ()
4056 && map[kills[i].parm_index] != MODREF_UNKNOWN_PARM)
4057 {
4058 kills[i].parm_index = map[kills[i].parm_index];
4059 i++;
4060 }
4061 else
4062 kills.unordered_remove (i);
4063 }
4064 else
4065 i++;
4066}
4067
c8fd2be1
JH
4068/* If signature changed, update the summary. */
4069
fe90c504
JH
4070static void
4071update_signature (struct cgraph_node *node)
c8fd2be1 4072{
ae7a23a3
JH
4073 clone_info *info = clone_info::get (node);
4074 if (!info || !info->param_adjustments)
fe90c504
JH
4075 return;
4076
4077 modref_summary *r = optimization_summaries
4078 ? optimization_summaries->get (node) : NULL;
4079 modref_summary_lto *r_lto = summaries_lto
4080 ? summaries_lto->get (node) : NULL;
4081 if (!r && !r_lto)
4082 return;
c8fd2be1
JH
4083 if (dump_file)
4084 {
4085 fprintf (dump_file, "Updating summary for %s from:\n",
4086 node->dump_name ());
85ebbabd
JH
4087 if (r)
4088 r->dump (dump_file);
4089 if (r_lto)
4090 r_lto->dump (dump_file);
c8fd2be1
JH
4091 }
4092
4093 size_t i, max = 0;
4094 ipa_adjusted_param *p;
4095
ae7a23a3 4096 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
c8fd2be1 4097 {
ae7a23a3 4098 int idx = info->param_adjustments->get_original_index (i);
c8fd2be1
JH
4099 if (idx > (int)max)
4100 max = idx;
4101 }
4102
4103 auto_vec <int, 32> map;
4104
5d2cedaa 4105 map.reserve (max + 1);
c8fd2be1 4106 for (i = 0; i <= max; i++)
992644c3 4107 map.quick_push (MODREF_UNKNOWN_PARM);
ae7a23a3 4108 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
c8fd2be1 4109 {
ae7a23a3 4110 int idx = info->param_adjustments->get_original_index (i);
c8fd2be1 4111 if (idx >= 0)
2f61125f 4112 map[idx] = i;
c8fd2be1 4113 }
fe90c504
JH
4114 if (r)
4115 {
4116 r->loads->remap_params (&map);
4117 r->stores->remap_params (&map);
74509b96 4118 remap_kills (r->kills, map);
85ebbabd
JH
4119 if (r->arg_flags.length ())
4120 remap_arg_flags (r->arg_flags, info);
fe90c504
JH
4121 }
4122 if (r_lto)
4123 {
4124 r_lto->loads->remap_params (&map);
4125 r_lto->stores->remap_params (&map);
74509b96 4126 remap_kills (r_lto->kills, map);
85ebbabd
JH
4127 if (r_lto->arg_flags.length ())
4128 remap_arg_flags (r_lto->arg_flags, info);
fe90c504 4129 }
c8fd2be1
JH
4130 if (dump_file)
4131 {
4132 fprintf (dump_file, "to:\n");
fe90c504 4133 if (r)
6cef01c3 4134 r->dump (dump_file);
fe90c504 4135 if (r_lto)
6cef01c3 4136 r_lto->dump (dump_file);
c8fd2be1 4137 }
e0040bc3 4138 if (r)
5aa91072 4139 r->finalize (node->decl);
fe90c504 4140 return;
c8fd2be1
JH
4141}
4142
d119f34c
JH
4143/* Definition of the modref IPA pass. */
4144const pass_data pass_data_ipa_modref =
4145{
4146 IPA_PASS, /* type */
4147 "modref", /* name */
4148 OPTGROUP_IPA, /* optinfo_flags */
4149 TV_IPA_MODREF, /* tv_id */
4150 0, /* properties_required */
4151 0, /* properties_provided */
4152 0, /* properties_destroyed */
4153 0, /* todo_flags_start */
4154 ( TODO_dump_symtab ), /* todo_flags_finish */
4155};
4156
4157class pass_ipa_modref : public ipa_opt_pass_d
4158{
4159public:
4160 pass_ipa_modref (gcc::context *ctxt)
4161 : ipa_opt_pass_d (pass_data_ipa_modref, ctxt,
4162 modref_generate, /* generate_summary */
4163 modref_write, /* write_summary */
4164 modref_read, /* read_summary */
4165 modref_write, /* write_optimization_summary */
4166 modref_read, /* read_optimization_summary */
4167 NULL, /* stmt_fixup */
4168 0, /* function_transform_todo_flags_start */
fe90c504 4169 NULL, /* function_transform */
d119f34c
JH
4170 NULL) /* variable_transform */
4171 {}
4172
4173 /* opt_pass methods: */
725793af
DM
4174 opt_pass *clone () final override { return new pass_ipa_modref (m_ctxt); }
4175 bool gate (function *) final override
d119f34c
JH
4176 {
4177 return true;
4178 }
725793af 4179 unsigned int execute (function *) final override;
d119f34c
JH
4180
4181};
4182
4183}
4184
2cadaa1f 4185unsigned int pass_modref::execute (function *)
d119f34c 4186{
2cadaa1f 4187 if (analyze_function (false))
494bdadf 4188 return execute_fixup_cfg ();
d119f34c
JH
4189 return 0;
4190}
4191
4192gimple_opt_pass *
4193make_pass_modref (gcc::context *ctxt)
4194{
4195 return new pass_modref (ctxt);
4196}
4197
4198ipa_opt_pass_d *
4199make_pass_ipa_modref (gcc::context *ctxt)
4200{
4201 return new pass_ipa_modref (ctxt);
4202}
4203
18f0873d
JH
4204namespace {
4205
d119f34c
JH
4206/* Skip edges from and to nodes without ipa_pure_const enabled.
4207 Ignore not available symbols. */
4208
4209static bool
4210ignore_edge (struct cgraph_edge *e)
4211{
87d75a11
JH
4212 /* We merge summaries of inline clones into summaries of functions they
4213 are inlined to. For that reason the complete function bodies must
4214 act as unit. */
4215 if (!e->inline_failed)
4216 return false;
d119f34c 4217 enum availability avail;
c87ff875 4218 cgraph_node *callee = e->callee->ultimate_alias_target
d119f34c
JH
4219 (&avail, e->caller);
4220
4221 return (avail <= AVAIL_INTERPOSABLE
56cb815b 4222 || ((!optimization_summaries || !optimization_summaries->get (callee))
494bdadf 4223 && (!summaries_lto || !summaries_lto->get (callee))));
d119f34c
JH
4224}
4225
8a2fd716 4226/* Compute parm_map for CALLEE_EDGE. */
d119f34c 4227
6cef01c3 4228static bool
c34db4b6 4229compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
ada353b8
JH
4230{
4231 class ipa_edge_args *args;
4232 if (ipa_node_params_sum
4233 && !callee_edge->call_stmt_cannot_inline_p
a4a3cdd0 4234 && (args = ipa_edge_args_sum->get (callee_edge)) != NULL)
ada353b8
JH
4235 {
4236 int i, count = ipa_get_cs_argument_count (args);
4237 class ipa_node_params *caller_parms_info, *callee_pi;
4238 class ipa_call_summary *es
4239 = ipa_call_summaries->get (callee_edge);
4240 cgraph_node *callee
c87ff875 4241 = callee_edge->callee->ultimate_alias_target
ada353b8
JH
4242 (NULL, callee_edge->caller);
4243
a4a3cdd0
MJ
4244 caller_parms_info
4245 = ipa_node_params_sum->get (callee_edge->caller->inlined_to
4246 ? callee_edge->caller->inlined_to
4247 : callee_edge->caller);
4248 callee_pi = ipa_node_params_sum->get (callee);
ada353b8 4249
520d5ad3 4250 (*parm_map).safe_grow_cleared (count, true);
ada353b8
JH
4251
4252 for (i = 0; i < count; i++)
4253 {
4254 if (es && es->param[i].points_to_local_or_readonly_memory)
4255 {
1f3a3363 4256 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
ada353b8
JH
4257 continue;
4258 }
4259
4260 struct ipa_jump_func *jf
4261 = ipa_get_ith_jump_func (args, i);
899c10c9 4262 if (jf && callee_pi)
ada353b8
JH
4263 {
4264 tree cst = ipa_value_from_jfunc (caller_parms_info,
4265 jf,
4266 ipa_get_type
4267 (callee_pi, i));
4268 if (cst && points_to_local_or_readonly_memory_p (cst))
4269 {
1f3a3363 4270 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
ada353b8
JH
4271 continue;
4272 }
4273 }
4274 if (jf && jf->type == IPA_JF_PASS_THROUGH)
4275 {
c34db4b6 4276 (*parm_map)[i].parm_index
56cb815b 4277 = ipa_get_jf_pass_through_formal_id (jf);
4d90edb9
JH
4278 if (ipa_get_jf_pass_through_operation (jf) == NOP_EXPR)
4279 {
4280 (*parm_map)[i].parm_offset_known = true;
4281 (*parm_map)[i].parm_offset = 0;
4282 }
4283 else if (ipa_get_jf_pass_through_operation (jf)
4284 == POINTER_PLUS_EXPR
4285 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf),
4286 &(*parm_map)[i].parm_offset))
4287 (*parm_map)[i].parm_offset_known = true;
4288 else
4289 (*parm_map)[i].parm_offset_known = false;
ada353b8
JH
4290 continue;
4291 }
4292 if (jf && jf->type == IPA_JF_ANCESTOR)
c34db4b6
JH
4293 {
4294 (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
4295 (*parm_map)[i].parm_offset_known = true;
c8fd2be1
JH
4296 gcc_checking_assert
4297 (!(ipa_get_jf_ancestor_offset (jf) & (BITS_PER_UNIT - 1)));
4298 (*parm_map)[i].parm_offset
4299 = ipa_get_jf_ancestor_offset (jf) >> LOG2_BITS_PER_UNIT;
85ebbabd 4300 }
ada353b8 4301 else
c34db4b6 4302 (*parm_map)[i].parm_index = -1;
ada353b8
JH
4303 }
4304 if (dump_file)
4305 {
4306 fprintf (dump_file, " Parm map: ");
4307 for (i = 0; i < count; i++)
c34db4b6 4308 fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
ada353b8
JH
4309 fprintf (dump_file, "\n");
4310 }
6cef01c3 4311 return true;
ada353b8 4312 }
6cef01c3 4313 return false;
ada353b8
JH
4314}
4315
85ebbabd
JH
4316/* Map used to translate escape infos. */
4317
4318struct escape_map
4319{
4320 int parm_index;
4321 bool direct;
4322};
4323
b8ef019a 4324/* Update escape map for E. */
85ebbabd
JH
4325
4326static void
4327update_escape_summary_1 (cgraph_edge *e,
9851a163
JH
4328 vec <vec <escape_map>> &map,
4329 bool ignore_stores)
85ebbabd
JH
4330{
4331 escape_summary *sum = escape_summaries->get (e);
4332 if (!sum)
4333 return;
4334 auto_vec <escape_entry> old = sum->esc.copy ();
4335 sum->esc.release ();
4336
4337 unsigned int i;
4338 escape_entry *ee;
4339 FOR_EACH_VEC_ELT (old, i, ee)
4340 {
4341 unsigned int j;
4342 struct escape_map *em;
b8ef019a
JH
4343 /* TODO: We do not have jump functions for return slots, so we
4344 never propagate them to outer function. */
4345 if (ee->parm_index >= (int)map.length ()
4346 || ee->parm_index < 0)
85ebbabd
JH
4347 continue;
4348 FOR_EACH_VEC_ELT (map[ee->parm_index], j, em)
4349 {
9851a163
JH
4350 int min_flags = ee->min_flags;
4351 if (ee->direct && !em->direct)
4352 min_flags = deref_flags (min_flags, ignore_stores);
85ebbabd 4353 struct escape_entry entry = {em->parm_index, ee->arg,
a70faf6e 4354 min_flags,
85ebbabd
JH
4355 ee->direct & em->direct};
4356 sum->esc.safe_push (entry);
4357 }
4358 }
4359 if (!sum->esc.length ())
4360 escape_summaries->remove (e);
4361}
4362
02c80893 4363/* Update escape map for NODE. */
85ebbabd
JH
4364
4365static void
4366update_escape_summary (cgraph_node *node,
9851a163
JH
4367 vec <vec <escape_map>> &map,
4368 bool ignore_stores)
85ebbabd
JH
4369{
4370 if (!escape_summaries)
4371 return;
4372 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
9851a163 4373 update_escape_summary_1 (e, map, ignore_stores);
85ebbabd
JH
4374 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
4375 {
4376 if (!e->inline_failed)
9851a163 4377 update_escape_summary (e->callee, map, ignore_stores);
85ebbabd 4378 else
9851a163 4379 update_escape_summary_1 (e, map, ignore_stores);
85ebbabd
JH
4380 }
4381}
4382
6cef01c3
JH
4383/* Get parameter type from DECL. This is only safe for special cases
4384 like builtins we create fnspec for because the type match is checked
4385 at fnspec creation time. */
d119f34c 4386
6cef01c3
JH
4387static tree
4388get_parm_type (tree decl, unsigned int i)
ada353b8 4389{
6cef01c3 4390 tree t = TYPE_ARG_TYPES (TREE_TYPE (decl));
ada353b8 4391
6cef01c3
JH
4392 for (unsigned int p = 0; p < i; p++)
4393 t = TREE_CHAIN (t);
4394 return TREE_VALUE (t);
4395}
4396
4397/* Return access mode for argument I of call E with FNSPEC. */
4398
4399static modref_access_node
4400get_access_for_fnspec (cgraph_edge *e, attr_fnspec &fnspec,
4401 unsigned int i, modref_parm_map &map)
4402{
4403 tree size = NULL_TREE;
4404 unsigned int size_arg;
4405
4406 if (!fnspec.arg_specified_p (i))
4407 ;
4408 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
ada353b8 4409 {
6cef01c3
JH
4410 cgraph_node *node = e->caller->inlined_to
4411 ? e->caller->inlined_to : e->caller;
a4a3cdd0
MJ
4412 ipa_node_params *caller_parms_info = ipa_node_params_sum->get (node);
4413 ipa_edge_args *args = ipa_edge_args_sum->get (e);
6cef01c3
JH
4414 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, size_arg);
4415
4416 if (jf)
4417 size = ipa_value_from_jfunc (caller_parms_info, jf,
4418 get_parm_type (e->callee->decl, size_arg));
ada353b8 4419 }
6cef01c3
JH
4420 else if (fnspec.arg_access_size_given_by_type_p (i))
4421 size = TYPE_SIZE_UNIT (get_parm_type (e->callee->decl, i));
4422 modref_access_node a = {0, -1, -1,
4423 map.parm_offset, map.parm_index,
5c85f295 4424 map.parm_offset_known, 0};
6cef01c3
JH
4425 poly_int64 size_hwi;
4426 if (size
4427 && poly_int_tree_p (size, &size_hwi)
4428 && coeffs_in_range_p (size_hwi, 0,
4429 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
ada353b8 4430 {
6cef01c3
JH
4431 a.size = -1;
4432 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
ada353b8 4433 }
6cef01c3
JH
4434 return a;
4435}
4436
09a4ffb7
JH
4437 /* Collapse loads and return true if something changed. */
4438static bool
4439collapse_loads (modref_summary *cur_summary,
4440 modref_summary_lto *cur_summary_lto)
4441{
4442 bool changed = false;
4443
4444 if (cur_summary && !cur_summary->loads->every_base)
4445 {
4446 cur_summary->loads->collapse ();
4447 changed = true;
4448 }
4449 if (cur_summary_lto
4450 && !cur_summary_lto->loads->every_base)
4451 {
4452 cur_summary_lto->loads->collapse ();
4453 changed = true;
4454 }
4455 return changed;
4456}
4457
4458/* Collapse loads and return true if something changed. */
4459
4460static bool
4461collapse_stores (modref_summary *cur_summary,
4462 modref_summary_lto *cur_summary_lto)
4463{
4464 bool changed = false;
4465
4466 if (cur_summary && !cur_summary->stores->every_base)
4467 {
4468 cur_summary->stores->collapse ();
4469 changed = true;
4470 }
4471 if (cur_summary_lto
4472 && !cur_summary_lto->stores->every_base)
4473 {
4474 cur_summary_lto->stores->collapse ();
4475 changed = true;
4476 }
4477 return changed;
4478}
4479
6cef01c3
JH
4480/* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
4481 CUR_SUMMARY_LTO accordingly. Return true if something changed. */
4482
4483static bool
4484propagate_unknown_call (cgraph_node *node,
4485 cgraph_edge *e, int ecf_flags,
85ebbabd 4486 modref_summary *cur_summary,
8d3abf42
JH
4487 modref_summary_lto *cur_summary_lto,
4488 bool nontrivial_scc)
6cef01c3
JH
4489{
4490 bool changed = false;
6cef01c3
JH
4491 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4492 auto_vec <modref_parm_map, 32> parm_map;
992644c3
JH
4493 bool looping;
4494
4495 if (e->callee
4496 && builtin_safe_for_const_function_p (&looping, e->callee->decl))
4497 {
8d3abf42 4498 if (looping && cur_summary && !cur_summary->side_effects)
992644c3
JH
4499 {
4500 cur_summary->side_effects = true;
4501 changed = true;
4502 }
8d3abf42 4503 if (looping && cur_summary_lto && !cur_summary_lto->side_effects)
992644c3
JH
4504 {
4505 cur_summary_lto->side_effects = true;
4506 changed = true;
4507 }
4508 return changed;
4509 }
4510
8d3abf42
JH
4511 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
4512 || (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
4513 || nontrivial_scc)
992644c3
JH
4514 {
4515 if (cur_summary && !cur_summary->side_effects)
4516 {
4517 cur_summary->side_effects = true;
4518 changed = true;
4519 }
4520 if (cur_summary_lto && !cur_summary_lto->side_effects)
4521 {
4522 cur_summary_lto->side_effects = true;
4523 changed = true;
4524 }
a34edf9a
JH
4525 if (cur_summary && !cur_summary->nondeterministic
4526 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4527 {
4528 cur_summary->nondeterministic = true;
4529 changed = true;
4530 }
4531 if (cur_summary_lto && !cur_summary_lto->nondeterministic
4532 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4533 {
4534 cur_summary_lto->nondeterministic = true;
4535 changed = true;
4536 }
992644c3 4537 }
8d3abf42
JH
4538 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
4539 return changed;
992644c3 4540
6cef01c3
JH
4541 if (fnspec_sum
4542 && compute_parm_map (e, &parm_map))
4543 {
4544 attr_fnspec fnspec (fnspec_sum->fnspec);
4545
4546 gcc_checking_assert (fnspec.known_p ());
4547 if (fnspec.global_memory_read_p ())
4548 collapse_loads (cur_summary, cur_summary_lto);
4549 else
4550 {
4551 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
4552 for (unsigned i = 0; i < parm_map.length () && t;
4553 i++, t = TREE_CHAIN (t))
4554 if (!POINTER_TYPE_P (TREE_VALUE (t)))
4555 ;
4556 else if (!fnspec.arg_specified_p (i)
4557 || fnspec.arg_maybe_read_p (i))
4558 {
4559 modref_parm_map map = parm_map[i];
1f3a3363 4560 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
6cef01c3 4561 continue;
1f3a3363 4562 if (map.parm_index == MODREF_UNKNOWN_PARM)
6cef01c3
JH
4563 {
4564 collapse_loads (cur_summary, cur_summary_lto);
4565 break;
4566 }
4567 if (cur_summary)
4568 changed |= cur_summary->loads->insert
8632f8c6
JH
4569 (node->decl, 0, 0,
4570 get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4571 if (cur_summary_lto)
4572 changed |= cur_summary_lto->loads->insert
8632f8c6
JH
4573 (node->decl, 0, 0,
4574 get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4575 }
4576 }
4577 if (ignore_stores_p (node->decl, ecf_flags))
4578 ;
4579 else if (fnspec.global_memory_written_p ())
4580 collapse_stores (cur_summary, cur_summary_lto);
4581 else
4582 {
4583 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
4584 for (unsigned i = 0; i < parm_map.length () && t;
4585 i++, t = TREE_CHAIN (t))
4586 if (!POINTER_TYPE_P (TREE_VALUE (t)))
4587 ;
4588 else if (!fnspec.arg_specified_p (i)
4589 || fnspec.arg_maybe_written_p (i))
4590 {
4591 modref_parm_map map = parm_map[i];
1f3a3363 4592 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
6cef01c3 4593 continue;
1f3a3363 4594 if (map.parm_index == MODREF_UNKNOWN_PARM)
6cef01c3
JH
4595 {
4596 collapse_stores (cur_summary, cur_summary_lto);
4597 break;
4598 }
4599 if (cur_summary)
4600 changed |= cur_summary->stores->insert
8632f8c6
JH
4601 (node->decl, 0, 0,
4602 get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4603 if (cur_summary_lto)
4604 changed |= cur_summary_lto->stores->insert
8632f8c6
JH
4605 (node->decl, 0, 0,
4606 get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4607 }
4608 }
4609 if (fnspec.errno_maybe_written_p () && flag_errno_math)
4610 {
4611 if (cur_summary && !cur_summary->writes_errno)
4612 {
4613 cur_summary->writes_errno = true;
4614 changed = true;
4615 }
4616 if (cur_summary_lto && !cur_summary_lto->writes_errno)
4617 {
4618 cur_summary_lto->writes_errno = true;
4619 changed = true;
4620 }
4621 }
4622 return changed;
4623 }
85ebbabd
JH
4624 if (dump_file)
4625 fprintf (dump_file, " collapsing loads\n");
4626 changed |= collapse_loads (cur_summary, cur_summary_lto);
4627 if (!ignore_stores_p (node->decl, ecf_flags))
6cef01c3
JH
4628 {
4629 if (dump_file)
85ebbabd
JH
4630 fprintf (dump_file, " collapsing stores\n");
4631 changed |= collapse_stores (cur_summary, cur_summary_lto);
6cef01c3 4632 }
85ebbabd 4633 return changed;
ada353b8 4634}
d119f34c 4635
02c80893 4636/* Maybe remove summaries of NODE pointed to by CUR_SUMMARY_PTR
85ebbabd
JH
4637 and CUR_SUMMARY_LTO_PTR if they are useless according to ECF_FLAGS. */
4638
4639static void
4640remove_useless_summaries (cgraph_node *node,
4641 modref_summary **cur_summary_ptr,
4642 modref_summary_lto **cur_summary_lto_ptr,
4643 int ecf_flags)
4644{
4645 if (*cur_summary_ptr && !(*cur_summary_ptr)->useful_p (ecf_flags, false))
4646 {
4647 optimization_summaries->remove (node);
4648 *cur_summary_ptr = NULL;
4649 }
4650 if (*cur_summary_lto_ptr
4651 && !(*cur_summary_lto_ptr)->useful_p (ecf_flags, false))
4652 {
4653 summaries_lto->remove (node);
4654 *cur_summary_lto_ptr = NULL;
4655 }
4656}
4657
4658/* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
4659 and propagate loads/stores. */
ada353b8 4660
494bdadf 4661static bool
ada353b8
JH
4662modref_propagate_in_scc (cgraph_node *component_node)
4663{
4664 bool changed = true;
5c85f295 4665 bool first = true;
ada353b8
JH
4666 int iteration = 0;
4667
4668 while (changed)
4669 {
8d3abf42
JH
4670 bool nontrivial_scc
4671 = ((struct ipa_dfs_info *) component_node->aux)->next_cycle;
ada353b8
JH
4672 changed = false;
4673 for (struct cgraph_node *cur = component_node; cur;
d119f34c
JH
4674 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4675 {
ada353b8 4676 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
71dbabcc
JH
4677 modref_summary *cur_summary = optimization_summaries
4678 ? optimization_summaries->get (node)
4679 : NULL;
4680 modref_summary_lto *cur_summary_lto = summaries_lto
4681 ? summaries_lto->get (node)
4682 : NULL;
4683
4684 if (!cur_summary && !cur_summary_lto)
ada353b8
JH
4685 continue;
4686
85ebbabd
JH
4687 int cur_ecf_flags = flags_from_decl_or_type (node->decl);
4688
ada353b8
JH
4689 if (dump_file)
4690 fprintf (dump_file, " Processing %s%s%s\n",
4691 cur->dump_name (),
4692 TREE_READONLY (cur->decl) ? " (const)" : "",
4693 DECL_PURE_P (cur->decl) ? " (pure)" : "");
d119f34c 4694
d119f34c
JH
4695 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
4696 {
6cef01c3 4697 if (dump_file)
8d3abf42 4698 fprintf (dump_file, " Indirect call\n");
85ebbabd 4699 if (propagate_unknown_call
6cef01c3 4700 (node, e, e->indirect_info->ecf_flags,
8d3abf42
JH
4701 cur_summary, cur_summary_lto,
4702 nontrivial_scc))
85ebbabd
JH
4703 {
4704 changed = true;
4705 remove_useless_summaries (node, &cur_summary,
4706 &cur_summary_lto,
4707 cur_ecf_flags);
4708 if (!cur_summary && !cur_summary_lto)
4709 break;
4710 }
d119f34c
JH
4711 }
4712
71dbabcc 4713 if (!cur_summary && !cur_summary_lto)
ada353b8
JH
4714 continue;
4715
d119f34c
JH
4716 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
4717 callee_edge = callee_edge->next_callee)
4718 {
4719 int flags = flags_from_decl_or_type (callee_edge->callee->decl);
71dbabcc
JH
4720 modref_summary *callee_summary = NULL;
4721 modref_summary_lto *callee_summary_lto = NULL;
d119f34c
JH
4722 struct cgraph_node *callee;
4723
8d3abf42
JH
4724 if (!callee_edge->inline_failed
4725 || ((flags & (ECF_CONST | ECF_NOVOPS))
4726 && !(flags & ECF_LOOPING_CONST_OR_PURE)))
d119f34c
JH
4727 continue;
4728
d119f34c
JH
4729 /* Get the callee and its summary. */
4730 enum availability avail;
c87ff875 4731 callee = callee_edge->callee->ultimate_alias_target
d119f34c
JH
4732 (&avail, cur);
4733
ada353b8
JH
4734 /* It is not necessary to re-process calls outside of the
4735 SCC component. */
4736 if (iteration > 0
4737 && (!callee->aux
4738 || ((struct ipa_dfs_info *)cur->aux)->scc_no
4739 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
4740 continue;
4741
4742 if (dump_file)
4743 fprintf (dump_file, " Call to %s\n",
4744 callee_edge->callee->dump_name ());
d119f34c
JH
4745
4746 bool ignore_stores = ignore_stores_p (cur->decl, flags);
4747
71dbabcc 4748 if (avail <= AVAIL_INTERPOSABLE)
d119f34c 4749 {
6cef01c3
JH
4750 if (dump_file)
4751 fprintf (dump_file, " Call target interposable"
4752 " or not available\n");
4753 changed |= propagate_unknown_call
4754 (node, callee_edge, flags,
8d3abf42
JH
4755 cur_summary, cur_summary_lto,
4756 nontrivial_scc);
6cef01c3
JH
4757 if (!cur_summary && !cur_summary_lto)
4758 break;
4759 continue;
71dbabcc
JH
4760 }
4761
4762 /* We don't know anything about CALLEE, hence we cannot tell
4763 anything about the entire component. */
4764
4765 if (cur_summary
4766 && !(callee_summary = optimization_summaries->get (callee)))
4767 {
6cef01c3
JH
4768 if (dump_file)
4769 fprintf (dump_file, " No call target summary\n");
4770 changed |= propagate_unknown_call
4771 (node, callee_edge, flags,
8d3abf42
JH
4772 cur_summary, NULL,
4773 nontrivial_scc);
71dbabcc
JH
4774 }
4775 if (cur_summary_lto
4776 && !(callee_summary_lto = summaries_lto->get (callee)))
4777 {
6cef01c3
JH
4778 if (dump_file)
4779 fprintf (dump_file, " No call target summary\n");
4780 changed |= propagate_unknown_call
4781 (node, callee_edge, flags,
8d3abf42
JH
4782 NULL, cur_summary_lto,
4783 nontrivial_scc);
d119f34c
JH
4784 }
4785
8d3abf42
JH
4786 if (callee_summary && !cur_summary->side_effects
4787 && (callee_summary->side_effects
4788 || callee_edge->recursive_p ()))
4789 {
4790 cur_summary->side_effects = true;
4791 changed = true;
4792 }
4793 if (callee_summary_lto && !cur_summary_lto->side_effects
4794 && (callee_summary_lto->side_effects
4795 || callee_edge->recursive_p ()))
4796 {
4797 cur_summary_lto->side_effects = true;
4798 changed = true;
4799 }
a34edf9a
JH
4800 if (callee_summary && !cur_summary->nondeterministic
4801 && callee_summary->nondeterministic
4802 && !ignore_nondeterminism_p (cur->decl, flags))
4803 {
4804 cur_summary->nondeterministic = true;
4805 changed = true;
4806 }
4807 if (callee_summary_lto && !cur_summary_lto->nondeterministic
4808 && callee_summary_lto->nondeterministic
4809 && !ignore_nondeterminism_p (cur->decl, flags))
4810 {
4811 cur_summary_lto->nondeterministic = true;
4812 changed = true;
4813 }
8d3abf42
JH
4814 if (flags & (ECF_CONST | ECF_NOVOPS))
4815 continue;
4816
ada353b8
JH
4817 /* We can not safely optimize based on summary of callee if it
4818 does not always bind to current def: it is possible that
4819 memory load was optimized out earlier which may not happen in
4820 the interposed variant. */
4821 if (!callee_edge->binds_to_current_def_p ())
4822 {
a34edf9a
JH
4823 if (cur_summary && !cur_summary->calls_interposable)
4824 {
4825 cur_summary->calls_interposable = true;
4826 changed = true;
4827 }
4828 if (cur_summary_lto && !cur_summary_lto->calls_interposable)
4829 {
4830 cur_summary_lto->calls_interposable = true;
4831 changed = true;
4832 }
ada353b8
JH
4833 if (dump_file)
4834 fprintf (dump_file, " May not bind local;"
4835 " collapsing loads\n");
4836 }
4837
4838
c34db4b6 4839 auto_vec <modref_parm_map, 32> parm_map;
1f3a3363
JH
4840 modref_parm_map chain_map;
4841 /* TODO: Once we get jump functions for static chains we could
4842 compute this. */
4843 chain_map.parm_index = MODREF_UNKNOWN_PARM;
ada353b8
JH
4844
4845 compute_parm_map (callee_edge, &parm_map);
c33f4742 4846
d119f34c 4847 /* Merge in callee's information. */
71dbabcc
JH
4848 if (callee_summary)
4849 {
56cb815b 4850 changed |= cur_summary->loads->merge
8632f8c6
JH
4851 (node->decl, callee_summary->loads,
4852 &parm_map, &chain_map, !first);
56cb815b 4853 if (!ignore_stores)
6cef01c3
JH
4854 {
4855 changed |= cur_summary->stores->merge
8632f8c6
JH
4856 (node->decl, callee_summary->stores,
4857 &parm_map, &chain_map, !first);
6cef01c3
JH
4858 if (!cur_summary->writes_errno
4859 && callee_summary->writes_errno)
4860 {
4861 cur_summary->writes_errno = true;
4862 changed = true;
4863 }
4864 }
71dbabcc
JH
4865 }
4866 if (callee_summary_lto)
4867 {
56cb815b 4868 changed |= cur_summary_lto->loads->merge
8632f8c6
JH
4869 (node->decl, callee_summary_lto->loads,
4870 &parm_map, &chain_map, !first);
56cb815b 4871 if (!ignore_stores)
6cef01c3
JH
4872 {
4873 changed |= cur_summary_lto->stores->merge
8632f8c6
JH
4874 (node->decl, callee_summary_lto->stores,
4875 &parm_map, &chain_map, !first);
6cef01c3
JH
4876 if (!cur_summary_lto->writes_errno
4877 && callee_summary_lto->writes_errno)
4878 {
4879 cur_summary_lto->writes_errno = true;
4880 changed = true;
4881 }
4882 }
71dbabcc 4883 }
85ebbabd
JH
4884 if (changed)
4885 remove_useless_summaries (node, &cur_summary,
4886 &cur_summary_lto,
4887 cur_ecf_flags);
4888 if (!cur_summary && !cur_summary_lto)
4889 break;
ada353b8 4890 if (dump_file && changed)
71dbabcc
JH
4891 {
4892 if (cur_summary)
4893 cur_summary->dump (dump_file);
4894 if (cur_summary_lto)
4895 cur_summary_lto->dump (dump_file);
85ebbabd 4896 dump_modref_edge_summaries (dump_file, node, 4);
71dbabcc 4897 }
d119f34c
JH
4898 }
4899 }
ada353b8 4900 iteration++;
5c85f295 4901 first = false;
ada353b8 4902 }
ada353b8 4903 if (dump_file)
85ebbabd
JH
4904 fprintf (dump_file,
4905 "Propagation finished in %i iterations\n", iteration);
494bdadf
JH
4906 bool pureconst = false;
4907 for (struct cgraph_node *cur = component_node; cur;
4908 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4909 if (!cur->inlined_to && opt_for_fn (cur->decl, flag_ipa_pure_const))
4910 {
4911 modref_summary *summary = optimization_summaries
4912 ? optimization_summaries->get (cur)
4913 : NULL;
4914 modref_summary_lto *summary_lto = summaries_lto
4915 ? summaries_lto->get (cur)
4916 : NULL;
1b62cddc 4917 if (summary && !summary->stores->every_base && !summary->stores->bases
a34edf9a 4918 && !summary->nondeterministic)
494bdadf 4919 {
a34edf9a
JH
4920 if (!summary->loads->every_base && !summary->loads->bases
4921 && !summary->calls_interposable)
494bdadf
JH
4922 pureconst |= ipa_make_function_const
4923 (cur, summary->side_effects, false);
4924 else
4925 pureconst |= ipa_make_function_pure
4926 (cur, summary->side_effects, false);
4927 }
4928 if (summary_lto && !summary_lto->stores->every_base
a34edf9a 4929 && !summary_lto->stores->bases && !summary_lto->nondeterministic)
494bdadf 4930 {
a34edf9a
JH
4931 if (!summary_lto->loads->every_base && !summary_lto->loads->bases
4932 && !summary_lto->calls_interposable)
494bdadf
JH
4933 pureconst |= ipa_make_function_const
4934 (cur, summary_lto->side_effects, false);
4935 else
4936 pureconst |= ipa_make_function_pure
4937 (cur, summary_lto->side_effects, false);
4938 }
4939 }
4940 return pureconst;
85ebbabd
JH
4941}
4942
4943/* Dump results of propagation in SCC rooted in COMPONENT_NODE. */
4944
4945static void
4946modref_propagate_dump_scc (cgraph_node *component_node)
4947{
4948 for (struct cgraph_node *cur = component_node; cur;
4949 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4950 if (!cur->inlined_to)
4951 {
4952 modref_summary *cur_summary = optimization_summaries
4953 ? optimization_summaries->get (cur)
4954 : NULL;
4955 modref_summary_lto *cur_summary_lto = summaries_lto
4956 ? summaries_lto->get (cur)
4957 : NULL;
4958
4959 fprintf (dump_file, "Propagated modref for %s%s%s\n",
4960 cur->dump_name (),
4961 TREE_READONLY (cur->decl) ? " (const)" : "",
4962 DECL_PURE_P (cur->decl) ? " (pure)" : "");
4963 if (optimization_summaries)
4964 {
4965 if (cur_summary)
4966 cur_summary->dump (dump_file);
4967 else
4968 fprintf (dump_file, " Not tracked\n");
4969 }
4970 if (summaries_lto)
4971 {
4972 if (cur_summary_lto)
4973 cur_summary_lto->dump (dump_file);
4974 else
4975 fprintf (dump_file, " Not tracked (lto)\n");
4976 }
4977 }
4978}
4979
16e85390
JH
4980/* Determine EAF flags know for call E with CALLEE_ECF_FLAGS and ARG. */
4981
4982int
4983implicit_eaf_flags_for_edge_and_arg (cgraph_edge *e, int callee_ecf_flags,
4984 bool ignore_stores, int arg)
4985{
4986 /* Returning the value is already accounted to at local propagation. */
4987 int implicit_flags = EAF_NOT_RETURNED_DIRECTLY
4988 | EAF_NOT_RETURNED_INDIRECTLY;
4989 if (ignore_stores)
4990 implicit_flags |= ignore_stores_eaf_flags;
4991 if (callee_ecf_flags & ECF_PURE)
4992 implicit_flags |= implicit_pure_eaf_flags;
4993 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
4994 implicit_flags |= implicit_const_eaf_flags;
4995 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4996 if (fnspec_sum)
4997 {
4998 attr_fnspec fnspec (fnspec_sum->fnspec);
4999 implicit_flags |= fnspec.arg_eaf_flags (arg);
5000 }
5001 return implicit_flags;
5002}
5003
85ebbabd
JH
5004/* Process escapes in SUM and merge SUMMARY to CUR_SUMMARY
5005 and SUMMARY_LTO to CUR_SUMMARY_LTO.
5006 Return true if something changed. */
5007
5008static bool
5009modref_merge_call_site_flags (escape_summary *sum,
5010 modref_summary *cur_summary,
5011 modref_summary_lto *cur_summary_lto,
5012 modref_summary *summary,
5013 modref_summary_lto *summary_lto,
4341b1b1 5014 tree caller,
f6f704fd
JH
5015 cgraph_edge *e,
5016 int caller_ecf_flags,
5017 int callee_ecf_flags,
5018 bool binds_to_current_def)
85ebbabd
JH
5019{
5020 escape_entry *ee;
5021 unsigned int i;
5022 bool changed = false;
f6f704fd 5023 bool ignore_stores = ignore_stores_p (caller, callee_ecf_flags);
85ebbabd 5024
fcbf94a5
JH
5025 /* Return early if we have no useful info to propagate. */
5026 if ((!cur_summary
5027 || (!cur_summary->arg_flags.length ()
5028 && !cur_summary->static_chain_flags
5029 && !cur_summary->retslot_flags))
5030 && (!cur_summary_lto
5031 || (!cur_summary_lto->arg_flags.length ()
5032 && !cur_summary_lto->static_chain_flags
5033 && !cur_summary_lto->retslot_flags)))
85ebbabd
JH
5034 return false;
5035
5036 FOR_EACH_VEC_ELT (sum->esc, i, ee)
ada353b8 5037 {
85ebbabd
JH
5038 int flags = 0;
5039 int flags_lto = 0;
16e85390
JH
5040 int implicit_flags = implicit_eaf_flags_for_edge_and_arg
5041 (e, callee_ecf_flags, ignore_stores, ee->arg);
85ebbabd
JH
5042
5043 if (summary && ee->arg < summary->arg_flags.length ())
5044 flags = summary->arg_flags[ee->arg];
5045 if (summary_lto
5046 && ee->arg < summary_lto->arg_flags.length ())
5047 flags_lto = summary_lto->arg_flags[ee->arg];
5048 if (!ee->direct)
5049 {
5050 flags = deref_flags (flags, ignore_stores);
5051 flags_lto = deref_flags (flags_lto, ignore_stores);
5052 }
f6f704fd
JH
5053 if (ignore_stores)
5054 implicit_flags |= ignore_stores_eaf_flags;
5055 if (callee_ecf_flags & ECF_PURE)
5056 implicit_flags |= implicit_pure_eaf_flags;
5057 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
5058 implicit_flags |= implicit_const_eaf_flags;
5059 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
5060 if (fnspec_sum)
85ebbabd 5061 {
f6f704fd 5062 attr_fnspec fnspec (fnspec_sum->fnspec);
e2dd12ab 5063 implicit_flags |= fnspec.arg_eaf_flags (ee->arg);
f6f704fd
JH
5064 }
5065 if (!ee->direct)
5066 implicit_flags = deref_flags (implicit_flags, ignore_stores);
5067 flags |= implicit_flags;
5068 flags_lto |= implicit_flags;
5069 if (!binds_to_current_def && (flags || flags_lto))
5070 {
5071 flags = interposable_eaf_flags (flags, implicit_flags);
5072 flags_lto = interposable_eaf_flags (flags_lto, implicit_flags);
85ebbabd 5073 }
3350e59f 5074 if (!(flags & EAF_UNUSED)
b8ef019a 5075 && cur_summary && ee->parm_index < (int)cur_summary->arg_flags.length ())
85ebbabd 5076 {
1f3a3363 5077 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
b8ef019a 5078 ? cur_summary->retslot_flags
1f3a3363 5079 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
a70c0512 5080 ? cur_summary->static_chain_flags
b8ef019a 5081 : cur_summary->arg_flags[ee->parm_index];
85ebbabd
JH
5082 if ((f & flags) != f)
5083 {
4341b1b1 5084 f = remove_useless_eaf_flags
f6f704fd 5085 (f & flags, caller_ecf_flags,
4341b1b1 5086 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
85ebbabd
JH
5087 changed = true;
5088 }
5089 }
3350e59f
JH
5090 if (!(flags_lto & EAF_UNUSED)
5091 && cur_summary_lto
b8ef019a 5092 && ee->parm_index < (int)cur_summary_lto->arg_flags.length ())
85ebbabd 5093 {
1f3a3363 5094 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
b8ef019a 5095 ? cur_summary_lto->retslot_flags
1f3a3363 5096 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
a70c0512 5097 ? cur_summary_lto->static_chain_flags
b8ef019a 5098 : cur_summary_lto->arg_flags[ee->parm_index];
85ebbabd
JH
5099 if ((f & flags_lto) != f)
5100 {
4341b1b1 5101 f = remove_useless_eaf_flags
f6f704fd 5102 (f & flags_lto, caller_ecf_flags,
4341b1b1 5103 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
85ebbabd
JH
5104 changed = true;
5105 }
5106 }
5107 }
5108 return changed;
5109}
5110
5111/* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
5112 and propagate arg flags. */
5113
5114static void
5115modref_propagate_flags_in_scc (cgraph_node *component_node)
5116{
5117 bool changed = true;
5118 int iteration = 0;
5119
5120 while (changed)
5121 {
5122 changed = false;
ada353b8 5123 for (struct cgraph_node *cur = component_node; cur;
d119f34c 5124 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
85ebbabd
JH
5125 {
5126 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
5127 modref_summary *cur_summary = optimization_summaries
5128 ? optimization_summaries->get (node)
5129 : NULL;
5130 modref_summary_lto *cur_summary_lto = summaries_lto
5131 ? summaries_lto->get (node)
5132 : NULL;
5133
5134 if (!cur_summary && !cur_summary_lto)
5135 continue;
f6f704fd 5136 int caller_ecf_flags = flags_from_decl_or_type (cur->decl);
85ebbabd
JH
5137
5138 if (dump_file)
5139 fprintf (dump_file, " Processing %s%s%s\n",
ada353b8
JH
5140 cur->dump_name (),
5141 TREE_READONLY (cur->decl) ? " (const)" : "",
5142 DECL_PURE_P (cur->decl) ? " (pure)" : "");
85ebbabd
JH
5143
5144 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
5145 {
5146 escape_summary *sum = escape_summaries->get (e);
5147
5148 if (!sum || (e->indirect_info->ecf_flags
5149 & (ECF_CONST | ECF_NOVOPS)))
5150 continue;
5151
5152 changed |= modref_merge_call_site_flags
5153 (sum, cur_summary, cur_summary_lto,
4341b1b1 5154 NULL, NULL,
f6f704fd
JH
5155 node->decl,
5156 e,
5157 caller_ecf_flags,
5158 e->indirect_info->ecf_flags,
5159 false);
85ebbabd
JH
5160 }
5161
5162 if (!cur_summary && !cur_summary_lto)
5163 continue;
5164
5165 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
5166 callee_edge = callee_edge->next_callee)
5167 {
4341b1b1
JH
5168 int ecf_flags = flags_from_decl_or_type
5169 (callee_edge->callee->decl);
85ebbabd
JH
5170 modref_summary *callee_summary = NULL;
5171 modref_summary_lto *callee_summary_lto = NULL;
5172 struct cgraph_node *callee;
5173
4341b1b1 5174 if (ecf_flags & (ECF_CONST | ECF_NOVOPS)
85ebbabd
JH
5175 || !callee_edge->inline_failed)
5176 continue;
16e85390 5177
85ebbabd
JH
5178 /* Get the callee and its summary. */
5179 enum availability avail;
c87ff875 5180 callee = callee_edge->callee->ultimate_alias_target
85ebbabd
JH
5181 (&avail, cur);
5182
5183 /* It is not necessary to re-process calls outside of the
5184 SCC component. */
5185 if (iteration > 0
5186 && (!callee->aux
5187 || ((struct ipa_dfs_info *)cur->aux)->scc_no
5188 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
5189 continue;
5190
5191 escape_summary *sum = escape_summaries->get (callee_edge);
5192 if (!sum)
5193 continue;
5194
5195 if (dump_file)
5196 fprintf (dump_file, " Call to %s\n",
5197 callee_edge->callee->dump_name ());
5198
5199 if (avail <= AVAIL_INTERPOSABLE
5200 || callee_edge->call_stmt_cannot_inline_p)
5201 ;
5202 else
5203 {
5204 if (cur_summary)
5205 callee_summary = optimization_summaries->get (callee);
5206 if (cur_summary_lto)
5207 callee_summary_lto = summaries_lto->get (callee);
5208 }
5209 changed |= modref_merge_call_site_flags
5210 (sum, cur_summary, cur_summary_lto,
5211 callee_summary, callee_summary_lto,
f6f704fd
JH
5212 node->decl,
5213 callee_edge,
5214 caller_ecf_flags,
5215 ecf_flags,
5216 callee->binds_to_current_def_p ());
85ebbabd
JH
5217 if (dump_file && changed)
5218 {
5219 if (cur_summary)
5220 cur_summary->dump (dump_file);
5221 if (cur_summary_lto)
5222 cur_summary_lto->dump (dump_file);
5223 }
5224 }
5225 }
5226 iteration++;
5227 }
5228 if (dump_file)
5229 fprintf (dump_file,
5230 "Propagation of flags finished in %i iterations\n", iteration);
ada353b8
JH
5231}
5232
18f0873d
JH
5233} /* ANON namespace. */
5234
5235/* Call EDGE was inlined; merge summary from callee to the caller. */
5236
5237void
5238ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
5239{
5240 if (!summaries && !summaries_lto)
5241 return;
5242
5243 struct cgraph_node *to = (edge->caller->inlined_to
5244 ? edge->caller->inlined_to : edge->caller);
5245 class modref_summary *to_info = summaries ? summaries->get (to) : NULL;
5246 class modref_summary_lto *to_info_lto = summaries_lto
5247 ? summaries_lto->get (to) : NULL;
5248
5249 if (!to_info && !to_info_lto)
5250 {
5251 if (summaries)
5252 summaries->remove (edge->callee);
5253 if (summaries_lto)
5254 summaries_lto->remove (edge->callee);
5255 remove_modref_edge_summaries (edge->callee);
5256 return;
5257 }
5258
5259 class modref_summary *callee_info = summaries ? summaries->get (edge->callee)
5260 : NULL;
5261 class modref_summary_lto *callee_info_lto
5262 = summaries_lto ? summaries_lto->get (edge->callee) : NULL;
5263 int flags = flags_from_decl_or_type (edge->callee->decl);
16e85390 5264 /* Combine in outer flags. */
2cadaa1f
JH
5265 cgraph_node *n;
5266 for (n = edge->caller; n->inlined_to; n = n->callers->caller)
5267 flags |= flags_from_decl_or_type (n->decl);
5268 flags |= flags_from_decl_or_type (n->decl);
18f0873d
JH
5269 bool ignore_stores = ignore_stores_p (edge->caller->decl, flags);
5270
5271 if (!callee_info && to_info)
5272 {
5273 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5274 to_info->loads->collapse ();
5275 if (!ignore_stores)
5276 to_info->stores->collapse ();
5277 }
5278 if (!callee_info_lto && to_info_lto)
5279 {
5280 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5281 to_info_lto->loads->collapse ();
5282 if (!ignore_stores)
5283 to_info_lto->stores->collapse ();
5284 }
aabb9a26
JH
5285 /* Merge side effects and non-determinism.
5286 PURE/CONST flags makes functions deterministic and if there is
5287 no LOOPING_CONST_OR_PURE they also have no side effects. */
5288 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
5289 || (flags & ECF_LOOPING_CONST_OR_PURE))
5290 {
5291 if (to_info)
5292 {
5293 if (!callee_info || callee_info->side_effects)
5294 to_info->side_effects = true;
5295 if ((!callee_info || callee_info->nondeterministic)
5296 && !ignore_nondeterminism_p (edge->caller->decl, flags))
5297 to_info->nondeterministic = true;
5298 }
5299 if (to_info_lto)
5300 {
5301 if (!callee_info_lto || callee_info_lto->side_effects)
5302 to_info_lto->side_effects = true;
5303 if ((!callee_info_lto || callee_info_lto->nondeterministic)
5304 && !ignore_nondeterminism_p (edge->caller->decl, flags))
5305 to_info_lto->nondeterministic = true;
5306 }
5307 }
18f0873d
JH
5308 if (callee_info || callee_info_lto)
5309 {
5310 auto_vec <modref_parm_map, 32> parm_map;
1f3a3363
JH
5311 modref_parm_map chain_map;
5312 /* TODO: Once we get jump functions for static chains we could
74a4ece0 5313 compute parm_index. */
18f0873d
JH
5314
5315 compute_parm_map (edge, &parm_map);
5316
5317 if (!ignore_stores)
5318 {
5319 if (to_info && callee_info)
8632f8c6 5320 to_info->stores->merge (to->decl, callee_info->stores, &parm_map,
1f3a3363 5321 &chain_map, false);
18f0873d 5322 if (to_info_lto && callee_info_lto)
8632f8c6
JH
5323 to_info_lto->stores->merge (to->decl, callee_info_lto->stores,
5324 &parm_map, &chain_map, false);
18f0873d
JH
5325 }
5326 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5327 {
5328 if (to_info && callee_info)
8632f8c6 5329 to_info->loads->merge (to->decl, callee_info->loads, &parm_map,
1f3a3363 5330 &chain_map, false);
18f0873d 5331 if (to_info_lto && callee_info_lto)
8632f8c6
JH
5332 to_info_lto->loads->merge (to->decl, callee_info_lto->loads,
5333 &parm_map, &chain_map, false);
18f0873d
JH
5334 }
5335 }
5336
5337 /* Now merge escape summaries.
02c80893
JJ
5338 For every escape to the callee we need to merge callee flags
5339 and remap callee's escapes. */
18f0873d
JH
5340 class escape_summary *sum = escape_summaries->get (edge);
5341 int max_escape = -1;
5342 escape_entry *ee;
5343 unsigned int i;
5344
5345 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
5346 FOR_EACH_VEC_ELT (sum->esc, i, ee)
5347 if ((int)ee->arg > max_escape)
5348 max_escape = ee->arg;
5349
5350 auto_vec <vec <struct escape_map>, 32> emap (max_escape + 1);
5351 emap.safe_grow (max_escape + 1, true);
5352 for (i = 0; (int)i < max_escape + 1; i++)
5353 emap[i] = vNULL;
5354
5355 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
5356 FOR_EACH_VEC_ELT (sum->esc, i, ee)
5357 {
5358 bool needed = false;
16e85390
JH
5359 int implicit_flags = implicit_eaf_flags_for_edge_and_arg
5360 (edge, flags, ignore_stores,
5361 ee->arg);
5362 if (!ee->direct)
5363 implicit_flags = deref_flags (implicit_flags, ignore_stores);
18f0873d
JH
5364 if (to_info && (int)to_info->arg_flags.length () > ee->parm_index)
5365 {
5366 int flags = callee_info
5367 && callee_info->arg_flags.length () > ee->arg
5368 ? callee_info->arg_flags[ee->arg] : 0;
5369 if (!ee->direct)
5370 flags = deref_flags (flags, ignore_stores);
16e85390
JH
5371 flags |= ee->min_flags | implicit_flags;
5372 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
5373 ? to_info->retslot_flags
5374 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
5375 ? to_info->static_chain_flags
5376 : to_info->arg_flags[ee->parm_index];
5377 f &= flags;
5378 if (f)
18f0873d
JH
5379 needed = true;
5380 }
5381 if (to_info_lto
62af7d94 5382 && (int)to_info_lto->arg_flags.length () > ee->parm_index)
18f0873d
JH
5383 {
5384 int flags = callee_info_lto
5385 && callee_info_lto->arg_flags.length () > ee->arg
5386 ? callee_info_lto->arg_flags[ee->arg] : 0;
5387 if (!ee->direct)
5388 flags = deref_flags (flags, ignore_stores);
16e85390
JH
5389 flags |= ee->min_flags | implicit_flags;
5390 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
5391 ? to_info_lto->retslot_flags
5392 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
5393 ? to_info_lto->static_chain_flags
5394 : to_info_lto->arg_flags[ee->parm_index];
5395 f &= flags;
5396 if (f)
18f0873d
JH
5397 needed = true;
5398 }
5399 struct escape_map entry = {ee->parm_index, ee->direct};
5400 if (needed)
5401 emap[ee->arg].safe_push (entry);
5402 }
5403 update_escape_summary (edge->callee, emap, ignore_stores);
5404 for (i = 0; (int)i < max_escape + 1; i++)
5405 emap[i].release ();
5406 if (sum)
5407 escape_summaries->remove (edge);
5408
5409 if (summaries)
5410 {
5411 if (to_info && !to_info->useful_p (flags))
5412 {
5413 if (dump_file)
5414 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5415 to->dump_name ());
5416 summaries->remove (to);
5417 to_info = NULL;
5418 }
5419 else if (to_info && dump_file)
5420 {
5421 if (dump_file)
5422 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5423 to->dump_name ());
5424 to_info->dump (dump_file);
5425 }
5426 if (callee_info)
5427 summaries->remove (edge->callee);
5428 }
5429 if (summaries_lto)
5430 {
5431 if (to_info_lto && !to_info_lto->useful_p (flags))
5432 {
5433 if (dump_file)
5434 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5435 to->dump_name ());
5436 summaries_lto->remove (to);
8c693978 5437 to_info_lto = NULL;
18f0873d
JH
5438 }
5439 else if (to_info_lto && dump_file)
5440 {
5441 if (dump_file)
5442 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5443 to->dump_name ());
5444 to_info_lto->dump (dump_file);
18f0873d
JH
5445 }
5446 if (callee_info_lto)
5447 summaries_lto->remove (edge->callee);
5448 }
5449 if (!to_info && !to_info_lto)
5450 remove_modref_edge_summaries (to);
5451 return;
5452}
5453
ada353b8
JH
5454/* Run the IPA pass. This will take a function's summaries and calls and
5455 construct new summaries which represent a transitive closure. So that
5456 summary of an analyzed function contains information about the loads and
5457 stores that the function or any function that it calls does. */
5458
5459unsigned int
5460pass_ipa_modref::execute (function *)
5461{
71dbabcc 5462 if (!summaries && !summaries_lto)
ada353b8 5463 return 0;
494bdadf 5464 bool pureconst = false;
ada353b8 5465
71dbabcc
JH
5466 if (optimization_summaries)
5467 ggc_delete (optimization_summaries);
5468 optimization_summaries = summaries;
5469 summaries = NULL;
5470
ada353b8
JH
5471 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *,
5472 symtab->cgraph_count);
5473 int order_pos;
5474 order_pos = ipa_reduced_postorder (order, true, ignore_edge);
5475 int i;
5476
5477 /* Iterate over all strongly connected components in post-order. */
5478 for (i = 0; i < order_pos; i++)
5479 {
5480 /* Get the component's representative. That's just any node in the
5481 component from which we can traverse the entire component. */
5482 struct cgraph_node *component_node = order[i];
5483
5484 if (dump_file)
5485 fprintf (dump_file, "\n\nStart of SCC component\n");
5486
494bdadf 5487 pureconst |= modref_propagate_in_scc (component_node);
85ebbabd 5488 modref_propagate_flags_in_scc (component_node);
e0040bc3
JH
5489 if (optimization_summaries)
5490 for (struct cgraph_node *cur = component_node; cur;
5491 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
5492 if (modref_summary *sum = optimization_summaries->get (cur))
5aa91072 5493 sum->finalize (cur->decl);
85ebbabd
JH
5494 if (dump_file)
5495 modref_propagate_dump_scc (component_node);
d119f34c 5496 }
fe90c504
JH
5497 cgraph_node *node;
5498 FOR_EACH_FUNCTION (node)
5499 update_signature (node);
71dbabcc
JH
5500 if (summaries_lto)
5501 ((modref_summaries_lto *)summaries_lto)->propagated = true;
d119f34c 5502 ipa_free_postorder_info ();
a0e6e49d 5503 free (order);
6cef01c3
JH
5504 delete fnspec_summaries;
5505 fnspec_summaries = NULL;
85ebbabd
JH
5506 delete escape_summaries;
5507 escape_summaries = NULL;
494bdadf 5508
02c80893 5509 /* If we possibly made constructors const/pure we may need to remove
494bdadf
JH
5510 them. */
5511 return pureconst ? TODO_remove_functions : 0;
d119f34c
JH
5512}
5513
39b3b1bd
JH
5514/* Summaries must stay alive until end of compilation. */
5515
5516void
d5148d4f 5517ipa_modref_cc_finalize ()
39b3b1bd 5518{
71dbabcc
JH
5519 if (optimization_summaries)
5520 ggc_delete (optimization_summaries);
5521 optimization_summaries = NULL;
71dbabcc 5522 if (summaries_lto)
85ebbabd
JH
5523 ggc_delete (summaries_lto);
5524 summaries_lto = NULL;
6cef01c3
JH
5525 if (fnspec_summaries)
5526 delete fnspec_summaries;
5527 fnspec_summaries = NULL;
85ebbabd
JH
5528 if (escape_summaries)
5529 delete escape_summaries;
5530 escape_summaries = NULL;
39b3b1bd
JH
5531}
5532
d119f34c 5533#include "gt-ipa-modref.h"