]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-modref.c
c++: don't ICE on NAMESPACE_DECL inside FUNCTION_DECL
[thirdparty/gcc.git] / gcc / ipa-modref.c
CommitLineData
d119f34c 1/* Search for references that a functions loads or stores.
99dee823 2 Copyright (C) 2020-2021 Free Software Foundation, Inc.
d119f34c
JH
3 Contributed by David Cepelik and Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
85ebbabd 23 accesses across function calls.
d119f34c
JH
24
25 This file contains a tree pass and an IPA pass. Both performs the same
8a2fd716 26 analysis however tree pass is executed during early and late optimization
d119f34c
JH
27 passes to propagate info downwards in the compilation order. IPA pass
28 propagates across the callgraph and is able to handle recursion and works on
29 whole program during link-time analysis.
30
46a27415 31 LTO mode differs from the local mode by not recording alias sets but types
d119f34c 32 that are translated to alias sets later. This is necessary in order stream
46a27415 33 the information because the alias sets are rebuild at stream-in time and may
85ebbabd
JH
34 not correspond to ones seen during analysis. For this reason part of
35 analysis is duplicated.
36
37 The following information is computed
38 1) load/store access tree described in ipa-modref-tree.h
11056ab7 39 This is used by tree-ssa-alias to disambiguate load/stores
85ebbabd
JH
40 2) EAF flags used by points-to analysis (in tree-ssa-structlias).
41 and defined in tree-core.h.
42 and stored to optimization_summaries.
43
44 There are multiple summaries computed and used during the propagation:
45 - summaries holds summaries from analysis to IPA propagation
46 time.
47 - summaries_lto is same as summaries but holds them in a format
48 that can be streamed (as described above).
49 - fnspec_summary holds fnspec strings for call. This is
50 necessary because gimple_call_fnspec performs additional
51 analysis except for looking callee fndecl.
52 - escape_summary holds escape points for given call edge.
53 That is a vector recording what function parmaeters
54 may escape to a function call (and with what parameter index). */
d119f34c
JH
55
56#include "config.h"
57#include "system.h"
58#include "coretypes.h"
59#include "backend.h"
60#include "tree.h"
61#include "gimple.h"
62#include "alloc-pool.h"
63#include "tree-pass.h"
64#include "gimple-iterator.h"
65#include "tree-dfa.h"
66#include "cgraph.h"
67#include "ipa-utils.h"
68#include "symbol-summary.h"
69#include "gimple-pretty-print.h"
70#include "gimple-walk.h"
71#include "print-tree.h"
72#include "tree-streamer.h"
73#include "alias.h"
74#include "calls.h"
75#include "ipa-modref-tree.h"
76#include "ipa-modref.h"
e977dd5e
JH
77#include "value-range.h"
78#include "ipa-prop.h"
79#include "ipa-fnsummary.h"
617695cd 80#include "attr-fnspec.h"
ae7a23a3 81#include "symtab-clones.h"
520d5ad3
JH
82#include "gimple-ssa.h"
83#include "tree-phinodes.h"
84#include "tree-ssa-operands.h"
85#include "ssa-iterators.h"
86#include "stringpool.h"
87#include "tree-ssanames.h"
008e7397 88#include "attribs.h"
b8ef019a 89#include "tree-cfg.h"
992644c3 90#include "tree-eh.h"
520d5ad3 91
8da8ed43 92
85ebbabd 93namespace {
d119f34c 94
6cef01c3
JH
95/* We record fnspec specifiers for call edges since they depends on actual
96 gimple statements. */
97
98class fnspec_summary
99{
100public:
101 char *fnspec;
102
103 fnspec_summary ()
104 : fnspec (NULL)
105 {
106 }
107
108 ~fnspec_summary ()
109 {
110 free (fnspec);
111 }
112};
113
114/* Summary holding fnspec string for a given call. */
115
116class fnspec_summaries_t : public call_summary <fnspec_summary *>
117{
118public:
119 fnspec_summaries_t (symbol_table *symtab)
120 : call_summary <fnspec_summary *> (symtab) {}
121 /* Hook that is called by summary when an edge is duplicated. */
122 virtual void duplicate (cgraph_edge *,
123 cgraph_edge *,
124 fnspec_summary *src,
125 fnspec_summary *dst)
126 {
127 dst->fnspec = xstrdup (src->fnspec);
128 }
129};
130
131static fnspec_summaries_t *fnspec_summaries = NULL;
132
85ebbabd
JH
133/* Escape summary holds a vector of param indexes that escape to
134 a given call. */
135struct escape_entry
136{
137 /* Parameter that escapes at a given call. */
b8ef019a 138 int parm_index;
85ebbabd
JH
139 /* Argument it escapes to. */
140 unsigned int arg;
141 /* Minimal flags known about the argument. */
8da8ed43 142 eaf_flags_t min_flags;
85ebbabd
JH
143 /* Does it escape directly or indirectly? */
144 bool direct;
145};
146
147/* Dump EAF flags. */
148
149static void
150dump_eaf_flags (FILE *out, int flags, bool newline = true)
151{
85ebbabd
JH
152 if (flags & EAF_UNUSED)
153 fprintf (out, " unused");
d70ef656
JH
154 if (flags & EAF_NO_DIRECT_CLOBBER)
155 fprintf (out, " no_direct_clobber");
156 if (flags & EAF_NO_INDIRECT_CLOBBER)
157 fprintf (out, " no_indirect_clobber");
158 if (flags & EAF_NO_DIRECT_ESCAPE)
159 fprintf (out, " no_direct_escape");
160 if (flags & EAF_NO_INDIRECT_ESCAPE)
161 fprintf (out, " no_indirect_escape");
f1979156
JH
162 if (flags & EAF_NOT_RETURNED_DIRECTLY)
163 fprintf (out, " not_returned_directly");
d70ef656
JH
164 if (flags & EAF_NOT_RETURNED_INDIRECTLY)
165 fprintf (out, " not_returned_indirectly");
166 if (flags & EAF_NO_DIRECT_READ)
167 fprintf (out, " no_direct_read");
168 if (flags & EAF_NO_INDIRECT_READ)
169 fprintf (out, " no_indirect_read");
85ebbabd
JH
170 if (newline)
171 fprintf (out, "\n");
172}
173
174struct escape_summary
175{
176 auto_vec <escape_entry> esc;
177 void dump (FILE *out)
178 {
179 for (unsigned int i = 0; i < esc.length (); i++)
180 {
181 fprintf (out, " parm %i arg %i %s min:",
182 esc[i].parm_index,
183 esc[i].arg,
184 esc[i].direct ? "(direct)" : "(indirect)");
185 dump_eaf_flags (out, esc[i].min_flags, false);
186 }
187 fprintf (out, "\n");
188 }
189};
190
191class escape_summaries_t : public call_summary <escape_summary *>
192{
193public:
194 escape_summaries_t (symbol_table *symtab)
195 : call_summary <escape_summary *> (symtab) {}
196 /* Hook that is called by summary when an edge is duplicated. */
197 virtual void duplicate (cgraph_edge *,
198 cgraph_edge *,
199 escape_summary *src,
200 escape_summary *dst)
201 {
202 dst->esc = src->esc.copy ();
203 }
204};
205
206static escape_summaries_t *escape_summaries = NULL;
207
208} /* ANON namespace: GTY annotated summaries can not be anonymous. */
209
210
d119f34c
JH
211/* Class (from which there is one global instance) that holds modref summaries
212 for all analyzed functions. */
6cef01c3 213
d119f34c
JH
214class GTY((user)) modref_summaries
215 : public fast_function_summary <modref_summary *, va_gc>
216{
217public:
218 modref_summaries (symbol_table *symtab)
219 : fast_function_summary <modref_summary *, va_gc> (symtab) {}
220 virtual void insert (cgraph_node *, modref_summary *state);
221 virtual void duplicate (cgraph_node *src_node,
222 cgraph_node *dst_node,
223 modref_summary *src_data,
224 modref_summary *dst_data);
c9da53d6
JH
225 static modref_summaries *create_ggc (symbol_table *symtab)
226 {
227 return new (ggc_alloc_no_dtor<modref_summaries> ())
228 modref_summaries (symtab);
229 }
d119f34c
JH
230};
231
71dbabcc
JH
232class modref_summary_lto;
233
234/* Class (from which there is one global instance) that holds modref summaries
235 for all analyzed functions. */
6cef01c3 236
71dbabcc
JH
237class GTY((user)) modref_summaries_lto
238 : public fast_function_summary <modref_summary_lto *, va_gc>
239{
240public:
241 modref_summaries_lto (symbol_table *symtab)
242 : fast_function_summary <modref_summary_lto *, va_gc> (symtab),
243 propagated (false) {}
244 virtual void insert (cgraph_node *, modref_summary_lto *state);
245 virtual void duplicate (cgraph_node *src_node,
246 cgraph_node *dst_node,
247 modref_summary_lto *src_data,
248 modref_summary_lto *dst_data);
249 static modref_summaries_lto *create_ggc (symbol_table *symtab)
250 {
251 return new (ggc_alloc_no_dtor<modref_summaries_lto> ())
252 modref_summaries_lto (symtab);
253 }
254 bool propagated;
255};
256
257/* Global variable holding all modref summaries
258 (from analysis to IPA propagation time). */
6cef01c3 259
71dbabcc
JH
260static GTY(()) fast_function_summary <modref_summary *, va_gc>
261 *summaries;
262
8a2fd716 263/* Global variable holding all modref optimization summaries
71dbabcc 264 (from IPA propagation time or used by local optimization pass). */
6cef01c3 265
71dbabcc
JH
266static GTY(()) fast_function_summary <modref_summary *, va_gc>
267 *optimization_summaries;
268
269/* LTO summaries hold info from analysis to LTO streaming or from LTO
270 stream-in through propagation to LTO stream-out. */
6cef01c3 271
71dbabcc
JH
272static GTY(()) fast_function_summary <modref_summary_lto *, va_gc>
273 *summaries_lto;
d119f34c
JH
274
275/* Summary for a single function which this pass produces. */
276
277modref_summary::modref_summary ()
a70c0512 278 : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
a34edf9a
JH
279 writes_errno (false), side_effects (false), nondeterministic (false),
280 calls_interposable (false), global_memory_read (false),
5aa91072 281 global_memory_written (false), try_dse (false)
d119f34c
JH
282{
283}
284
285modref_summary::~modref_summary ()
286{
287 if (loads)
288 ggc_delete (loads);
289 if (stores)
290 ggc_delete (stores);
d119f34c
JH
291}
292
4341b1b1
JH
293/* Remove all flags from EAF_FLAGS that are implied by ECF_FLAGS and not
294 useful to track. If returns_void is true moreover clear
295 EAF_NOT_RETURNED. */
296static int
297remove_useless_eaf_flags (int eaf_flags, int ecf_flags, bool returns_void)
298{
f6f704fd 299 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
4341b1b1
JH
300 eaf_flags &= ~implicit_const_eaf_flags;
301 else if (ecf_flags & ECF_PURE)
302 eaf_flags &= ~implicit_pure_eaf_flags;
303 else if ((ecf_flags & ECF_NORETURN) || returns_void)
d70ef656 304 eaf_flags &= ~(EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY);
4341b1b1
JH
305 return eaf_flags;
306}
307
85ebbabd
JH
308/* Return true if FLAGS holds some useful information. */
309
310static bool
8da8ed43 311eaf_flags_useful_p (vec <eaf_flags_t> &flags, int ecf_flags)
85ebbabd
JH
312{
313 for (unsigned i = 0; i < flags.length (); i++)
4341b1b1
JH
314 if (remove_useless_eaf_flags (flags[i], ecf_flags, false))
315 return true;
85ebbabd
JH
316 return false;
317}
318
319/* Return true if summary is potentially useful for optimization.
320 If CHECK_FLAGS is false assume that arg_flags are useful. */
67c935c8
JH
321
322bool
85ebbabd 323modref_summary::useful_p (int ecf_flags, bool check_flags)
67c935c8 324{
85ebbabd 325 if (arg_flags.length () && !check_flags)
520d5ad3 326 return true;
85ebbabd
JH
327 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
328 return true;
329 arg_flags.release ();
b8ef019a
JH
330 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
331 return true;
a70c0512
JH
332 if (check_flags
333 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
334 return true;
f6f704fd 335 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
a34edf9a
JH
336 return ((!side_effects || !nondeterministic)
337 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
71dbabcc 338 if (loads && !loads->every_base)
67c935c8 339 return true;
64f3e71c
JH
340 else
341 kills.release ();
67c935c8 342 if (ecf_flags & ECF_PURE)
a34edf9a
JH
343 return ((!side_effects || !nondeterministic)
344 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
71dbabcc 345 return stores && !stores->every_base;
67c935c8
JH
346}
347
71dbabcc
JH
348/* Single function summary used for LTO. */
349
350typedef modref_tree <tree> modref_records_lto;
351struct GTY(()) modref_summary_lto
352{
353 /* Load and stores in functions using types rather then alias sets.
354
355 This is necessary to make the information streamable for LTO but is also
356 more verbose and thus more likely to hit the limits. */
357 modref_records_lto *loads;
358 modref_records_lto *stores;
64f3e71c 359 auto_vec<modref_access_node> GTY((skip)) kills;
8da8ed43 360 auto_vec<eaf_flags_t> GTY((skip)) arg_flags;
b8ef019a 361 eaf_flags_t retslot_flags;
a70c0512 362 eaf_flags_t static_chain_flags;
a34edf9a
JH
363 unsigned writes_errno : 1;
364 unsigned side_effects : 1;
365 unsigned nondeterministic : 1;
366 unsigned calls_interposable : 1;
71dbabcc
JH
367
368 modref_summary_lto ();
369 ~modref_summary_lto ();
370 void dump (FILE *);
85ebbabd 371 bool useful_p (int ecf_flags, bool check_flags = true);
71dbabcc
JH
372};
373
374/* Summary for a single function which this pass produces. */
375
376modref_summary_lto::modref_summary_lto ()
a70c0512 377 : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
a34edf9a
JH
378 writes_errno (false), side_effects (false), nondeterministic (false),
379 calls_interposable (false)
71dbabcc
JH
380{
381}
382
383modref_summary_lto::~modref_summary_lto ()
384{
385 if (loads)
386 ggc_delete (loads);
387 if (stores)
388 ggc_delete (stores);
389}
390
391
85ebbabd
JH
392/* Return true if lto summary is potentially useful for optimization.
393 If CHECK_FLAGS is false assume that arg_flags are useful. */
67c935c8
JH
394
395bool
85ebbabd 396modref_summary_lto::useful_p (int ecf_flags, bool check_flags)
67c935c8 397{
85ebbabd
JH
398 if (arg_flags.length () && !check_flags)
399 return true;
400 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
401 return true;
402 arg_flags.release ();
b8ef019a
JH
403 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
404 return true;
a70c0512
JH
405 if (check_flags
406 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
407 return true;
f6f704fd 408 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
a34edf9a
JH
409 return ((!side_effects || !nondeterministic)
410 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
67c935c8
JH
411 if (loads && !loads->every_base)
412 return true;
74509b96
JH
413 else
414 kills.release ();
67c935c8 415 if (ecf_flags & ECF_PURE)
a34edf9a
JH
416 return ((!side_effects || !nondeterministic)
417 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
e24817aa 418 return stores && !stores->every_base;
67c935c8
JH
419}
420
d119f34c
JH
421/* Dump records TT to OUT. */
422
423static void
424dump_records (modref_records *tt, FILE *out)
425{
d119f34c
JH
426 if (tt->every_base)
427 {
428 fprintf (out, " Every base\n");
429 return;
430 }
431 size_t i;
432 modref_base_node <alias_set_type> *n;
433 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
434 {
435 fprintf (out, " Base %i: alias set %i\n", (int)i, n->base);
436 if (n->every_ref)
437 {
438 fprintf (out, " Every ref\n");
439 continue;
440 }
441 size_t j;
442 modref_ref_node <alias_set_type> *r;
443 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
444 {
445 fprintf (out, " Ref %i: alias set %i\n", (int)j, r->ref);
c33f4742
JH
446 if (r->every_access)
447 {
ada353b8 448 fprintf (out, " Every access\n");
c33f4742
JH
449 continue;
450 }
451 size_t k;
452 modref_access_node *a;
453 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
e30bf330
JH
454 {
455 fprintf (out, " access:");
456 a->dump (out);
457 }
d119f34c
JH
458 }
459 }
460}
461
462/* Dump records TT to OUT. */
463
464static void
465dump_lto_records (modref_records_lto *tt, FILE *out)
466{
d119f34c
JH
467 if (tt->every_base)
468 {
469 fprintf (out, " Every base\n");
470 return;
471 }
472 size_t i;
473 modref_base_node <tree> *n;
474 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
475 {
476 fprintf (out, " Base %i:", (int)i);
477 print_generic_expr (dump_file, n->base);
478 fprintf (out, " (alias set %i)\n",
9044db88 479 n->base ? get_alias_set (n->base) : 0);
d119f34c
JH
480 if (n->every_ref)
481 {
482 fprintf (out, " Every ref\n");
483 continue;
484 }
485 size_t j;
486 modref_ref_node <tree> *r;
487 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
488 {
489 fprintf (out, " Ref %i:", (int)j);
490 print_generic_expr (dump_file, r->ref);
491 fprintf (out, " (alias set %i)\n",
9044db88 492 r->ref ? get_alias_set (r->ref) : 0);
c33f4742
JH
493 if (r->every_access)
494 {
56cb815b 495 fprintf (out, " Every access\n");
c33f4742
JH
496 continue;
497 }
498 size_t k;
499 modref_access_node *a;
500 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
e30bf330
JH
501 {
502 fprintf (out, " access:");
503 a->dump (out);
504 }
d119f34c
JH
505 }
506 }
507}
508
85ebbabd 509/* Dump all escape points of NODE to OUT. */
520d5ad3
JH
510
511static void
85ebbabd 512dump_modref_edge_summaries (FILE *out, cgraph_node *node, int depth)
520d5ad3 513{
85ebbabd
JH
514 int i = 0;
515 if (!escape_summaries)
516 return;
517 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
518 {
519 class escape_summary *sum = escape_summaries->get (e);
520 if (sum)
521 {
522 fprintf (out, "%*sIndirect call %i in %s escapes:",
523 depth, "", i, node->dump_name ());
524 sum->dump (out);
525 }
526 i++;
527 }
528 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
529 {
530 if (!e->inline_failed)
531 dump_modref_edge_summaries (out, e->callee, depth + 1);
532 class escape_summary *sum = escape_summaries->get (e);
533 if (sum)
534 {
535 fprintf (out, "%*sCall %s->%s escapes:", depth, "",
536 node->dump_name (), e->callee->dump_name ());
537 sum->dump (out);
538 }
539 class fnspec_summary *fsum = fnspec_summaries->get (e);
540 if (fsum)
541 {
542 fprintf (out, "%*sCall %s->%s fnspec: %s\n", depth, "",
543 node->dump_name (), e->callee->dump_name (),
544 fsum->fnspec);
545 }
546 }
547}
548
549/* Remove all call edge summaries associated with NODE. */
550
551static void
552remove_modref_edge_summaries (cgraph_node *node)
553{
554 if (!escape_summaries)
555 return;
556 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
557 escape_summaries->remove (e);
558 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
559 {
560 if (!e->inline_failed)
561 remove_modref_edge_summaries (e->callee);
562 escape_summaries->remove (e);
563 fnspec_summaries->remove (e);
564 }
520d5ad3
JH
565}
566
d119f34c
JH
567/* Dump summary. */
568
569void
570modref_summary::dump (FILE *out)
571{
6cef01c3
JH
572 if (loads)
573 {
574 fprintf (out, " loads:\n");
575 dump_records (loads, out);
576 }
577 if (stores)
578 {
579 fprintf (out, " stores:\n");
580 dump_records (stores, out);
581 }
64f3e71c
JH
582 if (kills.length ())
583 {
584 fprintf (out, " kills:\n");
585 for (auto kill : kills)
586 {
587 fprintf (out, " ");
588 kill.dump (out);
589 }
590 }
617695cd
JH
591 if (writes_errno)
592 fprintf (out, " Writes errno\n");
992644c3
JH
593 if (side_effects)
594 fprintf (out, " Side effects\n");
a34edf9a
JH
595 if (nondeterministic)
596 fprintf (out, " Nondeterministic\n");
597 if (calls_interposable)
598 fprintf (out, " Calls interposable\n");
e0040bc3
JH
599 if (global_memory_read)
600 fprintf (out, " Global memory read\n");
601 if (global_memory_written)
602 fprintf (out, " Global memory written\n");
5aa91072
JH
603 if (try_dse)
604 fprintf (out, " Try dse\n");
520d5ad3
JH
605 if (arg_flags.length ())
606 {
607 for (unsigned int i = 0; i < arg_flags.length (); i++)
608 if (arg_flags[i])
609 {
610 fprintf (out, " parm %i flags:", i);
611 dump_eaf_flags (out, arg_flags[i]);
612 }
613 }
b8ef019a
JH
614 if (retslot_flags)
615 {
616 fprintf (out, " Retslot flags:");
617 dump_eaf_flags (out, retslot_flags);
618 }
a70c0512
JH
619 if (static_chain_flags)
620 {
621 fprintf (out, " Static chain flags:");
622 dump_eaf_flags (out, static_chain_flags);
623 }
71dbabcc
JH
624}
625
626/* Dump summary. */
627
628void
629modref_summary_lto::dump (FILE *out)
630{
56cb815b
JH
631 fprintf (out, " loads:\n");
632 dump_lto_records (loads, out);
633 fprintf (out, " stores:\n");
634 dump_lto_records (stores, out);
74509b96
JH
635 if (kills.length ())
636 {
637 fprintf (out, " kills:\n");
638 for (auto kill : kills)
639 {
640 fprintf (out, " ");
641 kill.dump (out);
642 }
643 }
6cef01c3
JH
644 if (writes_errno)
645 fprintf (out, " Writes errno\n");
992644c3
JH
646 if (side_effects)
647 fprintf (out, " Side effects\n");
a34edf9a
JH
648 if (nondeterministic)
649 fprintf (out, " Nondeterministic\n");
650 if (calls_interposable)
651 fprintf (out, " Calls interposable\n");
85ebbabd
JH
652 if (arg_flags.length ())
653 {
654 for (unsigned int i = 0; i < arg_flags.length (); i++)
655 if (arg_flags[i])
656 {
657 fprintf (out, " parm %i flags:", i);
658 dump_eaf_flags (out, arg_flags[i]);
659 }
660 }
b8ef019a
JH
661 if (retslot_flags)
662 {
663 fprintf (out, " Retslot flags:");
664 dump_eaf_flags (out, retslot_flags);
665 }
a70c0512
JH
666 if (static_chain_flags)
667 {
668 fprintf (out, " Static chain flags:");
669 dump_eaf_flags (out, static_chain_flags);
670 }
d119f34c
JH
671}
672
e0040bc3 673/* Called after summary is produced and before it is used by local analysis.
5aa91072
JH
674 Can be called multiple times in case summary needs to update signature.
675 FUN is decl of function summary is attached to. */
e0040bc3 676void
5aa91072 677modref_summary::finalize (tree fun)
e0040bc3
JH
678{
679 global_memory_read = !loads || loads->global_access_p ();
680 global_memory_written = !stores || stores->global_access_p ();
5aa91072
JH
681
682 /* We can do DSE if we know function has no side effects and
683 we can analyse all stores. Disable dse if there are too many
684 stores to try. */
685 if (side_effects || global_memory_written || writes_errno)
686 try_dse = false;
687 else
688 {
689 try_dse = true;
690 size_t i, j, k;
691 int num_tests = 0, max_tests
8632f8c6 692 = opt_for_fn (fun, param_modref_max_tests);
5aa91072
JH
693 modref_base_node <alias_set_type> *base_node;
694 modref_ref_node <alias_set_type> *ref_node;
695 modref_access_node *access_node;
696 FOR_EACH_VEC_SAFE_ELT (stores->bases, i, base_node)
697 {
698 if (base_node->every_ref)
699 {
700 try_dse = false;
701 break;
702 }
703 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
704 {
705 if (base_node->every_ref)
706 {
707 try_dse = false;
708 break;
709 }
710 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
711 if (num_tests++ > max_tests
712 || !access_node->parm_offset_known)
713 {
714 try_dse = false;
715 break;
716 }
717 if (!try_dse)
718 break;
719 }
720 if (!try_dse)
721 break;
722 }
723 }
6180f5c8
RB
724 if (loads->every_base)
725 load_accesses = 1;
726 else
727 {
728 load_accesses = 0;
729 for (auto base_node : loads->bases)
730 {
731 if (base_node->every_ref)
732 load_accesses++;
733 else
734 for (auto ref_node : base_node->refs)
735 if (ref_node->every_access)
736 load_accesses++;
737 else
738 load_accesses += ref_node->accesses->length ();
739 }
740 }
e0040bc3
JH
741}
742
d119f34c
JH
743/* Get function summary for FUNC if it exists, return NULL otherwise. */
744
745modref_summary *
746get_modref_function_summary (cgraph_node *func)
747{
748 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
71dbabcc 749 if (!optimization_summaries)
d119f34c
JH
750 return NULL;
751
752 /* A single function body may be represented by multiple symbols with
753 different visibility. For example, if FUNC is an interposable alias,
754 we don't want to return anything, even if we have summary for the target
755 function. */
756 enum availability avail;
c87ff875 757 func = func->ultimate_alias_target
520d5ad3
JH
758 (&avail, current_function_decl ?
759 cgraph_node::get (current_function_decl) : NULL);
d119f34c
JH
760 if (avail <= AVAIL_INTERPOSABLE)
761 return NULL;
762
71dbabcc
JH
763 modref_summary *r = optimization_summaries->get (func);
764 return r;
d119f34c
JH
765}
766
6dc90c4d
JH
767/* Get function summary for CALL if it exists, return NULL otherwise.
768 If non-null set interposed to indicate whether function may not
769 bind to current def. In this case sometimes loads from function
770 needs to be ignored. */
771
772modref_summary *
773get_modref_function_summary (gcall *call, bool *interposed)
774{
775 tree callee = gimple_call_fndecl (call);
776 if (!callee)
777 return NULL;
778 struct cgraph_node *node = cgraph_node::get (callee);
779 if (!node)
780 return NULL;
781 modref_summary *r = get_modref_function_summary (node);
782 if (interposed && r)
783 *interposed = r->calls_interposable
8632f8c6 784 || !node->binds_to_current_def_p ();
6dc90c4d
JH
785 return r;
786}
787
788
18f0873d
JH
789namespace {
790
09a4ffb7
JH
791/* Return true if ECF flags says that nondeterminsm can be ignored. */
792
793static bool
794ignore_nondeterminism_p (tree caller, int flags)
795{
796 if (flags & (ECF_CONST | ECF_PURE))
797 return true;
798 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
799 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
800 return true;
801 return false;
802}
803
804/* Return true if ECF flags says that return value can be ignored. */
805
806static bool
807ignore_retval_p (tree caller, int flags)
808{
809 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
810 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
811 return true;
812 return false;
813}
814
815/* Return true if ECF flags says that stores can be ignored. */
816
817static bool
818ignore_stores_p (tree caller, int flags)
819{
820 if (flags & (ECF_PURE | ECF_CONST | ECF_NOVOPS))
821 return true;
822 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
823 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
824 return true;
825 return false;
826}
827
0f5afb62 828/* Determine parm_map for PTR which is supposed to be a pointer. */
09a4ffb7
JH
829
830modref_parm_map
0f5afb62 831parm_map_for_ptr (tree op)
09a4ffb7
JH
832{
833 bool offset_known;
834 poly_int64 offset;
835 struct modref_parm_map parm_map;
0f5afb62 836 gcall *call;
09a4ffb7
JH
837
838 parm_map.parm_offset_known = false;
839 parm_map.parm_offset = 0;
840
841 offset_known = unadjusted_ptr_and_unit_offset (op, &op, &offset);
842 if (TREE_CODE (op) == SSA_NAME
843 && SSA_NAME_IS_DEFAULT_DEF (op)
844 && TREE_CODE (SSA_NAME_VAR (op)) == PARM_DECL)
845 {
846 int index = 0;
0f5afb62
JH
847
848 if (cfun->static_chain_decl
849 && op == ssa_default_def (cfun, cfun->static_chain_decl))
850 index = MODREF_STATIC_CHAIN_PARM;
851 else
852 for (tree t = DECL_ARGUMENTS (current_function_decl);
853 t != SSA_NAME_VAR (op); t = DECL_CHAIN (t))
09a4ffb7 854 index++;
09a4ffb7
JH
855 parm_map.parm_index = index;
856 parm_map.parm_offset_known = offset_known;
857 parm_map.parm_offset = offset;
858 }
859 else if (points_to_local_or_readonly_memory_p (op))
860 parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
0f5afb62
JH
861 /* Memory allocated in the function is not visible to caller before the
862 call and thus we do not need to record it as load/stores/kills. */
863 else if (TREE_CODE (op) == SSA_NAME
864 && (call = dyn_cast<gcall *>(SSA_NAME_DEF_STMT (op))) != NULL
865 && gimple_call_flags (call) & ECF_MALLOC)
866 parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
09a4ffb7
JH
867 else
868 parm_map.parm_index = MODREF_UNKNOWN_PARM;
869 return parm_map;
870}
871
3305135c
JH
872/* Return true if ARG with EAF flags FLAGS can not make any caller's parameter
873 used (if LOAD is true we check loads, otherwise stores). */
874
875static bool
876verify_arg (tree arg, int flags, bool load)
877{
878 if (flags & EAF_UNUSED)
879 return true;
880 if (load && (flags & EAF_NO_DIRECT_READ))
881 return true;
882 if (!load
883 && (flags & (EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER))
884 == (EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER))
885 return true;
886 if (is_gimple_constant (arg))
887 return true;
888 if (DECL_P (arg) && TREE_READONLY (arg))
889 return true;
890 if (TREE_CODE (arg) == ADDR_EXPR)
891 {
892 tree t = get_base_address (TREE_OPERAND (arg, 0));
893 if (is_gimple_constant (t))
894 return true;
895 if (DECL_P (t)
896 && (TREE_READONLY (t) || TREE_CODE (t) == FUNCTION_DECL))
897 return true;
898 }
899 return false;
900}
901
902/* Return true if STMT may access memory that is pointed to by parameters
903 of caller and which is not seen as an escape by PTA.
904 CALLEE_ECF_FLAGS are ECF flags of callee. If LOAD is true then by access
905 we mean load, otherwise we mean store. */
906
907static bool
908may_access_nonescaping_parm_p (gcall *call, int callee_ecf_flags, bool load)
909{
910 int implicit_flags = 0;
911
912 if (ignore_stores_p (current_function_decl, callee_ecf_flags))
913 implicit_flags |= ignore_stores_eaf_flags;
914 if (callee_ecf_flags & ECF_PURE)
915 implicit_flags |= implicit_pure_eaf_flags;
916 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
917 implicit_flags |= implicit_const_eaf_flags;
918 if (gimple_call_chain (call)
919 && !verify_arg (gimple_call_chain (call),
920 gimple_call_static_chain_flags (call) | implicit_flags,
921 load))
922 return true;
923 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
924 if (!verify_arg (gimple_call_arg (call, i),
925 gimple_call_arg_flags (call, i) | implicit_flags,
926 load))
927 return true;
928 return false;
929}
930
931
09a4ffb7
JH
932/* Analyze memory accesses (loads, stores and kills) performed
933 by the function. Set also side_effects, calls_interposable
934 and nondeterminism flags. */
935
936class modref_access_analysis
937{
938public:
939 modref_access_analysis (bool ipa, modref_summary *summary,
940 modref_summary_lto *summary_lto)
941 : m_summary (summary), m_summary_lto (summary_lto), m_ipa (ipa)
942 {
943 }
944 void analyze ();
945private:
946 bool set_side_effects ();
947 bool set_nondeterministic ();
948 static modref_access_node get_access (ao_ref *ref);
949 static void record_access (modref_records *, ao_ref *, modref_access_node &);
950 static void record_access_lto (modref_records_lto *, ao_ref *,
951 modref_access_node &a);
952 bool record_access_p (tree);
953 bool record_unknown_load ();
954 bool record_unknown_store ();
3305135c
JH
955 bool record_global_memory_load ();
956 bool record_global_memory_store ();
09a4ffb7
JH
957 bool merge_call_side_effects (gimple *, modref_summary *,
958 cgraph_node *, bool);
959 modref_access_node get_access_for_fnspec (gcall *, attr_fnspec &,
960 unsigned int, modref_parm_map &);
961 void process_fnspec (gcall *);
962 void analyze_call (gcall *);
963 static bool analyze_load (gimple *, tree, tree, void *);
964 static bool analyze_store (gimple *, tree, tree, void *);
965 void analyze_stmt (gimple *, bool);
966 void propagate ();
967
968 /* Summary being computed.
969 We work eitehr with m_summary or m_summary_lto. Never on both. */
970 modref_summary *m_summary;
971 modref_summary_lto *m_summary_lto;
972 /* Recursive calls needs simplisitc dataflow after analysis finished.
973 Collect all calls into this vector during analysis and later process
974 them in propagate. */
975 auto_vec <gimple *, 32> m_recursive_calls;
976 /* ECF flags of function being analysed. */
977 int m_ecf_flags;
978 /* True if IPA propagation will be done later. */
979 bool m_ipa;
980 /* Set true if statement currently analysed is known to be
981 executed each time function is called. */
982 bool m_always_executed;
983};
984
985/* Set side_effects flag and return if someting changed. */
986
987bool
988modref_access_analysis::set_side_effects ()
989{
990 bool changed = false;
991
992 if (m_summary && !m_summary->side_effects)
993 {
994 m_summary->side_effects = true;
995 changed = true;
996 }
997 if (m_summary_lto && !m_summary_lto->side_effects)
998 {
999 m_summary_lto->side_effects = true;
1000 changed = true;
1001 }
1002 return changed;
1003}
1004
1005/* Set nondeterministic flag and return if someting changed. */
1006
1007bool
1008modref_access_analysis::set_nondeterministic ()
1009{
1010 bool changed = false;
1011
1012 if (m_summary && !m_summary->nondeterministic)
1013 {
1014 m_summary->side_effects = m_summary->nondeterministic = true;
1015 changed = true;
1016 }
1017 if (m_summary_lto && !m_summary_lto->nondeterministic)
1018 {
1019 m_summary_lto->side_effects = m_summary_lto->nondeterministic = true;
1020 changed = true;
1021 }
1022 return changed;
1023}
1024
c33f4742 1025/* Construct modref_access_node from REF. */
09a4ffb7
JH
1026
1027modref_access_node
1028modref_access_analysis::get_access (ao_ref *ref)
c33f4742 1029{
c33f4742
JH
1030 tree base;
1031
c34db4b6
JH
1032 base = ao_ref_base (ref);
1033 modref_access_node a = {ref->offset, ref->size, ref->max_size,
1f3a3363 1034 0, MODREF_UNKNOWN_PARM, false, 0};
c33f4742
JH
1035 if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
1036 {
2bdf324f 1037 tree memref = base;
0f5afb62 1038 modref_parm_map m = parm_map_for_ptr (TREE_OPERAND (base, 0));
1f3a3363 1039
0f5afb62
JH
1040 a.parm_index = m.parm_index;
1041 if (a.parm_index != MODREF_UNKNOWN_PARM && TREE_CODE (memref) == MEM_REF)
1f3a3363
JH
1042 {
1043 a.parm_offset_known
1044 = wi::to_poly_wide (TREE_OPERAND
1045 (memref, 1)).to_shwi (&a.parm_offset);
0f5afb62
JH
1046 if (a.parm_offset_known && m.parm_offset_known)
1047 a.parm_offset += m.parm_offset;
1048 else
1049 a.parm_offset_known = false;
c33f4742 1050 }
c33f4742
JH
1051 }
1052 else
1f3a3363 1053 a.parm_index = MODREF_UNKNOWN_PARM;
c33f4742
JH
1054 return a;
1055}
1056
d119f34c
JH
1057/* Record access into the modref_records data structure. */
1058
09a4ffb7
JH
1059void
1060modref_access_analysis::record_access (modref_records *tt,
1061 ao_ref *ref,
1062 modref_access_node &a)
d119f34c 1063{
16c84809
JH
1064 alias_set_type base_set = !flag_strict_aliasing
1065 || !flag_ipa_strict_aliasing ? 0
d119f34c 1066 : ao_ref_base_alias_set (ref);
16c84809
JH
1067 alias_set_type ref_set = !flag_strict_aliasing
1068 || !flag_ipa_strict_aliasing ? 0
d119f34c
JH
1069 : (ao_ref_alias_set (ref));
1070 if (dump_file)
1071 {
4898e958
JH
1072 fprintf (dump_file, " - Recording base_set=%i ref_set=%i ",
1073 base_set, ref_set);
e30bf330 1074 a.dump (dump_file);
d119f34c 1075 }
8632f8c6 1076 tt->insert (current_function_decl, base_set, ref_set, a, false);
d119f34c
JH
1077}
1078
1079/* IPA version of record_access_tree. */
1080
09a4ffb7
JH
1081void
1082modref_access_analysis::record_access_lto (modref_records_lto *tt, ao_ref *ref,
1083 modref_access_node &a)
d119f34c
JH
1084{
1085 /* get_alias_set sometimes use different type to compute the alias set
1086 than TREE_TYPE (base). Do same adjustments. */
1087 tree base_type = NULL_TREE, ref_type = NULL_TREE;
16c84809 1088 if (flag_strict_aliasing && flag_ipa_strict_aliasing)
d119f34c
JH
1089 {
1090 tree base;
1091
1092 base = ref->ref;
1093 while (handled_component_p (base))
1094 base = TREE_OPERAND (base, 0);
1095
1096 base_type = reference_alias_ptr_type_1 (&base);
1097
1098 if (!base_type)
1099 base_type = TREE_TYPE (base);
1100 else
1101 base_type = TYPE_REF_CAN_ALIAS_ALL (base_type)
1102 ? NULL_TREE : TREE_TYPE (base_type);
1103
1104 tree ref_expr = ref->ref;
1105 ref_type = reference_alias_ptr_type_1 (&ref_expr);
1106
1107 if (!ref_type)
1108 ref_type = TREE_TYPE (ref_expr);
1109 else
1110 ref_type = TYPE_REF_CAN_ALIAS_ALL (ref_type)
1111 ? NULL_TREE : TREE_TYPE (ref_type);
1112
1113 /* Sanity check that we are in sync with what get_alias_set does. */
1114 gcc_checking_assert ((!base_type && !ao_ref_base_alias_set (ref))
1115 || get_alias_set (base_type)
1116 == ao_ref_base_alias_set (ref));
1117 gcc_checking_assert ((!ref_type && !ao_ref_alias_set (ref))
1118 || get_alias_set (ref_type)
1119 == ao_ref_alias_set (ref));
1120
1121 /* Do not bother to record types that have no meaningful alias set.
1122 Also skip variably modified types since these go to local streams. */
1123 if (base_type && (!get_alias_set (base_type)
1124 || variably_modified_type_p (base_type, NULL_TREE)))
1125 base_type = NULL_TREE;
1126 if (ref_type && (!get_alias_set (ref_type)
1127 || variably_modified_type_p (ref_type, NULL_TREE)))
1128 ref_type = NULL_TREE;
1129 }
1130 if (dump_file)
1131 {
1132 fprintf (dump_file, " - Recording base type:");
1133 print_generic_expr (dump_file, base_type);
1134 fprintf (dump_file, " (alias set %i) ref type:",
1135 base_type ? get_alias_set (base_type) : 0);
1136 print_generic_expr (dump_file, ref_type);
4898e958
JH
1137 fprintf (dump_file, " (alias set %i) ",
1138 ref_type ? get_alias_set (ref_type) : 0);
e30bf330 1139 a.dump (dump_file);
d119f34c
JH
1140 }
1141
8632f8c6 1142 tt->insert (current_function_decl, base_type, ref_type, a, false);
d119f34c
JH
1143}
1144
1145/* Returns true if and only if we should store the access to EXPR.
1146 Some accesses, e.g. loads from automatic variables, are not interesting. */
1147
09a4ffb7
JH
1148bool
1149modref_access_analysis::record_access_p (tree expr)
d119f34c 1150{
09a4ffb7
JH
1151 if (TREE_THIS_VOLATILE (expr))
1152 {
1153 if (dump_file)
1154 fprintf (dump_file, " (volatile; marking nondeterministic) ");
1155 set_nondeterministic ();
1156 }
1157 if (cfun->can_throw_non_call_exceptions
1158 && tree_could_throw_p (expr))
1159 {
1160 if (dump_file)
1161 fprintf (dump_file, " (can throw; marking side effects) ");
1162 set_side_effects ();
1163 }
1164
e977dd5e 1165 if (refs_local_or_readonly_memory_p (expr))
d119f34c
JH
1166 {
1167 if (dump_file)
e977dd5e 1168 fprintf (dump_file, " - Read-only or local, ignoring.\n");
d119f34c
JH
1169 return false;
1170 }
d119f34c
JH
1171 return true;
1172}
1173
09a4ffb7 1174/* Collapse loads and return true if something changed. */
85ebbabd 1175
09a4ffb7
JH
1176bool
1177modref_access_analysis::record_unknown_load ()
85ebbabd 1178{
09a4ffb7 1179 bool changed = false;
d119f34c 1180
09a4ffb7
JH
1181 if (m_summary && !m_summary->loads->every_base)
1182 {
1183 m_summary->loads->collapse ();
1184 changed = true;
1185 }
1186 if (m_summary_lto && !m_summary_lto->loads->every_base)
1187 {
1188 m_summary_lto->loads->collapse ();
1189 changed = true;
1190 }
1191 return changed;
d119f34c
JH
1192}
1193
09a4ffb7 1194/* Collapse loads and return true if something changed. */
617695cd 1195
09a4ffb7
JH
1196bool
1197modref_access_analysis::record_unknown_store ()
617695cd 1198{
09a4ffb7 1199 bool changed = false;
ea937e7d 1200
09a4ffb7 1201 if (m_summary && !m_summary->stores->every_base)
617695cd 1202 {
09a4ffb7
JH
1203 m_summary->stores->collapse ();
1204 changed = true;
617695cd 1205 }
09a4ffb7
JH
1206 if (m_summary_lto && !m_summary_lto->stores->every_base)
1207 {
1208 m_summary_lto->stores->collapse ();
1209 changed = true;
1210 }
1211 return changed;
617695cd
JH
1212}
1213
3305135c
JH
1214/* Record unknown load from gloal memory. */
1215
1216bool
1217modref_access_analysis::record_global_memory_load ()
1218{
1219 bool changed = false;
1220 modref_access_node a = {0, -1, -1,
1221 0, MODREF_GLOBAL_MEMORY_PARM, false, 0};
1222
1223 if (m_summary && !m_summary->loads->every_base)
1224 changed |= m_summary->loads->insert (current_function_decl, 0, 0, a, false);
1225 if (m_summary_lto && !m_summary_lto->loads->every_base)
1226 changed |= m_summary_lto->loads->insert (current_function_decl,
1227 0, 0, a, false);
1228 return changed;
1229}
1230
1231/* Record unknown store from gloal memory. */
1232
1233bool
1234modref_access_analysis::record_global_memory_store ()
1235{
1236 bool changed = false;
1237 modref_access_node a = {0, -1, -1,
1238 0, MODREF_GLOBAL_MEMORY_PARM, false, 0};
1239
1240 if (m_summary && !m_summary->stores->every_base)
1241 changed |= m_summary->stores->insert (current_function_decl,
1242 0, 0, a, false);
1243 if (m_summary_lto && !m_summary_lto->stores->every_base)
1244 changed |= m_summary_lto->stores->insert (current_function_decl,
1245 0, 0, a, false);
1246 return changed;
1247}
1248
09a4ffb7
JH
1249/* Merge side effects of call STMT to function with CALLEE_SUMMARY.
1250 Return true if something changed.
5c85f295
JH
1251 If IGNORE_STORES is true, do not merge stores.
1252 If RECORD_ADJUSTMENTS is true cap number of adjustments to
1253 a given access to make dataflow finite. */
ada353b8
JH
1254
1255bool
09a4ffb7
JH
1256modref_access_analysis::merge_call_side_effects
1257 (gimple *stmt, modref_summary *callee_summary,
1258 cgraph_node *callee_node, bool record_adjustments)
ada353b8 1259{
3305135c
JH
1260 gcall *call = as_a <gcall *> (stmt);
1261 int flags = gimple_call_flags (call);
8d3abf42 1262
09a4ffb7 1263 /* Nothing to do for non-looping cont functions. */
64f3e71c
JH
1264 if ((flags & (ECF_CONST | ECF_NOVOPS))
1265 && !(flags & ECF_LOOPING_CONST_OR_PURE))
09a4ffb7
JH
1266 return false;
1267
1268 bool changed = false;
64f3e71c 1269
ce2dbf94
JH
1270 if (dump_file)
1271 fprintf (dump_file, " - Merging side effects of %s\n",
1272 callee_node->dump_name ());
1273
09a4ffb7
JH
1274 /* Merge side effects and non-determinism.
1275 PURE/CONST flags makes functions deterministic and if there is
1276 no LOOPING_CONST_OR_PURE they also have no side effects. */
a34edf9a
JH
1277 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
1278 || (flags & ECF_LOOPING_CONST_OR_PURE))
8d3abf42 1279 {
09a4ffb7 1280 if (!m_summary->side_effects && callee_summary->side_effects)
a34edf9a
JH
1281 {
1282 if (dump_file)
1283 fprintf (dump_file, " - merging side effects.\n");
09a4ffb7 1284 m_summary->side_effects = true;
a34edf9a
JH
1285 changed = true;
1286 }
09a4ffb7 1287 if (!m_summary->nondeterministic && callee_summary->nondeterministic
a34edf9a
JH
1288 && !ignore_nondeterminism_p (current_function_decl, flags))
1289 {
1290 if (dump_file)
1291 fprintf (dump_file, " - merging nondeterministic.\n");
09a4ffb7 1292 m_summary->nondeterministic = true;
a34edf9a
JH
1293 changed = true;
1294 }
1295 }
8d3abf42 1296
09a4ffb7 1297 /* For const functions we are done. */
8d3abf42
JH
1298 if (flags & (ECF_CONST | ECF_NOVOPS))
1299 return changed;
ada353b8 1300
09a4ffb7
JH
1301 /* Merge calls_interposable flags. */
1302 if (!m_summary->calls_interposable && callee_summary->calls_interposable)
a34edf9a
JH
1303 {
1304 if (dump_file)
1305 fprintf (dump_file, " - merging calls interposable.\n");
09a4ffb7 1306 m_summary->calls_interposable = true;
a34edf9a
JH
1307 changed = true;
1308 }
1309
09a4ffb7 1310 if (!callee_node->binds_to_current_def_p () && !m_summary->calls_interposable)
617695cd
JH
1311 {
1312 if (dump_file)
a34edf9a 1313 fprintf (dump_file, " - May be interposed.\n");
09a4ffb7 1314 m_summary->calls_interposable = true;
a34edf9a 1315 changed = true;
617695cd
JH
1316 }
1317
09a4ffb7
JH
1318 /* Now merge the actual load, store and kill vectors. For this we need
1319 to compute map translating new parameters to old. */
0b874e0f 1320 if (dump_file)
ce2dbf94 1321 fprintf (dump_file, " Parm map:");
0b874e0f 1322
09a4ffb7 1323 auto_vec <modref_parm_map, 32> parm_map;
3305135c
JH
1324 parm_map.safe_grow_cleared (gimple_call_num_args (call), true);
1325 for (unsigned i = 0; i < gimple_call_num_args (call); i++)
ada353b8 1326 {
3305135c 1327 parm_map[i] = parm_map_for_ptr (gimple_call_arg (call, i));
56cb815b 1328 if (dump_file)
c7b6a758
JH
1329 {
1330 fprintf (dump_file, " %i", parm_map[i].parm_index);
1331 if (parm_map[i].parm_offset_known)
1332 {
1333 fprintf (dump_file, " offset:");
1334 print_dec ((poly_int64_pod)parm_map[i].parm_offset,
1335 dump_file, SIGNED);
1336 }
1337 }
ada353b8 1338 }
09a4ffb7
JH
1339
1340 modref_parm_map chain_map;
3305135c 1341 if (gimple_call_chain (call))
1f3a3363 1342 {
3305135c 1343 chain_map = parm_map_for_ptr (gimple_call_chain (call));
1f3a3363
JH
1344 if (dump_file)
1345 {
1346 fprintf (dump_file, "static chain %i", chain_map.parm_index);
1347 if (chain_map.parm_offset_known)
1348 {
1349 fprintf (dump_file, " offset:");
1350 print_dec ((poly_int64_pod)chain_map.parm_offset,
1351 dump_file, SIGNED);
1352 }
1353 }
1354 }
56cb815b
JH
1355 if (dump_file)
1356 fprintf (dump_file, "\n");
ada353b8 1357
09a4ffb7
JH
1358 /* Kills can me merged in only if we know the function is going to be
1359 always executed. */
1360 if (m_always_executed
e69b7c57
JH
1361 && callee_summary->kills.length ()
1362 && (!cfun->can_throw_non_call_exceptions
3305135c 1363 || !stmt_could_throw_p (cfun, call)))
e69b7c57
JH
1364 {
1365 /* Watch for self recursive updates. */
1366 auto_vec<modref_access_node, 32> saved_kills;
1367
1368 saved_kills.reserve_exact (callee_summary->kills.length ());
1369 saved_kills.splice (callee_summary->kills);
1370 for (auto kill : saved_kills)
1371 {
1372 if (kill.parm_index >= (int)parm_map.length ())
1373 continue;
1374 modref_parm_map &m
1375 = kill.parm_index == MODREF_STATIC_CHAIN_PARM
1376 ? chain_map
1377 : parm_map[kill.parm_index];
1378 if (m.parm_index == MODREF_LOCAL_MEMORY_PARM
1379 || m.parm_index == MODREF_UNKNOWN_PARM
1380 || m.parm_index == MODREF_RETSLOT_PARM
1381 || !m.parm_offset_known)
1382 continue;
1383 modref_access_node n = kill;
1384 n.parm_index = m.parm_index;
1385 n.parm_offset += m.parm_offset;
09a4ffb7 1386 if (modref_access_node::insert_kill (m_summary->kills, n,
e69b7c57
JH
1387 record_adjustments))
1388 changed = true;
1389 }
1390 }
1391
09a4ffb7 1392 /* Merge in loads. */
8632f8c6
JH
1393 changed |= m_summary->loads->merge (current_function_decl,
1394 callee_summary->loads,
1395 &parm_map, &chain_map,
3305135c
JH
1396 record_adjustments,
1397 !may_access_nonescaping_parm_p
1398 (call, flags, true));
09a4ffb7
JH
1399 /* Merge in stores. */
1400 if (!ignore_stores_p (current_function_decl, flags))
617695cd 1401 {
8632f8c6
JH
1402 changed |= m_summary->stores->merge (current_function_decl,
1403 callee_summary->stores,
09a4ffb7 1404 &parm_map, &chain_map,
3305135c
JH
1405 record_adjustments,
1406 !may_access_nonescaping_parm_p
1407 (call, flags, false));
09a4ffb7 1408 if (!m_summary->writes_errno
617695cd
JH
1409 && callee_summary->writes_errno)
1410 {
09a4ffb7 1411 m_summary->writes_errno = true;
617695cd
JH
1412 changed = true;
1413 }
1414 }
ada353b8
JH
1415 return changed;
1416}
1417
617695cd
JH
1418/* Return access mode for argument I of call STMT with FNSPEC. */
1419
09a4ffb7
JH
1420modref_access_node
1421modref_access_analysis::get_access_for_fnspec (gcall *call, attr_fnspec &fnspec,
1422 unsigned int i,
1423 modref_parm_map &map)
617695cd
JH
1424{
1425 tree size = NULL_TREE;
1426 unsigned int size_arg;
1427
1428 if (!fnspec.arg_specified_p (i))
1429 ;
1430 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
1431 size = gimple_call_arg (call, size_arg);
1432 else if (fnspec.arg_access_size_given_by_type_p (i))
1433 {
1434 tree callee = gimple_call_fndecl (call);
1435 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
1436
1437 for (unsigned int p = 0; p < i; p++)
1438 t = TREE_CHAIN (t);
1439 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
1440 }
1441 modref_access_node a = {0, -1, -1,
1442 map.parm_offset, map.parm_index,
5c85f295 1443 map.parm_offset_known, 0};
617695cd
JH
1444 poly_int64 size_hwi;
1445 if (size
1446 && poly_int_tree_p (size, &size_hwi)
1447 && coeffs_in_range_p (size_hwi, 0,
1448 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
1449 {
1450 a.size = -1;
1451 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
1452 }
1453 return a;
1454}
617695cd
JH
1455/* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
1456 If IGNORE_STORES is true ignore them.
1457 Return false if no useful summary can be produced. */
1458
09a4ffb7
JH
1459void
1460modref_access_analysis::process_fnspec (gcall *call)
617695cd 1461{
992644c3
JH
1462 int flags = gimple_call_flags (call);
1463
09a4ffb7
JH
1464 /* PURE/CONST flags makes functions deterministic and if there is
1465 no LOOPING_CONST_OR_PURE they also have no side effects. */
8d3abf42 1466 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
992644c3
JH
1467 || (flags & ECF_LOOPING_CONST_OR_PURE)
1468 || (cfun->can_throw_non_call_exceptions
1469 && stmt_could_throw_p (cfun, call)))
1470 {
09a4ffb7
JH
1471 set_side_effects ();
1472 if (!ignore_nondeterminism_p (current_function_decl, flags))
1473 set_nondeterministic ();
992644c3 1474 }
09a4ffb7
JH
1475
1476 /* For const functions we are done. */
8d3abf42 1477 if (flags & (ECF_CONST | ECF_NOVOPS))
09a4ffb7
JH
1478 return;
1479
1480 attr_fnspec fnspec = gimple_call_fnspec (call);
1481 /* If there is no fnpec we know nothing about loads & stores. */
617695cd
JH
1482 if (!fnspec.known_p ())
1483 {
1484 if (dump_file && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1485 fprintf (dump_file, " Builtin with no fnspec: %s\n",
1486 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call))));
09a4ffb7 1487 if (!ignore_stores_p (current_function_decl, flags))
3305135c
JH
1488 {
1489 if (!may_access_nonescaping_parm_p (call, flags, false))
1490 record_global_memory_store ();
1491 else
1492 record_unknown_store ();
1493 if (!may_access_nonescaping_parm_p (call, flags, true))
1494 record_global_memory_load ();
1495 else
1496 record_unknown_load ();
1497 }
1498 else
1499 {
1500 if (!may_access_nonescaping_parm_p (call, flags, true))
1501 record_global_memory_load ();
1502 else
1503 record_unknown_load ();
1504 }
09a4ffb7 1505 return;
617695cd 1506 }
09a4ffb7 1507 /* Process fnspec. */
617695cd 1508 if (fnspec.global_memory_read_p ())
3305135c
JH
1509 {
1510 if (may_access_nonescaping_parm_p (call, flags, true))
1511 record_unknown_load ();
1512 else
1513 record_global_memory_load ();
1514 }
617695cd
JH
1515 else
1516 {
1517 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1518 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
1519 ;
1520 else if (!fnspec.arg_specified_p (i)
1521 || fnspec.arg_maybe_read_p (i))
1522 {
0f5afb62 1523 modref_parm_map map = parm_map_for_ptr
1f3a3363 1524 (gimple_call_arg (call, i));
617695cd 1525
1f3a3363 1526 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
617695cd 1527 continue;
1f3a3363 1528 if (map.parm_index == MODREF_UNKNOWN_PARM)
617695cd 1529 {
09a4ffb7 1530 record_unknown_load ();
617695cd
JH
1531 break;
1532 }
09a4ffb7 1533 modref_access_node a = get_access_for_fnspec (call, fnspec, i, map);
0f5afb62
JH
1534 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1535 continue;
09a4ffb7 1536 if (m_summary)
8632f8c6 1537 m_summary->loads->insert (current_function_decl, 0, 0, a, false);
09a4ffb7 1538 if (m_summary_lto)
8632f8c6
JH
1539 m_summary_lto->loads->insert (current_function_decl, 0, 0, a,
1540 false);
617695cd
JH
1541 }
1542 }
09a4ffb7
JH
1543 if (ignore_stores_p (current_function_decl, flags))
1544 return;
617695cd 1545 if (fnspec.global_memory_written_p ())
3305135c
JH
1546 {
1547 if (may_access_nonescaping_parm_p (call, flags, false))
1548 record_unknown_store ();
1549 else
1550 record_global_memory_store ();
1551 }
617695cd
JH
1552 else
1553 {
1554 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1555 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
1556 ;
1557 else if (!fnspec.arg_specified_p (i)
1558 || fnspec.arg_maybe_written_p (i))
1559 {
0f5afb62 1560 modref_parm_map map = parm_map_for_ptr
1f3a3363 1561 (gimple_call_arg (call, i));
617695cd 1562
1f3a3363 1563 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
617695cd 1564 continue;
1f3a3363 1565 if (map.parm_index == MODREF_UNKNOWN_PARM)
617695cd 1566 {
09a4ffb7 1567 record_unknown_store ();
617695cd
JH
1568 break;
1569 }
09a4ffb7 1570 modref_access_node a = get_access_for_fnspec (call, fnspec, i, map);
0f5afb62
JH
1571 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1572 continue;
09a4ffb7 1573 if (m_summary)
8632f8c6 1574 m_summary->stores->insert (current_function_decl, 0, 0, a, false);
09a4ffb7 1575 if (m_summary_lto)
8632f8c6
JH
1576 m_summary_lto->stores->insert (current_function_decl,
1577 0, 0, a, false);
617695cd
JH
1578 }
1579 if (fnspec.errno_maybe_written_p () && flag_errno_math)
6cef01c3 1580 {
09a4ffb7
JH
1581 if (m_summary)
1582 m_summary->writes_errno = true;
1583 if (m_summary_lto)
1584 m_summary_lto->writes_errno = true;
6cef01c3 1585 }
617695cd 1586 }
617695cd
JH
1587}
1588
ada353b8
JH
1589/* Analyze function call STMT in function F.
1590 Remember recursive calls in RECURSIVE_CALLS. */
d119f34c 1591
09a4ffb7
JH
1592void
1593modref_access_analysis::analyze_call (gcall *stmt)
d119f34c
JH
1594{
1595 /* Check flags on the function call. In certain cases, analysis can be
1596 simplified. */
1597 int flags = gimple_call_flags (stmt);
09a4ffb7 1598
3305135c
JH
1599 if (dump_file)
1600 {
1601 fprintf (dump_file, " - Analyzing call:");
1602 print_gimple_stmt (dump_file, stmt, 0);
1603 }
1604
8d3abf42
JH
1605 if ((flags & (ECF_CONST | ECF_NOVOPS))
1606 && !(flags & ECF_LOOPING_CONST_OR_PURE))
d119f34c
JH
1607 {
1608 if (dump_file)
1609 fprintf (dump_file,
1610 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
1611 "except for args.\n");
09a4ffb7 1612 return;
d119f34c
JH
1613 }
1614
d119f34c
JH
1615 /* Next, we try to get the callee's function declaration. The goal is to
1616 merge their summary with ours. */
1617 tree callee = gimple_call_fndecl (stmt);
1618
1619 /* Check if this is an indirect call. */
1620 if (!callee)
1621 {
d119f34c 1622 if (dump_file)
6cef01c3
JH
1623 fprintf (dump_file, gimple_call_internal_p (stmt)
1624 ? " - Internal call" : " - Indirect call.\n");
09a4ffb7
JH
1625 process_fnspec (stmt);
1626 return;
d119f34c 1627 }
6cef01c3 1628 /* We only need to handle internal calls in IPA mode. */
09a4ffb7 1629 gcc_checking_assert (!m_summary_lto && !m_ipa);
d119f34c
JH
1630
1631 struct cgraph_node *callee_node = cgraph_node::get_create (callee);
1632
d119f34c
JH
1633 /* If this is a recursive call, the target summary is the same as ours, so
1634 there's nothing to do. */
1635 if (recursive_call_p (current_function_decl, callee))
1636 {
09a4ffb7
JH
1637 m_recursive_calls.safe_push (stmt);
1638 set_side_effects ();
d119f34c
JH
1639 if (dump_file)
1640 fprintf (dump_file, " - Skipping recursive call.\n");
09a4ffb7 1641 return;
d119f34c
JH
1642 }
1643
1644 gcc_assert (callee_node != NULL);
1645
1646 /* Get the function symbol and its availability. */
1647 enum availability avail;
1648 callee_node = callee_node->function_symbol (&avail);
992644c3
JH
1649 bool looping;
1650 if (builtin_safe_for_const_function_p (&looping, callee))
1651 {
1652 if (looping)
09a4ffb7 1653 set_side_effects ();
992644c3 1654 if (dump_file)
09a4ffb7
JH
1655 fprintf (dump_file, " - Builtin is safe for const.\n");
1656 return;
992644c3 1657 }
d119f34c
JH
1658 if (avail <= AVAIL_INTERPOSABLE)
1659 {
d119f34c 1660 if (dump_file)
09a4ffb7
JH
1661 fprintf (dump_file,
1662 " - Function availability <= AVAIL_INTERPOSABLE.\n");
1663 process_fnspec (stmt);
1664 return;
d119f34c
JH
1665 }
1666
1667 /* Get callee's modref summary. As above, if there's no summary, we either
1668 have to give up or, if stores are ignored, we can just purge loads. */
71dbabcc 1669 modref_summary *callee_summary = optimization_summaries->get (callee_node);
d119f34c
JH
1670 if (!callee_summary)
1671 {
d119f34c
JH
1672 if (dump_file)
1673 fprintf (dump_file, " - No modref summary available for callee.\n");
09a4ffb7
JH
1674 process_fnspec (stmt);
1675 return;
d119f34c
JH
1676 }
1677
09a4ffb7 1678 merge_call_side_effects (stmt, callee_summary, callee_node, false);
d119f34c 1679
09a4ffb7 1680 return;
d119f34c
JH
1681}
1682
1683/* Helper for analyze_stmt. */
1684
09a4ffb7
JH
1685bool
1686modref_access_analysis::analyze_load (gimple *, tree, tree op, void *data)
d119f34c 1687{
09a4ffb7 1688 modref_access_analysis *t = (modref_access_analysis *)data;
d119f34c
JH
1689
1690 if (dump_file)
1691 {
1692 fprintf (dump_file, " - Analyzing load: ");
1693 print_generic_expr (dump_file, op);
1694 fprintf (dump_file, "\n");
1695 }
1696
09a4ffb7 1697 if (!t->record_access_p (op))
d119f34c
JH
1698 return false;
1699
1700 ao_ref r;
1701 ao_ref_init (&r, op);
64f3e71c 1702 modref_access_node a = get_access (&r);
0f5afb62
JH
1703 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1704 return false;
d119f34c 1705
09a4ffb7
JH
1706 if (t->m_summary)
1707 t->record_access (t->m_summary->loads, &r, a);
1708 if (t->m_summary_lto)
1709 t->record_access_lto (t->m_summary_lto->loads, &r, a);
d119f34c
JH
1710 return false;
1711}
1712
1713/* Helper for analyze_stmt. */
1714
09a4ffb7
JH
1715bool
1716modref_access_analysis::analyze_store (gimple *stmt, tree, tree op, void *data)
d119f34c 1717{
09a4ffb7 1718 modref_access_analysis *t = (modref_access_analysis *)data;
d119f34c
JH
1719
1720 if (dump_file)
1721 {
1722 fprintf (dump_file, " - Analyzing store: ");
1723 print_generic_expr (dump_file, op);
1724 fprintf (dump_file, "\n");
1725 }
1726
09a4ffb7 1727 if (!t->record_access_p (op))
d119f34c
JH
1728 return false;
1729
1730 ao_ref r;
1731 ao_ref_init (&r, op);
64f3e71c 1732 modref_access_node a = get_access (&r);
0f5afb62
JH
1733 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1734 return false;
d119f34c 1735
09a4ffb7
JH
1736 if (t->m_summary)
1737 t->record_access (t->m_summary->stores, &r, a);
1738 if (t->m_summary_lto)
1739 t->record_access_lto (t->m_summary_lto->stores, &r, a);
1740 if (t->m_always_executed
64f3e71c
JH
1741 && a.useful_for_kill_p ()
1742 && (!cfun->can_throw_non_call_exceptions
1743 || !stmt_could_throw_p (cfun, stmt)))
1744 {
1745 if (dump_file)
1746 fprintf (dump_file, " - Recording kill\n");
09a4ffb7
JH
1747 if (t->m_summary)
1748 modref_access_node::insert_kill (t->m_summary->kills, a, false);
1749 if (t->m_summary_lto)
1750 modref_access_node::insert_kill (t->m_summary_lto->kills, a, false);
64f3e71c 1751 }
d119f34c
JH
1752 return false;
1753}
1754
1755/* Analyze statement STMT of function F.
1756 If IPA is true do not merge in side effects of calls. */
1757
09a4ffb7
JH
1758void
1759modref_access_analysis::analyze_stmt (gimple *stmt, bool always_executed)
d119f34c 1760{
09a4ffb7 1761 m_always_executed = always_executed;
8a2fd716
JJ
1762 /* In general we can not ignore clobbers because they are barriers for code
1763 motion, however after inlining it is safe to do because local optimization
3991912e 1764 passes do not consider clobbers from other functions.
8a2fd716 1765 Similar logic is in ipa-pure-const.c. */
09a4ffb7 1766 if ((m_ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
64f3e71c 1767 {
74509b96 1768 if (always_executed && record_access_p (gimple_assign_lhs (stmt)))
64f3e71c
JH
1769 {
1770 ao_ref r;
1771 ao_ref_init (&r, gimple_assign_lhs (stmt));
1772 modref_access_node a = get_access (&r);
1773 if (a.useful_for_kill_p ())
1774 {
1775 if (dump_file)
1776 fprintf (dump_file, " - Recording kill\n");
09a4ffb7
JH
1777 if (m_summary)
1778 modref_access_node::insert_kill (m_summary->kills, a, false);
1779 if (m_summary_lto)
1780 modref_access_node::insert_kill (m_summary_lto->kills,
1781 a, false);
64f3e71c
JH
1782 }
1783 }
09a4ffb7 1784 return;
64f3e71c 1785 }
3991912e 1786
d119f34c 1787 /* Analyze all loads and stores in STMT. */
09a4ffb7 1788 walk_stmt_load_store_ops (stmt, this,
d119f34c 1789 analyze_load, analyze_store);
d119f34c
JH
1790
1791 switch (gimple_code (stmt))
1792 {
1793 case GIMPLE_ASM:
a34edf9a 1794 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
09a4ffb7 1795 set_nondeterministic ();
a34edf9a
JH
1796 if (cfun->can_throw_non_call_exceptions
1797 && stmt_could_throw_p (cfun, stmt))
09a4ffb7 1798 set_side_effects ();
d119f34c
JH
1799 /* If the ASM statement does not read nor write memory, there's nothing
1800 to do. Otherwise just give up. */
1801 if (!gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
09a4ffb7 1802 return;
d119f34c
JH
1803 if (dump_file)
1804 fprintf (dump_file, " - Function contains GIMPLE_ASM statement "
1805 "which clobbers memory.\n");
09a4ffb7
JH
1806 record_unknown_load ();
1807 record_unknown_store ();
1808 return;
d119f34c 1809 case GIMPLE_CALL:
09a4ffb7
JH
1810 if (!m_ipa || gimple_call_internal_p (stmt))
1811 analyze_call (as_a <gcall *> (stmt));
6cef01c3 1812 else
09a4ffb7
JH
1813 {
1814 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
1815
1816 if (fnspec.known_p ()
1817 && (!fnspec.global_memory_read_p ()
1818 || !fnspec.global_memory_written_p ()))
1819 {
1820 cgraph_edge *e = cgraph_node::get
1821 (current_function_decl)->get_edge (stmt);
1822 if (e->callee)
1823 {
1824 fnspec_summaries->get_create (e)->fnspec
1825 = xstrdup (fnspec.get_str ());
1826 if (dump_file)
1827 fprintf (dump_file, " Recorded fnspec %s\n",
1828 fnspec.get_str ());
1829 }
1830 }
1831 }
1832 return;
d119f34c 1833 default:
992644c3
JH
1834 if (cfun->can_throw_non_call_exceptions
1835 && stmt_could_throw_p (cfun, stmt))
09a4ffb7
JH
1836 set_side_effects ();
1837 return;
d119f34c
JH
1838 }
1839}
1840
09a4ffb7 1841/* Propagate load/stres acress recursive calls. */
71dbabcc 1842
09a4ffb7
JH
1843void
1844modref_access_analysis::propagate ()
71dbabcc 1845{
09a4ffb7
JH
1846 if (m_ipa && m_summary)
1847 return;
1848
1849 bool changed = true;
1850 bool first = true;
71dbabcc 1851 cgraph_node *fnode = cgraph_node::get (current_function_decl);
09a4ffb7
JH
1852
1853 m_always_executed = false;
1854 while (changed && m_summary->useful_p (m_ecf_flags, false))
71dbabcc 1855 {
09a4ffb7
JH
1856 changed = false;
1857 for (unsigned i = 0; i < m_recursive_calls.length (); i++)
1858 {
1859 changed |= merge_call_side_effects (m_recursive_calls[i], m_summary,
1860 fnode, !first);
1861 }
1862 first = false;
1863 }
1864}
1865
1866/* Analyze function. */
1867
1868void
1869modref_access_analysis::analyze ()
1870{
1871 m_ecf_flags = flags_from_decl_or_type (current_function_decl);
1872 bool summary_useful = true;
1873
1874 /* Analyze each statement in each basic block of the function. If the
1875 statement cannot be analyzed (for any reason), the entire function cannot
1876 be analyzed by modref. */
1877 basic_block bb;
1878 FOR_EACH_BB_FN (bb, cfun)
1879 {
1880 gimple_stmt_iterator si;
1881 bool always_executed
1882 = bb == single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1883
1884 for (si = gsi_start_nondebug_after_labels_bb (bb);
1885 !gsi_end_p (si); gsi_next_nondebug (&si))
1886 {
e93809f6
JH
1887 /* NULL memory accesses terminates BB. These accesses are known
1888 to trip undefined behaviour. gimple-ssa-isolate-paths turns them
1889 to volatile accesses and adds builtin_trap call which would
1890 confuse us otherwise. */
1891 if (infer_nonnull_range_by_dereference (gsi_stmt (si),
1892 null_pointer_node))
1893 {
1894 if (dump_file)
1895 fprintf (dump_file, " - NULL memory access; terminating BB\n");
1896 if (flag_non_call_exceptions)
1897 set_side_effects ();
1898 break;
1899 }
09a4ffb7
JH
1900 analyze_stmt (gsi_stmt (si), always_executed);
1901
1902 /* Avoid doing useles work. */
1903 if ((!m_summary || !m_summary->useful_p (m_ecf_flags, false))
1904 && (!m_summary_lto
1905 || !m_summary_lto->useful_p (m_ecf_flags, false)))
1906 {
1907 summary_useful = false;
1908 break;
1909 }
1910 if (always_executed
1911 && stmt_can_throw_external (cfun, gsi_stmt (si)))
1912 always_executed = false;
1913 }
1914 if (!summary_useful)
1915 break;
1916 }
1917 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
1918 This needs to be done after all other side effects are computed. */
1919 if (summary_useful)
1920 {
1921 if (!m_ipa)
1922 propagate ();
1923 if (m_summary && !m_summary->side_effects && !finite_function_p ())
1924 m_summary->side_effects = true;
1925 if (m_summary_lto && !m_summary_lto->side_effects
1926 && !finite_function_p ())
1927 m_summary_lto->side_effects = true;
71dbabcc 1928 }
71dbabcc
JH
1929}
1930
520d5ad3
JH
1931/* Return true if OP accesses memory pointed to by SSA_NAME. */
1932
1933bool
1934memory_access_to (tree op, tree ssa_name)
1935{
1936 tree base = get_base_address (op);
1937 if (!base)
1938 return false;
1939 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
1940 return false;
1941 return TREE_OPERAND (base, 0) == ssa_name;
1942}
1943
1944/* Consider statement val = *arg.
1945 return EAF flags of ARG that can be determined from EAF flags of VAL
1946 (which are known to be FLAGS). If IGNORE_STORES is true we can ignore
1947 all stores to VAL, i.e. when handling noreturn function. */
1948
1949static int
1950deref_flags (int flags, bool ignore_stores)
1951{
d70ef656
JH
1952 /* Dereference is also a direct read but dereferenced value does not
1953 yield any other direct use. */
1954 int ret = EAF_NO_DIRECT_CLOBBER | EAF_NO_DIRECT_ESCAPE
1955 | EAF_NOT_RETURNED_DIRECTLY;
4341b1b1
JH
1956 /* If argument is unused just account for
1957 the read involved in dereference. */
520d5ad3 1958 if (flags & EAF_UNUSED)
d70ef656
JH
1959 ret |= EAF_NO_INDIRECT_READ | EAF_NO_INDIRECT_CLOBBER
1960 | EAF_NO_INDIRECT_ESCAPE;
520d5ad3
JH
1961 else
1962 {
d70ef656
JH
1963 /* Direct or indirect accesses leads to indirect accesses. */
1964 if (((flags & EAF_NO_DIRECT_CLOBBER)
1965 && (flags & EAF_NO_INDIRECT_CLOBBER))
1966 || ignore_stores)
1967 ret |= EAF_NO_INDIRECT_CLOBBER;
1968 if (((flags & EAF_NO_DIRECT_ESCAPE)
1969 && (flags & EAF_NO_INDIRECT_ESCAPE))
1970 || ignore_stores)
1971 ret |= EAF_NO_INDIRECT_ESCAPE;
1972 if ((flags & EAF_NO_DIRECT_READ)
1973 && (flags & EAF_NO_INDIRECT_READ))
1974 ret |= EAF_NO_INDIRECT_READ;
1975 if ((flags & EAF_NOT_RETURNED_DIRECTLY)
1976 && (flags & EAF_NOT_RETURNED_INDIRECTLY))
1977 ret |= EAF_NOT_RETURNED_INDIRECTLY;
520d5ad3
JH
1978 }
1979 return ret;
1980}
1981
85ebbabd 1982
09a4ffb7
JH
1983/* Description of an escape point: a call which affects flags of a given
1984 SSA name. */
85ebbabd
JH
1985
1986struct escape_point
1987{
1988 /* Value escapes to this call. */
1989 gcall *call;
1990 /* Argument it escapes to. */
1991 int arg;
1992 /* Flags already known about the argument (this can save us from recording
1993 esape points if local analysis did good job already). */
8da8ed43 1994 eaf_flags_t min_flags;
85ebbabd
JH
1995 /* Does value escape directly or indiretly? */
1996 bool direct;
1997};
1998
09a4ffb7
JH
1999/* Lattice used during the eaf flags analsysis dataflow. For a given SSA name
2000 we aim to compute its flags and escape points. We also use the lattice
2001 to dynamically build dataflow graph to propagate on. */
2002
85ebbabd
JH
2003class modref_lattice
2004{
2005public:
2006 /* EAF flags of the SSA name. */
4341b1b1 2007 eaf_flags_t flags;
4898e958
JH
2008 /* Used during DFS walk to mark names where final value was determined
2009 without need for dataflow. */
85ebbabd 2010 bool known;
4898e958 2011 /* Used during DFS walk to mark open vertices (for cycle detection). */
85ebbabd 2012 bool open;
4898e958
JH
2013 /* Set during DFS walk for names that needs dataflow propagation. */
2014 bool do_dataflow;
2015 /* Used during the iterative dataflow. */
2016 bool changed;
85ebbabd
JH
2017
2018 /* When doing IPA analysis we can not merge in callee escape points;
2019 Only remember them and do the merging at IPA propagation time. */
2020 vec <escape_point, va_heap, vl_ptr> escape_points;
2021
4898e958
JH
2022 /* Representation of a graph for dataaflow. This graph is built on-demand
2023 using modref_eaf_analysis::analyze_ssa and later solved by
2024 modref_eaf_analysis::propagate.
2025 Each edge represents the fact that flags of current lattice should be
2026 propagated to lattice of SSA_NAME. */
2027 struct propagate_edge
2028 {
2029 int ssa_name;
2030 bool deref;
2031 };
2032 vec <propagate_edge, va_heap, vl_ptr> propagate_to;
2033
85ebbabd
JH
2034 void init ();
2035 void release ();
2036 bool merge (const modref_lattice &with);
2037 bool merge (int flags);
2038 bool merge_deref (const modref_lattice &with, bool ignore_stores);
2039 bool merge_direct_load ();
2040 bool merge_direct_store ();
2041 bool add_escape_point (gcall *call, int arg, int min_flags, bool diret);
2042 void dump (FILE *out, int indent = 0) const;
2043};
2044
2045/* Lattices are saved to vectors, so keep them PODs. */
2046void
2047modref_lattice::init ()
2048{
4341b1b1 2049 /* All flags we track. */
d70ef656
JH
2050 int f = EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER
2051 | EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE
2052 | EAF_NO_DIRECT_READ | EAF_NO_INDIRECT_READ
2053 | EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY
2054 | EAF_UNUSED;
4341b1b1
JH
2055 flags = f;
2056 /* Check that eaf_flags_t is wide enough to hold all flags. */
2057 gcc_checking_assert (f == flags);
85ebbabd
JH
2058 open = true;
2059 known = false;
2060}
2061
2062/* Release memory. */
2063void
2064modref_lattice::release ()
2065{
2066 escape_points.release ();
4898e958 2067 propagate_to.release ();
85ebbabd
JH
2068}
2069
2070/* Dump lattice to OUT; indent with INDENT spaces. */
2071
2072void
2073modref_lattice::dump (FILE *out, int indent) const
2074{
2075 dump_eaf_flags (out, flags);
2076 if (escape_points.length ())
2077 {
2078 fprintf (out, "%*sEscapes:\n", indent, "");
2079 for (unsigned int i = 0; i < escape_points.length (); i++)
2080 {
2081 fprintf (out, "%*s Arg %i (%s) min flags", indent, "",
2082 escape_points[i].arg,
2083 escape_points[i].direct ? "direct" : "indirect");
9851a163 2084 dump_eaf_flags (out, escape_points[i].min_flags, false);
85ebbabd
JH
2085 fprintf (out, " in call ");
2086 print_gimple_stmt (out, escape_points[i].call, 0);
2087 }
2088 }
2089}
2090
2091/* Add escape point CALL, ARG, MIN_FLAGS, DIRECT. Return false if such escape
2092 point exists. */
2093
2094bool
2095modref_lattice::add_escape_point (gcall *call, int arg, int min_flags,
2096 bool direct)
2097{
2098 escape_point *ep;
2099 unsigned int i;
2100
2101 /* If we already determined flags to be bad enough,
4341b1b1
JH
2102 we do not need to record. */
2103 if ((flags & min_flags) == flags || (min_flags & EAF_UNUSED))
85ebbabd
JH
2104 return false;
2105
2106 FOR_EACH_VEC_ELT (escape_points, i, ep)
2107 if (ep->call == call && ep->arg == arg && ep->direct == direct)
2108 {
2109 if ((ep->min_flags & min_flags) == min_flags)
2110 return false;
2111 ep->min_flags &= min_flags;
2112 return true;
2113 }
2114 /* Give up if max escape points is met. */
2115 if ((int)escape_points.length () > param_modref_max_escape_points)
2116 {
2117 if (dump_file)
2118 fprintf (dump_file, "--param modref-max-escape-points limit reached\n");
2119 merge (0);
2120 return true;
2121 }
2122 escape_point new_ep = {call, arg, min_flags, direct};
2123 escape_points.safe_push (new_ep);
2124 return true;
2125}
2126
2127/* Merge in flags from F. */
2128bool
2129modref_lattice::merge (int f)
2130{
3350e59f
JH
2131 if (f & EAF_UNUSED)
2132 return false;
4526ec20
JH
2133 /* Check that flags seems sane: if function does not read the parameter
2134 it can not access it indirectly. */
2135 gcc_checking_assert (!(f & EAF_NO_DIRECT_READ)
2136 || ((f & EAF_NO_INDIRECT_READ)
2137 && (f & EAF_NO_INDIRECT_CLOBBER)
2138 && (f & EAF_NO_INDIRECT_ESCAPE)
2139 && (f & EAF_NOT_RETURNED_INDIRECTLY)));
85ebbabd
JH
2140 if ((flags & f) != flags)
2141 {
2142 flags &= f;
4341b1b1
JH
2143 /* Prune obvoiusly useless flags;
2144 We do not have ECF_FLAGS handy which is not big problem since
2145 we will do final flags cleanup before producing summary.
2146 Merging should be fast so it can work well with dataflow. */
2147 flags = remove_useless_eaf_flags (flags, 0, false);
85ebbabd
JH
2148 if (!flags)
2149 escape_points.release ();
2150 return true;
2151 }
2152 return false;
2153}
2154
2155/* Merge in WITH. Return true if anyting changed. */
2156
2157bool
2158modref_lattice::merge (const modref_lattice &with)
2159{
2160 if (!with.known)
4898e958 2161 do_dataflow = true;
85ebbabd
JH
2162
2163 bool changed = merge (with.flags);
2164
2165 if (!flags)
2166 return changed;
2167 for (unsigned int i = 0; i < with.escape_points.length (); i++)
2168 changed |= add_escape_point (with.escape_points[i].call,
2169 with.escape_points[i].arg,
2170 with.escape_points[i].min_flags,
2171 with.escape_points[i].direct);
2172 return changed;
2173}
2174
2175/* Merge in deref of WITH. If IGNORE_STORES is true do not consider
2176 stores. Return true if anyting changed. */
2177
2178bool
2179modref_lattice::merge_deref (const modref_lattice &with, bool ignore_stores)
2180{
2181 if (!with.known)
4898e958 2182 do_dataflow = true;
85ebbabd
JH
2183
2184 bool changed = merge (deref_flags (with.flags, ignore_stores));
2185
2186 if (!flags)
2187 return changed;
2188 for (unsigned int i = 0; i < with.escape_points.length (); i++)
9851a163
JH
2189 {
2190 int min_flags = with.escape_points[i].min_flags;
2191
2192 if (with.escape_points[i].direct)
2193 min_flags = deref_flags (min_flags, ignore_stores);
2194 else if (ignore_stores)
4341b1b1 2195 min_flags |= ignore_stores_eaf_flags;
9851a163
JH
2196 changed |= add_escape_point (with.escape_points[i].call,
2197 with.escape_points[i].arg,
2198 min_flags,
2199 false);
2200 }
85ebbabd
JH
2201 return changed;
2202}
2203
2204/* Merge in flags for direct load. */
2205
2206bool
2207modref_lattice::merge_direct_load ()
2208{
d70ef656 2209 return merge (~(EAF_UNUSED | EAF_NO_DIRECT_READ));
85ebbabd
JH
2210}
2211
2212/* Merge in flags for direct store. */
2213
2214bool
2215modref_lattice::merge_direct_store ()
2216{
d70ef656 2217 return merge (~(EAF_UNUSED | EAF_NO_DIRECT_CLOBBER));
85ebbabd
JH
2218}
2219
4898e958
JH
2220/* Analyzer of EAF flags.
2221 This is genrally dataflow problem over the SSA graph, however we only
2222 care about flags of few selected ssa names (arguments, return slot and
2223 static chain). So we first call analyze_ssa_name on all relevant names
2224 and perform a DFS walk to discover SSA names where flags needs to be
2225 determined. For acyclic graphs we try to determine final flags during
2226 this walk. Once cycles or recursin depth is met we enlist SSA names
2227 for dataflow which is done by propagate call.
2228
2229 After propagation the flags can be obtained using get_ssa_name_flags. */
18f0873d
JH
2230
2231class modref_eaf_analysis
2232{
2233public:
4898e958 2234 /* Mark NAME as relevant for analysis. */
18f0873d 2235 void analyze_ssa_name (tree name);
4898e958
JH
2236 /* Dataflow slover. */
2237 void propagate ();
18f0873d
JH
2238 /* Return flags computed earlier for NAME. */
2239 int get_ssa_name_flags (tree name)
2240 {
2241 int version = SSA_NAME_VERSION (name);
2242 gcc_checking_assert (m_lattice[version].known);
2243 return m_lattice[version].flags;
2244 }
2245 /* In IPA mode this will record all escape points
2246 determined for NAME to PARM_IDNEX. Flags are minimal
2247 flags known. */
2248 void record_escape_points (tree name, int parm_index, int flags);
2249 modref_eaf_analysis (bool ipa)
2250 {
2251 m_ipa = ipa;
2252 m_depth = 0;
2253 m_lattice.safe_grow_cleared (num_ssa_names, true);
2254 }
2255 ~modref_eaf_analysis ()
2256 {
2257 gcc_checking_assert (!m_depth);
4898e958 2258 if (m_ipa || m_names_to_propagate.length ())
18f0873d
JH
2259 for (unsigned int i = 0; i < num_ssa_names; i++)
2260 m_lattice[i].release ();
2261 }
2262private:
2263 /* If true, we produce analysis for IPA mode. In this case escape points ar
2264 collected. */
2265 bool m_ipa;
2266 /* Depth of recursion of analyze_ssa_name. */
2267 int m_depth;
2268 /* Propagation lattice for individual ssa names. */
2269 auto_vec<modref_lattice> m_lattice;
4898e958
JH
2270 auto_vec<tree> m_deferred_names;
2271 auto_vec<int> m_names_to_propagate;
18f0873d
JH
2272
2273 void merge_with_ssa_name (tree dest, tree src, bool deref);
d70ef656
JH
2274 void merge_call_lhs_flags (gcall *call, int arg, tree name, bool direct,
2275 bool deref);
18f0873d 2276};
85ebbabd 2277
85ebbabd 2278
d70ef656 2279/* Call statements may return tgeir parameters. Consider argument number
520d5ad3
JH
2280 ARG of USE_STMT and determine flags that can needs to be cleared
2281 in case pointer possibly indirectly references from ARG I is returned.
d70ef656
JH
2282 If DIRECT is true consider direct returns and if INDIRECT consider
2283 indirect returns.
62af7d94
JH
2284 LATTICE, DEPTH and ipa are same as in analyze_ssa_name.
2285 ARG is set to -1 for static chain. */
520d5ad3 2286
18f0873d
JH
2287void
2288modref_eaf_analysis::merge_call_lhs_flags (gcall *call, int arg,
d70ef656
JH
2289 tree name, bool direct,
2290 bool indirect)
520d5ad3 2291{
18f0873d 2292 int index = SSA_NAME_VERSION (name);
c331a75d 2293 bool returned_directly = false;
d70ef656 2294
520d5ad3
JH
2295 /* If there is no return value, no flags are affected. */
2296 if (!gimple_call_lhs (call))
85ebbabd 2297 return;
520d5ad3
JH
2298
2299 /* If we know that function returns given argument and it is not ARG
2300 we can still be happy. */
62af7d94
JH
2301 if (arg >= 0)
2302 {
2303 int flags = gimple_call_return_flags (call);
c331a75d
JH
2304 if (flags & ERF_RETURNS_ARG)
2305 {
2306 if ((flags & ERF_RETURN_ARG_MASK) == arg)
2307 returned_directly = true;
2308 else
2309 return;
2310 }
2311 }
2312 /* Make ERF_RETURNS_ARG overwrite EAF_UNUSED. */
2313 if (returned_directly)
2314 {
2315 direct = true;
2316 indirect = false;
62af7d94 2317 }
c331a75d
JH
2318 /* If value is not returned at all, do nothing. */
2319 else if (!direct && !indirect)
2320 return;
8da8ed43 2321
520d5ad3
JH
2322 /* If return value is SSA name determine its flags. */
2323 if (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME)
85ebbabd
JH
2324 {
2325 tree lhs = gimple_call_lhs (call);
d70ef656
JH
2326 if (direct)
2327 merge_with_ssa_name (name, lhs, false);
2328 if (indirect)
2329 merge_with_ssa_name (name, lhs, true);
85ebbabd 2330 }
520d5ad3 2331 /* In the case of memory store we can do nothing. */
d70ef656 2332 else if (!direct)
18f0873d 2333 m_lattice[index].merge (deref_flags (0, false));
520d5ad3 2334 else
18f0873d 2335 m_lattice[index].merge (0);
520d5ad3
JH
2336}
2337
62af7d94
JH
2338/* CALL_FLAGS are EAF_FLAGS of the argument. Turn them
2339 into flags for caller, update LATTICE of corresponding
2340 argument if needed. */
2341
2342static int
2343callee_to_caller_flags (int call_flags, bool ignore_stores,
2344 modref_lattice &lattice)
2345{
2346 /* call_flags is about callee returning a value
2347 that is not the same as caller returning it. */
d70ef656
JH
2348 call_flags |= EAF_NOT_RETURNED_DIRECTLY
2349 | EAF_NOT_RETURNED_INDIRECTLY;
62af7d94
JH
2350 if (!ignore_stores && !(call_flags & EAF_UNUSED))
2351 {
2f3d43a3
JH
2352 /* If value escapes we are no longer able to track what happens
2353 with it because we can read it from the escaped location
2354 anytime. */
d70ef656 2355 if (!(call_flags & EAF_NO_DIRECT_ESCAPE))
2f3d43a3
JH
2356 lattice.merge (0);
2357 else if (!(call_flags & EAF_NO_INDIRECT_ESCAPE))
d70ef656 2358 lattice.merge (~(EAF_NOT_RETURNED_INDIRECTLY
4526ec20 2359 | EAF_NO_DIRECT_READ
2f3d43a3
JH
2360 | EAF_NO_INDIRECT_READ
2361 | EAF_NO_INDIRECT_CLOBBER
62af7d94
JH
2362 | EAF_UNUSED));
2363 }
2364 else
2365 call_flags |= ignore_stores_eaf_flags;
2366 return call_flags;
2367}
2368
85ebbabd
JH
2369/* Analyze EAF flags for SSA name NAME and store result to LATTICE.
2370 LATTICE is an array of modref_lattices.
2371 DEPTH is a recursion depth used to make debug output prettier.
2372 If IPA is true we analyze for IPA propagation (and thus call escape points
2373 are processed later) */
520d5ad3 2374
18f0873d
JH
2375void
2376modref_eaf_analysis::analyze_ssa_name (tree name)
520d5ad3
JH
2377{
2378 imm_use_iterator ui;
2379 gimple *use_stmt;
85ebbabd 2380 int index = SSA_NAME_VERSION (name);
520d5ad3
JH
2381
2382 /* See if value is already computed. */
4898e958 2383 if (m_lattice[index].known || m_lattice[index].do_dataflow)
85ebbabd 2384 return;
18f0873d 2385 if (m_lattice[index].open)
520d5ad3 2386 {
85ebbabd
JH
2387 if (dump_file)
2388 fprintf (dump_file,
4898e958 2389 "%*sCycle in SSA graph\n",
18f0873d 2390 m_depth * 4, "");
85ebbabd 2391 return;
520d5ad3 2392 }
4898e958
JH
2393 /* Recursion guard. */
2394 m_lattice[index].init ();
18f0873d 2395 if (m_depth == param_modref_max_depth)
520d5ad3
JH
2396 {
2397 if (dump_file)
2398 fprintf (dump_file,
4898e958 2399 "%*sMax recursion depth reached; postponing\n",
18f0873d 2400 m_depth * 4, "");
4898e958 2401 m_deferred_names.safe_push (name);
85ebbabd 2402 return;
520d5ad3 2403 }
520d5ad3
JH
2404
2405 if (dump_file)
2406 {
2407 fprintf (dump_file,
18f0873d 2408 "%*sAnalyzing flags of ssa name: ", m_depth * 4, "");
520d5ad3
JH
2409 print_generic_expr (dump_file, name);
2410 fprintf (dump_file, "\n");
2411 }
2412
2413 FOR_EACH_IMM_USE_STMT (use_stmt, ui, name)
2414 {
18f0873d 2415 if (m_lattice[index].flags == 0)
640296c3 2416 break;
520d5ad3
JH
2417 if (is_gimple_debug (use_stmt))
2418 continue;
2419 if (dump_file)
2420 {
18f0873d 2421 fprintf (dump_file, "%*s Analyzing stmt: ", m_depth * 4, "");
520d5ad3
JH
2422 print_gimple_stmt (dump_file, use_stmt, 0);
2423 }
4341b1b1
JH
2424 /* If we see a direct non-debug use, clear unused bit.
2425 All dereferneces should be accounted below using deref_flags. */
18f0873d 2426 m_lattice[index].merge (~EAF_UNUSED);
520d5ad3 2427
26285af4
JH
2428 /* Gimple return may load the return value.
2429 Returning name counts as an use by tree-ssa-structalias.c */
520d5ad3
JH
2430 if (greturn *ret = dyn_cast <greturn *> (use_stmt))
2431 {
b8ef019a
JH
2432 /* Returning through return slot is seen as memory write earlier. */
2433 if (DECL_RESULT (current_function_decl)
2434 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
2435 ;
2436 else if (gimple_return_retval (ret) == name)
d70ef656 2437 m_lattice[index].merge (~(EAF_UNUSED | EAF_NOT_RETURNED_DIRECTLY
18f0873d 2438 | EAF_NOT_RETURNED_DIRECTLY));
85ebbabd 2439 else if (memory_access_to (gimple_return_retval (ret), name))
8da8ed43 2440 {
18f0873d 2441 m_lattice[index].merge_direct_load ();
d70ef656
JH
2442 m_lattice[index].merge (~(EAF_UNUSED
2443 | EAF_NOT_RETURNED_INDIRECTLY));
8da8ed43 2444 }
520d5ad3
JH
2445 }
2446 /* Account for LHS store, arg loads and flags from callee function. */
2447 else if (gcall *call = dyn_cast <gcall *> (use_stmt))
2448 {
2449 tree callee = gimple_call_fndecl (call);
9b08f776
JH
2450
2451 /* IPA PTA internally it treats calling a function as "writing" to
2452 the argument space of all functions the function pointer points to
2453 (PR101949). We can not drop EAF_NOCLOBBER only when ipa-pta
2454 is on since that would allow propagation of this from -fno-ipa-pta
2455 to -fipa-pta functions. */
2456 if (gimple_call_fn (use_stmt) == name)
d70ef656 2457 m_lattice[index].merge (~(EAF_NO_DIRECT_CLOBBER | EAF_UNUSED));
9b08f776 2458
520d5ad3 2459 /* Recursion would require bit of propagation; give up for now. */
18f0873d 2460 if (callee && !m_ipa && recursive_call_p (current_function_decl,
85ebbabd 2461 callee))
18f0873d 2462 m_lattice[index].merge (0);
520d5ad3
JH
2463 else
2464 {
2465 int ecf_flags = gimple_call_flags (call);
2466 bool ignore_stores = ignore_stores_p (current_function_decl,
2467 ecf_flags);
85ebbabd
JH
2468 bool ignore_retval = ignore_retval_p (current_function_decl,
2469 ecf_flags);
520d5ad3
JH
2470
2471 /* Handle *name = func (...). */
2472 if (gimple_call_lhs (call)
2473 && memory_access_to (gimple_call_lhs (call), name))
59f38935 2474 {
18f0873d 2475 m_lattice[index].merge_direct_store ();
59f38935
JH
2476 /* Return slot optimization passes address of
2477 LHS to callee via hidden parameter and this
2478 may make LHS to escape. See PR 98499. */
2479 if (gimple_call_return_slot_opt_p (call)
2480 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call))))
62af7d94
JH
2481 {
2482 int call_flags = gimple_call_retslot_flags (call);
2483 bool isretslot = false;
2484
2485 if (DECL_RESULT (current_function_decl)
2486 && DECL_BY_REFERENCE
2487 (DECL_RESULT (current_function_decl)))
2488 isretslot = ssa_default_def
2489 (cfun,
2490 DECL_RESULT (current_function_decl))
2491 == name;
2492
2493 /* Passing returnslot to return slot is special because
2494 not_returned and escape has same meaning.
2495 However passing arg to return slot is different. If
2496 the callee's return slot is returned it means that
2f3d43a3
JH
2497 arg is written to itself which is an escape.
2498 Since we do not track the memory it is written to we
2499 need to give up on analysisng it. */
62af7d94
JH
2500 if (!isretslot)
2501 {
62af7d94 2502 if (!(call_flags & (EAF_NOT_RETURNED_DIRECTLY
d70ef656 2503 | EAF_UNUSED)))
2f3d43a3
JH
2504 m_lattice[index].merge (0);
2505 else gcc_checking_assert
2506 (call_flags & (EAF_NOT_RETURNED_INDIRECTLY
2507 | EAF_UNUSED));
62af7d94
JH
2508 call_flags = callee_to_caller_flags
2509 (call_flags, false,
2510 m_lattice[index]);
2511 }
2512 m_lattice[index].merge (call_flags);
2513 }
59f38935 2514 }
520d5ad3 2515
520d5ad3
JH
2516 if (gimple_call_chain (call)
2517 && (gimple_call_chain (call) == name))
62af7d94
JH
2518 {
2519 int call_flags = gimple_call_static_chain_flags (call);
d70ef656
JH
2520 if (!ignore_retval && !(call_flags & EAF_UNUSED))
2521 merge_call_lhs_flags
2522 (call, -1, name,
2523 !(call_flags & EAF_NOT_RETURNED_DIRECTLY),
2524 !(call_flags & EAF_NOT_RETURNED_INDIRECTLY));
62af7d94
JH
2525 call_flags = callee_to_caller_flags
2526 (call_flags, ignore_stores,
2527 m_lattice[index]);
2528 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
2529 m_lattice[index].merge (call_flags);
2530 }
85ebbabd
JH
2531
2532 /* Process internal functions and right away. */
18f0873d 2533 bool record_ipa = m_ipa && !gimple_call_internal_p (call);
520d5ad3
JH
2534
2535 /* Handle all function parameters. */
85ebbabd 2536 for (unsigned i = 0;
18f0873d
JH
2537 i < gimple_call_num_args (call)
2538 && m_lattice[index].flags; i++)
520d5ad3
JH
2539 /* Name is directly passed to the callee. */
2540 if (gimple_call_arg (call, i) == name)
2541 {
62af7d94 2542 int call_flags = gimple_call_arg_flags (call, i);
c331a75d 2543 if (!ignore_retval)
62af7d94
JH
2544 merge_call_lhs_flags
2545 (call, i, name,
c331a75d
JH
2546 !(call_flags & (EAF_NOT_RETURNED_DIRECTLY
2547 | EAF_UNUSED)),
2548 !(call_flags & (EAF_NOT_RETURNED_INDIRECTLY
2549 | EAF_UNUSED)));
85ebbabd 2550 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
520d5ad3 2551 {
62af7d94
JH
2552 call_flags = callee_to_caller_flags
2553 (call_flags, ignore_stores,
2554 m_lattice[index]);
85ebbabd 2555 if (!record_ipa)
18f0873d 2556 m_lattice[index].merge (call_flags);
c3c61674 2557 else
18f0873d 2558 m_lattice[index].add_escape_point (call, i,
b8ef019a 2559 call_flags, true);
520d5ad3
JH
2560 }
2561 }
2562 /* Name is dereferenced and passed to a callee. */
2563 else if (memory_access_to (gimple_call_arg (call, i), name))
2564 {
62af7d94
JH
2565 int call_flags = deref_flags
2566 (gimple_call_arg_flags (call, i), ignore_stores);
d70ef656
JH
2567 if (!ignore_retval && !(call_flags & EAF_UNUSED)
2568 && !(call_flags & EAF_NOT_RETURNED_DIRECTLY)
2569 && !(call_flags & EAF_NOT_RETURNED_INDIRECTLY))
2570 merge_call_lhs_flags (call, i, name, false, true);
520d5ad3 2571 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
18f0873d 2572 m_lattice[index].merge_direct_load ();
520d5ad3 2573 else
85ebbabd 2574 {
62af7d94
JH
2575 call_flags = callee_to_caller_flags
2576 (call_flags, ignore_stores,
2577 m_lattice[index]);
85ebbabd 2578 if (!record_ipa)
18f0873d 2579 m_lattice[index].merge (call_flags);
c3c61674 2580 else
18f0873d 2581 m_lattice[index].add_escape_point (call, i,
62af7d94 2582 call_flags, false);
85ebbabd 2583 }
520d5ad3
JH
2584 }
2585 }
520d5ad3
JH
2586 }
2587 else if (gimple_assign_load_p (use_stmt))
2588 {
2589 gassign *assign = as_a <gassign *> (use_stmt);
2590 /* Memory to memory copy. */
2591 if (gimple_store_p (assign))
2592 {
520d5ad3
JH
2593 /* Handle *lhs = *name.
2594
2595 We do not track memory locations, so assume that value
2596 is used arbitrarily. */
2597 if (memory_access_to (gimple_assign_rhs1 (assign), name))
18f0873d 2598 m_lattice[index].merge (deref_flags (0, false));
85ebbabd
JH
2599 /* Handle *name = *exp. */
2600 else if (memory_access_to (gimple_assign_lhs (assign), name))
18f0873d 2601 m_lattice[index].merge_direct_store ();
520d5ad3
JH
2602 }
2603 /* Handle lhs = *name. */
2604 else if (memory_access_to (gimple_assign_rhs1 (assign), name))
85ebbabd
JH
2605 {
2606 tree lhs = gimple_assign_lhs (assign);
18f0873d 2607 merge_with_ssa_name (name, lhs, true);
85ebbabd 2608 }
520d5ad3
JH
2609 }
2610 else if (gimple_store_p (use_stmt))
2611 {
2612 gassign *assign = dyn_cast <gassign *> (use_stmt);
2613
2614 /* Handle *lhs = name. */
2615 if (assign && gimple_assign_rhs1 (assign) == name)
2616 {
2617 if (dump_file)
2618 fprintf (dump_file, "%*s ssa name saved to memory\n",
18f0873d
JH
2619 m_depth * 4, "");
2620 m_lattice[index].merge (0);
520d5ad3
JH
2621 }
2622 /* Handle *name = exp. */
2623 else if (assign
2624 && memory_access_to (gimple_assign_lhs (assign), name))
0c9687d0
JH
2625 {
2626 /* In general we can not ignore clobbers because they are
2627 barriers for code motion, however after inlining it is safe to
2628 do because local optimization passes do not consider clobbers
18f0873d
JH
2629 from other functions.
2630 Similar logic is in ipa-pure-const.c. */
0c9687d0 2631 if (!cfun->after_inlining || !gimple_clobber_p (assign))
18f0873d 2632 m_lattice[index].merge_direct_store ();
0c9687d0 2633 }
520d5ad3
JH
2634 /* ASM statements etc. */
2635 else if (!assign)
2636 {
2637 if (dump_file)
18f0873d
JH
2638 fprintf (dump_file, "%*s Unhandled store\n", m_depth * 4, "");
2639 m_lattice[index].merge (0);
520d5ad3
JH
2640 }
2641 }
2642 else if (gassign *assign = dyn_cast <gassign *> (use_stmt))
2643 {
2644 enum tree_code code = gimple_assign_rhs_code (assign);
2645
2646 /* See if operation is a merge as considered by
2647 tree-ssa-structalias.c:find_func_aliases. */
2648 if (!truth_value_p (code)
2649 && code != POINTER_DIFF_EXPR
2650 && (code != POINTER_PLUS_EXPR
2651 || gimple_assign_rhs1 (assign) == name))
85ebbabd
JH
2652 {
2653 tree lhs = gimple_assign_lhs (assign);
18f0873d 2654 merge_with_ssa_name (name, lhs, false);
85ebbabd 2655 }
520d5ad3
JH
2656 }
2657 else if (gphi *phi = dyn_cast <gphi *> (use_stmt))
2658 {
85ebbabd 2659 tree result = gimple_phi_result (phi);
18f0873d 2660 merge_with_ssa_name (name, result, false);
520d5ad3
JH
2661 }
2662 /* Conditions are not considered escape points
2663 by tree-ssa-structalias. */
2664 else if (gimple_code (use_stmt) == GIMPLE_COND)
2665 ;
2666 else
2667 {
2668 if (dump_file)
18f0873d
JH
2669 fprintf (dump_file, "%*s Unhandled stmt\n", m_depth * 4, "");
2670 m_lattice[index].merge (0);
520d5ad3
JH
2671 }
2672
2673 if (dump_file)
2674 {
18f0873d 2675 fprintf (dump_file, "%*s current flags of ", m_depth * 4, "");
520d5ad3 2676 print_generic_expr (dump_file, name);
18f0873d 2677 m_lattice[index].dump (dump_file, m_depth * 4 + 4);
520d5ad3
JH
2678 }
2679 }
2680 if (dump_file)
2681 {
18f0873d 2682 fprintf (dump_file, "%*sflags of ssa name ", m_depth * 4, "");
520d5ad3 2683 print_generic_expr (dump_file, name);
18f0873d 2684 m_lattice[index].dump (dump_file, m_depth * 4 + 2);
520d5ad3 2685 }
18f0873d 2686 m_lattice[index].open = false;
4898e958
JH
2687 if (!m_lattice[index].do_dataflow)
2688 m_lattice[index].known = true;
18f0873d
JH
2689}
2690
2691/* Propagate info from SRC to DEST. If DEREF it true, assume that SRC
2692 is dereferenced. */
2693
2694void
2695modref_eaf_analysis::merge_with_ssa_name (tree dest, tree src, bool deref)
2696{
2697 int index = SSA_NAME_VERSION (dest);
2698 int src_index = SSA_NAME_VERSION (src);
2699
4898e958
JH
2700 /* Merging lattice with itself is a no-op. */
2701 if (!deref && src == dest)
2702 return;
2703
18f0873d
JH
2704 m_depth++;
2705 analyze_ssa_name (src);
2706 m_depth--;
2707 if (deref)
2708 m_lattice[index].merge_deref (m_lattice[src_index], false);
2709 else
2710 m_lattice[index].merge (m_lattice[src_index]);
4898e958
JH
2711
2712 /* If we failed to produce final solution add an edge to the dataflow
2713 graph. */
2714 if (!m_lattice[src_index].known)
2715 {
2716 modref_lattice::propagate_edge e = {index, deref};
2717
2718 if (!m_lattice[src_index].propagate_to.length ())
2719 m_names_to_propagate.safe_push (src_index);
2720 m_lattice[src_index].propagate_to.safe_push (e);
2721 m_lattice[src_index].changed = true;
2722 m_lattice[src_index].do_dataflow = true;
2723 if (dump_file)
2724 fprintf (dump_file,
2725 "%*sWill propgate from ssa_name %i to %i%s\n",
2726 m_depth * 4 + 4,
2727 "", src_index, index, deref ? " (deref)" : "");
2728 }
2729}
2730
2731/* In the case we deferred some SSA names, reprocess them. In the case some
2732 dataflow edges were introduced, do the actual iterative dataflow. */
2733
2734void
2735modref_eaf_analysis::propagate ()
2736{
2737 int iterations = 0;
2738 size_t i;
2739 int index;
2740 bool changed = true;
2741
2742 while (m_deferred_names.length ())
2743 {
2744 tree name = m_deferred_names.pop ();
2745 m_lattice[SSA_NAME_VERSION (name)].open = false;
2746 if (dump_file)
2747 fprintf (dump_file, "Analyzing deferred SSA name\n");
2748 analyze_ssa_name (name);
2749 }
2750
2751 if (!m_names_to_propagate.length ())
2752 return;
2753 if (dump_file)
2754 fprintf (dump_file, "Propagating EAF flags\n");
2755
2756 /* Compute reverse postorder. */
2757 auto_vec <int> rpo;
2758 struct stack_entry
2759 {
2760 int name;
2761 unsigned pos;
2762 };
2763 auto_vec <struct stack_entry> stack;
2764 int pos = m_names_to_propagate.length () - 1;
2765
2766 rpo.safe_grow (m_names_to_propagate.length (), true);
2767 stack.reserve_exact (m_names_to_propagate.length ());
2768
2769 /* We reuse known flag for RPO DFS walk bookeeping. */
2770 if (flag_checking)
2771 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
2772 gcc_assert (!m_lattice[index].known && m_lattice[index].changed);
2773
2774 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
2775 {
2776 if (!m_lattice[index].known)
2777 {
2778 stack_entry e = {index, 0};
2779
2780 stack.quick_push (e);
2781 m_lattice[index].known = true;
2782 }
2783 while (stack.length ())
2784 {
2785 bool found = false;
2786 int index1 = stack.last ().name;
2787
2788 while (stack.last ().pos < m_lattice[index1].propagate_to.length ())
2789 {
2790 int index2 = m_lattice[index1]
2791 .propagate_to[stack.last ().pos].ssa_name;
2792
2793 stack.last ().pos++;
2794 if (!m_lattice[index2].known
2795 && m_lattice[index2].propagate_to.length ())
2796 {
2797 stack_entry e = {index2, 0};
2798
2799 stack.quick_push (e);
2800 m_lattice[index2].known = true;
2801 found = true;
2802 break;
2803 }
2804 }
2805 if (!found
2806 && stack.last ().pos == m_lattice[index1].propagate_to.length ())
2807 {
2808 rpo[pos--] = index1;
2809 stack.pop ();
2810 }
2811 }
2812 }
2813
2814 /* Perform itrative dataflow. */
2815 while (changed)
2816 {
2817 changed = false;
2818 iterations++;
2819 if (dump_file)
2820 fprintf (dump_file, " iteration %i\n", iterations);
2821 FOR_EACH_VEC_ELT (rpo, i, index)
2822 {
2823 if (m_lattice[index].changed)
2824 {
2825 size_t j;
2826
2827 m_lattice[index].changed = false;
2828 if (dump_file)
2829 fprintf (dump_file, " Visiting ssa name %i\n", index);
2830 for (j = 0; j < m_lattice[index].propagate_to.length (); j++)
2831 {
2832 bool ch;
2833 int target = m_lattice[index].propagate_to[j].ssa_name;
2834 bool deref = m_lattice[index].propagate_to[j].deref;
2835
2836 if (dump_file)
2837 fprintf (dump_file, " Propagating flags of ssa name"
2838 " %i to %i%s\n",
2839 index, target, deref ? " (deref)" : "");
2840 m_lattice[target].known = true;
2841 if (!m_lattice[index].propagate_to[j].deref)
2842 ch = m_lattice[target].merge (m_lattice[index]);
2843 else
2844 ch = m_lattice[target].merge_deref (m_lattice[index],
2845 false);
2846 if (!ch)
2847 continue;
2848 if (dump_file)
2849 {
2850 fprintf (dump_file, " New lattice: ");
2851 m_lattice[target].dump (dump_file);
2852 }
d70ef656 2853 changed = true;
4898e958
JH
2854 m_lattice[target].changed = true;
2855 }
2856 }
2857 }
2858 }
2859 if (dump_file)
2860 fprintf (dump_file, "EAF flags propagated in %i iterations\n", iterations);
520d5ad3
JH
2861}
2862
b8ef019a
JH
2863/* Record escape points of PARM_INDEX according to LATTICE. */
2864
18f0873d
JH
2865void
2866modref_eaf_analysis::record_escape_points (tree name, int parm_index, int flags)
b8ef019a 2867{
18f0873d
JH
2868 modref_lattice &lattice = m_lattice[SSA_NAME_VERSION (name)];
2869
b8ef019a
JH
2870 if (lattice.escape_points.length ())
2871 {
2872 escape_point *ep;
2873 unsigned int ip;
2874 cgraph_node *node = cgraph_node::get (current_function_decl);
2875
18f0873d 2876 gcc_assert (m_ipa);
b8ef019a
JH
2877 FOR_EACH_VEC_ELT (lattice.escape_points, ip, ep)
2878 if ((ep->min_flags & flags) != flags)
2879 {
2880 cgraph_edge *e = node->get_edge (ep->call);
2881 struct escape_entry ee = {parm_index, ep->arg,
2882 ep->min_flags, ep->direct};
2883
2884 escape_summaries->get_create (e)->esc.safe_push (ee);
2885 }
2886 }
2887}
2888
7798ae1a
JH
2889/* Determine EAF flags for function parameters
2890 and fill in SUMMARY/SUMMARY_LTO. If IPA is true work in IPA mode
2891 where we also collect scape points.
2892 PAST_FLAGS, PAST_RETSLOT_FLAGS, PAST_STATIC_CHAIN_FLAGS can be
2893 used to preserve flags from prevoius (IPA) run for cases where
2894 late optimizations changed code in a way we can no longer analyze
2895 it easily. */
520d5ad3
JH
2896
2897static void
85ebbabd 2898analyze_parms (modref_summary *summary, modref_summary_lto *summary_lto,
7798ae1a
JH
2899 bool ipa, vec<eaf_flags_t> &past_flags,
2900 int past_retslot_flags, int past_static_chain_flags)
520d5ad3
JH
2901{
2902 unsigned int parm_index = 0;
2903 unsigned int count = 0;
85ebbabd 2904 int ecf_flags = flags_from_decl_or_type (current_function_decl);
b8ef019a 2905 tree retslot = NULL;
a70c0512 2906 tree static_chain = NULL;
85ebbabd 2907
b8ef019a
JH
2908 /* If there is return slot, look up its SSA name. */
2909 if (DECL_RESULT (current_function_decl)
2910 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
2911 retslot = ssa_default_def (cfun, DECL_RESULT (current_function_decl));
a70c0512
JH
2912 if (cfun->static_chain_decl)
2913 static_chain = ssa_default_def (cfun, cfun->static_chain_decl);
b8ef019a 2914
520d5ad3
JH
2915 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
2916 parm = TREE_CHAIN (parm))
2917 count++;
2918
a70c0512 2919 if (!count && !retslot && !static_chain)
520d5ad3
JH
2920 return;
2921
18f0873d 2922 modref_eaf_analysis eaf_analysis (ipa);
520d5ad3 2923
4898e958
JH
2924 /* Determine all SSA names we need to know flags for. */
2925 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
2926 parm = TREE_CHAIN (parm))
2927 {
2928 tree name = ssa_default_def (cfun, parm);
2929 if (name)
2930 eaf_analysis.analyze_ssa_name (name);
2931 }
2932 if (retslot)
2933 eaf_analysis.analyze_ssa_name (retslot);
2934 if (static_chain)
2935 eaf_analysis.analyze_ssa_name (static_chain);
2936
2937 /* Do the dataflow. */
2938 eaf_analysis.propagate ();
2939
e2dd12ab
JH
2940 tree attr = lookup_attribute ("fn spec",
2941 TYPE_ATTRIBUTES
2942 (TREE_TYPE (current_function_decl)));
2943 attr_fnspec fnspec (attr
2944 ? TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))
2945 : "");
2946
2947
4898e958 2948 /* Store results to summaries. */
520d5ad3
JH
2949 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm; parm_index++,
2950 parm = TREE_CHAIN (parm))
2951 {
2952 tree name = ssa_default_def (cfun, parm);
3350e59f
JH
2953 if (!name || has_zero_uses (name))
2954 {
2955 /* We do not track non-SSA parameters,
2956 but we want to track unused gimple_regs. */
2957 if (!is_gimple_reg (parm))
2958 continue;
2959 if (summary)
2960 {
2961 if (parm_index >= summary->arg_flags.length ())
2962 summary->arg_flags.safe_grow_cleared (count, true);
2963 summary->arg_flags[parm_index] = EAF_UNUSED;
2964 }
2965 else if (summary_lto)
2966 {
2967 if (parm_index >= summary_lto->arg_flags.length ())
2968 summary_lto->arg_flags.safe_grow_cleared (count, true);
2969 summary_lto->arg_flags[parm_index] = EAF_UNUSED;
2970 }
2971 continue;
2972 }
18f0873d 2973 int flags = eaf_analysis.get_ssa_name_flags (name);
e2dd12ab
JH
2974 int attr_flags = fnspec.arg_eaf_flags (parm_index);
2975
2976 if (dump_file && (flags | attr_flags) != flags && !(flags & EAF_UNUSED))
2977 {
2978 fprintf (dump_file,
2979 " Flags for param %i combined with fnspec flags:",
2980 (int)parm_index);
2981 dump_eaf_flags (dump_file, attr_flags, false);
2982 fprintf (dump_file, " determined: ");
2983 dump_eaf_flags (dump_file, flags, true);
2984 }
2985 flags |= attr_flags;
85ebbabd 2986
4341b1b1
JH
2987 /* Eliminate useless flags so we do not end up storing unnecessary
2988 summaries. */
2989
2990 flags = remove_useless_eaf_flags
2991 (flags, ecf_flags,
2992 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
7798ae1a
JH
2993 if (past_flags.length () > parm_index)
2994 {
2995 int past = past_flags[parm_index];
2996 past = remove_useless_eaf_flags
2997 (past, ecf_flags,
2998 VOID_TYPE_P (TREE_TYPE
2999 (TREE_TYPE (current_function_decl))));
3000 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
3001 {
3002 fprintf (dump_file,
3003 " Flags for param %i combined with IPA pass:",
3004 (int)parm_index);
3005 dump_eaf_flags (dump_file, past, false);
e2dd12ab
JH
3006 fprintf (dump_file, " determined: ");
3007 dump_eaf_flags (dump_file, flags, true);
7798ae1a
JH
3008 }
3009 if (!(flags & EAF_UNUSED))
3010 flags |= past;
3011 }
520d5ad3
JH
3012
3013 if (flags)
3014 {
85ebbabd
JH
3015 if (summary)
3016 {
3017 if (parm_index >= summary->arg_flags.length ())
3018 summary->arg_flags.safe_grow_cleared (count, true);
3019 summary->arg_flags[parm_index] = flags;
3020 }
3021 else if (summary_lto)
3022 {
3023 if (parm_index >= summary_lto->arg_flags.length ())
3024 summary_lto->arg_flags.safe_grow_cleared (count, true);
3025 summary_lto->arg_flags[parm_index] = flags;
3026 }
18f0873d 3027 eaf_analysis.record_escape_points (name, parm_index, flags);
b8ef019a
JH
3028 }
3029 }
3030 if (retslot)
3031 {
18f0873d 3032 int flags = eaf_analysis.get_ssa_name_flags (retslot);
7798ae1a 3033 int past = past_retslot_flags;
85ebbabd 3034
b8ef019a 3035 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
7798ae1a
JH
3036 past = remove_useless_eaf_flags
3037 (past, ecf_flags,
3038 VOID_TYPE_P (TREE_TYPE
3039 (TREE_TYPE (current_function_decl))));
3040 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
3041 {
3042 fprintf (dump_file,
3043 " Retslot flags combined with IPA pass:");
3044 dump_eaf_flags (dump_file, past, false);
e2dd12ab 3045 fprintf (dump_file, " determined: ");
7798ae1a
JH
3046 dump_eaf_flags (dump_file, flags, true);
3047 }
3048 if (!(flags & EAF_UNUSED))
3049 flags |= past;
b8ef019a
JH
3050 if (flags)
3051 {
3052 if (summary)
3053 summary->retslot_flags = flags;
3054 if (summary_lto)
3055 summary_lto->retslot_flags = flags;
18f0873d 3056 eaf_analysis.record_escape_points (retslot,
1f3a3363 3057 MODREF_RETSLOT_PARM, flags);
520d5ad3
JH
3058 }
3059 }
a70c0512
JH
3060 if (static_chain)
3061 {
18f0873d 3062 int flags = eaf_analysis.get_ssa_name_flags (static_chain);
7798ae1a 3063 int past = past_static_chain_flags;
a70c0512
JH
3064
3065 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
7798ae1a
JH
3066 past = remove_useless_eaf_flags
3067 (past, ecf_flags,
3068 VOID_TYPE_P (TREE_TYPE
3069 (TREE_TYPE (current_function_decl))));
3070 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
3071 {
3072 fprintf (dump_file,
3073 " Static chain flags combined with IPA pass:");
3074 dump_eaf_flags (dump_file, past, false);
e2dd12ab 3075 fprintf (dump_file, " determined: ");
7798ae1a
JH
3076 dump_eaf_flags (dump_file, flags, true);
3077 }
3078 if (!(flags & EAF_UNUSED))
78dd0de9 3079 flags |= past;
a70c0512
JH
3080 if (flags)
3081 {
3082 if (summary)
3083 summary->static_chain_flags = flags;
3084 if (summary_lto)
3085 summary_lto->static_chain_flags = flags;
18f0873d 3086 eaf_analysis.record_escape_points (static_chain,
1f3a3363 3087 MODREF_STATIC_CHAIN_PARM,
18f0873d 3088 flags);
a70c0512
JH
3089 }
3090 }
520d5ad3
JH
3091}
3092
2cadaa1f 3093/* Analyze function. IPA indicates whether we're running in local mode
494bdadf
JH
3094 (false) or the IPA mode (true).
3095 Return true if fixup cfg is needed after the pass. */
d119f34c 3096
494bdadf 3097static bool
2cadaa1f 3098analyze_function (bool ipa)
d119f34c 3099{
494bdadf 3100 bool fixup_cfg = false;
d119f34c 3101 if (dump_file)
2cadaa1f
JH
3102 fprintf (dump_file, "\n\nmodref analyzing '%s' (ipa=%i)%s%s\n",
3103 cgraph_node::get (current_function_decl)->dump_name (), ipa,
67c935c8
JH
3104 TREE_READONLY (current_function_decl) ? " (const)" : "",
3105 DECL_PURE_P (current_function_decl) ? " (pure)" : "");
d119f34c
JH
3106
3107 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
008e7397
JH
3108 if (!flag_ipa_modref
3109 || lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl)))
494bdadf 3110 return false;
d119f34c 3111
d119f34c
JH
3112 /* Compute no-LTO summaries when local optimization is going to happen. */
3113 bool nolto = (!ipa || ((!flag_lto || flag_fat_lto_objects) && !in_lto_p)
3114 || (in_lto_p && !flag_wpa
3115 && flag_incremental_link != INCREMENTAL_LINK_LTO));
d119f34c
JH
3116 /* Compute LTO when LTO streaming is going to happen. */
3117 bool lto = ipa && ((flag_lto && !in_lto_p)
3118 || flag_wpa
3119 || flag_incremental_link == INCREMENTAL_LINK_LTO);
71dbabcc
JH
3120 cgraph_node *fnode = cgraph_node::get (current_function_decl);
3121
3122 modref_summary *summary = NULL;
3123 modref_summary_lto *summary_lto = NULL;
3124
f6f704fd
JH
3125 bool past_flags_known = false;
3126 auto_vec <eaf_flags_t> past_flags;
3127 int past_retslot_flags = 0;
3128 int past_static_chain_flags = 0;
3129
71dbabcc
JH
3130 /* Initialize the summary.
3131 If we run in local mode there is possibly pre-existing summary from
3132 IPA pass. Dump it so it is easy to compare if mod-ref info has
3133 improved. */
3134 if (!ipa)
3135 {
3136 if (!optimization_summaries)
3137 optimization_summaries = modref_summaries::create_ggc (symtab);
3138 else /* Remove existing summary if we are re-running the pass. */
3139 {
22c24234
ML
3140 summary = optimization_summaries->get (fnode);
3141 if (summary != NULL
56cb815b 3142 && summary->loads)
71dbabcc 3143 {
22c24234
ML
3144 if (dump_file)
3145 {
3146 fprintf (dump_file, "Past summary:\n");
3147 optimization_summaries->get (fnode)->dump (dump_file);
3148 }
f6f704fd
JH
3149 past_flags.reserve_exact (summary->arg_flags.length ());
3150 past_flags.splice (summary->arg_flags);
3151 past_retslot_flags = summary->retslot_flags;
3152 past_static_chain_flags = summary->static_chain_flags;
3153 past_flags_known = true;
71dbabcc 3154 }
616ca102 3155 optimization_summaries->remove (fnode);
71dbabcc 3156 }
616ca102 3157 summary = optimization_summaries->get_create (fnode);
71dbabcc
JH
3158 gcc_checking_assert (nolto && !lto);
3159 }
8a2fd716 3160 /* In IPA mode we analyze every function precisely once. Assert that. */
71dbabcc
JH
3161 else
3162 {
3163 if (nolto)
3164 {
3165 if (!summaries)
3166 summaries = modref_summaries::create_ggc (symtab);
3167 else
616ca102
ML
3168 summaries->remove (fnode);
3169 summary = summaries->get_create (fnode);
71dbabcc
JH
3170 }
3171 if (lto)
3172 {
3173 if (!summaries_lto)
3174 summaries_lto = modref_summaries_lto::create_ggc (symtab);
3175 else
616ca102
ML
3176 summaries_lto->remove (fnode);
3177 summary_lto = summaries_lto->get_create (fnode);
71dbabcc 3178 }
6cef01c3
JH
3179 if (!fnspec_summaries)
3180 fnspec_summaries = new fnspec_summaries_t (symtab);
85ebbabd
JH
3181 if (!escape_summaries)
3182 escape_summaries = new escape_summaries_t (symtab);
71dbabcc
JH
3183 }
3184
d119f34c
JH
3185
3186 /* Create and initialize summary for F.
3187 Note that summaries may be already allocated from previous
3188 run of the pass. */
3189 if (nolto)
3190 {
3191 gcc_assert (!summary->loads);
8632f8c6 3192 summary->loads = modref_records::create_ggc ();
d119f34c 3193 gcc_assert (!summary->stores);
8632f8c6 3194 summary->stores = modref_records::create_ggc ();
617695cd 3195 summary->writes_errno = false;
992644c3 3196 summary->side_effects = false;
a34edf9a
JH
3197 summary->nondeterministic = false;
3198 summary->calls_interposable = false;
d119f34c
JH
3199 }
3200 if (lto)
3201 {
71dbabcc 3202 gcc_assert (!summary_lto->loads);
8632f8c6 3203 summary_lto->loads = modref_records_lto::create_ggc ();
71dbabcc 3204 gcc_assert (!summary_lto->stores);
8632f8c6 3205 summary_lto->stores = modref_records_lto::create_ggc ();
6cef01c3 3206 summary_lto->writes_errno = false;
992644c3 3207 summary_lto->side_effects = false;
a34edf9a
JH
3208 summary_lto->nondeterministic = false;
3209 summary_lto->calls_interposable = false;
d119f34c 3210 }
520d5ad3 3211
7798ae1a
JH
3212 analyze_parms (summary, summary_lto, ipa,
3213 past_flags, past_retslot_flags, past_static_chain_flags);
520d5ad3 3214
09a4ffb7
JH
3215 {
3216 modref_access_analysis analyzer (ipa, summary, summary_lto);
3217 analyzer.analyze ();
3218 }
494bdadf
JH
3219
3220 if (!ipa && flag_ipa_pure_const)
3221 {
1b62cddc 3222 if (!summary->stores->every_base && !summary->stores->bases
a34edf9a 3223 && !summary->nondeterministic)
494bdadf 3224 {
a34edf9a
JH
3225 if (!summary->loads->every_base && !summary->loads->bases
3226 && !summary->calls_interposable)
616ca102
ML
3227 fixup_cfg = ipa_make_function_const (fnode,
3228 summary->side_effects, true);
494bdadf 3229 else
616ca102
ML
3230 fixup_cfg = ipa_make_function_pure (fnode,
3231 summary->side_effects, true);
494bdadf
JH
3232 }
3233 }
09a4ffb7 3234 int ecf_flags = flags_from_decl_or_type (current_function_decl);
71dbabcc
JH
3235 if (summary && !summary->useful_p (ecf_flags))
3236 {
3237 if (!ipa)
3238 optimization_summaries->remove (fnode);
3239 else
3240 summaries->remove (fnode);
3241 summary = NULL;
3242 }
e0040bc3 3243 if (summary)
5aa91072 3244 summary->finalize (current_function_decl);
71dbabcc
JH
3245 if (summary_lto && !summary_lto->useful_p (ecf_flags))
3246 {
3247 summaries_lto->remove (fnode);
3248 summary_lto = NULL;
3249 }
992644c3 3250
85ebbabd
JH
3251 if (ipa && !summary && !summary_lto)
3252 remove_modref_edge_summaries (fnode);
d119f34c
JH
3253
3254 if (dump_file)
3255 {
3256 fprintf (dump_file, " - modref done with result: tracked.\n");
71dbabcc
JH
3257 if (summary)
3258 summary->dump (dump_file);
3259 if (summary_lto)
3260 summary_lto->dump (dump_file);
85ebbabd 3261 dump_modref_edge_summaries (dump_file, fnode, 2);
f6f704fd
JH
3262 /* To simplify debugging, compare IPA and local solutions. */
3263 if (past_flags_known && summary)
3264 {
3265 size_t len = summary->arg_flags.length ();
3266
3267 if (past_flags.length () > len)
3268 len = past_flags.length ();
3269 for (size_t i = 0; i < len; i++)
3270 {
3271 int old_flags = i < past_flags.length () ? past_flags[i] : 0;
3272 int new_flags = i < summary->arg_flags.length ()
3273 ? summary->arg_flags[i] : 0;
3274 old_flags = remove_useless_eaf_flags
3275 (old_flags, flags_from_decl_or_type (current_function_decl),
3276 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3277 if (old_flags != new_flags)
3278 {
7798ae1a
JH
3279 if ((old_flags & ~new_flags) == 0
3280 || (new_flags & EAF_UNUSED))
f6f704fd
JH
3281 fprintf (dump_file, " Flags for param %i improved:",
3282 (int)i);
f6f704fd 3283 else
7798ae1a 3284 gcc_unreachable ();
f6f704fd
JH
3285 dump_eaf_flags (dump_file, old_flags, false);
3286 fprintf (dump_file, " -> ");
3287 dump_eaf_flags (dump_file, new_flags, true);
3288 }
3289 }
3290 past_retslot_flags = remove_useless_eaf_flags
3291 (past_retslot_flags,
3292 flags_from_decl_or_type (current_function_decl),
3293 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3294 if (past_retslot_flags != summary->retslot_flags)
3295 {
7798ae1a
JH
3296 if ((past_retslot_flags & ~summary->retslot_flags) == 0
3297 || (summary->retslot_flags & EAF_UNUSED))
f6f704fd 3298 fprintf (dump_file, " Flags for retslot improved:");
f6f704fd 3299 else
7798ae1a 3300 gcc_unreachable ();
f6f704fd
JH
3301 dump_eaf_flags (dump_file, past_retslot_flags, false);
3302 fprintf (dump_file, " -> ");
3303 dump_eaf_flags (dump_file, summary->retslot_flags, true);
3304 }
3305 past_static_chain_flags = remove_useless_eaf_flags
3306 (past_static_chain_flags,
3307 flags_from_decl_or_type (current_function_decl),
3308 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3309 if (past_static_chain_flags != summary->static_chain_flags)
3310 {
7798ae1a
JH
3311 if ((past_static_chain_flags & ~summary->static_chain_flags) == 0
3312 || (summary->static_chain_flags & EAF_UNUSED))
f6f704fd 3313 fprintf (dump_file, " Flags for static chain improved:");
f6f704fd 3314 else
7798ae1a 3315 gcc_unreachable ();
f6f704fd
JH
3316 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3317 fprintf (dump_file, " -> ");
3318 dump_eaf_flags (dump_file, summary->static_chain_flags, true);
3319 }
3320 }
3321 else if (past_flags_known && !summary)
3322 {
3323 for (size_t i = 0; i < past_flags.length (); i++)
3324 {
3325 int old_flags = past_flags[i];
3326 old_flags = remove_useless_eaf_flags
3327 (old_flags, flags_from_decl_or_type (current_function_decl),
3328 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3329 if (old_flags)
3330 {
3331 fprintf (dump_file, " Flags for param %i worsened:",
3332 (int)i);
3333 dump_eaf_flags (dump_file, old_flags, false);
3334 fprintf (dump_file, " -> \n");
3335 }
3336 }
3337 past_retslot_flags = remove_useless_eaf_flags
3338 (past_retslot_flags,
3339 flags_from_decl_or_type (current_function_decl),
3340 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3341 if (past_retslot_flags)
3342 {
3343 fprintf (dump_file, " Flags for retslot worsened:");
3344 dump_eaf_flags (dump_file, past_retslot_flags, false);
3345 fprintf (dump_file, " ->\n");
3346 }
3347 past_static_chain_flags = remove_useless_eaf_flags
3348 (past_static_chain_flags,
3349 flags_from_decl_or_type (current_function_decl),
3350 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3351 if (past_static_chain_flags)
3352 {
3353 fprintf (dump_file, " Flags for static chain worsened:");
3354 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3355 fprintf (dump_file, " ->\n");
3356 }
3357 }
d119f34c 3358 }
494bdadf 3359 return fixup_cfg;
d119f34c
JH
3360}
3361
3362/* Callback for generate_summary. */
3363
3364static void
3365modref_generate (void)
3366{
3367 struct cgraph_node *node;
3368 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
3369 {
3370 function *f = DECL_STRUCT_FUNCTION (node->decl);
3371 if (!f)
3372 continue;
3373 push_cfun (f);
2cadaa1f 3374 analyze_function (true);
d119f34c
JH
3375 pop_cfun ();
3376 }
3377}
3378
18f0873d
JH
3379} /* ANON namespace. */
3380
d70ef656
JH
3381/* Debugging helper. */
3382
3383void
3384debug_eaf_flags (int flags)
3385{
3386 dump_eaf_flags (stderr, flags, true);
3387}
3388
d119f34c
JH
3389/* Called when a new function is inserted to callgraph late. */
3390
3391void
3392modref_summaries::insert (struct cgraph_node *node, modref_summary *)
3393{
56cb815b
JH
3394 /* Local passes ought to be executed by the pass manager. */
3395 if (this == optimization_summaries)
71dbabcc
JH
3396 {
3397 optimization_summaries->remove (node);
56cb815b
JH
3398 return;
3399 }
1a90e99f
JH
3400 if (!DECL_STRUCT_FUNCTION (node->decl)
3401 || !opt_for_fn (node->decl, flag_ipa_modref))
56cb815b 3402 {
71dbabcc 3403 summaries->remove (node);
56cb815b 3404 return;
71dbabcc
JH
3405 }
3406 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2cadaa1f 3407 analyze_function (true);
71dbabcc
JH
3408 pop_cfun ();
3409}
3410
3411/* Called when a new function is inserted to callgraph late. */
3412
3413void
3414modref_summaries_lto::insert (struct cgraph_node *node, modref_summary_lto *)
3415{
3416 /* We do not support adding new function when IPA information is already
3417 propagated. This is done only by SIMD cloning that is not very
3418 critical. */
3419 if (!DECL_STRUCT_FUNCTION (node->decl)
1a90e99f 3420 || !opt_for_fn (node->decl, flag_ipa_modref)
71dbabcc
JH
3421 || propagated)
3422 {
3423 summaries_lto->remove (node);
3424 return;
3425 }
d119f34c 3426 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2cadaa1f 3427 analyze_function (true);
d119f34c
JH
3428 pop_cfun ();
3429}
3430
3431/* Called when new clone is inserted to callgraph late. */
3432
3433void
56cb815b 3434modref_summaries::duplicate (cgraph_node *, cgraph_node *dst,
d119f34c
JH
3435 modref_summary *src_data,
3436 modref_summary *dst_data)
3437{
8a2fd716 3438 /* Do not duplicate optimization summaries; we do not handle parameter
56cb815b
JH
3439 transforms on them. */
3440 if (this == optimization_summaries)
d119f34c 3441 {
56cb815b
JH
3442 optimization_summaries->remove (dst);
3443 return;
d119f34c 3444 }
8632f8c6 3445 dst_data->stores = modref_records::create_ggc ();
56cb815b 3446 dst_data->stores->copy_from (src_data->stores);
8632f8c6 3447 dst_data->loads = modref_records::create_ggc ();
56cb815b 3448 dst_data->loads->copy_from (src_data->loads);
64f3e71c
JH
3449 dst_data->kills.reserve_exact (src_data->kills.length ());
3450 dst_data->kills.splice (src_data->kills);
617695cd 3451 dst_data->writes_errno = src_data->writes_errno;
992644c3 3452 dst_data->side_effects = src_data->side_effects;
a34edf9a
JH
3453 dst_data->nondeterministic = src_data->nondeterministic;
3454 dst_data->calls_interposable = src_data->calls_interposable;
5962efe9
JH
3455 if (src_data->arg_flags.length ())
3456 dst_data->arg_flags = src_data->arg_flags.copy ();
b8ef019a 3457 dst_data->retslot_flags = src_data->retslot_flags;
a70c0512 3458 dst_data->static_chain_flags = src_data->static_chain_flags;
71dbabcc
JH
3459}
3460
3461/* Called when new clone is inserted to callgraph late. */
3462
3463void
3464modref_summaries_lto::duplicate (cgraph_node *, cgraph_node *,
3465 modref_summary_lto *src_data,
3466 modref_summary_lto *dst_data)
3467{
8a2fd716 3468 /* Be sure that no further cloning happens after ipa-modref. If it does
fe90c504
JH
3469 we will need to update signatures for possible param changes. */
3470 gcc_checking_assert (!((modref_summaries_lto *)summaries_lto)->propagated);
8632f8c6 3471 dst_data->stores = modref_records_lto::create_ggc ();
56cb815b 3472 dst_data->stores->copy_from (src_data->stores);
8632f8c6 3473 dst_data->loads = modref_records_lto::create_ggc ();
56cb815b 3474 dst_data->loads->copy_from (src_data->loads);
74509b96
JH
3475 dst_data->kills.reserve_exact (src_data->kills.length ());
3476 dst_data->kills.splice (src_data->kills);
6cef01c3 3477 dst_data->writes_errno = src_data->writes_errno;
992644c3 3478 dst_data->side_effects = src_data->side_effects;
a34edf9a
JH
3479 dst_data->nondeterministic = src_data->nondeterministic;
3480 dst_data->calls_interposable = src_data->calls_interposable;
5962efe9
JH
3481 if (src_data->arg_flags.length ())
3482 dst_data->arg_flags = src_data->arg_flags.copy ();
b8ef019a 3483 dst_data->retslot_flags = src_data->retslot_flags;
a70c0512 3484 dst_data->static_chain_flags = src_data->static_chain_flags;
d119f34c
JH
3485}
3486
3487namespace
3488{
3489/* Definition of the modref pass on GIMPLE. */
3490const pass_data pass_data_modref = {
3491 GIMPLE_PASS,
3492 "modref",
3493 OPTGROUP_IPA,
3494 TV_TREE_MODREF,
3495 (PROP_cfg | PROP_ssa),
3496 0,
3497 0,
3498 0,
3499 0,
3500};
3501
3502class pass_modref : public gimple_opt_pass
3503{
3504 public:
3505 pass_modref (gcc::context *ctxt)
3506 : gimple_opt_pass (pass_data_modref, ctxt) {}
3507
d119f34c
JH
3508 /* opt_pass methods: */
3509 opt_pass *clone ()
3510 {
3511 return new pass_modref (m_ctxt);
3512 }
3513 virtual bool gate (function *)
3514 {
3515 return flag_ipa_modref;
3516 }
3517 virtual unsigned int execute (function *);
3518};
3519
3520/* Encode TT to the output block OB using the summary streaming API. */
3521
3522static void
3523write_modref_records (modref_records_lto *tt, struct output_block *ob)
3524{
d119f34c
JH
3525 streamer_write_uhwi (ob, tt->every_base);
3526 streamer_write_uhwi (ob, vec_safe_length (tt->bases));
74509b96 3527 for (auto base_node : tt->bases)
d119f34c
JH
3528 {
3529 stream_write_tree (ob, base_node->base, true);
3530
3531 streamer_write_uhwi (ob, base_node->every_ref);
3532 streamer_write_uhwi (ob, vec_safe_length (base_node->refs));
c33f4742 3533
74509b96 3534 for (auto ref_node : base_node->refs)
d119f34c
JH
3535 {
3536 stream_write_tree (ob, ref_node->ref, true);
c33f4742
JH
3537 streamer_write_uhwi (ob, ref_node->every_access);
3538 streamer_write_uhwi (ob, vec_safe_length (ref_node->accesses));
3539
74509b96
JH
3540 for (auto access_node : ref_node->accesses)
3541 access_node.stream_out (ob);
d119f34c
JH
3542 }
3543 }
3544}
3545
3546/* Read a modref_tree from the input block IB using the data from DATA_IN.
3547 This assumes that the tree was encoded using write_modref_tree.
3548 Either nolto_ret or lto_ret is initialized by the tree depending whether
46a27415 3549 LTO streaming is expected or not. */
d119f34c 3550
18f0873d 3551static void
8632f8c6
JH
3552read_modref_records (tree decl,
3553 lto_input_block *ib, struct data_in *data_in,
d119f34c
JH
3554 modref_records **nolto_ret,
3555 modref_records_lto **lto_ret)
3556{
8632f8c6
JH
3557 size_t max_bases = opt_for_fn (decl, param_modref_max_bases);
3558 size_t max_refs = opt_for_fn (decl, param_modref_max_refs);
3559 size_t max_accesses = opt_for_fn (decl, param_modref_max_accesses);
d119f34c 3560
71dbabcc 3561 if (lto_ret)
8632f8c6 3562 *lto_ret = modref_records_lto::create_ggc ();
71dbabcc 3563 if (nolto_ret)
8632f8c6 3564 *nolto_ret = modref_records::create_ggc ();
71dbabcc 3565 gcc_checking_assert (lto_ret || nolto_ret);
d119f34c
JH
3566
3567 size_t every_base = streamer_read_uhwi (ib);
3568 size_t nbase = streamer_read_uhwi (ib);
3569
3570 gcc_assert (!every_base || nbase == 0);
3571 if (every_base)
3572 {
71dbabcc 3573 if (nolto_ret)
d119f34c 3574 (*nolto_ret)->collapse ();
71dbabcc 3575 if (lto_ret)
d119f34c
JH
3576 (*lto_ret)->collapse ();
3577 }
3578 for (size_t i = 0; i < nbase; i++)
3579 {
3580 tree base_tree = stream_read_tree (ib, data_in);
3581 modref_base_node <alias_set_type> *nolto_base_node = NULL;
3582 modref_base_node <tree> *lto_base_node = NULL;
3583
3584 /* At stream in time we have LTO alias info. Check if we streamed in
3585 something obviously unnecessary. Do not glob types by alias sets;
3586 it is not 100% clear that ltrans types will get merged same way.
3587 Types may get refined based on ODR type conflicts. */
3588 if (base_tree && !get_alias_set (base_tree))
3589 {
3590 if (dump_file)
3591 {
3592 fprintf (dump_file, "Streamed in alias set 0 type ");
3593 print_generic_expr (dump_file, base_tree);
3594 fprintf (dump_file, "\n");
3595 }
3596 base_tree = NULL;
3597 }
3598
71dbabcc 3599 if (nolto_ret)
d119f34c
JH
3600 nolto_base_node = (*nolto_ret)->insert_base (base_tree
3601 ? get_alias_set (base_tree)
8632f8c6 3602 : 0, 0, INT_MAX);
71dbabcc 3603 if (lto_ret)
8632f8c6 3604 lto_base_node = (*lto_ret)->insert_base (base_tree, 0, max_bases);
d119f34c
JH
3605 size_t every_ref = streamer_read_uhwi (ib);
3606 size_t nref = streamer_read_uhwi (ib);
3607
3608 gcc_assert (!every_ref || nref == 0);
3609 if (every_ref)
3610 {
3611 if (nolto_base_node)
3612 nolto_base_node->collapse ();
3613 if (lto_base_node)
3614 lto_base_node->collapse ();
3615 }
3616 for (size_t j = 0; j < nref; j++)
3617 {
3618 tree ref_tree = stream_read_tree (ib, data_in);
3619
3620 if (ref_tree && !get_alias_set (ref_tree))
3621 {
3622 if (dump_file)
3623 {
3624 fprintf (dump_file, "Streamed in alias set 0 type ");
3625 print_generic_expr (dump_file, ref_tree);
3626 fprintf (dump_file, "\n");
3627 }
c33f4742 3628 ref_tree = NULL;
d119f34c
JH
3629 }
3630
c33f4742
JH
3631 modref_ref_node <alias_set_type> *nolto_ref_node = NULL;
3632 modref_ref_node <tree> *lto_ref_node = NULL;
3633
d119f34c 3634 if (nolto_base_node)
c33f4742
JH
3635 nolto_ref_node
3636 = nolto_base_node->insert_ref (ref_tree
3637 ? get_alias_set (ref_tree) : 0,
3638 max_refs);
d119f34c 3639 if (lto_base_node)
c33f4742
JH
3640 lto_ref_node = lto_base_node->insert_ref (ref_tree, max_refs);
3641
3642 size_t every_access = streamer_read_uhwi (ib);
3643 size_t naccesses = streamer_read_uhwi (ib);
3644
425369bf
JH
3645 if (nolto_ref_node && every_access)
3646 nolto_ref_node->collapse ();
3647 if (lto_ref_node && every_access)
3648 lto_ref_node->collapse ();
c33f4742
JH
3649
3650 for (size_t k = 0; k < naccesses; k++)
3651 {
74509b96 3652 modref_access_node a = modref_access_node::stream_in (ib);
c33f4742 3653 if (nolto_ref_node)
5c85f295 3654 nolto_ref_node->insert_access (a, max_accesses, false);
c33f4742 3655 if (lto_ref_node)
5c85f295 3656 lto_ref_node->insert_access (a, max_accesses, false);
c33f4742 3657 }
d119f34c
JH
3658 }
3659 }
71dbabcc 3660 if (lto_ret)
c33f4742 3661 (*lto_ret)->cleanup ();
71dbabcc 3662 if (nolto_ret)
c33f4742 3663 (*nolto_ret)->cleanup ();
d119f34c
JH
3664}
3665
85ebbabd
JH
3666/* Write ESUM to BP. */
3667
3668static void
3669modref_write_escape_summary (struct bitpack_d *bp, escape_summary *esum)
3670{
3671 if (!esum)
3672 {
3673 bp_pack_var_len_unsigned (bp, 0);
3674 return;
3675 }
3676 bp_pack_var_len_unsigned (bp, esum->esc.length ());
3677 unsigned int i;
3678 escape_entry *ee;
3679 FOR_EACH_VEC_ELT (esum->esc, i, ee)
3680 {
b8ef019a 3681 bp_pack_var_len_int (bp, ee->parm_index);
85ebbabd
JH
3682 bp_pack_var_len_unsigned (bp, ee->arg);
3683 bp_pack_var_len_unsigned (bp, ee->min_flags);
3684 bp_pack_value (bp, ee->direct, 1);
3685 }
3686}
3687
3688/* Read escape summary for E from BP. */
3689
3690static void
3691modref_read_escape_summary (struct bitpack_d *bp, cgraph_edge *e)
3692{
3693 unsigned int n = bp_unpack_var_len_unsigned (bp);
3694 if (!n)
3695 return;
3696 escape_summary *esum = escape_summaries->get_create (e);
3697 esum->esc.reserve_exact (n);
3698 for (unsigned int i = 0; i < n; i++)
3699 {
3700 escape_entry ee;
b8ef019a 3701 ee.parm_index = bp_unpack_var_len_int (bp);
85ebbabd
JH
3702 ee.arg = bp_unpack_var_len_unsigned (bp);
3703 ee.min_flags = bp_unpack_var_len_unsigned (bp);
3704 ee.direct = bp_unpack_value (bp, 1);
3705 esum->esc.quick_push (ee);
3706 }
3707}
3708
d119f34c
JH
3709/* Callback for write_summary. */
3710
3711static void
3712modref_write ()
3713{
3714 struct output_block *ob = create_output_block (LTO_section_ipa_modref);
3715 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3716 unsigned int count = 0;
3717 int i;
3718
71dbabcc 3719 if (!summaries_lto)
d119f34c
JH
3720 {
3721 streamer_write_uhwi (ob, 0);
3722 streamer_write_char_stream (ob->main_stream, 0);
3723 produce_asm (ob, NULL);
3724 destroy_output_block (ob);
3725 return;
3726 }
3727
3728 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3729 {
3730 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3731 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
71dbabcc 3732 modref_summary_lto *r;
d119f34c
JH
3733
3734 if (cnode && cnode->definition && !cnode->alias
71dbabcc
JH
3735 && (r = summaries_lto->get (cnode))
3736 && r->useful_p (flags_from_decl_or_type (cnode->decl)))
d119f34c
JH
3737 count++;
3738 }
3739 streamer_write_uhwi (ob, count);
3740
3741 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3742 {
3743 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3744 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
3745
3746 if (cnode && cnode->definition && !cnode->alias)
3747 {
71dbabcc 3748 modref_summary_lto *r = summaries_lto->get (cnode);
d119f34c 3749
71dbabcc 3750 if (!r || !r->useful_p (flags_from_decl_or_type (cnode->decl)))
d119f34c
JH
3751 continue;
3752
3753 streamer_write_uhwi (ob, lto_symtab_encoder_encode (encoder, cnode));
3754
85ebbabd
JH
3755 streamer_write_uhwi (ob, r->arg_flags.length ());
3756 for (unsigned int i = 0; i < r->arg_flags.length (); i++)
8da8ed43 3757 streamer_write_uhwi (ob, r->arg_flags[i]);
b8ef019a 3758 streamer_write_uhwi (ob, r->retslot_flags);
a70c0512 3759 streamer_write_uhwi (ob, r->static_chain_flags);
85ebbabd 3760
56cb815b
JH
3761 write_modref_records (r->loads, ob);
3762 write_modref_records (r->stores, ob);
74509b96
JH
3763 streamer_write_uhwi (ob, r->kills.length ());
3764 for (auto kill : r->kills)
3765 kill.stream_out (ob);
6cef01c3
JH
3766
3767 struct bitpack_d bp = bitpack_create (ob->main_stream);
3768 bp_pack_value (&bp, r->writes_errno, 1);
992644c3 3769 bp_pack_value (&bp, r->side_effects, 1);
a34edf9a
JH
3770 bp_pack_value (&bp, r->nondeterministic, 1);
3771 bp_pack_value (&bp, r->calls_interposable, 1);
6cef01c3
JH
3772 if (!flag_wpa)
3773 {
3774 for (cgraph_edge *e = cnode->indirect_calls;
3775 e; e = e->next_callee)
3776 {
3777 class fnspec_summary *sum = fnspec_summaries->get (e);
3778 bp_pack_value (&bp, sum != NULL, 1);
3779 if (sum)
3780 bp_pack_string (ob, &bp, sum->fnspec, true);
85ebbabd
JH
3781 class escape_summary *esum = escape_summaries->get (e);
3782 modref_write_escape_summary (&bp,esum);
6cef01c3
JH
3783 }
3784 for (cgraph_edge *e = cnode->callees; e; e = e->next_callee)
3785 {
3786 class fnspec_summary *sum = fnspec_summaries->get (e);
3787 bp_pack_value (&bp, sum != NULL, 1);
3788 if (sum)
3789 bp_pack_string (ob, &bp, sum->fnspec, true);
85ebbabd
JH
3790 class escape_summary *esum = escape_summaries->get (e);
3791 modref_write_escape_summary (&bp,esum);
6cef01c3
JH
3792 }
3793 }
3794 streamer_write_bitpack (&bp);
d119f34c
JH
3795 }
3796 }
3797 streamer_write_char_stream (ob->main_stream, 0);
3798 produce_asm (ob, NULL);
3799 destroy_output_block (ob);
3800}
3801
3802static void
3803read_section (struct lto_file_decl_data *file_data, const char *data,
3804 size_t len)
3805{
3806 const struct lto_function_header *header
3807 = (const struct lto_function_header *) data;
3808 const int cfg_offset = sizeof (struct lto_function_header);
3809 const int main_offset = cfg_offset + header->cfg_size;
3810 const int string_offset = main_offset + header->main_size;
3811 struct data_in *data_in;
3812 unsigned int i;
3813 unsigned int f_count;
3814
3815 lto_input_block ib ((const char *) data + main_offset, header->main_size,
3816 file_data->mode_table);
3817
3818 data_in
3819 = lto_data_in_create (file_data, (const char *) data + string_offset,
3820 header->string_size, vNULL);
3821 f_count = streamer_read_uhwi (&ib);
3822 for (i = 0; i < f_count; i++)
3823 {
3824 struct cgraph_node *node;
3825 lto_symtab_encoder_t encoder;
3826
3827 unsigned int index = streamer_read_uhwi (&ib);
3828 encoder = file_data->symtab_node_encoder;
3829 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder,
3830 index));
3831
71dbabcc
JH
3832 modref_summary *modref_sum = summaries
3833 ? summaries->get_create (node) : NULL;
3834 modref_summary_lto *modref_sum_lto = summaries_lto
3835 ? summaries_lto->get_create (node)
3836 : NULL;
71dbabcc
JH
3837 if (optimization_summaries)
3838 modref_sum = optimization_summaries->get_create (node);
3839
ea937e7d 3840 if (modref_sum)
992644c3
JH
3841 {
3842 modref_sum->writes_errno = false;
3843 modref_sum->side_effects = false;
a34edf9a
JH
3844 modref_sum->nondeterministic = false;
3845 modref_sum->calls_interposable = false;
992644c3 3846 }
6cef01c3 3847 if (modref_sum_lto)
992644c3
JH
3848 {
3849 modref_sum_lto->writes_errno = false;
3850 modref_sum_lto->side_effects = false;
a34edf9a
JH
3851 modref_sum_lto->nondeterministic = false;
3852 modref_sum_lto->calls_interposable = false;
992644c3 3853 }
ea937e7d 3854
71dbabcc
JH
3855 gcc_assert (!modref_sum || (!modref_sum->loads
3856 && !modref_sum->stores));
3857 gcc_assert (!modref_sum_lto || (!modref_sum_lto->loads
3858 && !modref_sum_lto->stores));
85ebbabd
JH
3859 unsigned int args = streamer_read_uhwi (&ib);
3860 if (args && modref_sum)
3861 modref_sum->arg_flags.reserve_exact (args);
3862 if (args && modref_sum_lto)
3863 modref_sum_lto->arg_flags.reserve_exact (args);
3864 for (unsigned int i = 0; i < args; i++)
3865 {
8da8ed43 3866 eaf_flags_t flags = streamer_read_uhwi (&ib);
85ebbabd
JH
3867 if (modref_sum)
3868 modref_sum->arg_flags.quick_push (flags);
3869 if (modref_sum_lto)
3870 modref_sum_lto->arg_flags.quick_push (flags);
3871 }
b8ef019a
JH
3872 eaf_flags_t flags = streamer_read_uhwi (&ib);
3873 if (modref_sum)
3874 modref_sum->retslot_flags = flags;
3875 if (modref_sum_lto)
3876 modref_sum_lto->retslot_flags = flags;
a70c0512
JH
3877
3878 flags = streamer_read_uhwi (&ib);
3879 if (modref_sum)
3880 modref_sum->static_chain_flags = flags;
3881 if (modref_sum_lto)
3882 modref_sum_lto->static_chain_flags = flags;
3883
8632f8c6 3884 read_modref_records (node->decl, &ib, data_in,
56cb815b
JH
3885 modref_sum ? &modref_sum->loads : NULL,
3886 modref_sum_lto ? &modref_sum_lto->loads : NULL);
8632f8c6 3887 read_modref_records (node->decl, &ib, data_in,
56cb815b
JH
3888 modref_sum ? &modref_sum->stores : NULL,
3889 modref_sum_lto ? &modref_sum_lto->stores : NULL);
74509b96
JH
3890 int j = streamer_read_uhwi (&ib);
3891 if (j && modref_sum)
3892 modref_sum->kills.reserve_exact (j);
3893 if (j && modref_sum_lto)
3894 modref_sum_lto->kills.reserve_exact (j);
3895 for (int k = 0; k < j; k++)
3896 {
3897 modref_access_node a = modref_access_node::stream_in (&ib);
3898
3899 if (modref_sum)
3900 modref_sum->kills.quick_push (a);
3901 if (modref_sum_lto)
3902 modref_sum_lto->kills.quick_push (a);
3903 }
6cef01c3
JH
3904 struct bitpack_d bp = streamer_read_bitpack (&ib);
3905 if (bp_unpack_value (&bp, 1))
3906 {
3907 if (modref_sum)
3908 modref_sum->writes_errno = true;
3909 if (modref_sum_lto)
3910 modref_sum_lto->writes_errno = true;
3911 }
992644c3
JH
3912 if (bp_unpack_value (&bp, 1))
3913 {
3914 if (modref_sum)
3915 modref_sum->side_effects = true;
3916 if (modref_sum_lto)
3917 modref_sum_lto->side_effects = true;
3918 }
a34edf9a
JH
3919 if (bp_unpack_value (&bp, 1))
3920 {
3921 if (modref_sum)
3922 modref_sum->nondeterministic = true;
3923 if (modref_sum_lto)
3924 modref_sum_lto->nondeterministic = true;
3925 }
3926 if (bp_unpack_value (&bp, 1))
3927 {
3928 if (modref_sum)
3929 modref_sum->calls_interposable = true;
3930 if (modref_sum_lto)
3931 modref_sum_lto->calls_interposable = true;
3932 }
6cef01c3
JH
3933 if (!flag_ltrans)
3934 {
3935 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
3936 {
3937 if (bp_unpack_value (&bp, 1))
3938 {
3939 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3940 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3941 }
85ebbabd 3942 modref_read_escape_summary (&bp, e);
6cef01c3
JH
3943 }
3944 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
3945 {
3946 if (bp_unpack_value (&bp, 1))
3947 {
3948 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3949 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3950 }
85ebbabd 3951 modref_read_escape_summary (&bp, e);
6cef01c3
JH
3952 }
3953 }
e0040bc3 3954 if (flag_ltrans)
5aa91072 3955 modref_sum->finalize (node->decl);
d119f34c
JH
3956 if (dump_file)
3957 {
3958 fprintf (dump_file, "Read modref for %s\n",
3959 node->dump_name ());
71dbabcc
JH
3960 if (modref_sum)
3961 modref_sum->dump (dump_file);
3962 if (modref_sum_lto)
3963 modref_sum_lto->dump (dump_file);
85ebbabd 3964 dump_modref_edge_summaries (dump_file, node, 4);
d119f34c 3965 }
d119f34c
JH
3966 }
3967
3968 lto_free_section_data (file_data, LTO_section_ipa_modref, NULL, data,
3969 len);
3970 lto_data_in_delete (data_in);
3971}
3972
3973/* Callback for read_summary. */
3974
3975static void
3976modref_read (void)
3977{
3978 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3979 struct lto_file_decl_data *file_data;
3980 unsigned int j = 0;
3981
71dbabcc
JH
3982 gcc_checking_assert (!optimization_summaries && !summaries && !summaries_lto);
3983 if (flag_ltrans)
3984 optimization_summaries = modref_summaries::create_ggc (symtab);
3985 else
3986 {
3987 if (flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
3988 summaries_lto = modref_summaries_lto::create_ggc (symtab);
3989 if (!flag_wpa
3990 || (flag_incremental_link == INCREMENTAL_LINK_LTO
3991 && flag_fat_lto_objects))
3992 summaries = modref_summaries::create_ggc (symtab);
6cef01c3
JH
3993 if (!fnspec_summaries)
3994 fnspec_summaries = new fnspec_summaries_t (symtab);
85ebbabd
JH
3995 if (!escape_summaries)
3996 escape_summaries = new escape_summaries_t (symtab);
71dbabcc 3997 }
d119f34c
JH
3998
3999 while ((file_data = file_data_vec[j++]))
4000 {
4001 size_t len;
4002 const char *data = lto_get_summary_section_data (file_data,
4003 LTO_section_ipa_modref,
4004 &len);
4005 if (data)
4006 read_section (file_data, data, len);
4007 else
4008 /* Fatal error here. We do not want to support compiling ltrans units
4009 with different version of compiler or different flags than the WPA
4010 unit, so this should never happen. */
4011 fatal_error (input_location,
4012 "IPA modref summary is missing in input file");
4013 }
4014}
4015
85ebbabd
JH
4016/* Recompute arg_flags for param adjustments in INFO. */
4017
4018static void
8da8ed43 4019remap_arg_flags (auto_vec <eaf_flags_t> &arg_flags, clone_info *info)
85ebbabd 4020{
8da8ed43 4021 auto_vec<eaf_flags_t> old = arg_flags.copy ();
85ebbabd
JH
4022 int max = -1;
4023 size_t i;
4024 ipa_adjusted_param *p;
4025
4026 arg_flags.release ();
4027
4028 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
4029 {
4030 int o = info->param_adjustments->get_original_index (i);
4031 if (o >= 0 && (int)old.length () > o && old[o])
4032 max = i;
4033 }
5962efe9 4034 if (max >= 0)
85ebbabd
JH
4035 arg_flags.safe_grow_cleared (max + 1, true);
4036 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
4037 {
4038 int o = info->param_adjustments->get_original_index (i);
4039 if (o >= 0 && (int)old.length () > o && old[o])
4040 arg_flags[i] = old[o];
4041 }
4042}
4043
74509b96
JH
4044/* Update kills accrdoing to the parm map MAP. */
4045
4046static void
4047remap_kills (vec <modref_access_node> &kills, const vec <int> &map)
4048{
4049 for (size_t i = 0; i < kills.length ();)
4050 if (kills[i].parm_index >= 0)
4051 {
4052 if (kills[i].parm_index < (int)map.length ()
4053 && map[kills[i].parm_index] != MODREF_UNKNOWN_PARM)
4054 {
4055 kills[i].parm_index = map[kills[i].parm_index];
4056 i++;
4057 }
4058 else
4059 kills.unordered_remove (i);
4060 }
4061 else
4062 i++;
4063}
4064
c8fd2be1
JH
4065/* If signature changed, update the summary. */
4066
fe90c504
JH
4067static void
4068update_signature (struct cgraph_node *node)
c8fd2be1 4069{
ae7a23a3
JH
4070 clone_info *info = clone_info::get (node);
4071 if (!info || !info->param_adjustments)
fe90c504
JH
4072 return;
4073
4074 modref_summary *r = optimization_summaries
4075 ? optimization_summaries->get (node) : NULL;
4076 modref_summary_lto *r_lto = summaries_lto
4077 ? summaries_lto->get (node) : NULL;
4078 if (!r && !r_lto)
4079 return;
c8fd2be1
JH
4080 if (dump_file)
4081 {
4082 fprintf (dump_file, "Updating summary for %s from:\n",
4083 node->dump_name ());
85ebbabd
JH
4084 if (r)
4085 r->dump (dump_file);
4086 if (r_lto)
4087 r_lto->dump (dump_file);
c8fd2be1
JH
4088 }
4089
4090 size_t i, max = 0;
4091 ipa_adjusted_param *p;
4092
ae7a23a3 4093 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
c8fd2be1 4094 {
ae7a23a3 4095 int idx = info->param_adjustments->get_original_index (i);
c8fd2be1
JH
4096 if (idx > (int)max)
4097 max = idx;
4098 }
4099
4100 auto_vec <int, 32> map;
4101
5d2cedaa 4102 map.reserve (max + 1);
c8fd2be1 4103 for (i = 0; i <= max; i++)
992644c3 4104 map.quick_push (MODREF_UNKNOWN_PARM);
ae7a23a3 4105 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
c8fd2be1 4106 {
ae7a23a3 4107 int idx = info->param_adjustments->get_original_index (i);
c8fd2be1 4108 if (idx >= 0)
2f61125f 4109 map[idx] = i;
c8fd2be1 4110 }
fe90c504
JH
4111 if (r)
4112 {
4113 r->loads->remap_params (&map);
4114 r->stores->remap_params (&map);
74509b96 4115 remap_kills (r->kills, map);
85ebbabd
JH
4116 if (r->arg_flags.length ())
4117 remap_arg_flags (r->arg_flags, info);
fe90c504
JH
4118 }
4119 if (r_lto)
4120 {
4121 r_lto->loads->remap_params (&map);
4122 r_lto->stores->remap_params (&map);
74509b96 4123 remap_kills (r_lto->kills, map);
85ebbabd
JH
4124 if (r_lto->arg_flags.length ())
4125 remap_arg_flags (r_lto->arg_flags, info);
fe90c504 4126 }
c8fd2be1
JH
4127 if (dump_file)
4128 {
4129 fprintf (dump_file, "to:\n");
fe90c504 4130 if (r)
6cef01c3 4131 r->dump (dump_file);
fe90c504 4132 if (r_lto)
6cef01c3 4133 r_lto->dump (dump_file);
c8fd2be1 4134 }
e0040bc3 4135 if (r)
5aa91072 4136 r->finalize (node->decl);
fe90c504 4137 return;
c8fd2be1
JH
4138}
4139
d119f34c
JH
4140/* Definition of the modref IPA pass. */
4141const pass_data pass_data_ipa_modref =
4142{
4143 IPA_PASS, /* type */
4144 "modref", /* name */
4145 OPTGROUP_IPA, /* optinfo_flags */
4146 TV_IPA_MODREF, /* tv_id */
4147 0, /* properties_required */
4148 0, /* properties_provided */
4149 0, /* properties_destroyed */
4150 0, /* todo_flags_start */
4151 ( TODO_dump_symtab ), /* todo_flags_finish */
4152};
4153
4154class pass_ipa_modref : public ipa_opt_pass_d
4155{
4156public:
4157 pass_ipa_modref (gcc::context *ctxt)
4158 : ipa_opt_pass_d (pass_data_ipa_modref, ctxt,
4159 modref_generate, /* generate_summary */
4160 modref_write, /* write_summary */
4161 modref_read, /* read_summary */
4162 modref_write, /* write_optimization_summary */
4163 modref_read, /* read_optimization_summary */
4164 NULL, /* stmt_fixup */
4165 0, /* function_transform_todo_flags_start */
fe90c504 4166 NULL, /* function_transform */
d119f34c
JH
4167 NULL) /* variable_transform */
4168 {}
4169
4170 /* opt_pass methods: */
4171 opt_pass *clone () { return new pass_ipa_modref (m_ctxt); }
4172 virtual bool gate (function *)
4173 {
4174 return true;
4175 }
4176 virtual unsigned int execute (function *);
4177
4178};
4179
4180}
4181
2cadaa1f 4182unsigned int pass_modref::execute (function *)
d119f34c 4183{
2cadaa1f 4184 if (analyze_function (false))
494bdadf 4185 return execute_fixup_cfg ();
d119f34c
JH
4186 return 0;
4187}
4188
4189gimple_opt_pass *
4190make_pass_modref (gcc::context *ctxt)
4191{
4192 return new pass_modref (ctxt);
4193}
4194
4195ipa_opt_pass_d *
4196make_pass_ipa_modref (gcc::context *ctxt)
4197{
4198 return new pass_ipa_modref (ctxt);
4199}
4200
18f0873d
JH
4201namespace {
4202
d119f34c
JH
4203/* Skip edges from and to nodes without ipa_pure_const enabled.
4204 Ignore not available symbols. */
4205
4206static bool
4207ignore_edge (struct cgraph_edge *e)
4208{
87d75a11
JH
4209 /* We merge summaries of inline clones into summaries of functions they
4210 are inlined to. For that reason the complete function bodies must
4211 act as unit. */
4212 if (!e->inline_failed)
4213 return false;
d119f34c 4214 enum availability avail;
c87ff875 4215 cgraph_node *callee = e->callee->ultimate_alias_target
d119f34c
JH
4216 (&avail, e->caller);
4217
4218 return (avail <= AVAIL_INTERPOSABLE
56cb815b 4219 || ((!optimization_summaries || !optimization_summaries->get (callee))
494bdadf 4220 && (!summaries_lto || !summaries_lto->get (callee))));
d119f34c
JH
4221}
4222
8a2fd716 4223/* Compute parm_map for CALLEE_EDGE. */
d119f34c 4224
6cef01c3 4225static bool
c34db4b6 4226compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
ada353b8
JH
4227{
4228 class ipa_edge_args *args;
4229 if (ipa_node_params_sum
4230 && !callee_edge->call_stmt_cannot_inline_p
a4a3cdd0 4231 && (args = ipa_edge_args_sum->get (callee_edge)) != NULL)
ada353b8
JH
4232 {
4233 int i, count = ipa_get_cs_argument_count (args);
4234 class ipa_node_params *caller_parms_info, *callee_pi;
4235 class ipa_call_summary *es
4236 = ipa_call_summaries->get (callee_edge);
4237 cgraph_node *callee
c87ff875 4238 = callee_edge->callee->ultimate_alias_target
ada353b8
JH
4239 (NULL, callee_edge->caller);
4240
a4a3cdd0
MJ
4241 caller_parms_info
4242 = ipa_node_params_sum->get (callee_edge->caller->inlined_to
4243 ? callee_edge->caller->inlined_to
4244 : callee_edge->caller);
4245 callee_pi = ipa_node_params_sum->get (callee);
ada353b8 4246
520d5ad3 4247 (*parm_map).safe_grow_cleared (count, true);
ada353b8
JH
4248
4249 for (i = 0; i < count; i++)
4250 {
4251 if (es && es->param[i].points_to_local_or_readonly_memory)
4252 {
1f3a3363 4253 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
ada353b8
JH
4254 continue;
4255 }
4256
4257 struct ipa_jump_func *jf
4258 = ipa_get_ith_jump_func (args, i);
899c10c9 4259 if (jf && callee_pi)
ada353b8
JH
4260 {
4261 tree cst = ipa_value_from_jfunc (caller_parms_info,
4262 jf,
4263 ipa_get_type
4264 (callee_pi, i));
4265 if (cst && points_to_local_or_readonly_memory_p (cst))
4266 {
1f3a3363 4267 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
ada353b8
JH
4268 continue;
4269 }
4270 }
4271 if (jf && jf->type == IPA_JF_PASS_THROUGH)
4272 {
c34db4b6 4273 (*parm_map)[i].parm_index
56cb815b 4274 = ipa_get_jf_pass_through_formal_id (jf);
4d90edb9
JH
4275 if (ipa_get_jf_pass_through_operation (jf) == NOP_EXPR)
4276 {
4277 (*parm_map)[i].parm_offset_known = true;
4278 (*parm_map)[i].parm_offset = 0;
4279 }
4280 else if (ipa_get_jf_pass_through_operation (jf)
4281 == POINTER_PLUS_EXPR
4282 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf),
4283 &(*parm_map)[i].parm_offset))
4284 (*parm_map)[i].parm_offset_known = true;
4285 else
4286 (*parm_map)[i].parm_offset_known = false;
ada353b8
JH
4287 continue;
4288 }
4289 if (jf && jf->type == IPA_JF_ANCESTOR)
c34db4b6
JH
4290 {
4291 (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
4292 (*parm_map)[i].parm_offset_known = true;
c8fd2be1
JH
4293 gcc_checking_assert
4294 (!(ipa_get_jf_ancestor_offset (jf) & (BITS_PER_UNIT - 1)));
4295 (*parm_map)[i].parm_offset
4296 = ipa_get_jf_ancestor_offset (jf) >> LOG2_BITS_PER_UNIT;
85ebbabd 4297 }
ada353b8 4298 else
c34db4b6 4299 (*parm_map)[i].parm_index = -1;
ada353b8
JH
4300 }
4301 if (dump_file)
4302 {
4303 fprintf (dump_file, " Parm map: ");
4304 for (i = 0; i < count; i++)
c34db4b6 4305 fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
ada353b8
JH
4306 fprintf (dump_file, "\n");
4307 }
6cef01c3 4308 return true;
ada353b8 4309 }
6cef01c3 4310 return false;
ada353b8
JH
4311}
4312
85ebbabd
JH
4313/* Map used to translate escape infos. */
4314
4315struct escape_map
4316{
4317 int parm_index;
4318 bool direct;
4319};
4320
b8ef019a 4321/* Update escape map for E. */
85ebbabd
JH
4322
4323static void
4324update_escape_summary_1 (cgraph_edge *e,
9851a163
JH
4325 vec <vec <escape_map>> &map,
4326 bool ignore_stores)
85ebbabd
JH
4327{
4328 escape_summary *sum = escape_summaries->get (e);
4329 if (!sum)
4330 return;
4331 auto_vec <escape_entry> old = sum->esc.copy ();
4332 sum->esc.release ();
4333
4334 unsigned int i;
4335 escape_entry *ee;
4336 FOR_EACH_VEC_ELT (old, i, ee)
4337 {
4338 unsigned int j;
4339 struct escape_map *em;
b8ef019a
JH
4340 /* TODO: We do not have jump functions for return slots, so we
4341 never propagate them to outer function. */
4342 if (ee->parm_index >= (int)map.length ()
4343 || ee->parm_index < 0)
85ebbabd
JH
4344 continue;
4345 FOR_EACH_VEC_ELT (map[ee->parm_index], j, em)
4346 {
9851a163
JH
4347 int min_flags = ee->min_flags;
4348 if (ee->direct && !em->direct)
4349 min_flags = deref_flags (min_flags, ignore_stores);
85ebbabd 4350 struct escape_entry entry = {em->parm_index, ee->arg,
a70faf6e 4351 min_flags,
85ebbabd
JH
4352 ee->direct & em->direct};
4353 sum->esc.safe_push (entry);
4354 }
4355 }
4356 if (!sum->esc.length ())
4357 escape_summaries->remove (e);
4358}
4359
4360/* Update escape map fo NODE. */
4361
4362static void
4363update_escape_summary (cgraph_node *node,
9851a163
JH
4364 vec <vec <escape_map>> &map,
4365 bool ignore_stores)
85ebbabd
JH
4366{
4367 if (!escape_summaries)
4368 return;
4369 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
9851a163 4370 update_escape_summary_1 (e, map, ignore_stores);
85ebbabd
JH
4371 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
4372 {
4373 if (!e->inline_failed)
9851a163 4374 update_escape_summary (e->callee, map, ignore_stores);
85ebbabd 4375 else
9851a163 4376 update_escape_summary_1 (e, map, ignore_stores);
85ebbabd
JH
4377 }
4378}
4379
6cef01c3
JH
4380/* Get parameter type from DECL. This is only safe for special cases
4381 like builtins we create fnspec for because the type match is checked
4382 at fnspec creation time. */
d119f34c 4383
6cef01c3
JH
4384static tree
4385get_parm_type (tree decl, unsigned int i)
ada353b8 4386{
6cef01c3 4387 tree t = TYPE_ARG_TYPES (TREE_TYPE (decl));
ada353b8 4388
6cef01c3
JH
4389 for (unsigned int p = 0; p < i; p++)
4390 t = TREE_CHAIN (t);
4391 return TREE_VALUE (t);
4392}
4393
4394/* Return access mode for argument I of call E with FNSPEC. */
4395
4396static modref_access_node
4397get_access_for_fnspec (cgraph_edge *e, attr_fnspec &fnspec,
4398 unsigned int i, modref_parm_map &map)
4399{
4400 tree size = NULL_TREE;
4401 unsigned int size_arg;
4402
4403 if (!fnspec.arg_specified_p (i))
4404 ;
4405 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
ada353b8 4406 {
6cef01c3
JH
4407 cgraph_node *node = e->caller->inlined_to
4408 ? e->caller->inlined_to : e->caller;
a4a3cdd0
MJ
4409 ipa_node_params *caller_parms_info = ipa_node_params_sum->get (node);
4410 ipa_edge_args *args = ipa_edge_args_sum->get (e);
6cef01c3
JH
4411 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, size_arg);
4412
4413 if (jf)
4414 size = ipa_value_from_jfunc (caller_parms_info, jf,
4415 get_parm_type (e->callee->decl, size_arg));
ada353b8 4416 }
6cef01c3
JH
4417 else if (fnspec.arg_access_size_given_by_type_p (i))
4418 size = TYPE_SIZE_UNIT (get_parm_type (e->callee->decl, i));
4419 modref_access_node a = {0, -1, -1,
4420 map.parm_offset, map.parm_index,
5c85f295 4421 map.parm_offset_known, 0};
6cef01c3
JH
4422 poly_int64 size_hwi;
4423 if (size
4424 && poly_int_tree_p (size, &size_hwi)
4425 && coeffs_in_range_p (size_hwi, 0,
4426 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
ada353b8 4427 {
6cef01c3
JH
4428 a.size = -1;
4429 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
ada353b8 4430 }
6cef01c3
JH
4431 return a;
4432}
4433
09a4ffb7
JH
4434 /* Collapse loads and return true if something changed. */
4435static bool
4436collapse_loads (modref_summary *cur_summary,
4437 modref_summary_lto *cur_summary_lto)
4438{
4439 bool changed = false;
4440
4441 if (cur_summary && !cur_summary->loads->every_base)
4442 {
4443 cur_summary->loads->collapse ();
4444 changed = true;
4445 }
4446 if (cur_summary_lto
4447 && !cur_summary_lto->loads->every_base)
4448 {
4449 cur_summary_lto->loads->collapse ();
4450 changed = true;
4451 }
4452 return changed;
4453}
4454
4455/* Collapse loads and return true if something changed. */
4456
4457static bool
4458collapse_stores (modref_summary *cur_summary,
4459 modref_summary_lto *cur_summary_lto)
4460{
4461 bool changed = false;
4462
4463 if (cur_summary && !cur_summary->stores->every_base)
4464 {
4465 cur_summary->stores->collapse ();
4466 changed = true;
4467 }
4468 if (cur_summary_lto
4469 && !cur_summary_lto->stores->every_base)
4470 {
4471 cur_summary_lto->stores->collapse ();
4472 changed = true;
4473 }
4474 return changed;
4475}
4476
6cef01c3
JH
4477/* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
4478 CUR_SUMMARY_LTO accordingly. Return true if something changed. */
4479
4480static bool
4481propagate_unknown_call (cgraph_node *node,
4482 cgraph_edge *e, int ecf_flags,
85ebbabd 4483 modref_summary *cur_summary,
8d3abf42
JH
4484 modref_summary_lto *cur_summary_lto,
4485 bool nontrivial_scc)
6cef01c3
JH
4486{
4487 bool changed = false;
6cef01c3
JH
4488 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4489 auto_vec <modref_parm_map, 32> parm_map;
992644c3
JH
4490 bool looping;
4491
4492 if (e->callee
4493 && builtin_safe_for_const_function_p (&looping, e->callee->decl))
4494 {
8d3abf42 4495 if (looping && cur_summary && !cur_summary->side_effects)
992644c3
JH
4496 {
4497 cur_summary->side_effects = true;
4498 changed = true;
4499 }
8d3abf42 4500 if (looping && cur_summary_lto && !cur_summary_lto->side_effects)
992644c3
JH
4501 {
4502 cur_summary_lto->side_effects = true;
4503 changed = true;
4504 }
4505 return changed;
4506 }
4507
8d3abf42
JH
4508 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
4509 || (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
4510 || nontrivial_scc)
992644c3
JH
4511 {
4512 if (cur_summary && !cur_summary->side_effects)
4513 {
4514 cur_summary->side_effects = true;
4515 changed = true;
4516 }
4517 if (cur_summary_lto && !cur_summary_lto->side_effects)
4518 {
4519 cur_summary_lto->side_effects = true;
4520 changed = true;
4521 }
a34edf9a
JH
4522 if (cur_summary && !cur_summary->nondeterministic
4523 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4524 {
4525 cur_summary->nondeterministic = true;
4526 changed = true;
4527 }
4528 if (cur_summary_lto && !cur_summary_lto->nondeterministic
4529 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4530 {
4531 cur_summary_lto->nondeterministic = true;
4532 changed = true;
4533 }
992644c3 4534 }
8d3abf42
JH
4535 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
4536 return changed;
992644c3 4537
6cef01c3
JH
4538 if (fnspec_sum
4539 && compute_parm_map (e, &parm_map))
4540 {
4541 attr_fnspec fnspec (fnspec_sum->fnspec);
4542
4543 gcc_checking_assert (fnspec.known_p ());
4544 if (fnspec.global_memory_read_p ())
4545 collapse_loads (cur_summary, cur_summary_lto);
4546 else
4547 {
4548 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
4549 for (unsigned i = 0; i < parm_map.length () && t;
4550 i++, t = TREE_CHAIN (t))
4551 if (!POINTER_TYPE_P (TREE_VALUE (t)))
4552 ;
4553 else if (!fnspec.arg_specified_p (i)
4554 || fnspec.arg_maybe_read_p (i))
4555 {
4556 modref_parm_map map = parm_map[i];
1f3a3363 4557 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
6cef01c3 4558 continue;
1f3a3363 4559 if (map.parm_index == MODREF_UNKNOWN_PARM)
6cef01c3
JH
4560 {
4561 collapse_loads (cur_summary, cur_summary_lto);
4562 break;
4563 }
4564 if (cur_summary)
4565 changed |= cur_summary->loads->insert
8632f8c6
JH
4566 (node->decl, 0, 0,
4567 get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4568 if (cur_summary_lto)
4569 changed |= cur_summary_lto->loads->insert
8632f8c6
JH
4570 (node->decl, 0, 0,
4571 get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4572 }
4573 }
4574 if (ignore_stores_p (node->decl, ecf_flags))
4575 ;
4576 else if (fnspec.global_memory_written_p ())
4577 collapse_stores (cur_summary, cur_summary_lto);
4578 else
4579 {
4580 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
4581 for (unsigned i = 0; i < parm_map.length () && t;
4582 i++, t = TREE_CHAIN (t))
4583 if (!POINTER_TYPE_P (TREE_VALUE (t)))
4584 ;
4585 else if (!fnspec.arg_specified_p (i)
4586 || fnspec.arg_maybe_written_p (i))
4587 {
4588 modref_parm_map map = parm_map[i];
1f3a3363 4589 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
6cef01c3 4590 continue;
1f3a3363 4591 if (map.parm_index == MODREF_UNKNOWN_PARM)
6cef01c3
JH
4592 {
4593 collapse_stores (cur_summary, cur_summary_lto);
4594 break;
4595 }
4596 if (cur_summary)
4597 changed |= cur_summary->stores->insert
8632f8c6
JH
4598 (node->decl, 0, 0,
4599 get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4600 if (cur_summary_lto)
4601 changed |= cur_summary_lto->stores->insert
8632f8c6
JH
4602 (node->decl, 0, 0,
4603 get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4604 }
4605 }
4606 if (fnspec.errno_maybe_written_p () && flag_errno_math)
4607 {
4608 if (cur_summary && !cur_summary->writes_errno)
4609 {
4610 cur_summary->writes_errno = true;
4611 changed = true;
4612 }
4613 if (cur_summary_lto && !cur_summary_lto->writes_errno)
4614 {
4615 cur_summary_lto->writes_errno = true;
4616 changed = true;
4617 }
4618 }
4619 return changed;
4620 }
85ebbabd
JH
4621 if (dump_file)
4622 fprintf (dump_file, " collapsing loads\n");
4623 changed |= collapse_loads (cur_summary, cur_summary_lto);
4624 if (!ignore_stores_p (node->decl, ecf_flags))
6cef01c3
JH
4625 {
4626 if (dump_file)
85ebbabd
JH
4627 fprintf (dump_file, " collapsing stores\n");
4628 changed |= collapse_stores (cur_summary, cur_summary_lto);
6cef01c3 4629 }
85ebbabd 4630 return changed;
ada353b8 4631}
d119f34c 4632
85ebbabd
JH
4633/* Maybe remove summaies of NODE pointed to by CUR_SUMMARY_PTR
4634 and CUR_SUMMARY_LTO_PTR if they are useless according to ECF_FLAGS. */
4635
4636static void
4637remove_useless_summaries (cgraph_node *node,
4638 modref_summary **cur_summary_ptr,
4639 modref_summary_lto **cur_summary_lto_ptr,
4640 int ecf_flags)
4641{
4642 if (*cur_summary_ptr && !(*cur_summary_ptr)->useful_p (ecf_flags, false))
4643 {
4644 optimization_summaries->remove (node);
4645 *cur_summary_ptr = NULL;
4646 }
4647 if (*cur_summary_lto_ptr
4648 && !(*cur_summary_lto_ptr)->useful_p (ecf_flags, false))
4649 {
4650 summaries_lto->remove (node);
4651 *cur_summary_lto_ptr = NULL;
4652 }
4653}
4654
4655/* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
4656 and propagate loads/stores. */
ada353b8 4657
494bdadf 4658static bool
ada353b8
JH
4659modref_propagate_in_scc (cgraph_node *component_node)
4660{
4661 bool changed = true;
5c85f295 4662 bool first = true;
ada353b8
JH
4663 int iteration = 0;
4664
4665 while (changed)
4666 {
8d3abf42
JH
4667 bool nontrivial_scc
4668 = ((struct ipa_dfs_info *) component_node->aux)->next_cycle;
ada353b8
JH
4669 changed = false;
4670 for (struct cgraph_node *cur = component_node; cur;
d119f34c
JH
4671 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4672 {
ada353b8 4673 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
71dbabcc
JH
4674 modref_summary *cur_summary = optimization_summaries
4675 ? optimization_summaries->get (node)
4676 : NULL;
4677 modref_summary_lto *cur_summary_lto = summaries_lto
4678 ? summaries_lto->get (node)
4679 : NULL;
4680
4681 if (!cur_summary && !cur_summary_lto)
ada353b8
JH
4682 continue;
4683
85ebbabd
JH
4684 int cur_ecf_flags = flags_from_decl_or_type (node->decl);
4685
ada353b8
JH
4686 if (dump_file)
4687 fprintf (dump_file, " Processing %s%s%s\n",
4688 cur->dump_name (),
4689 TREE_READONLY (cur->decl) ? " (const)" : "",
4690 DECL_PURE_P (cur->decl) ? " (pure)" : "");
d119f34c 4691
d119f34c
JH
4692 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
4693 {
6cef01c3 4694 if (dump_file)
8d3abf42 4695 fprintf (dump_file, " Indirect call\n");
85ebbabd 4696 if (propagate_unknown_call
6cef01c3 4697 (node, e, e->indirect_info->ecf_flags,
8d3abf42
JH
4698 cur_summary, cur_summary_lto,
4699 nontrivial_scc))
85ebbabd
JH
4700 {
4701 changed = true;
4702 remove_useless_summaries (node, &cur_summary,
4703 &cur_summary_lto,
4704 cur_ecf_flags);
4705 if (!cur_summary && !cur_summary_lto)
4706 break;
4707 }
d119f34c
JH
4708 }
4709
71dbabcc 4710 if (!cur_summary && !cur_summary_lto)
ada353b8
JH
4711 continue;
4712
d119f34c
JH
4713 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
4714 callee_edge = callee_edge->next_callee)
4715 {
4716 int flags = flags_from_decl_or_type (callee_edge->callee->decl);
71dbabcc
JH
4717 modref_summary *callee_summary = NULL;
4718 modref_summary_lto *callee_summary_lto = NULL;
d119f34c
JH
4719 struct cgraph_node *callee;
4720
8d3abf42
JH
4721 if (!callee_edge->inline_failed
4722 || ((flags & (ECF_CONST | ECF_NOVOPS))
4723 && !(flags & ECF_LOOPING_CONST_OR_PURE)))
d119f34c
JH
4724 continue;
4725
d119f34c
JH
4726 /* Get the callee and its summary. */
4727 enum availability avail;
c87ff875 4728 callee = callee_edge->callee->ultimate_alias_target
d119f34c
JH
4729 (&avail, cur);
4730
ada353b8
JH
4731 /* It is not necessary to re-process calls outside of the
4732 SCC component. */
4733 if (iteration > 0
4734 && (!callee->aux
4735 || ((struct ipa_dfs_info *)cur->aux)->scc_no
4736 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
4737 continue;
4738
4739 if (dump_file)
4740 fprintf (dump_file, " Call to %s\n",
4741 callee_edge->callee->dump_name ());
d119f34c
JH
4742
4743 bool ignore_stores = ignore_stores_p (cur->decl, flags);
4744
71dbabcc 4745 if (avail <= AVAIL_INTERPOSABLE)
d119f34c 4746 {
6cef01c3
JH
4747 if (dump_file)
4748 fprintf (dump_file, " Call target interposable"
4749 " or not available\n");
4750 changed |= propagate_unknown_call
4751 (node, callee_edge, flags,
8d3abf42
JH
4752 cur_summary, cur_summary_lto,
4753 nontrivial_scc);
6cef01c3
JH
4754 if (!cur_summary && !cur_summary_lto)
4755 break;
4756 continue;
71dbabcc
JH
4757 }
4758
4759 /* We don't know anything about CALLEE, hence we cannot tell
4760 anything about the entire component. */
4761
4762 if (cur_summary
4763 && !(callee_summary = optimization_summaries->get (callee)))
4764 {
6cef01c3
JH
4765 if (dump_file)
4766 fprintf (dump_file, " No call target summary\n");
4767 changed |= propagate_unknown_call
4768 (node, callee_edge, flags,
8d3abf42
JH
4769 cur_summary, NULL,
4770 nontrivial_scc);
71dbabcc
JH
4771 }
4772 if (cur_summary_lto
4773 && !(callee_summary_lto = summaries_lto->get (callee)))
4774 {
6cef01c3
JH
4775 if (dump_file)
4776 fprintf (dump_file, " No call target summary\n");
4777 changed |= propagate_unknown_call
4778 (node, callee_edge, flags,
8d3abf42
JH
4779 NULL, cur_summary_lto,
4780 nontrivial_scc);
d119f34c
JH
4781 }
4782
8d3abf42
JH
4783 if (callee_summary && !cur_summary->side_effects
4784 && (callee_summary->side_effects
4785 || callee_edge->recursive_p ()))
4786 {
4787 cur_summary->side_effects = true;
4788 changed = true;
4789 }
4790 if (callee_summary_lto && !cur_summary_lto->side_effects
4791 && (callee_summary_lto->side_effects
4792 || callee_edge->recursive_p ()))
4793 {
4794 cur_summary_lto->side_effects = true;
4795 changed = true;
4796 }
a34edf9a
JH
4797 if (callee_summary && !cur_summary->nondeterministic
4798 && callee_summary->nondeterministic
4799 && !ignore_nondeterminism_p (cur->decl, flags))
4800 {
4801 cur_summary->nondeterministic = true;
4802 changed = true;
4803 }
4804 if (callee_summary_lto && !cur_summary_lto->nondeterministic
4805 && callee_summary_lto->nondeterministic
4806 && !ignore_nondeterminism_p (cur->decl, flags))
4807 {
4808 cur_summary_lto->nondeterministic = true;
4809 changed = true;
4810 }
8d3abf42
JH
4811 if (flags & (ECF_CONST | ECF_NOVOPS))
4812 continue;
4813
ada353b8
JH
4814 /* We can not safely optimize based on summary of callee if it
4815 does not always bind to current def: it is possible that
4816 memory load was optimized out earlier which may not happen in
4817 the interposed variant. */
4818 if (!callee_edge->binds_to_current_def_p ())
4819 {
a34edf9a
JH
4820 if (cur_summary && !cur_summary->calls_interposable)
4821 {
4822 cur_summary->calls_interposable = true;
4823 changed = true;
4824 }
4825 if (cur_summary_lto && !cur_summary_lto->calls_interposable)
4826 {
4827 cur_summary_lto->calls_interposable = true;
4828 changed = true;
4829 }
ada353b8
JH
4830 if (dump_file)
4831 fprintf (dump_file, " May not bind local;"
4832 " collapsing loads\n");
4833 }
4834
4835
c34db4b6 4836 auto_vec <modref_parm_map, 32> parm_map;
1f3a3363
JH
4837 modref_parm_map chain_map;
4838 /* TODO: Once we get jump functions for static chains we could
4839 compute this. */
4840 chain_map.parm_index = MODREF_UNKNOWN_PARM;
ada353b8
JH
4841
4842 compute_parm_map (callee_edge, &parm_map);
c33f4742 4843
d119f34c 4844 /* Merge in callee's information. */
71dbabcc
JH
4845 if (callee_summary)
4846 {
56cb815b 4847 changed |= cur_summary->loads->merge
8632f8c6
JH
4848 (node->decl, callee_summary->loads,
4849 &parm_map, &chain_map, !first);
56cb815b 4850 if (!ignore_stores)
6cef01c3
JH
4851 {
4852 changed |= cur_summary->stores->merge
8632f8c6
JH
4853 (node->decl, callee_summary->stores,
4854 &parm_map, &chain_map, !first);
6cef01c3
JH
4855 if (!cur_summary->writes_errno
4856 && callee_summary->writes_errno)
4857 {
4858 cur_summary->writes_errno = true;
4859 changed = true;
4860 }
4861 }
71dbabcc
JH
4862 }
4863 if (callee_summary_lto)
4864 {
56cb815b 4865 changed |= cur_summary_lto->loads->merge
8632f8c6
JH
4866 (node->decl, callee_summary_lto->loads,
4867 &parm_map, &chain_map, !first);
56cb815b 4868 if (!ignore_stores)
6cef01c3
JH
4869 {
4870 changed |= cur_summary_lto->stores->merge
8632f8c6
JH
4871 (node->decl, callee_summary_lto->stores,
4872 &parm_map, &chain_map, !first);
6cef01c3
JH
4873 if (!cur_summary_lto->writes_errno
4874 && callee_summary_lto->writes_errno)
4875 {
4876 cur_summary_lto->writes_errno = true;
4877 changed = true;
4878 }
4879 }
71dbabcc 4880 }
85ebbabd
JH
4881 if (changed)
4882 remove_useless_summaries (node, &cur_summary,
4883 &cur_summary_lto,
4884 cur_ecf_flags);
4885 if (!cur_summary && !cur_summary_lto)
4886 break;
ada353b8 4887 if (dump_file && changed)
71dbabcc
JH
4888 {
4889 if (cur_summary)
4890 cur_summary->dump (dump_file);
4891 if (cur_summary_lto)
4892 cur_summary_lto->dump (dump_file);
85ebbabd 4893 dump_modref_edge_summaries (dump_file, node, 4);
71dbabcc 4894 }
d119f34c
JH
4895 }
4896 }
ada353b8 4897 iteration++;
5c85f295 4898 first = false;
ada353b8 4899 }
ada353b8 4900 if (dump_file)
85ebbabd
JH
4901 fprintf (dump_file,
4902 "Propagation finished in %i iterations\n", iteration);
494bdadf
JH
4903 bool pureconst = false;
4904 for (struct cgraph_node *cur = component_node; cur;
4905 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4906 if (!cur->inlined_to && opt_for_fn (cur->decl, flag_ipa_pure_const))
4907 {
4908 modref_summary *summary = optimization_summaries
4909 ? optimization_summaries->get (cur)
4910 : NULL;
4911 modref_summary_lto *summary_lto = summaries_lto
4912 ? summaries_lto->get (cur)
4913 : NULL;
1b62cddc 4914 if (summary && !summary->stores->every_base && !summary->stores->bases
a34edf9a 4915 && !summary->nondeterministic)
494bdadf 4916 {
a34edf9a
JH
4917 if (!summary->loads->every_base && !summary->loads->bases
4918 && !summary->calls_interposable)
494bdadf
JH
4919 pureconst |= ipa_make_function_const
4920 (cur, summary->side_effects, false);
4921 else
4922 pureconst |= ipa_make_function_pure
4923 (cur, summary->side_effects, false);
4924 }
4925 if (summary_lto && !summary_lto->stores->every_base
a34edf9a 4926 && !summary_lto->stores->bases && !summary_lto->nondeterministic)
494bdadf 4927 {
a34edf9a
JH
4928 if (!summary_lto->loads->every_base && !summary_lto->loads->bases
4929 && !summary_lto->calls_interposable)
494bdadf
JH
4930 pureconst |= ipa_make_function_const
4931 (cur, summary_lto->side_effects, false);
4932 else
4933 pureconst |= ipa_make_function_pure
4934 (cur, summary_lto->side_effects, false);
4935 }
4936 }
4937 return pureconst;
85ebbabd
JH
4938}
4939
4940/* Dump results of propagation in SCC rooted in COMPONENT_NODE. */
4941
4942static void
4943modref_propagate_dump_scc (cgraph_node *component_node)
4944{
4945 for (struct cgraph_node *cur = component_node; cur;
4946 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4947 if (!cur->inlined_to)
4948 {
4949 modref_summary *cur_summary = optimization_summaries
4950 ? optimization_summaries->get (cur)
4951 : NULL;
4952 modref_summary_lto *cur_summary_lto = summaries_lto
4953 ? summaries_lto->get (cur)
4954 : NULL;
4955
4956 fprintf (dump_file, "Propagated modref for %s%s%s\n",
4957 cur->dump_name (),
4958 TREE_READONLY (cur->decl) ? " (const)" : "",
4959 DECL_PURE_P (cur->decl) ? " (pure)" : "");
4960 if (optimization_summaries)
4961 {
4962 if (cur_summary)
4963 cur_summary->dump (dump_file);
4964 else
4965 fprintf (dump_file, " Not tracked\n");
4966 }
4967 if (summaries_lto)
4968 {
4969 if (cur_summary_lto)
4970 cur_summary_lto->dump (dump_file);
4971 else
4972 fprintf (dump_file, " Not tracked (lto)\n");
4973 }
4974 }
4975}
4976
16e85390
JH
4977/* Determine EAF flags know for call E with CALLEE_ECF_FLAGS and ARG. */
4978
4979int
4980implicit_eaf_flags_for_edge_and_arg (cgraph_edge *e, int callee_ecf_flags,
4981 bool ignore_stores, int arg)
4982{
4983 /* Returning the value is already accounted to at local propagation. */
4984 int implicit_flags = EAF_NOT_RETURNED_DIRECTLY
4985 | EAF_NOT_RETURNED_INDIRECTLY;
4986 if (ignore_stores)
4987 implicit_flags |= ignore_stores_eaf_flags;
4988 if (callee_ecf_flags & ECF_PURE)
4989 implicit_flags |= implicit_pure_eaf_flags;
4990 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
4991 implicit_flags |= implicit_const_eaf_flags;
4992 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4993 if (fnspec_sum)
4994 {
4995 attr_fnspec fnspec (fnspec_sum->fnspec);
4996 implicit_flags |= fnspec.arg_eaf_flags (arg);
4997 }
4998 return implicit_flags;
4999}
5000
85ebbabd
JH
5001/* Process escapes in SUM and merge SUMMARY to CUR_SUMMARY
5002 and SUMMARY_LTO to CUR_SUMMARY_LTO.
5003 Return true if something changed. */
5004
5005static bool
5006modref_merge_call_site_flags (escape_summary *sum,
5007 modref_summary *cur_summary,
5008 modref_summary_lto *cur_summary_lto,
5009 modref_summary *summary,
5010 modref_summary_lto *summary_lto,
4341b1b1 5011 tree caller,
f6f704fd
JH
5012 cgraph_edge *e,
5013 int caller_ecf_flags,
5014 int callee_ecf_flags,
5015 bool binds_to_current_def)
85ebbabd
JH
5016{
5017 escape_entry *ee;
5018 unsigned int i;
5019 bool changed = false;
f6f704fd 5020 bool ignore_stores = ignore_stores_p (caller, callee_ecf_flags);
85ebbabd
JH
5021
5022 /* If we have no useful info to propagate. */
5023 if ((!cur_summary || !cur_summary->arg_flags.length ())
5024 && (!cur_summary_lto || !cur_summary_lto->arg_flags.length ()))
5025 return false;
5026
5027 FOR_EACH_VEC_ELT (sum->esc, i, ee)
ada353b8 5028 {
85ebbabd
JH
5029 int flags = 0;
5030 int flags_lto = 0;
16e85390
JH
5031 int implicit_flags = implicit_eaf_flags_for_edge_and_arg
5032 (e, callee_ecf_flags, ignore_stores, ee->arg);
85ebbabd
JH
5033
5034 if (summary && ee->arg < summary->arg_flags.length ())
5035 flags = summary->arg_flags[ee->arg];
5036 if (summary_lto
5037 && ee->arg < summary_lto->arg_flags.length ())
5038 flags_lto = summary_lto->arg_flags[ee->arg];
5039 if (!ee->direct)
5040 {
5041 flags = deref_flags (flags, ignore_stores);
5042 flags_lto = deref_flags (flags_lto, ignore_stores);
5043 }
f6f704fd
JH
5044 if (ignore_stores)
5045 implicit_flags |= ignore_stores_eaf_flags;
5046 if (callee_ecf_flags & ECF_PURE)
5047 implicit_flags |= implicit_pure_eaf_flags;
5048 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
5049 implicit_flags |= implicit_const_eaf_flags;
5050 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
5051 if (fnspec_sum)
85ebbabd 5052 {
f6f704fd 5053 attr_fnspec fnspec (fnspec_sum->fnspec);
e2dd12ab 5054 implicit_flags |= fnspec.arg_eaf_flags (ee->arg);
f6f704fd
JH
5055 }
5056 if (!ee->direct)
5057 implicit_flags = deref_flags (implicit_flags, ignore_stores);
5058 flags |= implicit_flags;
5059 flags_lto |= implicit_flags;
5060 if (!binds_to_current_def && (flags || flags_lto))
5061 {
5062 flags = interposable_eaf_flags (flags, implicit_flags);
5063 flags_lto = interposable_eaf_flags (flags_lto, implicit_flags);
85ebbabd 5064 }
3350e59f 5065 if (!(flags & EAF_UNUSED)
b8ef019a 5066 && cur_summary && ee->parm_index < (int)cur_summary->arg_flags.length ())
85ebbabd 5067 {
1f3a3363 5068 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
b8ef019a 5069 ? cur_summary->retslot_flags
1f3a3363 5070 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
a70c0512 5071 ? cur_summary->static_chain_flags
b8ef019a 5072 : cur_summary->arg_flags[ee->parm_index];
85ebbabd
JH
5073 if ((f & flags) != f)
5074 {
4341b1b1 5075 f = remove_useless_eaf_flags
f6f704fd 5076 (f & flags, caller_ecf_flags,
4341b1b1 5077 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
85ebbabd
JH
5078 changed = true;
5079 }
5080 }
3350e59f
JH
5081 if (!(flags_lto & EAF_UNUSED)
5082 && cur_summary_lto
b8ef019a 5083 && ee->parm_index < (int)cur_summary_lto->arg_flags.length ())
85ebbabd 5084 {
1f3a3363 5085 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
b8ef019a 5086 ? cur_summary_lto->retslot_flags
1f3a3363 5087 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
a70c0512 5088 ? cur_summary_lto->static_chain_flags
b8ef019a 5089 : cur_summary_lto->arg_flags[ee->parm_index];
85ebbabd
JH
5090 if ((f & flags_lto) != f)
5091 {
4341b1b1 5092 f = remove_useless_eaf_flags
f6f704fd 5093 (f & flags_lto, caller_ecf_flags,
4341b1b1 5094 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
85ebbabd
JH
5095 changed = true;
5096 }
5097 }
5098 }
5099 return changed;
5100}
5101
5102/* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
5103 and propagate arg flags. */
5104
5105static void
5106modref_propagate_flags_in_scc (cgraph_node *component_node)
5107{
5108 bool changed = true;
5109 int iteration = 0;
5110
5111 while (changed)
5112 {
5113 changed = false;
ada353b8 5114 for (struct cgraph_node *cur = component_node; cur;
d119f34c 5115 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
85ebbabd
JH
5116 {
5117 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
5118 modref_summary *cur_summary = optimization_summaries
5119 ? optimization_summaries->get (node)
5120 : NULL;
5121 modref_summary_lto *cur_summary_lto = summaries_lto
5122 ? summaries_lto->get (node)
5123 : NULL;
5124
5125 if (!cur_summary && !cur_summary_lto)
5126 continue;
f6f704fd 5127 int caller_ecf_flags = flags_from_decl_or_type (cur->decl);
85ebbabd
JH
5128
5129 if (dump_file)
5130 fprintf (dump_file, " Processing %s%s%s\n",
ada353b8
JH
5131 cur->dump_name (),
5132 TREE_READONLY (cur->decl) ? " (const)" : "",
5133 DECL_PURE_P (cur->decl) ? " (pure)" : "");
85ebbabd
JH
5134
5135 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
5136 {
5137 escape_summary *sum = escape_summaries->get (e);
5138
5139 if (!sum || (e->indirect_info->ecf_flags
5140 & (ECF_CONST | ECF_NOVOPS)))
5141 continue;
5142
5143 changed |= modref_merge_call_site_flags
5144 (sum, cur_summary, cur_summary_lto,
4341b1b1 5145 NULL, NULL,
f6f704fd
JH
5146 node->decl,
5147 e,
5148 caller_ecf_flags,
5149 e->indirect_info->ecf_flags,
5150 false);
85ebbabd
JH
5151 }
5152
5153 if (!cur_summary && !cur_summary_lto)
5154 continue;
5155
5156 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
5157 callee_edge = callee_edge->next_callee)
5158 {
4341b1b1
JH
5159 int ecf_flags = flags_from_decl_or_type
5160 (callee_edge->callee->decl);
85ebbabd
JH
5161 modref_summary *callee_summary = NULL;
5162 modref_summary_lto *callee_summary_lto = NULL;
5163 struct cgraph_node *callee;
5164
4341b1b1 5165 if (ecf_flags & (ECF_CONST | ECF_NOVOPS)
85ebbabd
JH
5166 || !callee_edge->inline_failed)
5167 continue;
16e85390 5168
85ebbabd
JH
5169 /* Get the callee and its summary. */
5170 enum availability avail;
c87ff875 5171 callee = callee_edge->callee->ultimate_alias_target
85ebbabd
JH
5172 (&avail, cur);
5173
5174 /* It is not necessary to re-process calls outside of the
5175 SCC component. */
5176 if (iteration > 0
5177 && (!callee->aux
5178 || ((struct ipa_dfs_info *)cur->aux)->scc_no
5179 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
5180 continue;
5181
5182 escape_summary *sum = escape_summaries->get (callee_edge);
5183 if (!sum)
5184 continue;
5185
5186 if (dump_file)
5187 fprintf (dump_file, " Call to %s\n",
5188 callee_edge->callee->dump_name ());
5189
5190 if (avail <= AVAIL_INTERPOSABLE
5191 || callee_edge->call_stmt_cannot_inline_p)
5192 ;
5193 else
5194 {
5195 if (cur_summary)
5196 callee_summary = optimization_summaries->get (callee);
5197 if (cur_summary_lto)
5198 callee_summary_lto = summaries_lto->get (callee);
5199 }
5200 changed |= modref_merge_call_site_flags
5201 (sum, cur_summary, cur_summary_lto,
5202 callee_summary, callee_summary_lto,
f6f704fd
JH
5203 node->decl,
5204 callee_edge,
5205 caller_ecf_flags,
5206 ecf_flags,
5207 callee->binds_to_current_def_p ());
85ebbabd
JH
5208 if (dump_file && changed)
5209 {
5210 if (cur_summary)
5211 cur_summary->dump (dump_file);
5212 if (cur_summary_lto)
5213 cur_summary_lto->dump (dump_file);
5214 }
5215 }
5216 }
5217 iteration++;
5218 }
5219 if (dump_file)
5220 fprintf (dump_file,
5221 "Propagation of flags finished in %i iterations\n", iteration);
ada353b8
JH
5222}
5223
18f0873d
JH
5224} /* ANON namespace. */
5225
5226/* Call EDGE was inlined; merge summary from callee to the caller. */
5227
5228void
5229ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
5230{
5231 if (!summaries && !summaries_lto)
5232 return;
5233
5234 struct cgraph_node *to = (edge->caller->inlined_to
5235 ? edge->caller->inlined_to : edge->caller);
5236 class modref_summary *to_info = summaries ? summaries->get (to) : NULL;
5237 class modref_summary_lto *to_info_lto = summaries_lto
5238 ? summaries_lto->get (to) : NULL;
5239
5240 if (!to_info && !to_info_lto)
5241 {
5242 if (summaries)
5243 summaries->remove (edge->callee);
5244 if (summaries_lto)
5245 summaries_lto->remove (edge->callee);
5246 remove_modref_edge_summaries (edge->callee);
5247 return;
5248 }
5249
5250 class modref_summary *callee_info = summaries ? summaries->get (edge->callee)
5251 : NULL;
5252 class modref_summary_lto *callee_info_lto
5253 = summaries_lto ? summaries_lto->get (edge->callee) : NULL;
5254 int flags = flags_from_decl_or_type (edge->callee->decl);
16e85390 5255 /* Combine in outer flags. */
2cadaa1f
JH
5256 cgraph_node *n;
5257 for (n = edge->caller; n->inlined_to; n = n->callers->caller)
5258 flags |= flags_from_decl_or_type (n->decl);
5259 flags |= flags_from_decl_or_type (n->decl);
18f0873d
JH
5260 bool ignore_stores = ignore_stores_p (edge->caller->decl, flags);
5261
5262 if (!callee_info && to_info)
5263 {
5264 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5265 to_info->loads->collapse ();
5266 if (!ignore_stores)
5267 to_info->stores->collapse ();
5268 }
5269 if (!callee_info_lto && to_info_lto)
5270 {
5271 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5272 to_info_lto->loads->collapse ();
5273 if (!ignore_stores)
5274 to_info_lto->stores->collapse ();
5275 }
5276 if (callee_info || callee_info_lto)
5277 {
5278 auto_vec <modref_parm_map, 32> parm_map;
1f3a3363
JH
5279 modref_parm_map chain_map;
5280 /* TODO: Once we get jump functions for static chains we could
74a4ece0 5281 compute parm_index. */
18f0873d
JH
5282
5283 compute_parm_map (edge, &parm_map);
5284
5285 if (!ignore_stores)
5286 {
5287 if (to_info && callee_info)
8632f8c6 5288 to_info->stores->merge (to->decl, callee_info->stores, &parm_map,
1f3a3363 5289 &chain_map, false);
18f0873d 5290 if (to_info_lto && callee_info_lto)
8632f8c6
JH
5291 to_info_lto->stores->merge (to->decl, callee_info_lto->stores,
5292 &parm_map, &chain_map, false);
18f0873d
JH
5293 }
5294 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5295 {
5296 if (to_info && callee_info)
8632f8c6 5297 to_info->loads->merge (to->decl, callee_info->loads, &parm_map,
1f3a3363 5298 &chain_map, false);
18f0873d 5299 if (to_info_lto && callee_info_lto)
8632f8c6
JH
5300 to_info_lto->loads->merge (to->decl, callee_info_lto->loads,
5301 &parm_map, &chain_map, false);
18f0873d
JH
5302 }
5303 }
5304
5305 /* Now merge escape summaries.
5306 For every escape to the callee we need to merge calle flags
5307 and remap calees escapes. */
5308 class escape_summary *sum = escape_summaries->get (edge);
5309 int max_escape = -1;
5310 escape_entry *ee;
5311 unsigned int i;
5312
5313 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
5314 FOR_EACH_VEC_ELT (sum->esc, i, ee)
5315 if ((int)ee->arg > max_escape)
5316 max_escape = ee->arg;
5317
5318 auto_vec <vec <struct escape_map>, 32> emap (max_escape + 1);
5319 emap.safe_grow (max_escape + 1, true);
5320 for (i = 0; (int)i < max_escape + 1; i++)
5321 emap[i] = vNULL;
5322
5323 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
5324 FOR_EACH_VEC_ELT (sum->esc, i, ee)
5325 {
5326 bool needed = false;
16e85390
JH
5327 int implicit_flags = implicit_eaf_flags_for_edge_and_arg
5328 (edge, flags, ignore_stores,
5329 ee->arg);
5330 if (!ee->direct)
5331 implicit_flags = deref_flags (implicit_flags, ignore_stores);
18f0873d
JH
5332 if (to_info && (int)to_info->arg_flags.length () > ee->parm_index)
5333 {
5334 int flags = callee_info
5335 && callee_info->arg_flags.length () > ee->arg
5336 ? callee_info->arg_flags[ee->arg] : 0;
5337 if (!ee->direct)
5338 flags = deref_flags (flags, ignore_stores);
16e85390
JH
5339 flags |= ee->min_flags | implicit_flags;
5340 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
5341 ? to_info->retslot_flags
5342 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
5343 ? to_info->static_chain_flags
5344 : to_info->arg_flags[ee->parm_index];
5345 f &= flags;
5346 if (f)
18f0873d
JH
5347 needed = true;
5348 }
5349 if (to_info_lto
62af7d94 5350 && (int)to_info_lto->arg_flags.length () > ee->parm_index)
18f0873d
JH
5351 {
5352 int flags = callee_info_lto
5353 && callee_info_lto->arg_flags.length () > ee->arg
5354 ? callee_info_lto->arg_flags[ee->arg] : 0;
5355 if (!ee->direct)
5356 flags = deref_flags (flags, ignore_stores);
16e85390
JH
5357 flags |= ee->min_flags | implicit_flags;
5358 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
5359 ? to_info_lto->retslot_flags
5360 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
5361 ? to_info_lto->static_chain_flags
5362 : to_info_lto->arg_flags[ee->parm_index];
5363 f &= flags;
5364 if (f)
18f0873d
JH
5365 needed = true;
5366 }
5367 struct escape_map entry = {ee->parm_index, ee->direct};
5368 if (needed)
5369 emap[ee->arg].safe_push (entry);
5370 }
5371 update_escape_summary (edge->callee, emap, ignore_stores);
5372 for (i = 0; (int)i < max_escape + 1; i++)
5373 emap[i].release ();
5374 if (sum)
5375 escape_summaries->remove (edge);
5376
5377 if (summaries)
5378 {
5379 if (to_info && !to_info->useful_p (flags))
5380 {
5381 if (dump_file)
5382 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5383 to->dump_name ());
5384 summaries->remove (to);
5385 to_info = NULL;
5386 }
5387 else if (to_info && dump_file)
5388 {
5389 if (dump_file)
5390 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5391 to->dump_name ());
5392 to_info->dump (dump_file);
5393 }
5394 if (callee_info)
5395 summaries->remove (edge->callee);
5396 }
5397 if (summaries_lto)
5398 {
5399 if (to_info_lto && !to_info_lto->useful_p (flags))
5400 {
5401 if (dump_file)
5402 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5403 to->dump_name ());
5404 summaries_lto->remove (to);
8c693978 5405 to_info_lto = NULL;
18f0873d
JH
5406 }
5407 else if (to_info_lto && dump_file)
5408 {
5409 if (dump_file)
5410 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5411 to->dump_name ());
5412 to_info_lto->dump (dump_file);
18f0873d
JH
5413 }
5414 if (callee_info_lto)
5415 summaries_lto->remove (edge->callee);
5416 }
5417 if (!to_info && !to_info_lto)
5418 remove_modref_edge_summaries (to);
5419 return;
5420}
5421
ada353b8
JH
5422/* Run the IPA pass. This will take a function's summaries and calls and
5423 construct new summaries which represent a transitive closure. So that
5424 summary of an analyzed function contains information about the loads and
5425 stores that the function or any function that it calls does. */
5426
5427unsigned int
5428pass_ipa_modref::execute (function *)
5429{
71dbabcc 5430 if (!summaries && !summaries_lto)
ada353b8 5431 return 0;
494bdadf 5432 bool pureconst = false;
ada353b8 5433
71dbabcc
JH
5434 if (optimization_summaries)
5435 ggc_delete (optimization_summaries);
5436 optimization_summaries = summaries;
5437 summaries = NULL;
5438
ada353b8
JH
5439 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *,
5440 symtab->cgraph_count);
5441 int order_pos;
5442 order_pos = ipa_reduced_postorder (order, true, ignore_edge);
5443 int i;
5444
5445 /* Iterate over all strongly connected components in post-order. */
5446 for (i = 0; i < order_pos; i++)
5447 {
5448 /* Get the component's representative. That's just any node in the
5449 component from which we can traverse the entire component. */
5450 struct cgraph_node *component_node = order[i];
5451
5452 if (dump_file)
5453 fprintf (dump_file, "\n\nStart of SCC component\n");
5454
494bdadf 5455 pureconst |= modref_propagate_in_scc (component_node);
85ebbabd 5456 modref_propagate_flags_in_scc (component_node);
e0040bc3
JH
5457 if (optimization_summaries)
5458 for (struct cgraph_node *cur = component_node; cur;
5459 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
5460 if (modref_summary *sum = optimization_summaries->get (cur))
5aa91072 5461 sum->finalize (cur->decl);
85ebbabd
JH
5462 if (dump_file)
5463 modref_propagate_dump_scc (component_node);
d119f34c 5464 }
fe90c504
JH
5465 cgraph_node *node;
5466 FOR_EACH_FUNCTION (node)
5467 update_signature (node);
71dbabcc
JH
5468 if (summaries_lto)
5469 ((modref_summaries_lto *)summaries_lto)->propagated = true;
d119f34c 5470 ipa_free_postorder_info ();
a0e6e49d 5471 free (order);
6cef01c3
JH
5472 delete fnspec_summaries;
5473 fnspec_summaries = NULL;
85ebbabd
JH
5474 delete escape_summaries;
5475 escape_summaries = NULL;
494bdadf
JH
5476
5477 /* If we posibly made constructors const/pure we may need to remove
5478 them. */
5479 return pureconst ? TODO_remove_functions : 0;
d119f34c
JH
5480}
5481
39b3b1bd
JH
5482/* Summaries must stay alive until end of compilation. */
5483
5484void
5485ipa_modref_c_finalize ()
5486{
71dbabcc
JH
5487 if (optimization_summaries)
5488 ggc_delete (optimization_summaries);
5489 optimization_summaries = NULL;
71dbabcc 5490 if (summaries_lto)
85ebbabd
JH
5491 ggc_delete (summaries_lto);
5492 summaries_lto = NULL;
6cef01c3
JH
5493 if (fnspec_summaries)
5494 delete fnspec_summaries;
5495 fnspec_summaries = NULL;
85ebbabd
JH
5496 if (escape_summaries)
5497 delete escape_summaries;
5498 escape_summaries = NULL;
39b3b1bd
JH
5499}
5500
d119f34c 5501#include "gt-ipa-modref.h"