]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-modref.c
Daily bump.
[thirdparty/gcc.git] / gcc / ipa-modref.c
CommitLineData
d119f34c 1/* Search for references that a functions loads or stores.
99dee823 2 Copyright (C) 2020-2021 Free Software Foundation, Inc.
d119f34c
JH
3 Contributed by David Cepelik and Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
85ebbabd 23 accesses across function calls.
d119f34c
JH
24
25 This file contains a tree pass and an IPA pass. Both performs the same
8a2fd716 26 analysis however tree pass is executed during early and late optimization
d119f34c
JH
27 passes to propagate info downwards in the compilation order. IPA pass
28 propagates across the callgraph and is able to handle recursion and works on
29 whole program during link-time analysis.
30
46a27415 31 LTO mode differs from the local mode by not recording alias sets but types
d119f34c 32 that are translated to alias sets later. This is necessary in order stream
46a27415 33 the information because the alias sets are rebuild at stream-in time and may
85ebbabd
JH
34 not correspond to ones seen during analysis. For this reason part of
35 analysis is duplicated.
36
37 The following information is computed
38 1) load/store access tree described in ipa-modref-tree.h
11056ab7 39 This is used by tree-ssa-alias to disambiguate load/stores
85ebbabd
JH
40 2) EAF flags used by points-to analysis (in tree-ssa-structlias).
41 and defined in tree-core.h.
42 and stored to optimization_summaries.
43
44 There are multiple summaries computed and used during the propagation:
45 - summaries holds summaries from analysis to IPA propagation
46 time.
47 - summaries_lto is same as summaries but holds them in a format
48 that can be streamed (as described above).
49 - fnspec_summary holds fnspec strings for call. This is
50 necessary because gimple_call_fnspec performs additional
51 analysis except for looking callee fndecl.
52 - escape_summary holds escape points for given call edge.
53 That is a vector recording what function parmaeters
54 may escape to a function call (and with what parameter index). */
d119f34c
JH
55
56#include "config.h"
57#include "system.h"
58#include "coretypes.h"
59#include "backend.h"
60#include "tree.h"
61#include "gimple.h"
62#include "alloc-pool.h"
63#include "tree-pass.h"
64#include "gimple-iterator.h"
65#include "tree-dfa.h"
66#include "cgraph.h"
67#include "ipa-utils.h"
68#include "symbol-summary.h"
69#include "gimple-pretty-print.h"
70#include "gimple-walk.h"
71#include "print-tree.h"
72#include "tree-streamer.h"
73#include "alias.h"
74#include "calls.h"
75#include "ipa-modref-tree.h"
76#include "ipa-modref.h"
e977dd5e
JH
77#include "value-range.h"
78#include "ipa-prop.h"
79#include "ipa-fnsummary.h"
617695cd 80#include "attr-fnspec.h"
ae7a23a3 81#include "symtab-clones.h"
520d5ad3
JH
82#include "gimple-ssa.h"
83#include "tree-phinodes.h"
84#include "tree-ssa-operands.h"
85#include "ssa-iterators.h"
86#include "stringpool.h"
87#include "tree-ssanames.h"
008e7397 88#include "attribs.h"
b8ef019a 89#include "tree-cfg.h"
992644c3 90#include "tree-eh.h"
520d5ad3 91
8da8ed43 92
85ebbabd 93namespace {
d119f34c 94
6cef01c3
JH
95/* We record fnspec specifiers for call edges since they depends on actual
96 gimple statements. */
97
98class fnspec_summary
99{
100public:
101 char *fnspec;
102
103 fnspec_summary ()
104 : fnspec (NULL)
105 {
106 }
107
108 ~fnspec_summary ()
109 {
110 free (fnspec);
111 }
112};
113
114/* Summary holding fnspec string for a given call. */
115
116class fnspec_summaries_t : public call_summary <fnspec_summary *>
117{
118public:
119 fnspec_summaries_t (symbol_table *symtab)
120 : call_summary <fnspec_summary *> (symtab) {}
121 /* Hook that is called by summary when an edge is duplicated. */
122 virtual void duplicate (cgraph_edge *,
123 cgraph_edge *,
124 fnspec_summary *src,
125 fnspec_summary *dst)
126 {
127 dst->fnspec = xstrdup (src->fnspec);
128 }
129};
130
131static fnspec_summaries_t *fnspec_summaries = NULL;
132
85ebbabd
JH
133/* Escape summary holds a vector of param indexes that escape to
134 a given call. */
135struct escape_entry
136{
137 /* Parameter that escapes at a given call. */
b8ef019a 138 int parm_index;
85ebbabd
JH
139 /* Argument it escapes to. */
140 unsigned int arg;
141 /* Minimal flags known about the argument. */
8da8ed43 142 eaf_flags_t min_flags;
85ebbabd
JH
143 /* Does it escape directly or indirectly? */
144 bool direct;
145};
146
147/* Dump EAF flags. */
148
149static void
150dump_eaf_flags (FILE *out, int flags, bool newline = true)
151{
85ebbabd
JH
152 if (flags & EAF_UNUSED)
153 fprintf (out, " unused");
d70ef656
JH
154 if (flags & EAF_NO_DIRECT_CLOBBER)
155 fprintf (out, " no_direct_clobber");
156 if (flags & EAF_NO_INDIRECT_CLOBBER)
157 fprintf (out, " no_indirect_clobber");
158 if (flags & EAF_NO_DIRECT_ESCAPE)
159 fprintf (out, " no_direct_escape");
160 if (flags & EAF_NO_INDIRECT_ESCAPE)
161 fprintf (out, " no_indirect_escape");
f1979156
JH
162 if (flags & EAF_NOT_RETURNED_DIRECTLY)
163 fprintf (out, " not_returned_directly");
d70ef656
JH
164 if (flags & EAF_NOT_RETURNED_INDIRECTLY)
165 fprintf (out, " not_returned_indirectly");
166 if (flags & EAF_NO_DIRECT_READ)
167 fprintf (out, " no_direct_read");
168 if (flags & EAF_NO_INDIRECT_READ)
169 fprintf (out, " no_indirect_read");
85ebbabd
JH
170 if (newline)
171 fprintf (out, "\n");
172}
173
174struct escape_summary
175{
176 auto_vec <escape_entry> esc;
177 void dump (FILE *out)
178 {
179 for (unsigned int i = 0; i < esc.length (); i++)
180 {
181 fprintf (out, " parm %i arg %i %s min:",
182 esc[i].parm_index,
183 esc[i].arg,
184 esc[i].direct ? "(direct)" : "(indirect)");
185 dump_eaf_flags (out, esc[i].min_flags, false);
186 }
187 fprintf (out, "\n");
188 }
189};
190
191class escape_summaries_t : public call_summary <escape_summary *>
192{
193public:
194 escape_summaries_t (symbol_table *symtab)
195 : call_summary <escape_summary *> (symtab) {}
196 /* Hook that is called by summary when an edge is duplicated. */
197 virtual void duplicate (cgraph_edge *,
198 cgraph_edge *,
199 escape_summary *src,
200 escape_summary *dst)
201 {
202 dst->esc = src->esc.copy ();
203 }
204};
205
206static escape_summaries_t *escape_summaries = NULL;
207
208} /* ANON namespace: GTY annotated summaries can not be anonymous. */
209
210
d119f34c
JH
211/* Class (from which there is one global instance) that holds modref summaries
212 for all analyzed functions. */
6cef01c3 213
d119f34c
JH
214class GTY((user)) modref_summaries
215 : public fast_function_summary <modref_summary *, va_gc>
216{
217public:
218 modref_summaries (symbol_table *symtab)
219 : fast_function_summary <modref_summary *, va_gc> (symtab) {}
220 virtual void insert (cgraph_node *, modref_summary *state);
221 virtual void duplicate (cgraph_node *src_node,
222 cgraph_node *dst_node,
223 modref_summary *src_data,
224 modref_summary *dst_data);
c9da53d6
JH
225 static modref_summaries *create_ggc (symbol_table *symtab)
226 {
227 return new (ggc_alloc_no_dtor<modref_summaries> ())
228 modref_summaries (symtab);
229 }
d119f34c
JH
230};
231
71dbabcc
JH
232class modref_summary_lto;
233
234/* Class (from which there is one global instance) that holds modref summaries
235 for all analyzed functions. */
6cef01c3 236
71dbabcc
JH
237class GTY((user)) modref_summaries_lto
238 : public fast_function_summary <modref_summary_lto *, va_gc>
239{
240public:
241 modref_summaries_lto (symbol_table *symtab)
242 : fast_function_summary <modref_summary_lto *, va_gc> (symtab),
243 propagated (false) {}
244 virtual void insert (cgraph_node *, modref_summary_lto *state);
245 virtual void duplicate (cgraph_node *src_node,
246 cgraph_node *dst_node,
247 modref_summary_lto *src_data,
248 modref_summary_lto *dst_data);
249 static modref_summaries_lto *create_ggc (symbol_table *symtab)
250 {
251 return new (ggc_alloc_no_dtor<modref_summaries_lto> ())
252 modref_summaries_lto (symtab);
253 }
254 bool propagated;
255};
256
257/* Global variable holding all modref summaries
258 (from analysis to IPA propagation time). */
6cef01c3 259
71dbabcc
JH
260static GTY(()) fast_function_summary <modref_summary *, va_gc>
261 *summaries;
262
8a2fd716 263/* Global variable holding all modref optimization summaries
71dbabcc 264 (from IPA propagation time or used by local optimization pass). */
6cef01c3 265
71dbabcc
JH
266static GTY(()) fast_function_summary <modref_summary *, va_gc>
267 *optimization_summaries;
268
269/* LTO summaries hold info from analysis to LTO streaming or from LTO
270 stream-in through propagation to LTO stream-out. */
6cef01c3 271
71dbabcc
JH
272static GTY(()) fast_function_summary <modref_summary_lto *, va_gc>
273 *summaries_lto;
d119f34c
JH
274
275/* Summary for a single function which this pass produces. */
276
277modref_summary::modref_summary ()
a70c0512 278 : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
a34edf9a
JH
279 writes_errno (false), side_effects (false), nondeterministic (false),
280 calls_interposable (false), global_memory_read (false),
5aa91072 281 global_memory_written (false), try_dse (false)
d119f34c
JH
282{
283}
284
285modref_summary::~modref_summary ()
286{
287 if (loads)
288 ggc_delete (loads);
289 if (stores)
290 ggc_delete (stores);
d119f34c
JH
291}
292
4341b1b1
JH
293/* Remove all flags from EAF_FLAGS that are implied by ECF_FLAGS and not
294 useful to track. If returns_void is true moreover clear
295 EAF_NOT_RETURNED. */
296static int
297remove_useless_eaf_flags (int eaf_flags, int ecf_flags, bool returns_void)
298{
f6f704fd 299 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
4341b1b1
JH
300 eaf_flags &= ~implicit_const_eaf_flags;
301 else if (ecf_flags & ECF_PURE)
302 eaf_flags &= ~implicit_pure_eaf_flags;
303 else if ((ecf_flags & ECF_NORETURN) || returns_void)
d70ef656 304 eaf_flags &= ~(EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY);
4341b1b1
JH
305 return eaf_flags;
306}
307
85ebbabd
JH
308/* Return true if FLAGS holds some useful information. */
309
310static bool
8da8ed43 311eaf_flags_useful_p (vec <eaf_flags_t> &flags, int ecf_flags)
85ebbabd
JH
312{
313 for (unsigned i = 0; i < flags.length (); i++)
4341b1b1
JH
314 if (remove_useless_eaf_flags (flags[i], ecf_flags, false))
315 return true;
85ebbabd
JH
316 return false;
317}
318
319/* Return true if summary is potentially useful for optimization.
320 If CHECK_FLAGS is false assume that arg_flags are useful. */
67c935c8
JH
321
322bool
85ebbabd 323modref_summary::useful_p (int ecf_flags, bool check_flags)
67c935c8 324{
85ebbabd 325 if (arg_flags.length () && !check_flags)
520d5ad3 326 return true;
85ebbabd
JH
327 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
328 return true;
329 arg_flags.release ();
b8ef019a
JH
330 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
331 return true;
a70c0512
JH
332 if (check_flags
333 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
334 return true;
f6f704fd 335 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
a34edf9a
JH
336 return ((!side_effects || !nondeterministic)
337 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
71dbabcc 338 if (loads && !loads->every_base)
67c935c8 339 return true;
64f3e71c
JH
340 else
341 kills.release ();
67c935c8 342 if (ecf_flags & ECF_PURE)
a34edf9a
JH
343 return ((!side_effects || !nondeterministic)
344 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
71dbabcc 345 return stores && !stores->every_base;
67c935c8
JH
346}
347
71dbabcc
JH
348/* Single function summary used for LTO. */
349
350typedef modref_tree <tree> modref_records_lto;
351struct GTY(()) modref_summary_lto
352{
353 /* Load and stores in functions using types rather then alias sets.
354
355 This is necessary to make the information streamable for LTO but is also
356 more verbose and thus more likely to hit the limits. */
357 modref_records_lto *loads;
358 modref_records_lto *stores;
64f3e71c 359 auto_vec<modref_access_node> GTY((skip)) kills;
8da8ed43 360 auto_vec<eaf_flags_t> GTY((skip)) arg_flags;
b8ef019a 361 eaf_flags_t retslot_flags;
a70c0512 362 eaf_flags_t static_chain_flags;
a34edf9a
JH
363 unsigned writes_errno : 1;
364 unsigned side_effects : 1;
365 unsigned nondeterministic : 1;
366 unsigned calls_interposable : 1;
71dbabcc
JH
367
368 modref_summary_lto ();
369 ~modref_summary_lto ();
370 void dump (FILE *);
85ebbabd 371 bool useful_p (int ecf_flags, bool check_flags = true);
71dbabcc
JH
372};
373
374/* Summary for a single function which this pass produces. */
375
376modref_summary_lto::modref_summary_lto ()
a70c0512 377 : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
a34edf9a
JH
378 writes_errno (false), side_effects (false), nondeterministic (false),
379 calls_interposable (false)
71dbabcc
JH
380{
381}
382
383modref_summary_lto::~modref_summary_lto ()
384{
385 if (loads)
386 ggc_delete (loads);
387 if (stores)
388 ggc_delete (stores);
389}
390
391
85ebbabd
JH
392/* Return true if lto summary is potentially useful for optimization.
393 If CHECK_FLAGS is false assume that arg_flags are useful. */
67c935c8
JH
394
395bool
85ebbabd 396modref_summary_lto::useful_p (int ecf_flags, bool check_flags)
67c935c8 397{
85ebbabd
JH
398 if (arg_flags.length () && !check_flags)
399 return true;
400 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
401 return true;
402 arg_flags.release ();
b8ef019a
JH
403 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
404 return true;
a70c0512
JH
405 if (check_flags
406 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
407 return true;
f6f704fd 408 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
a34edf9a
JH
409 return ((!side_effects || !nondeterministic)
410 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
67c935c8
JH
411 if (loads && !loads->every_base)
412 return true;
413 if (ecf_flags & ECF_PURE)
a34edf9a
JH
414 return ((!side_effects || !nondeterministic)
415 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
e24817aa 416 return stores && !stores->every_base;
67c935c8
JH
417}
418
d119f34c
JH
419/* Dump records TT to OUT. */
420
421static void
422dump_records (modref_records *tt, FILE *out)
423{
424 fprintf (out, " Limits: %i bases, %i refs\n",
425 (int)tt->max_bases, (int)tt->max_refs);
426 if (tt->every_base)
427 {
428 fprintf (out, " Every base\n");
429 return;
430 }
431 size_t i;
432 modref_base_node <alias_set_type> *n;
433 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
434 {
435 fprintf (out, " Base %i: alias set %i\n", (int)i, n->base);
436 if (n->every_ref)
437 {
438 fprintf (out, " Every ref\n");
439 continue;
440 }
441 size_t j;
442 modref_ref_node <alias_set_type> *r;
443 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
444 {
445 fprintf (out, " Ref %i: alias set %i\n", (int)j, r->ref);
c33f4742
JH
446 if (r->every_access)
447 {
ada353b8 448 fprintf (out, " Every access\n");
c33f4742
JH
449 continue;
450 }
451 size_t k;
452 modref_access_node *a;
453 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
e30bf330
JH
454 {
455 fprintf (out, " access:");
456 a->dump (out);
457 }
d119f34c
JH
458 }
459 }
460}
461
462/* Dump records TT to OUT. */
463
464static void
465dump_lto_records (modref_records_lto *tt, FILE *out)
466{
467 fprintf (out, " Limits: %i bases, %i refs\n",
468 (int)tt->max_bases, (int)tt->max_refs);
469 if (tt->every_base)
470 {
471 fprintf (out, " Every base\n");
472 return;
473 }
474 size_t i;
475 modref_base_node <tree> *n;
476 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
477 {
478 fprintf (out, " Base %i:", (int)i);
479 print_generic_expr (dump_file, n->base);
480 fprintf (out, " (alias set %i)\n",
9044db88 481 n->base ? get_alias_set (n->base) : 0);
d119f34c
JH
482 if (n->every_ref)
483 {
484 fprintf (out, " Every ref\n");
485 continue;
486 }
487 size_t j;
488 modref_ref_node <tree> *r;
489 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
490 {
491 fprintf (out, " Ref %i:", (int)j);
492 print_generic_expr (dump_file, r->ref);
493 fprintf (out, " (alias set %i)\n",
9044db88 494 r->ref ? get_alias_set (r->ref) : 0);
c33f4742
JH
495 if (r->every_access)
496 {
56cb815b 497 fprintf (out, " Every access\n");
c33f4742
JH
498 continue;
499 }
500 size_t k;
501 modref_access_node *a;
502 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
e30bf330
JH
503 {
504 fprintf (out, " access:");
505 a->dump (out);
506 }
d119f34c
JH
507 }
508 }
509}
510
85ebbabd 511/* Dump all escape points of NODE to OUT. */
520d5ad3
JH
512
513static void
85ebbabd 514dump_modref_edge_summaries (FILE *out, cgraph_node *node, int depth)
520d5ad3 515{
85ebbabd
JH
516 int i = 0;
517 if (!escape_summaries)
518 return;
519 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
520 {
521 class escape_summary *sum = escape_summaries->get (e);
522 if (sum)
523 {
524 fprintf (out, "%*sIndirect call %i in %s escapes:",
525 depth, "", i, node->dump_name ());
526 sum->dump (out);
527 }
528 i++;
529 }
530 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
531 {
532 if (!e->inline_failed)
533 dump_modref_edge_summaries (out, e->callee, depth + 1);
534 class escape_summary *sum = escape_summaries->get (e);
535 if (sum)
536 {
537 fprintf (out, "%*sCall %s->%s escapes:", depth, "",
538 node->dump_name (), e->callee->dump_name ());
539 sum->dump (out);
540 }
541 class fnspec_summary *fsum = fnspec_summaries->get (e);
542 if (fsum)
543 {
544 fprintf (out, "%*sCall %s->%s fnspec: %s\n", depth, "",
545 node->dump_name (), e->callee->dump_name (),
546 fsum->fnspec);
547 }
548 }
549}
550
551/* Remove all call edge summaries associated with NODE. */
552
553static void
554remove_modref_edge_summaries (cgraph_node *node)
555{
556 if (!escape_summaries)
557 return;
558 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
559 escape_summaries->remove (e);
560 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
561 {
562 if (!e->inline_failed)
563 remove_modref_edge_summaries (e->callee);
564 escape_summaries->remove (e);
565 fnspec_summaries->remove (e);
566 }
520d5ad3
JH
567}
568
d119f34c
JH
569/* Dump summary. */
570
571void
572modref_summary::dump (FILE *out)
573{
6cef01c3
JH
574 if (loads)
575 {
576 fprintf (out, " loads:\n");
577 dump_records (loads, out);
578 }
579 if (stores)
580 {
581 fprintf (out, " stores:\n");
582 dump_records (stores, out);
583 }
64f3e71c
JH
584 if (kills.length ())
585 {
586 fprintf (out, " kills:\n");
587 for (auto kill : kills)
588 {
589 fprintf (out, " ");
590 kill.dump (out);
591 }
592 }
617695cd
JH
593 if (writes_errno)
594 fprintf (out, " Writes errno\n");
992644c3
JH
595 if (side_effects)
596 fprintf (out, " Side effects\n");
a34edf9a
JH
597 if (nondeterministic)
598 fprintf (out, " Nondeterministic\n");
599 if (calls_interposable)
600 fprintf (out, " Calls interposable\n");
e0040bc3
JH
601 if (global_memory_read)
602 fprintf (out, " Global memory read\n");
603 if (global_memory_written)
604 fprintf (out, " Global memory written\n");
5aa91072
JH
605 if (try_dse)
606 fprintf (out, " Try dse\n");
520d5ad3
JH
607 if (arg_flags.length ())
608 {
609 for (unsigned int i = 0; i < arg_flags.length (); i++)
610 if (arg_flags[i])
611 {
612 fprintf (out, " parm %i flags:", i);
613 dump_eaf_flags (out, arg_flags[i]);
614 }
615 }
b8ef019a
JH
616 if (retslot_flags)
617 {
618 fprintf (out, " Retslot flags:");
619 dump_eaf_flags (out, retslot_flags);
620 }
a70c0512
JH
621 if (static_chain_flags)
622 {
623 fprintf (out, " Static chain flags:");
624 dump_eaf_flags (out, static_chain_flags);
625 }
71dbabcc
JH
626}
627
628/* Dump summary. */
629
630void
631modref_summary_lto::dump (FILE *out)
632{
56cb815b
JH
633 fprintf (out, " loads:\n");
634 dump_lto_records (loads, out);
635 fprintf (out, " stores:\n");
636 dump_lto_records (stores, out);
6cef01c3
JH
637 if (writes_errno)
638 fprintf (out, " Writes errno\n");
992644c3
JH
639 if (side_effects)
640 fprintf (out, " Side effects\n");
a34edf9a
JH
641 if (nondeterministic)
642 fprintf (out, " Nondeterministic\n");
643 if (calls_interposable)
644 fprintf (out, " Calls interposable\n");
85ebbabd
JH
645 if (arg_flags.length ())
646 {
647 for (unsigned int i = 0; i < arg_flags.length (); i++)
648 if (arg_flags[i])
649 {
650 fprintf (out, " parm %i flags:", i);
651 dump_eaf_flags (out, arg_flags[i]);
652 }
653 }
b8ef019a
JH
654 if (retslot_flags)
655 {
656 fprintf (out, " Retslot flags:");
657 dump_eaf_flags (out, retslot_flags);
658 }
a70c0512
JH
659 if (static_chain_flags)
660 {
661 fprintf (out, " Static chain flags:");
662 dump_eaf_flags (out, static_chain_flags);
663 }
d119f34c
JH
664}
665
e0040bc3 666/* Called after summary is produced and before it is used by local analysis.
5aa91072
JH
667 Can be called multiple times in case summary needs to update signature.
668 FUN is decl of function summary is attached to. */
e0040bc3 669void
5aa91072 670modref_summary::finalize (tree fun)
e0040bc3
JH
671{
672 global_memory_read = !loads || loads->global_access_p ();
673 global_memory_written = !stores || stores->global_access_p ();
5aa91072
JH
674
675 /* We can do DSE if we know function has no side effects and
676 we can analyse all stores. Disable dse if there are too many
677 stores to try. */
678 if (side_effects || global_memory_written || writes_errno)
679 try_dse = false;
680 else
681 {
682 try_dse = true;
683 size_t i, j, k;
684 int num_tests = 0, max_tests
685 = opt_for_fn (fun, param_modref_max_tests);
686 modref_base_node <alias_set_type> *base_node;
687 modref_ref_node <alias_set_type> *ref_node;
688 modref_access_node *access_node;
689 FOR_EACH_VEC_SAFE_ELT (stores->bases, i, base_node)
690 {
691 if (base_node->every_ref)
692 {
693 try_dse = false;
694 break;
695 }
696 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
697 {
698 if (base_node->every_ref)
699 {
700 try_dse = false;
701 break;
702 }
703 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
704 if (num_tests++ > max_tests
705 || !access_node->parm_offset_known)
706 {
707 try_dse = false;
708 break;
709 }
710 if (!try_dse)
711 break;
712 }
713 if (!try_dse)
714 break;
715 }
716 }
e0040bc3
JH
717}
718
d119f34c
JH
719/* Get function summary for FUNC if it exists, return NULL otherwise. */
720
721modref_summary *
722get_modref_function_summary (cgraph_node *func)
723{
724 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
71dbabcc 725 if (!optimization_summaries)
d119f34c
JH
726 return NULL;
727
728 /* A single function body may be represented by multiple symbols with
729 different visibility. For example, if FUNC is an interposable alias,
730 we don't want to return anything, even if we have summary for the target
731 function. */
732 enum availability avail;
733 func = func->function_or_virtual_thunk_symbol
520d5ad3
JH
734 (&avail, current_function_decl ?
735 cgraph_node::get (current_function_decl) : NULL);
d119f34c
JH
736 if (avail <= AVAIL_INTERPOSABLE)
737 return NULL;
738
71dbabcc
JH
739 modref_summary *r = optimization_summaries->get (func);
740 return r;
d119f34c
JH
741}
742
6dc90c4d
JH
743/* Get function summary for CALL if it exists, return NULL otherwise.
744 If non-null set interposed to indicate whether function may not
745 bind to current def. In this case sometimes loads from function
746 needs to be ignored. */
747
748modref_summary *
749get_modref_function_summary (gcall *call, bool *interposed)
750{
751 tree callee = gimple_call_fndecl (call);
752 if (!callee)
753 return NULL;
754 struct cgraph_node *node = cgraph_node::get (callee);
755 if (!node)
756 return NULL;
757 modref_summary *r = get_modref_function_summary (node);
758 if (interposed && r)
759 *interposed = r->calls_interposable
760 || !node->binds_to_current_def_p ();
761 return r;
762}
763
764
18f0873d
JH
765namespace {
766
c33f4742
JH
767/* Construct modref_access_node from REF. */
768static modref_access_node
769get_access (ao_ref *ref)
770{
c33f4742
JH
771 tree base;
772
c34db4b6
JH
773 base = ao_ref_base (ref);
774 modref_access_node a = {ref->offset, ref->size, ref->max_size,
1f3a3363 775 0, MODREF_UNKNOWN_PARM, false, 0};
c33f4742
JH
776 if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
777 {
2bdf324f 778 tree memref = base;
c33f4742 779 base = TREE_OPERAND (base, 0);
1f3a3363 780
c33f4742
JH
781 if (TREE_CODE (base) == SSA_NAME
782 && SSA_NAME_IS_DEFAULT_DEF (base)
783 && TREE_CODE (SSA_NAME_VAR (base)) == PARM_DECL)
784 {
785 a.parm_index = 0;
1f3a3363
JH
786 if (cfun->static_chain_decl
787 && base == ssa_default_def (cfun, cfun->static_chain_decl))
788 a.parm_index = MODREF_STATIC_CHAIN_PARM;
2bdf324f 789 else
1f3a3363
JH
790 for (tree t = DECL_ARGUMENTS (current_function_decl);
791 t != SSA_NAME_VAR (base); t = DECL_CHAIN (t))
792 a.parm_index++;
793 }
794 else
795 a.parm_index = MODREF_UNKNOWN_PARM;
796
797 if (a.parm_index != MODREF_UNKNOWN_PARM
798 && TREE_CODE (memref) == MEM_REF)
799 {
800 a.parm_offset_known
801 = wi::to_poly_wide (TREE_OPERAND
802 (memref, 1)).to_shwi (&a.parm_offset);
c33f4742
JH
803 }
804 else
1f3a3363 805 a.parm_offset_known = false;
c33f4742
JH
806 }
807 else
1f3a3363 808 a.parm_index = MODREF_UNKNOWN_PARM;
c33f4742
JH
809 return a;
810}
811
d119f34c
JH
812/* Record access into the modref_records data structure. */
813
814static void
64f3e71c 815record_access (modref_records *tt, ao_ref *ref, modref_access_node &a)
d119f34c
JH
816{
817 alias_set_type base_set = !flag_strict_aliasing ? 0
818 : ao_ref_base_alias_set (ref);
819 alias_set_type ref_set = !flag_strict_aliasing ? 0
820 : (ao_ref_alias_set (ref));
821 if (dump_file)
822 {
4898e958
JH
823 fprintf (dump_file, " - Recording base_set=%i ref_set=%i ",
824 base_set, ref_set);
e30bf330 825 a.dump (dump_file);
d119f34c 826 }
5c85f295 827 tt->insert (base_set, ref_set, a, false);
d119f34c
JH
828}
829
830/* IPA version of record_access_tree. */
831
832static void
64f3e71c 833record_access_lto (modref_records_lto *tt, ao_ref *ref, modref_access_node &a)
d119f34c
JH
834{
835 /* get_alias_set sometimes use different type to compute the alias set
836 than TREE_TYPE (base). Do same adjustments. */
837 tree base_type = NULL_TREE, ref_type = NULL_TREE;
838 if (flag_strict_aliasing)
839 {
840 tree base;
841
842 base = ref->ref;
843 while (handled_component_p (base))
844 base = TREE_OPERAND (base, 0);
845
846 base_type = reference_alias_ptr_type_1 (&base);
847
848 if (!base_type)
849 base_type = TREE_TYPE (base);
850 else
851 base_type = TYPE_REF_CAN_ALIAS_ALL (base_type)
852 ? NULL_TREE : TREE_TYPE (base_type);
853
854 tree ref_expr = ref->ref;
855 ref_type = reference_alias_ptr_type_1 (&ref_expr);
856
857 if (!ref_type)
858 ref_type = TREE_TYPE (ref_expr);
859 else
860 ref_type = TYPE_REF_CAN_ALIAS_ALL (ref_type)
861 ? NULL_TREE : TREE_TYPE (ref_type);
862
863 /* Sanity check that we are in sync with what get_alias_set does. */
864 gcc_checking_assert ((!base_type && !ao_ref_base_alias_set (ref))
865 || get_alias_set (base_type)
866 == ao_ref_base_alias_set (ref));
867 gcc_checking_assert ((!ref_type && !ao_ref_alias_set (ref))
868 || get_alias_set (ref_type)
869 == ao_ref_alias_set (ref));
870
871 /* Do not bother to record types that have no meaningful alias set.
872 Also skip variably modified types since these go to local streams. */
873 if (base_type && (!get_alias_set (base_type)
874 || variably_modified_type_p (base_type, NULL_TREE)))
875 base_type = NULL_TREE;
876 if (ref_type && (!get_alias_set (ref_type)
877 || variably_modified_type_p (ref_type, NULL_TREE)))
878 ref_type = NULL_TREE;
879 }
880 if (dump_file)
881 {
882 fprintf (dump_file, " - Recording base type:");
883 print_generic_expr (dump_file, base_type);
884 fprintf (dump_file, " (alias set %i) ref type:",
885 base_type ? get_alias_set (base_type) : 0);
886 print_generic_expr (dump_file, ref_type);
4898e958
JH
887 fprintf (dump_file, " (alias set %i) ",
888 ref_type ? get_alias_set (ref_type) : 0);
e30bf330 889 a.dump (dump_file);
d119f34c
JH
890 }
891
5c85f295 892 tt->insert (base_type, ref_type, a, false);
d119f34c
JH
893}
894
895/* Returns true if and only if we should store the access to EXPR.
896 Some accesses, e.g. loads from automatic variables, are not interesting. */
897
898static bool
899record_access_p (tree expr)
900{
e977dd5e 901 if (refs_local_or_readonly_memory_p (expr))
d119f34c
JH
902 {
903 if (dump_file)
e977dd5e 904 fprintf (dump_file, " - Read-only or local, ignoring.\n");
d119f34c
JH
905 return false;
906 }
d119f34c
JH
907 return true;
908}
909
a34edf9a
JH
910/* Return true if ECF flags says that nondeterminsm can be ignored. */
911
912static bool
913ignore_nondeterminism_p (tree caller, int flags)
914{
915 if ((flags & (ECF_CONST | ECF_PURE))
916 && !(flags & ECF_LOOPING_CONST_OR_PURE))
917 return true;
918 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
919 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
920 return true;
921 return false;
922}
923
85ebbabd
JH
924/* Return true if ECF flags says that return value can be ignored. */
925
926static bool
927ignore_retval_p (tree caller, int flags)
928{
929 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
930 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
931 return true;
932 return false;
933}
934
d119f34c
JH
935/* Return true if ECF flags says that stores can be ignored. */
936
937static bool
938ignore_stores_p (tree caller, int flags)
939{
85ebbabd 940 if (flags & (ECF_PURE | ECF_CONST | ECF_NOVOPS))
d119f34c
JH
941 return true;
942 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
943 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
944 return true;
945 return false;
946}
947
1f3a3363 948/* Determine parm_map for argument OP. */
617695cd
JH
949
950modref_parm_map
1f3a3363 951parm_map_for_arg (tree op)
617695cd 952{
617695cd
JH
953 bool offset_known;
954 poly_int64 offset;
955 struct modref_parm_map parm_map;
956
ea937e7d
JH
957 parm_map.parm_offset_known = false;
958 parm_map.parm_offset = 0;
959
617695cd
JH
960 offset_known = unadjusted_ptr_and_unit_offset (op, &op, &offset);
961 if (TREE_CODE (op) == SSA_NAME
962 && SSA_NAME_IS_DEFAULT_DEF (op)
963 && TREE_CODE (SSA_NAME_VAR (op)) == PARM_DECL)
964 {
965 int index = 0;
966 for (tree t = DECL_ARGUMENTS (current_function_decl);
967 t != SSA_NAME_VAR (op); t = DECL_CHAIN (t))
968 {
969 if (!t)
970 {
1f3a3363 971 index = MODREF_UNKNOWN_PARM;
617695cd
JH
972 break;
973 }
974 index++;
975 }
976 parm_map.parm_index = index;
977 parm_map.parm_offset_known = offset_known;
978 parm_map.parm_offset = offset;
979 }
980 else if (points_to_local_or_readonly_memory_p (op))
1f3a3363 981 parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
617695cd 982 else
1f3a3363 983 parm_map.parm_index = MODREF_UNKNOWN_PARM;
617695cd
JH
984 return parm_map;
985}
986
ada353b8
JH
987/* Merge side effects of call STMT to function with CALLEE_SUMMARY
988 int CUR_SUMMARY. Return true if something changed.
5c85f295
JH
989 If IGNORE_STORES is true, do not merge stores.
990 If RECORD_ADJUSTMENTS is true cap number of adjustments to
991 a given access to make dataflow finite. */
ada353b8
JH
992
993bool
994merge_call_side_effects (modref_summary *cur_summary,
995 gimple *stmt, modref_summary *callee_summary,
5c85f295 996 bool ignore_stores, cgraph_node *callee_node,
64f3e71c 997 bool record_adjustments, bool always_executed)
ada353b8 998{
c34db4b6 999 auto_vec <modref_parm_map, 32> parm_map;
1f3a3363 1000 modref_parm_map chain_map;
ada353b8 1001 bool changed = false;
8d3abf42
JH
1002 int flags = gimple_call_flags (stmt);
1003
64f3e71c
JH
1004 if ((flags & (ECF_CONST | ECF_NOVOPS))
1005 && !(flags & ECF_LOOPING_CONST_OR_PURE))
1006 return changed;
1007
a34edf9a
JH
1008 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
1009 || (flags & ECF_LOOPING_CONST_OR_PURE))
8d3abf42 1010 {
a34edf9a
JH
1011 if (!cur_summary->side_effects && callee_summary->side_effects)
1012 {
1013 if (dump_file)
1014 fprintf (dump_file, " - merging side effects.\n");
1015 cur_summary->side_effects = true;
1016 changed = true;
1017 }
1018 if (!cur_summary->nondeterministic && callee_summary->nondeterministic
1019 && !ignore_nondeterminism_p (current_function_decl, flags))
1020 {
1021 if (dump_file)
1022 fprintf (dump_file, " - merging nondeterministic.\n");
1023 cur_summary->nondeterministic = true;
1024 changed = true;
1025 }
1026 }
8d3abf42
JH
1027
1028 if (flags & (ECF_CONST | ECF_NOVOPS))
1029 return changed;
ada353b8 1030
a34edf9a
JH
1031 if (!cur_summary->calls_interposable && callee_summary->calls_interposable)
1032 {
1033 if (dump_file)
1034 fprintf (dump_file, " - merging calls interposable.\n");
1035 cur_summary->calls_interposable = true;
1036 changed = true;
1037 }
1038
617695cd
JH
1039 /* We can not safely optimize based on summary of callee if it does
1040 not always bind to current def: it is possible that memory load
1041 was optimized out earlier which may not happen in the interposed
1042 variant. */
a34edf9a
JH
1043 if (!callee_node->binds_to_current_def_p ()
1044 && !cur_summary->calls_interposable)
617695cd
JH
1045 {
1046 if (dump_file)
a34edf9a
JH
1047 fprintf (dump_file, " - May be interposed.\n");
1048 cur_summary->calls_interposable = true;
1049 changed = true;
617695cd
JH
1050 }
1051
0b874e0f
ST
1052 if (dump_file)
1053 fprintf (dump_file, " - Merging side effects of %s with parm map:",
1054 callee_node->dump_name ());
1055
520d5ad3 1056 parm_map.safe_grow_cleared (gimple_call_num_args (stmt), true);
ada353b8
JH
1057 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1058 {
1f3a3363 1059 parm_map[i] = parm_map_for_arg (gimple_call_arg (stmt, i));
56cb815b 1060 if (dump_file)
c7b6a758
JH
1061 {
1062 fprintf (dump_file, " %i", parm_map[i].parm_index);
1063 if (parm_map[i].parm_offset_known)
1064 {
1065 fprintf (dump_file, " offset:");
1066 print_dec ((poly_int64_pod)parm_map[i].parm_offset,
1067 dump_file, SIGNED);
1068 }
1069 }
ada353b8 1070 }
1f3a3363
JH
1071 if (gimple_call_chain (stmt))
1072 {
1073 chain_map = parm_map_for_arg (gimple_call_chain (stmt));
1074 if (dump_file)
1075 {
1076 fprintf (dump_file, "static chain %i", chain_map.parm_index);
1077 if (chain_map.parm_offset_known)
1078 {
1079 fprintf (dump_file, " offset:");
1080 print_dec ((poly_int64_pod)chain_map.parm_offset,
1081 dump_file, SIGNED);
1082 }
1083 }
1084 }
56cb815b
JH
1085 if (dump_file)
1086 fprintf (dump_file, "\n");
ada353b8 1087
e69b7c57
JH
1088 if (always_executed
1089 && callee_summary->kills.length ()
1090 && (!cfun->can_throw_non_call_exceptions
1091 || !stmt_could_throw_p (cfun, stmt)))
1092 {
1093 /* Watch for self recursive updates. */
1094 auto_vec<modref_access_node, 32> saved_kills;
1095
1096 saved_kills.reserve_exact (callee_summary->kills.length ());
1097 saved_kills.splice (callee_summary->kills);
1098 for (auto kill : saved_kills)
1099 {
1100 if (kill.parm_index >= (int)parm_map.length ())
1101 continue;
1102 modref_parm_map &m
1103 = kill.parm_index == MODREF_STATIC_CHAIN_PARM
1104 ? chain_map
1105 : parm_map[kill.parm_index];
1106 if (m.parm_index == MODREF_LOCAL_MEMORY_PARM
1107 || m.parm_index == MODREF_UNKNOWN_PARM
1108 || m.parm_index == MODREF_RETSLOT_PARM
1109 || !m.parm_offset_known)
1110 continue;
1111 modref_access_node n = kill;
1112 n.parm_index = m.parm_index;
1113 n.parm_offset += m.parm_offset;
1114 if (modref_access_node::insert_kill (cur_summary->kills, n,
1115 record_adjustments))
1116 changed = true;
1117 }
1118 }
1119
ada353b8 1120 /* Merge with callee's summary. */
5c85f295 1121 changed |= cur_summary->loads->merge (callee_summary->loads, &parm_map,
1f3a3363 1122 &chain_map, record_adjustments);
ada353b8 1123 if (!ignore_stores)
617695cd
JH
1124 {
1125 changed |= cur_summary->stores->merge (callee_summary->stores,
1f3a3363 1126 &parm_map, &chain_map,
5c85f295 1127 record_adjustments);
617695cd
JH
1128 if (!cur_summary->writes_errno
1129 && callee_summary->writes_errno)
1130 {
1131 cur_summary->writes_errno = true;
1132 changed = true;
1133 }
1134 }
ada353b8
JH
1135 return changed;
1136}
1137
617695cd
JH
1138/* Return access mode for argument I of call STMT with FNSPEC. */
1139
1140static modref_access_node
1141get_access_for_fnspec (gcall *call, attr_fnspec &fnspec,
1142 unsigned int i, modref_parm_map &map)
1143{
1144 tree size = NULL_TREE;
1145 unsigned int size_arg;
1146
1147 if (!fnspec.arg_specified_p (i))
1148 ;
1149 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
1150 size = gimple_call_arg (call, size_arg);
1151 else if (fnspec.arg_access_size_given_by_type_p (i))
1152 {
1153 tree callee = gimple_call_fndecl (call);
1154 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
1155
1156 for (unsigned int p = 0; p < i; p++)
1157 t = TREE_CHAIN (t);
1158 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
1159 }
1160 modref_access_node a = {0, -1, -1,
1161 map.parm_offset, map.parm_index,
5c85f295 1162 map.parm_offset_known, 0};
617695cd
JH
1163 poly_int64 size_hwi;
1164 if (size
1165 && poly_int_tree_p (size, &size_hwi)
1166 && coeffs_in_range_p (size_hwi, 0,
1167 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
1168 {
1169 a.size = -1;
1170 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
1171 }
1172 return a;
1173}
1174
6cef01c3
JH
1175/* Collapse loads and return true if something changed. */
1176
1177static bool
1178collapse_loads (modref_summary *cur_summary,
1179 modref_summary_lto *cur_summary_lto)
1180{
1181 bool changed = false;
1182
1183 if (cur_summary && !cur_summary->loads->every_base)
1184 {
1185 cur_summary->loads->collapse ();
1186 changed = true;
1187 }
1188 if (cur_summary_lto
1189 && !cur_summary_lto->loads->every_base)
1190 {
1191 cur_summary_lto->loads->collapse ();
1192 changed = true;
1193 }
1194 return changed;
1195}
1196
1197/* Collapse loads and return true if something changed. */
1198
1199static bool
1200collapse_stores (modref_summary *cur_summary,
1201 modref_summary_lto *cur_summary_lto)
1202{
1203 bool changed = false;
1204
1205 if (cur_summary && !cur_summary->stores->every_base)
1206 {
1207 cur_summary->stores->collapse ();
1208 changed = true;
1209 }
1210 if (cur_summary_lto
1211 && !cur_summary_lto->stores->every_base)
1212 {
1213 cur_summary_lto->stores->collapse ();
1214 changed = true;
1215 }
1216 return changed;
1217}
1218
1219
617695cd
JH
1220/* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
1221 If IGNORE_STORES is true ignore them.
1222 Return false if no useful summary can be produced. */
1223
1224static bool
6cef01c3
JH
1225process_fnspec (modref_summary *cur_summary,
1226 modref_summary_lto *cur_summary_lto,
1227 gcall *call, bool ignore_stores)
617695cd
JH
1228{
1229 attr_fnspec fnspec = gimple_call_fnspec (call);
992644c3
JH
1230 int flags = gimple_call_flags (call);
1231
8d3abf42 1232 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
992644c3
JH
1233 || (flags & ECF_LOOPING_CONST_OR_PURE)
1234 || (cfun->can_throw_non_call_exceptions
1235 && stmt_could_throw_p (cfun, call)))
1236 {
1237 if (cur_summary)
a34edf9a
JH
1238 {
1239 cur_summary->side_effects = true;
1240 if (!ignore_nondeterminism_p (current_function_decl, flags))
1241 cur_summary->nondeterministic = true;
1242 }
992644c3 1243 if (cur_summary_lto)
a34edf9a
JH
1244 {
1245 cur_summary_lto->side_effects = true;
1246 if (!ignore_nondeterminism_p (current_function_decl, flags))
1247 cur_summary_lto->nondeterministic = true;
1248 }
992644c3 1249 }
8d3abf42
JH
1250 if (flags & (ECF_CONST | ECF_NOVOPS))
1251 return true;
617695cd
JH
1252 if (!fnspec.known_p ())
1253 {
1254 if (dump_file && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1255 fprintf (dump_file, " Builtin with no fnspec: %s\n",
1256 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call))));
1257 if (ignore_stores)
1258 {
6cef01c3 1259 collapse_loads (cur_summary, cur_summary_lto);
617695cd
JH
1260 return true;
1261 }
1262 return false;
1263 }
1264 if (fnspec.global_memory_read_p ())
6cef01c3 1265 collapse_loads (cur_summary, cur_summary_lto);
617695cd
JH
1266 else
1267 {
1268 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1269 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
1270 ;
1271 else if (!fnspec.arg_specified_p (i)
1272 || fnspec.arg_maybe_read_p (i))
1273 {
1f3a3363
JH
1274 modref_parm_map map = parm_map_for_arg
1275 (gimple_call_arg (call, i));
617695cd 1276
1f3a3363 1277 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
617695cd 1278 continue;
1f3a3363 1279 if (map.parm_index == MODREF_UNKNOWN_PARM)
617695cd 1280 {
6cef01c3 1281 collapse_loads (cur_summary, cur_summary_lto);
617695cd
JH
1282 break;
1283 }
6cef01c3
JH
1284 if (cur_summary)
1285 cur_summary->loads->insert (0, 0,
1286 get_access_for_fnspec (call,
1287 fnspec, i,
5c85f295
JH
1288 map),
1289 false);
6cef01c3
JH
1290 if (cur_summary_lto)
1291 cur_summary_lto->loads->insert (0, 0,
1292 get_access_for_fnspec (call,
1293 fnspec, i,
5c85f295
JH
1294 map),
1295 false);
617695cd
JH
1296 }
1297 }
1298 if (ignore_stores)
1299 return true;
1300 if (fnspec.global_memory_written_p ())
6cef01c3 1301 collapse_stores (cur_summary, cur_summary_lto);
617695cd
JH
1302 else
1303 {
1304 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1305 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
1306 ;
1307 else if (!fnspec.arg_specified_p (i)
1308 || fnspec.arg_maybe_written_p (i))
1309 {
1f3a3363
JH
1310 modref_parm_map map = parm_map_for_arg
1311 (gimple_call_arg (call, i));
617695cd 1312
1f3a3363 1313 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
617695cd 1314 continue;
1f3a3363 1315 if (map.parm_index == MODREF_UNKNOWN_PARM)
617695cd 1316 {
6cef01c3 1317 collapse_stores (cur_summary, cur_summary_lto);
617695cd
JH
1318 break;
1319 }
6cef01c3
JH
1320 if (cur_summary)
1321 cur_summary->stores->insert (0, 0,
1322 get_access_for_fnspec (call,
1323 fnspec, i,
5c85f295
JH
1324 map),
1325 false);
6cef01c3
JH
1326 if (cur_summary_lto)
1327 cur_summary_lto->stores->insert (0, 0,
1328 get_access_for_fnspec (call,
1329 fnspec, i,
5c85f295
JH
1330 map),
1331 false);
617695cd
JH
1332 }
1333 if (fnspec.errno_maybe_written_p () && flag_errno_math)
6cef01c3
JH
1334 {
1335 if (cur_summary)
1336 cur_summary->writes_errno = true;
1337 if (cur_summary_lto)
1338 cur_summary_lto->writes_errno = true;
1339 }
617695cd
JH
1340 }
1341 return true;
1342}
1343
ada353b8
JH
1344/* Analyze function call STMT in function F.
1345 Remember recursive calls in RECURSIVE_CALLS. */
d119f34c
JH
1346
1347static bool
6cef01c3 1348analyze_call (modref_summary *cur_summary, modref_summary_lto *cur_summary_lto,
64f3e71c
JH
1349 gcall *stmt, vec <gimple *> *recursive_calls,
1350 bool always_executed)
d119f34c
JH
1351{
1352 /* Check flags on the function call. In certain cases, analysis can be
1353 simplified. */
1354 int flags = gimple_call_flags (stmt);
8d3abf42
JH
1355 if ((flags & (ECF_CONST | ECF_NOVOPS))
1356 && !(flags & ECF_LOOPING_CONST_OR_PURE))
d119f34c
JH
1357 {
1358 if (dump_file)
1359 fprintf (dump_file,
1360 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
1361 "except for args.\n");
1362 return true;
1363 }
1364
1365 /* Pure functions do not affect global memory. Stores by functions which are
1366 noreturn and do not throw can safely be ignored. */
1367 bool ignore_stores = ignore_stores_p (current_function_decl, flags);
1368
1369 /* Next, we try to get the callee's function declaration. The goal is to
1370 merge their summary with ours. */
1371 tree callee = gimple_call_fndecl (stmt);
1372
1373 /* Check if this is an indirect call. */
1374 if (!callee)
1375 {
d119f34c 1376 if (dump_file)
6cef01c3
JH
1377 fprintf (dump_file, gimple_call_internal_p (stmt)
1378 ? " - Internal call" : " - Indirect call.\n");
1379 return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
d119f34c 1380 }
6cef01c3
JH
1381 /* We only need to handle internal calls in IPA mode. */
1382 gcc_checking_assert (!cur_summary_lto);
d119f34c
JH
1383
1384 struct cgraph_node *callee_node = cgraph_node::get_create (callee);
1385
d119f34c
JH
1386 /* If this is a recursive call, the target summary is the same as ours, so
1387 there's nothing to do. */
1388 if (recursive_call_p (current_function_decl, callee))
1389 {
ada353b8 1390 recursive_calls->safe_push (stmt);
992644c3
JH
1391 if (cur_summary)
1392 cur_summary->side_effects = true;
1393 if (cur_summary_lto)
1394 cur_summary_lto->side_effects = true;
d119f34c
JH
1395 if (dump_file)
1396 fprintf (dump_file, " - Skipping recursive call.\n");
1397 return true;
1398 }
1399
1400 gcc_assert (callee_node != NULL);
1401
1402 /* Get the function symbol and its availability. */
1403 enum availability avail;
1404 callee_node = callee_node->function_symbol (&avail);
992644c3
JH
1405 bool looping;
1406 if (builtin_safe_for_const_function_p (&looping, callee))
1407 {
1408 if (looping)
1409 {
1410 if (cur_summary)
1411 cur_summary->side_effects = true;
1412 if (cur_summary_lto)
1413 cur_summary_lto->side_effects = true;
1414 }
1415 if (dump_file)
1416 fprintf (dump_file, " - Bulitin is safe for const.\n");
1417 return true;
1418 }
d119f34c
JH
1419 if (avail <= AVAIL_INTERPOSABLE)
1420 {
d119f34c
JH
1421 if (dump_file)
1422 fprintf (dump_file, " - Function availability <= AVAIL_INTERPOSABLE.\n");
6cef01c3 1423 return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
d119f34c
JH
1424 }
1425
1426 /* Get callee's modref summary. As above, if there's no summary, we either
1427 have to give up or, if stores are ignored, we can just purge loads. */
71dbabcc 1428 modref_summary *callee_summary = optimization_summaries->get (callee_node);
d119f34c
JH
1429 if (!callee_summary)
1430 {
d119f34c
JH
1431 if (dump_file)
1432 fprintf (dump_file, " - No modref summary available for callee.\n");
6cef01c3 1433 return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
d119f34c
JH
1434 }
1435
56cb815b 1436 merge_call_side_effects (cur_summary, stmt, callee_summary, ignore_stores,
64f3e71c 1437 callee_node, false, always_executed);
d119f34c
JH
1438
1439 return true;
1440}
1441
8a2fd716 1442/* Support analysis in non-lto and lto mode in parallel. */
71dbabcc
JH
1443
1444struct summary_ptrs
1445{
1446 struct modref_summary *nolto;
1447 struct modref_summary_lto *lto;
64f3e71c 1448 bool always_executed;
71dbabcc
JH
1449};
1450
d119f34c
JH
1451/* Helper for analyze_stmt. */
1452
1453static bool
1454analyze_load (gimple *, tree, tree op, void *data)
1455{
71dbabcc
JH
1456 modref_summary *summary = ((summary_ptrs *)data)->nolto;
1457 modref_summary_lto *summary_lto = ((summary_ptrs *)data)->lto;
d119f34c
JH
1458
1459 if (dump_file)
1460 {
1461 fprintf (dump_file, " - Analyzing load: ");
1462 print_generic_expr (dump_file, op);
1463 fprintf (dump_file, "\n");
1464 }
1465
992644c3
JH
1466 if (TREE_THIS_VOLATILE (op)
1467 || (cfun->can_throw_non_call_exceptions
1468 && tree_could_throw_p (op)))
1469 {
1470 if (dump_file)
1471 fprintf (dump_file, " (volatile or can throw; marking side effects) ");
1472 if (summary)
a34edf9a 1473 summary->side_effects = summary->nondeterministic = true;
992644c3 1474 if (summary_lto)
a34edf9a 1475 summary_lto->side_effects = summary_lto->nondeterministic = true;
992644c3
JH
1476 }
1477
d119f34c
JH
1478 if (!record_access_p (op))
1479 return false;
1480
1481 ao_ref r;
1482 ao_ref_init (&r, op);
64f3e71c 1483 modref_access_node a = get_access (&r);
d119f34c 1484
71dbabcc 1485 if (summary)
64f3e71c 1486 record_access (summary->loads, &r, a);
71dbabcc 1487 if (summary_lto)
64f3e71c 1488 record_access_lto (summary_lto->loads, &r, a);
d119f34c
JH
1489 return false;
1490}
1491
1492/* Helper for analyze_stmt. */
1493
1494static bool
64f3e71c 1495analyze_store (gimple *stmt, tree, tree op, void *data)
d119f34c 1496{
71dbabcc
JH
1497 modref_summary *summary = ((summary_ptrs *)data)->nolto;
1498 modref_summary_lto *summary_lto = ((summary_ptrs *)data)->lto;
d119f34c
JH
1499
1500 if (dump_file)
1501 {
1502 fprintf (dump_file, " - Analyzing store: ");
1503 print_generic_expr (dump_file, op);
1504 fprintf (dump_file, "\n");
1505 }
1506
992644c3
JH
1507 if (TREE_THIS_VOLATILE (op)
1508 || (cfun->can_throw_non_call_exceptions
1509 && tree_could_throw_p (op)))
1510 {
1511 if (dump_file)
1512 fprintf (dump_file, " (volatile or can throw; marking side effects) ");
1513 if (summary)
a34edf9a 1514 summary->side_effects = summary->nondeterministic = true;
992644c3 1515 if (summary_lto)
a34edf9a 1516 summary_lto->side_effects = summary_lto->nondeterministic = true;
992644c3
JH
1517 }
1518
d119f34c
JH
1519 if (!record_access_p (op))
1520 return false;
1521
1522 ao_ref r;
1523 ao_ref_init (&r, op);
64f3e71c 1524 modref_access_node a = get_access (&r);
d119f34c 1525
71dbabcc 1526 if (summary)
64f3e71c 1527 record_access (summary->stores, &r, a);
71dbabcc 1528 if (summary_lto)
64f3e71c
JH
1529 record_access_lto (summary_lto->stores, &r, a);
1530 if (summary
1531 && ((summary_ptrs *)data)->always_executed
1532 && a.useful_for_kill_p ()
1533 && (!cfun->can_throw_non_call_exceptions
1534 || !stmt_could_throw_p (cfun, stmt)))
1535 {
1536 if (dump_file)
1537 fprintf (dump_file, " - Recording kill\n");
1538 modref_access_node::insert_kill (summary->kills, a, false);
1539 }
d119f34c
JH
1540 return false;
1541}
1542
1543/* Analyze statement STMT of function F.
1544 If IPA is true do not merge in side effects of calls. */
1545
1546static bool
71dbabcc 1547analyze_stmt (modref_summary *summary, modref_summary_lto *summary_lto,
64f3e71c
JH
1548 gimple *stmt, bool ipa, vec <gimple *> *recursive_calls,
1549 bool always_executed)
d119f34c 1550{
8a2fd716
JJ
1551 /* In general we can not ignore clobbers because they are barriers for code
1552 motion, however after inlining it is safe to do because local optimization
3991912e 1553 passes do not consider clobbers from other functions.
8a2fd716 1554 Similar logic is in ipa-pure-const.c. */
3991912e 1555 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
64f3e71c
JH
1556 {
1557 if (summary
1558 && always_executed && record_access_p (gimple_assign_lhs (stmt)))
1559 {
1560 ao_ref r;
1561 ao_ref_init (&r, gimple_assign_lhs (stmt));
1562 modref_access_node a = get_access (&r);
1563 if (a.useful_for_kill_p ())
1564 {
1565 if (dump_file)
1566 fprintf (dump_file, " - Recording kill\n");
1567 modref_access_node::insert_kill (summary->kills, a, false);
1568 }
1569 }
1570 return true;
1571 }
3991912e 1572
64f3e71c 1573 struct summary_ptrs sums = {summary, summary_lto, always_executed};
71dbabcc 1574
d119f34c 1575 /* Analyze all loads and stores in STMT. */
71dbabcc 1576 walk_stmt_load_store_ops (stmt, &sums,
d119f34c 1577 analyze_load, analyze_store);
d119f34c
JH
1578
1579 switch (gimple_code (stmt))
1580 {
1581 case GIMPLE_ASM:
a34edf9a
JH
1582 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
1583 {
1584 if (summary)
1585 summary->side_effects = summary->nondeterministic = true;
1586 if (summary_lto)
1587 summary_lto->side_effects = summary_lto->nondeterministic = true;
1588 }
1589 if (cfun->can_throw_non_call_exceptions
1590 && stmt_could_throw_p (cfun, stmt))
992644c3
JH
1591 {
1592 if (summary)
1593 summary->side_effects = true;
1594 if (summary_lto)
1595 summary_lto->side_effects = true;
1596 }
d119f34c
JH
1597 /* If the ASM statement does not read nor write memory, there's nothing
1598 to do. Otherwise just give up. */
1599 if (!gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
1600 return true;
1601 if (dump_file)
1602 fprintf (dump_file, " - Function contains GIMPLE_ASM statement "
1603 "which clobbers memory.\n");
1604 return false;
1605 case GIMPLE_CALL:
6cef01c3
JH
1606 if (!ipa || gimple_call_internal_p (stmt))
1607 return analyze_call (summary, summary_lto,
64f3e71c
JH
1608 as_a <gcall *> (stmt), recursive_calls,
1609 always_executed);
6cef01c3
JH
1610 else
1611 {
1612 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
1613
1614 if (fnspec.known_p ()
1615 && (!fnspec.global_memory_read_p ()
1616 || !fnspec.global_memory_written_p ()))
1617 {
66dd412f
RB
1618 cgraph_edge *e = cgraph_node::get (current_function_decl)->get_edge (stmt);
1619 if (e->callee)
1620 {
1621 fnspec_summaries->get_create (e)->fnspec = xstrdup (fnspec.get_str ());
1622 if (dump_file)
1623 fprintf (dump_file, " Recorded fnspec %s\n", fnspec.get_str ());
1624 }
6cef01c3
JH
1625 }
1626 }
d119f34c
JH
1627 return true;
1628 default:
992644c3
JH
1629 if (cfun->can_throw_non_call_exceptions
1630 && stmt_could_throw_p (cfun, stmt))
1631 {
1632 if (summary)
1633 summary->side_effects = true;
1634 if (summary_lto)
1635 summary_lto->side_effects = true;
1636 }
d119f34c
JH
1637 return true;
1638 }
1639}
1640
71dbabcc
JH
1641/* Remove summary of current function because during the function body
1642 scan we determined it is not useful. LTO, NOLTO and IPA determines the
1643 mode of scan. */
1644
1645static void
1646remove_summary (bool lto, bool nolto, bool ipa)
1647{
1648 cgraph_node *fnode = cgraph_node::get (current_function_decl);
1649 if (!ipa)
1650 optimization_summaries->remove (fnode);
1651 else
1652 {
1653 if (nolto)
1654 summaries->remove (fnode);
1655 if (lto)
1656 summaries_lto->remove (fnode);
85ebbabd 1657 remove_modref_edge_summaries (fnode);
71dbabcc
JH
1658 }
1659 if (dump_file)
1660 fprintf (dump_file,
1661 " - modref done with result: not tracked.\n");
1662}
1663
520d5ad3
JH
1664/* Return true if OP accesses memory pointed to by SSA_NAME. */
1665
1666bool
1667memory_access_to (tree op, tree ssa_name)
1668{
1669 tree base = get_base_address (op);
1670 if (!base)
1671 return false;
1672 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
1673 return false;
1674 return TREE_OPERAND (base, 0) == ssa_name;
1675}
1676
1677/* Consider statement val = *arg.
1678 return EAF flags of ARG that can be determined from EAF flags of VAL
1679 (which are known to be FLAGS). If IGNORE_STORES is true we can ignore
1680 all stores to VAL, i.e. when handling noreturn function. */
1681
1682static int
1683deref_flags (int flags, bool ignore_stores)
1684{
d70ef656
JH
1685 /* Dereference is also a direct read but dereferenced value does not
1686 yield any other direct use. */
1687 int ret = EAF_NO_DIRECT_CLOBBER | EAF_NO_DIRECT_ESCAPE
1688 | EAF_NOT_RETURNED_DIRECTLY;
4341b1b1
JH
1689 /* If argument is unused just account for
1690 the read involved in dereference. */
520d5ad3 1691 if (flags & EAF_UNUSED)
d70ef656
JH
1692 ret |= EAF_NO_INDIRECT_READ | EAF_NO_INDIRECT_CLOBBER
1693 | EAF_NO_INDIRECT_ESCAPE;
520d5ad3
JH
1694 else
1695 {
d70ef656
JH
1696 /* Direct or indirect accesses leads to indirect accesses. */
1697 if (((flags & EAF_NO_DIRECT_CLOBBER)
1698 && (flags & EAF_NO_INDIRECT_CLOBBER))
1699 || ignore_stores)
1700 ret |= EAF_NO_INDIRECT_CLOBBER;
1701 if (((flags & EAF_NO_DIRECT_ESCAPE)
1702 && (flags & EAF_NO_INDIRECT_ESCAPE))
1703 || ignore_stores)
1704 ret |= EAF_NO_INDIRECT_ESCAPE;
1705 if ((flags & EAF_NO_DIRECT_READ)
1706 && (flags & EAF_NO_INDIRECT_READ))
1707 ret |= EAF_NO_INDIRECT_READ;
1708 if ((flags & EAF_NOT_RETURNED_DIRECTLY)
1709 && (flags & EAF_NOT_RETURNED_INDIRECTLY))
1710 ret |= EAF_NOT_RETURNED_INDIRECTLY;
520d5ad3
JH
1711 }
1712 return ret;
1713}
1714
85ebbabd
JH
1715
1716/* Description of an escape point. */
1717
1718struct escape_point
1719{
1720 /* Value escapes to this call. */
1721 gcall *call;
1722 /* Argument it escapes to. */
1723 int arg;
1724 /* Flags already known about the argument (this can save us from recording
1725 esape points if local analysis did good job already). */
8da8ed43 1726 eaf_flags_t min_flags;
85ebbabd
JH
1727 /* Does value escape directly or indiretly? */
1728 bool direct;
1729};
1730
1731class modref_lattice
1732{
1733public:
1734 /* EAF flags of the SSA name. */
4341b1b1 1735 eaf_flags_t flags;
4898e958
JH
1736 /* Used during DFS walk to mark names where final value was determined
1737 without need for dataflow. */
85ebbabd 1738 bool known;
4898e958 1739 /* Used during DFS walk to mark open vertices (for cycle detection). */
85ebbabd 1740 bool open;
4898e958
JH
1741 /* Set during DFS walk for names that needs dataflow propagation. */
1742 bool do_dataflow;
1743 /* Used during the iterative dataflow. */
1744 bool changed;
85ebbabd
JH
1745
1746 /* When doing IPA analysis we can not merge in callee escape points;
1747 Only remember them and do the merging at IPA propagation time. */
1748 vec <escape_point, va_heap, vl_ptr> escape_points;
1749
4898e958
JH
1750 /* Representation of a graph for dataaflow. This graph is built on-demand
1751 using modref_eaf_analysis::analyze_ssa and later solved by
1752 modref_eaf_analysis::propagate.
1753 Each edge represents the fact that flags of current lattice should be
1754 propagated to lattice of SSA_NAME. */
1755 struct propagate_edge
1756 {
1757 int ssa_name;
1758 bool deref;
1759 };
1760 vec <propagate_edge, va_heap, vl_ptr> propagate_to;
1761
85ebbabd
JH
1762 void init ();
1763 void release ();
1764 bool merge (const modref_lattice &with);
1765 bool merge (int flags);
1766 bool merge_deref (const modref_lattice &with, bool ignore_stores);
1767 bool merge_direct_load ();
1768 bool merge_direct_store ();
1769 bool add_escape_point (gcall *call, int arg, int min_flags, bool diret);
1770 void dump (FILE *out, int indent = 0) const;
1771};
1772
1773/* Lattices are saved to vectors, so keep them PODs. */
1774void
1775modref_lattice::init ()
1776{
4341b1b1 1777 /* All flags we track. */
d70ef656
JH
1778 int f = EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER
1779 | EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE
1780 | EAF_NO_DIRECT_READ | EAF_NO_INDIRECT_READ
1781 | EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY
1782 | EAF_UNUSED;
4341b1b1
JH
1783 flags = f;
1784 /* Check that eaf_flags_t is wide enough to hold all flags. */
1785 gcc_checking_assert (f == flags);
85ebbabd
JH
1786 open = true;
1787 known = false;
1788}
1789
1790/* Release memory. */
1791void
1792modref_lattice::release ()
1793{
1794 escape_points.release ();
4898e958 1795 propagate_to.release ();
85ebbabd
JH
1796}
1797
1798/* Dump lattice to OUT; indent with INDENT spaces. */
1799
1800void
1801modref_lattice::dump (FILE *out, int indent) const
1802{
1803 dump_eaf_flags (out, flags);
1804 if (escape_points.length ())
1805 {
1806 fprintf (out, "%*sEscapes:\n", indent, "");
1807 for (unsigned int i = 0; i < escape_points.length (); i++)
1808 {
1809 fprintf (out, "%*s Arg %i (%s) min flags", indent, "",
1810 escape_points[i].arg,
1811 escape_points[i].direct ? "direct" : "indirect");
9851a163 1812 dump_eaf_flags (out, escape_points[i].min_flags, false);
85ebbabd
JH
1813 fprintf (out, " in call ");
1814 print_gimple_stmt (out, escape_points[i].call, 0);
1815 }
1816 }
1817}
1818
1819/* Add escape point CALL, ARG, MIN_FLAGS, DIRECT. Return false if such escape
1820 point exists. */
1821
1822bool
1823modref_lattice::add_escape_point (gcall *call, int arg, int min_flags,
1824 bool direct)
1825{
1826 escape_point *ep;
1827 unsigned int i;
1828
1829 /* If we already determined flags to be bad enough,
4341b1b1
JH
1830 we do not need to record. */
1831 if ((flags & min_flags) == flags || (min_flags & EAF_UNUSED))
85ebbabd
JH
1832 return false;
1833
1834 FOR_EACH_VEC_ELT (escape_points, i, ep)
1835 if (ep->call == call && ep->arg == arg && ep->direct == direct)
1836 {
1837 if ((ep->min_flags & min_flags) == min_flags)
1838 return false;
1839 ep->min_flags &= min_flags;
1840 return true;
1841 }
1842 /* Give up if max escape points is met. */
1843 if ((int)escape_points.length () > param_modref_max_escape_points)
1844 {
1845 if (dump_file)
1846 fprintf (dump_file, "--param modref-max-escape-points limit reached\n");
1847 merge (0);
1848 return true;
1849 }
1850 escape_point new_ep = {call, arg, min_flags, direct};
1851 escape_points.safe_push (new_ep);
1852 return true;
1853}
1854
1855/* Merge in flags from F. */
1856bool
1857modref_lattice::merge (int f)
1858{
3350e59f
JH
1859 if (f & EAF_UNUSED)
1860 return false;
4526ec20
JH
1861 /* Check that flags seems sane: if function does not read the parameter
1862 it can not access it indirectly. */
1863 gcc_checking_assert (!(f & EAF_NO_DIRECT_READ)
1864 || ((f & EAF_NO_INDIRECT_READ)
1865 && (f & EAF_NO_INDIRECT_CLOBBER)
1866 && (f & EAF_NO_INDIRECT_ESCAPE)
1867 && (f & EAF_NOT_RETURNED_INDIRECTLY)));
85ebbabd
JH
1868 if ((flags & f) != flags)
1869 {
1870 flags &= f;
4341b1b1
JH
1871 /* Prune obvoiusly useless flags;
1872 We do not have ECF_FLAGS handy which is not big problem since
1873 we will do final flags cleanup before producing summary.
1874 Merging should be fast so it can work well with dataflow. */
1875 flags = remove_useless_eaf_flags (flags, 0, false);
85ebbabd
JH
1876 if (!flags)
1877 escape_points.release ();
1878 return true;
1879 }
1880 return false;
1881}
1882
1883/* Merge in WITH. Return true if anyting changed. */
1884
1885bool
1886modref_lattice::merge (const modref_lattice &with)
1887{
1888 if (!with.known)
4898e958 1889 do_dataflow = true;
85ebbabd
JH
1890
1891 bool changed = merge (with.flags);
1892
1893 if (!flags)
1894 return changed;
1895 for (unsigned int i = 0; i < with.escape_points.length (); i++)
1896 changed |= add_escape_point (with.escape_points[i].call,
1897 with.escape_points[i].arg,
1898 with.escape_points[i].min_flags,
1899 with.escape_points[i].direct);
1900 return changed;
1901}
1902
1903/* Merge in deref of WITH. If IGNORE_STORES is true do not consider
1904 stores. Return true if anyting changed. */
1905
1906bool
1907modref_lattice::merge_deref (const modref_lattice &with, bool ignore_stores)
1908{
1909 if (!with.known)
4898e958 1910 do_dataflow = true;
85ebbabd
JH
1911
1912 bool changed = merge (deref_flags (with.flags, ignore_stores));
1913
1914 if (!flags)
1915 return changed;
1916 for (unsigned int i = 0; i < with.escape_points.length (); i++)
9851a163
JH
1917 {
1918 int min_flags = with.escape_points[i].min_flags;
1919
1920 if (with.escape_points[i].direct)
1921 min_flags = deref_flags (min_flags, ignore_stores);
1922 else if (ignore_stores)
4341b1b1 1923 min_flags |= ignore_stores_eaf_flags;
9851a163
JH
1924 changed |= add_escape_point (with.escape_points[i].call,
1925 with.escape_points[i].arg,
1926 min_flags,
1927 false);
1928 }
85ebbabd
JH
1929 return changed;
1930}
1931
1932/* Merge in flags for direct load. */
1933
1934bool
1935modref_lattice::merge_direct_load ()
1936{
d70ef656 1937 return merge (~(EAF_UNUSED | EAF_NO_DIRECT_READ));
85ebbabd
JH
1938}
1939
1940/* Merge in flags for direct store. */
1941
1942bool
1943modref_lattice::merge_direct_store ()
1944{
d70ef656 1945 return merge (~(EAF_UNUSED | EAF_NO_DIRECT_CLOBBER));
85ebbabd
JH
1946}
1947
4898e958
JH
1948/* Analyzer of EAF flags.
1949 This is genrally dataflow problem over the SSA graph, however we only
1950 care about flags of few selected ssa names (arguments, return slot and
1951 static chain). So we first call analyze_ssa_name on all relevant names
1952 and perform a DFS walk to discover SSA names where flags needs to be
1953 determined. For acyclic graphs we try to determine final flags during
1954 this walk. Once cycles or recursin depth is met we enlist SSA names
1955 for dataflow which is done by propagate call.
1956
1957 After propagation the flags can be obtained using get_ssa_name_flags. */
18f0873d
JH
1958
1959class modref_eaf_analysis
1960{
1961public:
4898e958 1962 /* Mark NAME as relevant for analysis. */
18f0873d 1963 void analyze_ssa_name (tree name);
4898e958
JH
1964 /* Dataflow slover. */
1965 void propagate ();
18f0873d
JH
1966 /* Return flags computed earlier for NAME. */
1967 int get_ssa_name_flags (tree name)
1968 {
1969 int version = SSA_NAME_VERSION (name);
1970 gcc_checking_assert (m_lattice[version].known);
1971 return m_lattice[version].flags;
1972 }
1973 /* In IPA mode this will record all escape points
1974 determined for NAME to PARM_IDNEX. Flags are minimal
1975 flags known. */
1976 void record_escape_points (tree name, int parm_index, int flags);
1977 modref_eaf_analysis (bool ipa)
1978 {
1979 m_ipa = ipa;
1980 m_depth = 0;
1981 m_lattice.safe_grow_cleared (num_ssa_names, true);
1982 }
1983 ~modref_eaf_analysis ()
1984 {
1985 gcc_checking_assert (!m_depth);
4898e958 1986 if (m_ipa || m_names_to_propagate.length ())
18f0873d
JH
1987 for (unsigned int i = 0; i < num_ssa_names; i++)
1988 m_lattice[i].release ();
1989 }
1990private:
1991 /* If true, we produce analysis for IPA mode. In this case escape points ar
1992 collected. */
1993 bool m_ipa;
1994 /* Depth of recursion of analyze_ssa_name. */
1995 int m_depth;
1996 /* Propagation lattice for individual ssa names. */
1997 auto_vec<modref_lattice> m_lattice;
4898e958
JH
1998 auto_vec<tree> m_deferred_names;
1999 auto_vec<int> m_names_to_propagate;
18f0873d
JH
2000
2001 void merge_with_ssa_name (tree dest, tree src, bool deref);
d70ef656
JH
2002 void merge_call_lhs_flags (gcall *call, int arg, tree name, bool direct,
2003 bool deref);
18f0873d 2004};
85ebbabd 2005
85ebbabd 2006
d70ef656 2007/* Call statements may return tgeir parameters. Consider argument number
520d5ad3
JH
2008 ARG of USE_STMT and determine flags that can needs to be cleared
2009 in case pointer possibly indirectly references from ARG I is returned.
d70ef656
JH
2010 If DIRECT is true consider direct returns and if INDIRECT consider
2011 indirect returns.
62af7d94
JH
2012 LATTICE, DEPTH and ipa are same as in analyze_ssa_name.
2013 ARG is set to -1 for static chain. */
520d5ad3 2014
18f0873d
JH
2015void
2016modref_eaf_analysis::merge_call_lhs_flags (gcall *call, int arg,
d70ef656
JH
2017 tree name, bool direct,
2018 bool indirect)
520d5ad3 2019{
18f0873d
JH
2020 int index = SSA_NAME_VERSION (name);
2021
d70ef656
JH
2022 /* If value is not returned at all, do nothing. */
2023 if (!direct && !indirect)
2024 return;
2025
520d5ad3
JH
2026 /* If there is no return value, no flags are affected. */
2027 if (!gimple_call_lhs (call))
85ebbabd 2028 return;
520d5ad3
JH
2029
2030 /* If we know that function returns given argument and it is not ARG
2031 we can still be happy. */
62af7d94
JH
2032 if (arg >= 0)
2033 {
2034 int flags = gimple_call_return_flags (call);
2035 if ((flags & ERF_RETURNS_ARG)
2036 && (flags & ERF_RETURN_ARG_MASK) != arg)
2037 return;
2038 }
8da8ed43 2039
520d5ad3
JH
2040 /* If return value is SSA name determine its flags. */
2041 if (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME)
85ebbabd
JH
2042 {
2043 tree lhs = gimple_call_lhs (call);
d70ef656
JH
2044 if (direct)
2045 merge_with_ssa_name (name, lhs, false);
2046 if (indirect)
2047 merge_with_ssa_name (name, lhs, true);
85ebbabd 2048 }
520d5ad3 2049 /* In the case of memory store we can do nothing. */
d70ef656 2050 else if (!direct)
18f0873d 2051 m_lattice[index].merge (deref_flags (0, false));
520d5ad3 2052 else
18f0873d 2053 m_lattice[index].merge (0);
520d5ad3
JH
2054}
2055
62af7d94
JH
2056/* CALL_FLAGS are EAF_FLAGS of the argument. Turn them
2057 into flags for caller, update LATTICE of corresponding
2058 argument if needed. */
2059
2060static int
2061callee_to_caller_flags (int call_flags, bool ignore_stores,
2062 modref_lattice &lattice)
2063{
2064 /* call_flags is about callee returning a value
2065 that is not the same as caller returning it. */
d70ef656
JH
2066 call_flags |= EAF_NOT_RETURNED_DIRECTLY
2067 | EAF_NOT_RETURNED_INDIRECTLY;
62af7d94
JH
2068 if (!ignore_stores && !(call_flags & EAF_UNUSED))
2069 {
2f3d43a3
JH
2070 /* If value escapes we are no longer able to track what happens
2071 with it because we can read it from the escaped location
2072 anytime. */
d70ef656 2073 if (!(call_flags & EAF_NO_DIRECT_ESCAPE))
2f3d43a3
JH
2074 lattice.merge (0);
2075 else if (!(call_flags & EAF_NO_INDIRECT_ESCAPE))
d70ef656 2076 lattice.merge (~(EAF_NOT_RETURNED_INDIRECTLY
4526ec20 2077 | EAF_NO_DIRECT_READ
2f3d43a3
JH
2078 | EAF_NO_INDIRECT_READ
2079 | EAF_NO_INDIRECT_CLOBBER
62af7d94
JH
2080 | EAF_UNUSED));
2081 }
2082 else
2083 call_flags |= ignore_stores_eaf_flags;
2084 return call_flags;
2085}
2086
85ebbabd
JH
2087/* Analyze EAF flags for SSA name NAME and store result to LATTICE.
2088 LATTICE is an array of modref_lattices.
2089 DEPTH is a recursion depth used to make debug output prettier.
2090 If IPA is true we analyze for IPA propagation (and thus call escape points
2091 are processed later) */
520d5ad3 2092
18f0873d
JH
2093void
2094modref_eaf_analysis::analyze_ssa_name (tree name)
520d5ad3
JH
2095{
2096 imm_use_iterator ui;
2097 gimple *use_stmt;
85ebbabd 2098 int index = SSA_NAME_VERSION (name);
520d5ad3
JH
2099
2100 /* See if value is already computed. */
4898e958 2101 if (m_lattice[index].known || m_lattice[index].do_dataflow)
85ebbabd 2102 return;
18f0873d 2103 if (m_lattice[index].open)
520d5ad3 2104 {
85ebbabd
JH
2105 if (dump_file)
2106 fprintf (dump_file,
4898e958 2107 "%*sCycle in SSA graph\n",
18f0873d 2108 m_depth * 4, "");
85ebbabd 2109 return;
520d5ad3 2110 }
4898e958
JH
2111 /* Recursion guard. */
2112 m_lattice[index].init ();
18f0873d 2113 if (m_depth == param_modref_max_depth)
520d5ad3
JH
2114 {
2115 if (dump_file)
2116 fprintf (dump_file,
4898e958 2117 "%*sMax recursion depth reached; postponing\n",
18f0873d 2118 m_depth * 4, "");
4898e958 2119 m_deferred_names.safe_push (name);
85ebbabd 2120 return;
520d5ad3 2121 }
520d5ad3
JH
2122
2123 if (dump_file)
2124 {
2125 fprintf (dump_file,
18f0873d 2126 "%*sAnalyzing flags of ssa name: ", m_depth * 4, "");
520d5ad3
JH
2127 print_generic_expr (dump_file, name);
2128 fprintf (dump_file, "\n");
2129 }
2130
2131 FOR_EACH_IMM_USE_STMT (use_stmt, ui, name)
2132 {
18f0873d 2133 if (m_lattice[index].flags == 0)
640296c3 2134 break;
520d5ad3
JH
2135 if (is_gimple_debug (use_stmt))
2136 continue;
2137 if (dump_file)
2138 {
18f0873d 2139 fprintf (dump_file, "%*s Analyzing stmt: ", m_depth * 4, "");
520d5ad3
JH
2140 print_gimple_stmt (dump_file, use_stmt, 0);
2141 }
4341b1b1
JH
2142 /* If we see a direct non-debug use, clear unused bit.
2143 All dereferneces should be accounted below using deref_flags. */
18f0873d 2144 m_lattice[index].merge (~EAF_UNUSED);
520d5ad3 2145
26285af4
JH
2146 /* Gimple return may load the return value.
2147 Returning name counts as an use by tree-ssa-structalias.c */
520d5ad3
JH
2148 if (greturn *ret = dyn_cast <greturn *> (use_stmt))
2149 {
b8ef019a
JH
2150 /* Returning through return slot is seen as memory write earlier. */
2151 if (DECL_RESULT (current_function_decl)
2152 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
2153 ;
2154 else if (gimple_return_retval (ret) == name)
d70ef656 2155 m_lattice[index].merge (~(EAF_UNUSED | EAF_NOT_RETURNED_DIRECTLY
18f0873d 2156 | EAF_NOT_RETURNED_DIRECTLY));
85ebbabd 2157 else if (memory_access_to (gimple_return_retval (ret), name))
8da8ed43 2158 {
18f0873d 2159 m_lattice[index].merge_direct_load ();
d70ef656
JH
2160 m_lattice[index].merge (~(EAF_UNUSED
2161 | EAF_NOT_RETURNED_INDIRECTLY));
8da8ed43 2162 }
520d5ad3
JH
2163 }
2164 /* Account for LHS store, arg loads and flags from callee function. */
2165 else if (gcall *call = dyn_cast <gcall *> (use_stmt))
2166 {
2167 tree callee = gimple_call_fndecl (call);
9b08f776
JH
2168
2169 /* IPA PTA internally it treats calling a function as "writing" to
2170 the argument space of all functions the function pointer points to
2171 (PR101949). We can not drop EAF_NOCLOBBER only when ipa-pta
2172 is on since that would allow propagation of this from -fno-ipa-pta
2173 to -fipa-pta functions. */
2174 if (gimple_call_fn (use_stmt) == name)
d70ef656 2175 m_lattice[index].merge (~(EAF_NO_DIRECT_CLOBBER | EAF_UNUSED));
9b08f776 2176
520d5ad3 2177 /* Recursion would require bit of propagation; give up for now. */
18f0873d 2178 if (callee && !m_ipa && recursive_call_p (current_function_decl,
85ebbabd 2179 callee))
18f0873d 2180 m_lattice[index].merge (0);
520d5ad3
JH
2181 else
2182 {
2183 int ecf_flags = gimple_call_flags (call);
2184 bool ignore_stores = ignore_stores_p (current_function_decl,
2185 ecf_flags);
85ebbabd
JH
2186 bool ignore_retval = ignore_retval_p (current_function_decl,
2187 ecf_flags);
520d5ad3
JH
2188
2189 /* Handle *name = func (...). */
2190 if (gimple_call_lhs (call)
2191 && memory_access_to (gimple_call_lhs (call), name))
59f38935 2192 {
18f0873d 2193 m_lattice[index].merge_direct_store ();
59f38935
JH
2194 /* Return slot optimization passes address of
2195 LHS to callee via hidden parameter and this
2196 may make LHS to escape. See PR 98499. */
2197 if (gimple_call_return_slot_opt_p (call)
2198 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call))))
62af7d94
JH
2199 {
2200 int call_flags = gimple_call_retslot_flags (call);
2201 bool isretslot = false;
2202
2203 if (DECL_RESULT (current_function_decl)
2204 && DECL_BY_REFERENCE
2205 (DECL_RESULT (current_function_decl)))
2206 isretslot = ssa_default_def
2207 (cfun,
2208 DECL_RESULT (current_function_decl))
2209 == name;
2210
2211 /* Passing returnslot to return slot is special because
2212 not_returned and escape has same meaning.
2213 However passing arg to return slot is different. If
2214 the callee's return slot is returned it means that
2f3d43a3
JH
2215 arg is written to itself which is an escape.
2216 Since we do not track the memory it is written to we
2217 need to give up on analysisng it. */
62af7d94
JH
2218 if (!isretslot)
2219 {
62af7d94 2220 if (!(call_flags & (EAF_NOT_RETURNED_DIRECTLY
d70ef656 2221 | EAF_UNUSED)))
2f3d43a3
JH
2222 m_lattice[index].merge (0);
2223 else gcc_checking_assert
2224 (call_flags & (EAF_NOT_RETURNED_INDIRECTLY
2225 | EAF_UNUSED));
62af7d94
JH
2226 call_flags = callee_to_caller_flags
2227 (call_flags, false,
2228 m_lattice[index]);
2229 }
2230 m_lattice[index].merge (call_flags);
2231 }
59f38935 2232 }
520d5ad3 2233
520d5ad3
JH
2234 if (gimple_call_chain (call)
2235 && (gimple_call_chain (call) == name))
62af7d94
JH
2236 {
2237 int call_flags = gimple_call_static_chain_flags (call);
d70ef656
JH
2238 if (!ignore_retval && !(call_flags & EAF_UNUSED))
2239 merge_call_lhs_flags
2240 (call, -1, name,
2241 !(call_flags & EAF_NOT_RETURNED_DIRECTLY),
2242 !(call_flags & EAF_NOT_RETURNED_INDIRECTLY));
62af7d94
JH
2243 call_flags = callee_to_caller_flags
2244 (call_flags, ignore_stores,
2245 m_lattice[index]);
2246 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
2247 m_lattice[index].merge (call_flags);
2248 }
85ebbabd
JH
2249
2250 /* Process internal functions and right away. */
18f0873d 2251 bool record_ipa = m_ipa && !gimple_call_internal_p (call);
520d5ad3
JH
2252
2253 /* Handle all function parameters. */
85ebbabd 2254 for (unsigned i = 0;
18f0873d
JH
2255 i < gimple_call_num_args (call)
2256 && m_lattice[index].flags; i++)
520d5ad3
JH
2257 /* Name is directly passed to the callee. */
2258 if (gimple_call_arg (call, i) == name)
2259 {
62af7d94 2260 int call_flags = gimple_call_arg_flags (call, i);
d70ef656 2261 if (!ignore_retval && !(call_flags & EAF_UNUSED))
62af7d94
JH
2262 merge_call_lhs_flags
2263 (call, i, name,
d70ef656
JH
2264 !(call_flags & EAF_NOT_RETURNED_DIRECTLY),
2265 !(call_flags & EAF_NOT_RETURNED_INDIRECTLY));
85ebbabd 2266 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
520d5ad3 2267 {
62af7d94
JH
2268 call_flags = callee_to_caller_flags
2269 (call_flags, ignore_stores,
2270 m_lattice[index]);
85ebbabd 2271 if (!record_ipa)
18f0873d 2272 m_lattice[index].merge (call_flags);
c3c61674 2273 else
18f0873d 2274 m_lattice[index].add_escape_point (call, i,
b8ef019a 2275 call_flags, true);
520d5ad3
JH
2276 }
2277 }
2278 /* Name is dereferenced and passed to a callee. */
2279 else if (memory_access_to (gimple_call_arg (call, i), name))
2280 {
62af7d94
JH
2281 int call_flags = deref_flags
2282 (gimple_call_arg_flags (call, i), ignore_stores);
d70ef656
JH
2283 if (!ignore_retval && !(call_flags & EAF_UNUSED)
2284 && !(call_flags & EAF_NOT_RETURNED_DIRECTLY)
2285 && !(call_flags & EAF_NOT_RETURNED_INDIRECTLY))
2286 merge_call_lhs_flags (call, i, name, false, true);
520d5ad3 2287 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
18f0873d 2288 m_lattice[index].merge_direct_load ();
520d5ad3 2289 else
85ebbabd 2290 {
62af7d94
JH
2291 call_flags = callee_to_caller_flags
2292 (call_flags, ignore_stores,
2293 m_lattice[index]);
85ebbabd 2294 if (!record_ipa)
18f0873d 2295 m_lattice[index].merge (call_flags);
c3c61674 2296 else
18f0873d 2297 m_lattice[index].add_escape_point (call, i,
62af7d94 2298 call_flags, false);
85ebbabd 2299 }
520d5ad3
JH
2300 }
2301 }
520d5ad3
JH
2302 }
2303 else if (gimple_assign_load_p (use_stmt))
2304 {
2305 gassign *assign = as_a <gassign *> (use_stmt);
2306 /* Memory to memory copy. */
2307 if (gimple_store_p (assign))
2308 {
520d5ad3
JH
2309 /* Handle *lhs = *name.
2310
2311 We do not track memory locations, so assume that value
2312 is used arbitrarily. */
2313 if (memory_access_to (gimple_assign_rhs1 (assign), name))
18f0873d 2314 m_lattice[index].merge (deref_flags (0, false));
85ebbabd
JH
2315 /* Handle *name = *exp. */
2316 else if (memory_access_to (gimple_assign_lhs (assign), name))
18f0873d 2317 m_lattice[index].merge_direct_store ();
520d5ad3
JH
2318 }
2319 /* Handle lhs = *name. */
2320 else if (memory_access_to (gimple_assign_rhs1 (assign), name))
85ebbabd
JH
2321 {
2322 tree lhs = gimple_assign_lhs (assign);
18f0873d 2323 merge_with_ssa_name (name, lhs, true);
85ebbabd 2324 }
520d5ad3
JH
2325 }
2326 else if (gimple_store_p (use_stmt))
2327 {
2328 gassign *assign = dyn_cast <gassign *> (use_stmt);
2329
2330 /* Handle *lhs = name. */
2331 if (assign && gimple_assign_rhs1 (assign) == name)
2332 {
2333 if (dump_file)
2334 fprintf (dump_file, "%*s ssa name saved to memory\n",
18f0873d
JH
2335 m_depth * 4, "");
2336 m_lattice[index].merge (0);
520d5ad3
JH
2337 }
2338 /* Handle *name = exp. */
2339 else if (assign
2340 && memory_access_to (gimple_assign_lhs (assign), name))
0c9687d0
JH
2341 {
2342 /* In general we can not ignore clobbers because they are
2343 barriers for code motion, however after inlining it is safe to
2344 do because local optimization passes do not consider clobbers
18f0873d
JH
2345 from other functions.
2346 Similar logic is in ipa-pure-const.c. */
0c9687d0 2347 if (!cfun->after_inlining || !gimple_clobber_p (assign))
18f0873d 2348 m_lattice[index].merge_direct_store ();
0c9687d0 2349 }
520d5ad3
JH
2350 /* ASM statements etc. */
2351 else if (!assign)
2352 {
2353 if (dump_file)
18f0873d
JH
2354 fprintf (dump_file, "%*s Unhandled store\n", m_depth * 4, "");
2355 m_lattice[index].merge (0);
520d5ad3
JH
2356 }
2357 }
2358 else if (gassign *assign = dyn_cast <gassign *> (use_stmt))
2359 {
2360 enum tree_code code = gimple_assign_rhs_code (assign);
2361
2362 /* See if operation is a merge as considered by
2363 tree-ssa-structalias.c:find_func_aliases. */
2364 if (!truth_value_p (code)
2365 && code != POINTER_DIFF_EXPR
2366 && (code != POINTER_PLUS_EXPR
2367 || gimple_assign_rhs1 (assign) == name))
85ebbabd
JH
2368 {
2369 tree lhs = gimple_assign_lhs (assign);
18f0873d 2370 merge_with_ssa_name (name, lhs, false);
85ebbabd 2371 }
520d5ad3
JH
2372 }
2373 else if (gphi *phi = dyn_cast <gphi *> (use_stmt))
2374 {
85ebbabd 2375 tree result = gimple_phi_result (phi);
18f0873d 2376 merge_with_ssa_name (name, result, false);
520d5ad3
JH
2377 }
2378 /* Conditions are not considered escape points
2379 by tree-ssa-structalias. */
2380 else if (gimple_code (use_stmt) == GIMPLE_COND)
2381 ;
2382 else
2383 {
2384 if (dump_file)
18f0873d
JH
2385 fprintf (dump_file, "%*s Unhandled stmt\n", m_depth * 4, "");
2386 m_lattice[index].merge (0);
520d5ad3
JH
2387 }
2388
2389 if (dump_file)
2390 {
18f0873d 2391 fprintf (dump_file, "%*s current flags of ", m_depth * 4, "");
520d5ad3 2392 print_generic_expr (dump_file, name);
18f0873d 2393 m_lattice[index].dump (dump_file, m_depth * 4 + 4);
520d5ad3
JH
2394 }
2395 }
2396 if (dump_file)
2397 {
18f0873d 2398 fprintf (dump_file, "%*sflags of ssa name ", m_depth * 4, "");
520d5ad3 2399 print_generic_expr (dump_file, name);
18f0873d 2400 m_lattice[index].dump (dump_file, m_depth * 4 + 2);
520d5ad3 2401 }
18f0873d 2402 m_lattice[index].open = false;
4898e958
JH
2403 if (!m_lattice[index].do_dataflow)
2404 m_lattice[index].known = true;
18f0873d
JH
2405}
2406
2407/* Propagate info from SRC to DEST. If DEREF it true, assume that SRC
2408 is dereferenced. */
2409
2410void
2411modref_eaf_analysis::merge_with_ssa_name (tree dest, tree src, bool deref)
2412{
2413 int index = SSA_NAME_VERSION (dest);
2414 int src_index = SSA_NAME_VERSION (src);
2415
4898e958
JH
2416 /* Merging lattice with itself is a no-op. */
2417 if (!deref && src == dest)
2418 return;
2419
18f0873d
JH
2420 m_depth++;
2421 analyze_ssa_name (src);
2422 m_depth--;
2423 if (deref)
2424 m_lattice[index].merge_deref (m_lattice[src_index], false);
2425 else
2426 m_lattice[index].merge (m_lattice[src_index]);
4898e958
JH
2427
2428 /* If we failed to produce final solution add an edge to the dataflow
2429 graph. */
2430 if (!m_lattice[src_index].known)
2431 {
2432 modref_lattice::propagate_edge e = {index, deref};
2433
2434 if (!m_lattice[src_index].propagate_to.length ())
2435 m_names_to_propagate.safe_push (src_index);
2436 m_lattice[src_index].propagate_to.safe_push (e);
2437 m_lattice[src_index].changed = true;
2438 m_lattice[src_index].do_dataflow = true;
2439 if (dump_file)
2440 fprintf (dump_file,
2441 "%*sWill propgate from ssa_name %i to %i%s\n",
2442 m_depth * 4 + 4,
2443 "", src_index, index, deref ? " (deref)" : "");
2444 }
2445}
2446
2447/* In the case we deferred some SSA names, reprocess them. In the case some
2448 dataflow edges were introduced, do the actual iterative dataflow. */
2449
2450void
2451modref_eaf_analysis::propagate ()
2452{
2453 int iterations = 0;
2454 size_t i;
2455 int index;
2456 bool changed = true;
2457
2458 while (m_deferred_names.length ())
2459 {
2460 tree name = m_deferred_names.pop ();
2461 m_lattice[SSA_NAME_VERSION (name)].open = false;
2462 if (dump_file)
2463 fprintf (dump_file, "Analyzing deferred SSA name\n");
2464 analyze_ssa_name (name);
2465 }
2466
2467 if (!m_names_to_propagate.length ())
2468 return;
2469 if (dump_file)
2470 fprintf (dump_file, "Propagating EAF flags\n");
2471
2472 /* Compute reverse postorder. */
2473 auto_vec <int> rpo;
2474 struct stack_entry
2475 {
2476 int name;
2477 unsigned pos;
2478 };
2479 auto_vec <struct stack_entry> stack;
2480 int pos = m_names_to_propagate.length () - 1;
2481
2482 rpo.safe_grow (m_names_to_propagate.length (), true);
2483 stack.reserve_exact (m_names_to_propagate.length ());
2484
2485 /* We reuse known flag for RPO DFS walk bookeeping. */
2486 if (flag_checking)
2487 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
2488 gcc_assert (!m_lattice[index].known && m_lattice[index].changed);
2489
2490 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
2491 {
2492 if (!m_lattice[index].known)
2493 {
2494 stack_entry e = {index, 0};
2495
2496 stack.quick_push (e);
2497 m_lattice[index].known = true;
2498 }
2499 while (stack.length ())
2500 {
2501 bool found = false;
2502 int index1 = stack.last ().name;
2503
2504 while (stack.last ().pos < m_lattice[index1].propagate_to.length ())
2505 {
2506 int index2 = m_lattice[index1]
2507 .propagate_to[stack.last ().pos].ssa_name;
2508
2509 stack.last ().pos++;
2510 if (!m_lattice[index2].known
2511 && m_lattice[index2].propagate_to.length ())
2512 {
2513 stack_entry e = {index2, 0};
2514
2515 stack.quick_push (e);
2516 m_lattice[index2].known = true;
2517 found = true;
2518 break;
2519 }
2520 }
2521 if (!found
2522 && stack.last ().pos == m_lattice[index1].propagate_to.length ())
2523 {
2524 rpo[pos--] = index1;
2525 stack.pop ();
2526 }
2527 }
2528 }
2529
2530 /* Perform itrative dataflow. */
2531 while (changed)
2532 {
2533 changed = false;
2534 iterations++;
2535 if (dump_file)
2536 fprintf (dump_file, " iteration %i\n", iterations);
2537 FOR_EACH_VEC_ELT (rpo, i, index)
2538 {
2539 if (m_lattice[index].changed)
2540 {
2541 size_t j;
2542
2543 m_lattice[index].changed = false;
2544 if (dump_file)
2545 fprintf (dump_file, " Visiting ssa name %i\n", index);
2546 for (j = 0; j < m_lattice[index].propagate_to.length (); j++)
2547 {
2548 bool ch;
2549 int target = m_lattice[index].propagate_to[j].ssa_name;
2550 bool deref = m_lattice[index].propagate_to[j].deref;
2551
2552 if (dump_file)
2553 fprintf (dump_file, " Propagating flags of ssa name"
2554 " %i to %i%s\n",
2555 index, target, deref ? " (deref)" : "");
2556 m_lattice[target].known = true;
2557 if (!m_lattice[index].propagate_to[j].deref)
2558 ch = m_lattice[target].merge (m_lattice[index]);
2559 else
2560 ch = m_lattice[target].merge_deref (m_lattice[index],
2561 false);
2562 if (!ch)
2563 continue;
2564 if (dump_file)
2565 {
2566 fprintf (dump_file, " New lattice: ");
2567 m_lattice[target].dump (dump_file);
2568 }
d70ef656 2569 changed = true;
4898e958
JH
2570 m_lattice[target].changed = true;
2571 }
2572 }
2573 }
2574 }
2575 if (dump_file)
2576 fprintf (dump_file, "EAF flags propagated in %i iterations\n", iterations);
520d5ad3
JH
2577}
2578
b8ef019a
JH
2579/* Record escape points of PARM_INDEX according to LATTICE. */
2580
18f0873d
JH
2581void
2582modref_eaf_analysis::record_escape_points (tree name, int parm_index, int flags)
b8ef019a 2583{
18f0873d
JH
2584 modref_lattice &lattice = m_lattice[SSA_NAME_VERSION (name)];
2585
b8ef019a
JH
2586 if (lattice.escape_points.length ())
2587 {
2588 escape_point *ep;
2589 unsigned int ip;
2590 cgraph_node *node = cgraph_node::get (current_function_decl);
2591
18f0873d 2592 gcc_assert (m_ipa);
b8ef019a
JH
2593 FOR_EACH_VEC_ELT (lattice.escape_points, ip, ep)
2594 if ((ep->min_flags & flags) != flags)
2595 {
2596 cgraph_edge *e = node->get_edge (ep->call);
2597 struct escape_entry ee = {parm_index, ep->arg,
2598 ep->min_flags, ep->direct};
2599
2600 escape_summaries->get_create (e)->esc.safe_push (ee);
2601 }
2602 }
2603}
2604
7798ae1a
JH
2605/* Determine EAF flags for function parameters
2606 and fill in SUMMARY/SUMMARY_LTO. If IPA is true work in IPA mode
2607 where we also collect scape points.
2608 PAST_FLAGS, PAST_RETSLOT_FLAGS, PAST_STATIC_CHAIN_FLAGS can be
2609 used to preserve flags from prevoius (IPA) run for cases where
2610 late optimizations changed code in a way we can no longer analyze
2611 it easily. */
520d5ad3
JH
2612
2613static void
85ebbabd 2614analyze_parms (modref_summary *summary, modref_summary_lto *summary_lto,
7798ae1a
JH
2615 bool ipa, vec<eaf_flags_t> &past_flags,
2616 int past_retslot_flags, int past_static_chain_flags)
520d5ad3
JH
2617{
2618 unsigned int parm_index = 0;
2619 unsigned int count = 0;
85ebbabd 2620 int ecf_flags = flags_from_decl_or_type (current_function_decl);
b8ef019a 2621 tree retslot = NULL;
a70c0512 2622 tree static_chain = NULL;
85ebbabd 2623
b8ef019a
JH
2624 /* If there is return slot, look up its SSA name. */
2625 if (DECL_RESULT (current_function_decl)
2626 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
2627 retslot = ssa_default_def (cfun, DECL_RESULT (current_function_decl));
a70c0512
JH
2628 if (cfun->static_chain_decl)
2629 static_chain = ssa_default_def (cfun, cfun->static_chain_decl);
b8ef019a 2630
520d5ad3
JH
2631 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
2632 parm = TREE_CHAIN (parm))
2633 count++;
2634
a70c0512 2635 if (!count && !retslot && !static_chain)
520d5ad3
JH
2636 return;
2637
18f0873d 2638 modref_eaf_analysis eaf_analysis (ipa);
520d5ad3 2639
4898e958
JH
2640 /* Determine all SSA names we need to know flags for. */
2641 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
2642 parm = TREE_CHAIN (parm))
2643 {
2644 tree name = ssa_default_def (cfun, parm);
2645 if (name)
2646 eaf_analysis.analyze_ssa_name (name);
2647 }
2648 if (retslot)
2649 eaf_analysis.analyze_ssa_name (retslot);
2650 if (static_chain)
2651 eaf_analysis.analyze_ssa_name (static_chain);
2652
2653 /* Do the dataflow. */
2654 eaf_analysis.propagate ();
2655
e2dd12ab
JH
2656 tree attr = lookup_attribute ("fn spec",
2657 TYPE_ATTRIBUTES
2658 (TREE_TYPE (current_function_decl)));
2659 attr_fnspec fnspec (attr
2660 ? TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))
2661 : "");
2662
2663
4898e958 2664 /* Store results to summaries. */
520d5ad3
JH
2665 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm; parm_index++,
2666 parm = TREE_CHAIN (parm))
2667 {
2668 tree name = ssa_default_def (cfun, parm);
3350e59f
JH
2669 if (!name || has_zero_uses (name))
2670 {
2671 /* We do not track non-SSA parameters,
2672 but we want to track unused gimple_regs. */
2673 if (!is_gimple_reg (parm))
2674 continue;
2675 if (summary)
2676 {
2677 if (parm_index >= summary->arg_flags.length ())
2678 summary->arg_flags.safe_grow_cleared (count, true);
2679 summary->arg_flags[parm_index] = EAF_UNUSED;
2680 }
2681 else if (summary_lto)
2682 {
2683 if (parm_index >= summary_lto->arg_flags.length ())
2684 summary_lto->arg_flags.safe_grow_cleared (count, true);
2685 summary_lto->arg_flags[parm_index] = EAF_UNUSED;
2686 }
2687 continue;
2688 }
18f0873d 2689 int flags = eaf_analysis.get_ssa_name_flags (name);
e2dd12ab
JH
2690 int attr_flags = fnspec.arg_eaf_flags (parm_index);
2691
2692 if (dump_file && (flags | attr_flags) != flags && !(flags & EAF_UNUSED))
2693 {
2694 fprintf (dump_file,
2695 " Flags for param %i combined with fnspec flags:",
2696 (int)parm_index);
2697 dump_eaf_flags (dump_file, attr_flags, false);
2698 fprintf (dump_file, " determined: ");
2699 dump_eaf_flags (dump_file, flags, true);
2700 }
2701 flags |= attr_flags;
85ebbabd 2702
4341b1b1
JH
2703 /* Eliminate useless flags so we do not end up storing unnecessary
2704 summaries. */
2705
2706 flags = remove_useless_eaf_flags
2707 (flags, ecf_flags,
2708 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
7798ae1a
JH
2709 if (past_flags.length () > parm_index)
2710 {
2711 int past = past_flags[parm_index];
2712 past = remove_useless_eaf_flags
2713 (past, ecf_flags,
2714 VOID_TYPE_P (TREE_TYPE
2715 (TREE_TYPE (current_function_decl))));
2716 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
2717 {
2718 fprintf (dump_file,
2719 " Flags for param %i combined with IPA pass:",
2720 (int)parm_index);
2721 dump_eaf_flags (dump_file, past, false);
e2dd12ab
JH
2722 fprintf (dump_file, " determined: ");
2723 dump_eaf_flags (dump_file, flags, true);
7798ae1a
JH
2724 }
2725 if (!(flags & EAF_UNUSED))
2726 flags |= past;
2727 }
520d5ad3
JH
2728
2729 if (flags)
2730 {
85ebbabd
JH
2731 if (summary)
2732 {
2733 if (parm_index >= summary->arg_flags.length ())
2734 summary->arg_flags.safe_grow_cleared (count, true);
2735 summary->arg_flags[parm_index] = flags;
2736 }
2737 else if (summary_lto)
2738 {
2739 if (parm_index >= summary_lto->arg_flags.length ())
2740 summary_lto->arg_flags.safe_grow_cleared (count, true);
2741 summary_lto->arg_flags[parm_index] = flags;
2742 }
18f0873d 2743 eaf_analysis.record_escape_points (name, parm_index, flags);
b8ef019a
JH
2744 }
2745 }
2746 if (retslot)
2747 {
18f0873d 2748 int flags = eaf_analysis.get_ssa_name_flags (retslot);
7798ae1a 2749 int past = past_retslot_flags;
85ebbabd 2750
b8ef019a 2751 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
7798ae1a
JH
2752 past = remove_useless_eaf_flags
2753 (past, ecf_flags,
2754 VOID_TYPE_P (TREE_TYPE
2755 (TREE_TYPE (current_function_decl))));
2756 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
2757 {
2758 fprintf (dump_file,
2759 " Retslot flags combined with IPA pass:");
2760 dump_eaf_flags (dump_file, past, false);
e2dd12ab 2761 fprintf (dump_file, " determined: ");
7798ae1a
JH
2762 dump_eaf_flags (dump_file, flags, true);
2763 }
2764 if (!(flags & EAF_UNUSED))
2765 flags |= past;
b8ef019a
JH
2766 if (flags)
2767 {
2768 if (summary)
2769 summary->retslot_flags = flags;
2770 if (summary_lto)
2771 summary_lto->retslot_flags = flags;
18f0873d 2772 eaf_analysis.record_escape_points (retslot,
1f3a3363 2773 MODREF_RETSLOT_PARM, flags);
520d5ad3
JH
2774 }
2775 }
a70c0512
JH
2776 if (static_chain)
2777 {
18f0873d 2778 int flags = eaf_analysis.get_ssa_name_flags (static_chain);
7798ae1a 2779 int past = past_static_chain_flags;
a70c0512
JH
2780
2781 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
7798ae1a
JH
2782 past = remove_useless_eaf_flags
2783 (past, ecf_flags,
2784 VOID_TYPE_P (TREE_TYPE
2785 (TREE_TYPE (current_function_decl))));
2786 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
2787 {
2788 fprintf (dump_file,
2789 " Static chain flags combined with IPA pass:");
2790 dump_eaf_flags (dump_file, past, false);
e2dd12ab 2791 fprintf (dump_file, " determined: ");
7798ae1a
JH
2792 dump_eaf_flags (dump_file, flags, true);
2793 }
2794 if (!(flags & EAF_UNUSED))
78dd0de9 2795 flags |= past;
a70c0512
JH
2796 if (flags)
2797 {
2798 if (summary)
2799 summary->static_chain_flags = flags;
2800 if (summary_lto)
2801 summary_lto->static_chain_flags = flags;
18f0873d 2802 eaf_analysis.record_escape_points (static_chain,
1f3a3363 2803 MODREF_STATIC_CHAIN_PARM,
18f0873d 2804 flags);
a70c0512
JH
2805 }
2806 }
520d5ad3
JH
2807}
2808
71dbabcc 2809/* Analyze function F. IPA indicates whether we're running in local mode
494bdadf
JH
2810 (false) or the IPA mode (true).
2811 Return true if fixup cfg is needed after the pass. */
d119f34c 2812
494bdadf 2813static bool
d119f34c
JH
2814analyze_function (function *f, bool ipa)
2815{
494bdadf 2816 bool fixup_cfg = false;
d119f34c 2817 if (dump_file)
67c935c8
JH
2818 fprintf (dump_file, "modref analyzing '%s' (ipa=%i)%s%s\n",
2819 function_name (f), ipa,
2820 TREE_READONLY (current_function_decl) ? " (const)" : "",
2821 DECL_PURE_P (current_function_decl) ? " (pure)" : "");
d119f34c
JH
2822
2823 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
008e7397
JH
2824 if (!flag_ipa_modref
2825 || lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl)))
494bdadf 2826 return false;
d119f34c 2827
d119f34c
JH
2828 /* Compute no-LTO summaries when local optimization is going to happen. */
2829 bool nolto = (!ipa || ((!flag_lto || flag_fat_lto_objects) && !in_lto_p)
2830 || (in_lto_p && !flag_wpa
2831 && flag_incremental_link != INCREMENTAL_LINK_LTO));
d119f34c
JH
2832 /* Compute LTO when LTO streaming is going to happen. */
2833 bool lto = ipa && ((flag_lto && !in_lto_p)
2834 || flag_wpa
2835 || flag_incremental_link == INCREMENTAL_LINK_LTO);
71dbabcc
JH
2836 cgraph_node *fnode = cgraph_node::get (current_function_decl);
2837
2838 modref_summary *summary = NULL;
2839 modref_summary_lto *summary_lto = NULL;
2840
f6f704fd
JH
2841 bool past_flags_known = false;
2842 auto_vec <eaf_flags_t> past_flags;
2843 int past_retslot_flags = 0;
2844 int past_static_chain_flags = 0;
2845
71dbabcc
JH
2846 /* Initialize the summary.
2847 If we run in local mode there is possibly pre-existing summary from
2848 IPA pass. Dump it so it is easy to compare if mod-ref info has
2849 improved. */
2850 if (!ipa)
2851 {
2852 if (!optimization_summaries)
2853 optimization_summaries = modref_summaries::create_ggc (symtab);
2854 else /* Remove existing summary if we are re-running the pass. */
2855 {
2856 if (dump_file
56cb815b
JH
2857 && (summary
2858 = optimization_summaries->get (cgraph_node::get (f->decl)))
2859 != NULL
2860 && summary->loads)
71dbabcc
JH
2861 {
2862 fprintf (dump_file, "Past summary:\n");
2863 optimization_summaries->get
2864 (cgraph_node::get (f->decl))->dump (dump_file);
f6f704fd
JH
2865 past_flags.reserve_exact (summary->arg_flags.length ());
2866 past_flags.splice (summary->arg_flags);
2867 past_retslot_flags = summary->retslot_flags;
2868 past_static_chain_flags = summary->static_chain_flags;
2869 past_flags_known = true;
71dbabcc
JH
2870 }
2871 optimization_summaries->remove (cgraph_node::get (f->decl));
2872 }
2873 summary = optimization_summaries->get_create (cgraph_node::get (f->decl));
2874 gcc_checking_assert (nolto && !lto);
2875 }
8a2fd716 2876 /* In IPA mode we analyze every function precisely once. Assert that. */
71dbabcc
JH
2877 else
2878 {
2879 if (nolto)
2880 {
2881 if (!summaries)
2882 summaries = modref_summaries::create_ggc (symtab);
2883 else
2884 summaries->remove (cgraph_node::get (f->decl));
2885 summary = summaries->get_create (cgraph_node::get (f->decl));
2886 }
2887 if (lto)
2888 {
2889 if (!summaries_lto)
2890 summaries_lto = modref_summaries_lto::create_ggc (symtab);
2891 else
2892 summaries_lto->remove (cgraph_node::get (f->decl));
2893 summary_lto = summaries_lto->get_create (cgraph_node::get (f->decl));
2894 }
6cef01c3
JH
2895 if (!fnspec_summaries)
2896 fnspec_summaries = new fnspec_summaries_t (symtab);
85ebbabd
JH
2897 if (!escape_summaries)
2898 escape_summaries = new escape_summaries_t (symtab);
71dbabcc
JH
2899 }
2900
d119f34c
JH
2901
2902 /* Create and initialize summary for F.
2903 Note that summaries may be already allocated from previous
2904 run of the pass. */
2905 if (nolto)
2906 {
2907 gcc_assert (!summary->loads);
c9da53d6 2908 summary->loads = modref_records::create_ggc (param_modref_max_bases,
c33f4742
JH
2909 param_modref_max_refs,
2910 param_modref_max_accesses);
d119f34c 2911 gcc_assert (!summary->stores);
c9da53d6 2912 summary->stores = modref_records::create_ggc (param_modref_max_bases,
c33f4742
JH
2913 param_modref_max_refs,
2914 param_modref_max_accesses);
617695cd 2915 summary->writes_errno = false;
992644c3 2916 summary->side_effects = false;
a34edf9a
JH
2917 summary->nondeterministic = false;
2918 summary->calls_interposable = false;
d119f34c
JH
2919 }
2920 if (lto)
2921 {
71dbabcc
JH
2922 gcc_assert (!summary_lto->loads);
2923 summary_lto->loads = modref_records_lto::create_ggc
c9da53d6 2924 (param_modref_max_bases,
c33f4742
JH
2925 param_modref_max_refs,
2926 param_modref_max_accesses);
71dbabcc
JH
2927 gcc_assert (!summary_lto->stores);
2928 summary_lto->stores = modref_records_lto::create_ggc
c9da53d6 2929 (param_modref_max_bases,
c33f4742
JH
2930 param_modref_max_refs,
2931 param_modref_max_accesses);
6cef01c3 2932 summary_lto->writes_errno = false;
992644c3 2933 summary_lto->side_effects = false;
a34edf9a
JH
2934 summary_lto->nondeterministic = false;
2935 summary_lto->calls_interposable = false;
d119f34c 2936 }
520d5ad3 2937
7798ae1a
JH
2938 analyze_parms (summary, summary_lto, ipa,
2939 past_flags, past_retslot_flags, past_static_chain_flags);
520d5ad3 2940
67c935c8 2941 int ecf_flags = flags_from_decl_or_type (current_function_decl);
ada353b8 2942 auto_vec <gimple *, 32> recursive_calls;
d119f34c
JH
2943
2944 /* Analyze each statement in each basic block of the function. If the
2945 statement cannot be analyzed (for any reason), the entire function cannot
2946 be analyzed by modref. */
2947 basic_block bb;
2948 FOR_EACH_BB_FN (bb, f)
2949 {
2950 gimple_stmt_iterator si;
64f3e71c
JH
2951 bool always_executed
2952 = bb == single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
2953
aa49a058
AO
2954 for (si = gsi_start_nondebug_after_labels_bb (bb);
2955 !gsi_end_p (si); gsi_next_nondebug (&si))
d119f34c 2956 {
71dbabcc 2957 if (!analyze_stmt (summary, summary_lto,
64f3e71c
JH
2958 gsi_stmt (si), ipa, &recursive_calls,
2959 always_executed)
85ebbabd
JH
2960 || ((!summary || !summary->useful_p (ecf_flags, false))
2961 && (!summary_lto
2962 || !summary_lto->useful_p (ecf_flags, false))))
d119f34c 2963 {
520d5ad3
JH
2964 collapse_loads (summary, summary_lto);
2965 collapse_stores (summary, summary_lto);
2966 break;
d119f34c 2967 }
64f3e71c
JH
2968 if (always_executed
2969 && stmt_can_throw_external (cfun, gsi_stmt (si)))
2970 always_executed = false;
d119f34c
JH
2971 }
2972 }
2973
ada353b8
JH
2974 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
2975 This needs to be done after all other side effects are computed. */
2976 if (!ipa)
2977 {
2978 bool changed = true;
5c85f295 2979 bool first = true;
ada353b8
JH
2980 while (changed)
2981 {
2982 changed = false;
2983 for (unsigned i = 0; i < recursive_calls.length (); i++)
2984 {
2985 changed |= merge_call_side_effects
2986 (summary, recursive_calls[i], summary,
2987 ignore_stores_p (current_function_decl,
2988 gimple_call_flags
56cb815b 2989 (recursive_calls[i])),
64f3e71c 2990 fnode, !first, false);
85ebbabd 2991 if (!summary->useful_p (ecf_flags, false))
ada353b8 2992 {
71dbabcc 2993 remove_summary (lto, nolto, ipa);
494bdadf 2994 return false;
ada353b8
JH
2995 }
2996 }
5c85f295 2997 first = false;
ada353b8
JH
2998 }
2999 }
e0040bc3 3000 if (summary && !summary->side_effects && !finite_function_p ())
494bdadf
JH
3001 summary->side_effects = true;
3002 if (summary_lto && !summary_lto->side_effects && !finite_function_p ())
3003 summary_lto->side_effects = true;
3004
3005 if (!ipa && flag_ipa_pure_const)
3006 {
1b62cddc 3007 if (!summary->stores->every_base && !summary->stores->bases
a34edf9a 3008 && !summary->nondeterministic)
494bdadf 3009 {
a34edf9a
JH
3010 if (!summary->loads->every_base && !summary->loads->bases
3011 && !summary->calls_interposable)
494bdadf
JH
3012 fixup_cfg = ipa_make_function_const
3013 (cgraph_node::get (current_function_decl),
3014 summary->side_effects, true);
3015 else
3016 fixup_cfg = ipa_make_function_pure
3017 (cgraph_node::get (current_function_decl),
3018 summary->side_effects, true);
3019 }
3020 }
71dbabcc
JH
3021 if (summary && !summary->useful_p (ecf_flags))
3022 {
3023 if (!ipa)
3024 optimization_summaries->remove (fnode);
3025 else
3026 summaries->remove (fnode);
3027 summary = NULL;
3028 }
e0040bc3 3029 if (summary)
5aa91072 3030 summary->finalize (current_function_decl);
71dbabcc
JH
3031 if (summary_lto && !summary_lto->useful_p (ecf_flags))
3032 {
3033 summaries_lto->remove (fnode);
3034 summary_lto = NULL;
3035 }
992644c3 3036
85ebbabd
JH
3037 if (ipa && !summary && !summary_lto)
3038 remove_modref_edge_summaries (fnode);
d119f34c
JH
3039
3040 if (dump_file)
3041 {
3042 fprintf (dump_file, " - modref done with result: tracked.\n");
71dbabcc
JH
3043 if (summary)
3044 summary->dump (dump_file);
3045 if (summary_lto)
3046 summary_lto->dump (dump_file);
85ebbabd 3047 dump_modref_edge_summaries (dump_file, fnode, 2);
f6f704fd
JH
3048 /* To simplify debugging, compare IPA and local solutions. */
3049 if (past_flags_known && summary)
3050 {
3051 size_t len = summary->arg_flags.length ();
3052
3053 if (past_flags.length () > len)
3054 len = past_flags.length ();
3055 for (size_t i = 0; i < len; i++)
3056 {
3057 int old_flags = i < past_flags.length () ? past_flags[i] : 0;
3058 int new_flags = i < summary->arg_flags.length ()
3059 ? summary->arg_flags[i] : 0;
3060 old_flags = remove_useless_eaf_flags
3061 (old_flags, flags_from_decl_or_type (current_function_decl),
3062 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3063 if (old_flags != new_flags)
3064 {
7798ae1a
JH
3065 if ((old_flags & ~new_flags) == 0
3066 || (new_flags & EAF_UNUSED))
f6f704fd
JH
3067 fprintf (dump_file, " Flags for param %i improved:",
3068 (int)i);
f6f704fd 3069 else
7798ae1a 3070 gcc_unreachable ();
f6f704fd
JH
3071 dump_eaf_flags (dump_file, old_flags, false);
3072 fprintf (dump_file, " -> ");
3073 dump_eaf_flags (dump_file, new_flags, true);
3074 }
3075 }
3076 past_retslot_flags = remove_useless_eaf_flags
3077 (past_retslot_flags,
3078 flags_from_decl_or_type (current_function_decl),
3079 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3080 if (past_retslot_flags != summary->retslot_flags)
3081 {
7798ae1a
JH
3082 if ((past_retslot_flags & ~summary->retslot_flags) == 0
3083 || (summary->retslot_flags & EAF_UNUSED))
f6f704fd 3084 fprintf (dump_file, " Flags for retslot improved:");
f6f704fd 3085 else
7798ae1a 3086 gcc_unreachable ();
f6f704fd
JH
3087 dump_eaf_flags (dump_file, past_retslot_flags, false);
3088 fprintf (dump_file, " -> ");
3089 dump_eaf_flags (dump_file, summary->retslot_flags, true);
3090 }
3091 past_static_chain_flags = remove_useless_eaf_flags
3092 (past_static_chain_flags,
3093 flags_from_decl_or_type (current_function_decl),
3094 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3095 if (past_static_chain_flags != summary->static_chain_flags)
3096 {
7798ae1a
JH
3097 if ((past_static_chain_flags & ~summary->static_chain_flags) == 0
3098 || (summary->static_chain_flags & EAF_UNUSED))
f6f704fd 3099 fprintf (dump_file, " Flags for static chain improved:");
f6f704fd 3100 else
7798ae1a 3101 gcc_unreachable ();
f6f704fd
JH
3102 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3103 fprintf (dump_file, " -> ");
3104 dump_eaf_flags (dump_file, summary->static_chain_flags, true);
3105 }
3106 }
3107 else if (past_flags_known && !summary)
3108 {
3109 for (size_t i = 0; i < past_flags.length (); i++)
3110 {
3111 int old_flags = past_flags[i];
3112 old_flags = remove_useless_eaf_flags
3113 (old_flags, flags_from_decl_or_type (current_function_decl),
3114 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3115 if (old_flags)
3116 {
3117 fprintf (dump_file, " Flags for param %i worsened:",
3118 (int)i);
3119 dump_eaf_flags (dump_file, old_flags, false);
3120 fprintf (dump_file, " -> \n");
3121 }
3122 }
3123 past_retslot_flags = remove_useless_eaf_flags
3124 (past_retslot_flags,
3125 flags_from_decl_or_type (current_function_decl),
3126 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3127 if (past_retslot_flags)
3128 {
3129 fprintf (dump_file, " Flags for retslot worsened:");
3130 dump_eaf_flags (dump_file, past_retslot_flags, false);
3131 fprintf (dump_file, " ->\n");
3132 }
3133 past_static_chain_flags = remove_useless_eaf_flags
3134 (past_static_chain_flags,
3135 flags_from_decl_or_type (current_function_decl),
3136 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3137 if (past_static_chain_flags)
3138 {
3139 fprintf (dump_file, " Flags for static chain worsened:");
3140 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3141 fprintf (dump_file, " ->\n");
3142 }
3143 }
d119f34c 3144 }
494bdadf 3145 return fixup_cfg;
d119f34c
JH
3146}
3147
3148/* Callback for generate_summary. */
3149
3150static void
3151modref_generate (void)
3152{
3153 struct cgraph_node *node;
3154 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
3155 {
3156 function *f = DECL_STRUCT_FUNCTION (node->decl);
3157 if (!f)
3158 continue;
3159 push_cfun (f);
3160 analyze_function (f, true);
3161 pop_cfun ();
3162 }
3163}
3164
18f0873d
JH
3165} /* ANON namespace. */
3166
d70ef656
JH
3167/* Debugging helper. */
3168
3169void
3170debug_eaf_flags (int flags)
3171{
3172 dump_eaf_flags (stderr, flags, true);
3173}
3174
d119f34c
JH
3175/* Called when a new function is inserted to callgraph late. */
3176
3177void
3178modref_summaries::insert (struct cgraph_node *node, modref_summary *)
3179{
56cb815b
JH
3180 /* Local passes ought to be executed by the pass manager. */
3181 if (this == optimization_summaries)
71dbabcc
JH
3182 {
3183 optimization_summaries->remove (node);
56cb815b
JH
3184 return;
3185 }
1a90e99f
JH
3186 if (!DECL_STRUCT_FUNCTION (node->decl)
3187 || !opt_for_fn (node->decl, flag_ipa_modref))
56cb815b 3188 {
71dbabcc 3189 summaries->remove (node);
56cb815b 3190 return;
71dbabcc
JH
3191 }
3192 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
56cb815b 3193 analyze_function (DECL_STRUCT_FUNCTION (node->decl), true);
71dbabcc
JH
3194 pop_cfun ();
3195}
3196
3197/* Called when a new function is inserted to callgraph late. */
3198
3199void
3200modref_summaries_lto::insert (struct cgraph_node *node, modref_summary_lto *)
3201{
3202 /* We do not support adding new function when IPA information is already
3203 propagated. This is done only by SIMD cloning that is not very
3204 critical. */
3205 if (!DECL_STRUCT_FUNCTION (node->decl)
1a90e99f 3206 || !opt_for_fn (node->decl, flag_ipa_modref)
71dbabcc
JH
3207 || propagated)
3208 {
3209 summaries_lto->remove (node);
3210 return;
3211 }
d119f34c 3212 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
71dbabcc 3213 analyze_function (DECL_STRUCT_FUNCTION (node->decl), true);
d119f34c
JH
3214 pop_cfun ();
3215}
3216
3217/* Called when new clone is inserted to callgraph late. */
3218
3219void
56cb815b 3220modref_summaries::duplicate (cgraph_node *, cgraph_node *dst,
d119f34c
JH
3221 modref_summary *src_data,
3222 modref_summary *dst_data)
3223{
8a2fd716 3224 /* Do not duplicate optimization summaries; we do not handle parameter
56cb815b
JH
3225 transforms on them. */
3226 if (this == optimization_summaries)
d119f34c 3227 {
56cb815b
JH
3228 optimization_summaries->remove (dst);
3229 return;
d119f34c 3230 }
56cb815b
JH
3231 dst_data->stores = modref_records::create_ggc
3232 (src_data->stores->max_bases,
3233 src_data->stores->max_refs,
3234 src_data->stores->max_accesses);
3235 dst_data->stores->copy_from (src_data->stores);
3236 dst_data->loads = modref_records::create_ggc
3237 (src_data->loads->max_bases,
3238 src_data->loads->max_refs,
3239 src_data->loads->max_accesses);
3240 dst_data->loads->copy_from (src_data->loads);
64f3e71c
JH
3241 dst_data->kills.reserve_exact (src_data->kills.length ());
3242 dst_data->kills.splice (src_data->kills);
617695cd 3243 dst_data->writes_errno = src_data->writes_errno;
992644c3 3244 dst_data->side_effects = src_data->side_effects;
a34edf9a
JH
3245 dst_data->nondeterministic = src_data->nondeterministic;
3246 dst_data->calls_interposable = src_data->calls_interposable;
5962efe9
JH
3247 if (src_data->arg_flags.length ())
3248 dst_data->arg_flags = src_data->arg_flags.copy ();
b8ef019a 3249 dst_data->retslot_flags = src_data->retslot_flags;
a70c0512 3250 dst_data->static_chain_flags = src_data->static_chain_flags;
71dbabcc
JH
3251}
3252
3253/* Called when new clone is inserted to callgraph late. */
3254
3255void
3256modref_summaries_lto::duplicate (cgraph_node *, cgraph_node *,
3257 modref_summary_lto *src_data,
3258 modref_summary_lto *dst_data)
3259{
8a2fd716 3260 /* Be sure that no further cloning happens after ipa-modref. If it does
fe90c504
JH
3261 we will need to update signatures for possible param changes. */
3262 gcc_checking_assert (!((modref_summaries_lto *)summaries_lto)->propagated);
56cb815b
JH
3263 dst_data->stores = modref_records_lto::create_ggc
3264 (src_data->stores->max_bases,
3265 src_data->stores->max_refs,
3266 src_data->stores->max_accesses);
3267 dst_data->stores->copy_from (src_data->stores);
3268 dst_data->loads = modref_records_lto::create_ggc
3269 (src_data->loads->max_bases,
3270 src_data->loads->max_refs,
3271 src_data->loads->max_accesses);
3272 dst_data->loads->copy_from (src_data->loads);
6cef01c3 3273 dst_data->writes_errno = src_data->writes_errno;
992644c3 3274 dst_data->side_effects = src_data->side_effects;
a34edf9a
JH
3275 dst_data->nondeterministic = src_data->nondeterministic;
3276 dst_data->calls_interposable = src_data->calls_interposable;
5962efe9
JH
3277 if (src_data->arg_flags.length ())
3278 dst_data->arg_flags = src_data->arg_flags.copy ();
b8ef019a 3279 dst_data->retslot_flags = src_data->retslot_flags;
a70c0512 3280 dst_data->static_chain_flags = src_data->static_chain_flags;
d119f34c
JH
3281}
3282
3283namespace
3284{
3285/* Definition of the modref pass on GIMPLE. */
3286const pass_data pass_data_modref = {
3287 GIMPLE_PASS,
3288 "modref",
3289 OPTGROUP_IPA,
3290 TV_TREE_MODREF,
3291 (PROP_cfg | PROP_ssa),
3292 0,
3293 0,
3294 0,
3295 0,
3296};
3297
3298class pass_modref : public gimple_opt_pass
3299{
3300 public:
3301 pass_modref (gcc::context *ctxt)
3302 : gimple_opt_pass (pass_data_modref, ctxt) {}
3303
d119f34c
JH
3304 /* opt_pass methods: */
3305 opt_pass *clone ()
3306 {
3307 return new pass_modref (m_ctxt);
3308 }
3309 virtual bool gate (function *)
3310 {
3311 return flag_ipa_modref;
3312 }
3313 virtual unsigned int execute (function *);
3314};
3315
3316/* Encode TT to the output block OB using the summary streaming API. */
3317
3318static void
3319write_modref_records (modref_records_lto *tt, struct output_block *ob)
3320{
3321 streamer_write_uhwi (ob, tt->max_bases);
3322 streamer_write_uhwi (ob, tt->max_refs);
c33f4742 3323 streamer_write_uhwi (ob, tt->max_accesses);
d119f34c
JH
3324
3325 streamer_write_uhwi (ob, tt->every_base);
3326 streamer_write_uhwi (ob, vec_safe_length (tt->bases));
3327 size_t i;
3328 modref_base_node <tree> *base_node;
3329 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, base_node)
3330 {
3331 stream_write_tree (ob, base_node->base, true);
3332
3333 streamer_write_uhwi (ob, base_node->every_ref);
3334 streamer_write_uhwi (ob, vec_safe_length (base_node->refs));
c33f4742 3335
d119f34c
JH
3336 size_t j;
3337 modref_ref_node <tree> *ref_node;
3338 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
3339 {
3340 stream_write_tree (ob, ref_node->ref, true);
c33f4742
JH
3341 streamer_write_uhwi (ob, ref_node->every_access);
3342 streamer_write_uhwi (ob, vec_safe_length (ref_node->accesses));
3343
3344 size_t k;
3345 modref_access_node *access_node;
3346 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
c34db4b6 3347 {
56cb815b 3348 streamer_write_hwi (ob, access_node->parm_index);
c34db4b6
JH
3349 if (access_node->parm_index != -1)
3350 {
3351 streamer_write_uhwi (ob, access_node->parm_offset_known);
3352 if (access_node->parm_offset_known)
3353 {
3354 streamer_write_poly_int64 (ob, access_node->parm_offset);
3355 streamer_write_poly_int64 (ob, access_node->offset);
3356 streamer_write_poly_int64 (ob, access_node->size);
3357 streamer_write_poly_int64 (ob, access_node->max_size);
3358 }
3359 }
3360 }
d119f34c
JH
3361 }
3362 }
3363}
3364
3365/* Read a modref_tree from the input block IB using the data from DATA_IN.
3366 This assumes that the tree was encoded using write_modref_tree.
3367 Either nolto_ret or lto_ret is initialized by the tree depending whether
46a27415 3368 LTO streaming is expected or not. */
d119f34c 3369
18f0873d 3370static void
d119f34c
JH
3371read_modref_records (lto_input_block *ib, struct data_in *data_in,
3372 modref_records **nolto_ret,
3373 modref_records_lto **lto_ret)
3374{
3375 size_t max_bases = streamer_read_uhwi (ib);
3376 size_t max_refs = streamer_read_uhwi (ib);
c33f4742 3377 size_t max_accesses = streamer_read_uhwi (ib);
d119f34c 3378
71dbabcc 3379 if (lto_ret)
c33f4742
JH
3380 *lto_ret = modref_records_lto::create_ggc (max_bases, max_refs,
3381 max_accesses);
71dbabcc 3382 if (nolto_ret)
c33f4742
JH
3383 *nolto_ret = modref_records::create_ggc (max_bases, max_refs,
3384 max_accesses);
71dbabcc 3385 gcc_checking_assert (lto_ret || nolto_ret);
d119f34c
JH
3386
3387 size_t every_base = streamer_read_uhwi (ib);
3388 size_t nbase = streamer_read_uhwi (ib);
3389
3390 gcc_assert (!every_base || nbase == 0);
3391 if (every_base)
3392 {
71dbabcc 3393 if (nolto_ret)
d119f34c 3394 (*nolto_ret)->collapse ();
71dbabcc 3395 if (lto_ret)
d119f34c
JH
3396 (*lto_ret)->collapse ();
3397 }
3398 for (size_t i = 0; i < nbase; i++)
3399 {
3400 tree base_tree = stream_read_tree (ib, data_in);
3401 modref_base_node <alias_set_type> *nolto_base_node = NULL;
3402 modref_base_node <tree> *lto_base_node = NULL;
3403
3404 /* At stream in time we have LTO alias info. Check if we streamed in
3405 something obviously unnecessary. Do not glob types by alias sets;
3406 it is not 100% clear that ltrans types will get merged same way.
3407 Types may get refined based on ODR type conflicts. */
3408 if (base_tree && !get_alias_set (base_tree))
3409 {
3410 if (dump_file)
3411 {
3412 fprintf (dump_file, "Streamed in alias set 0 type ");
3413 print_generic_expr (dump_file, base_tree);
3414 fprintf (dump_file, "\n");
3415 }
3416 base_tree = NULL;
3417 }
3418
71dbabcc 3419 if (nolto_ret)
d119f34c
JH
3420 nolto_base_node = (*nolto_ret)->insert_base (base_tree
3421 ? get_alias_set (base_tree)
e28ac73a 3422 : 0, 0);
71dbabcc 3423 if (lto_ret)
e28ac73a 3424 lto_base_node = (*lto_ret)->insert_base (base_tree, 0);
d119f34c
JH
3425 size_t every_ref = streamer_read_uhwi (ib);
3426 size_t nref = streamer_read_uhwi (ib);
3427
3428 gcc_assert (!every_ref || nref == 0);
3429 if (every_ref)
3430 {
3431 if (nolto_base_node)
3432 nolto_base_node->collapse ();
3433 if (lto_base_node)
3434 lto_base_node->collapse ();
3435 }
3436 for (size_t j = 0; j < nref; j++)
3437 {
3438 tree ref_tree = stream_read_tree (ib, data_in);
3439
3440 if (ref_tree && !get_alias_set (ref_tree))
3441 {
3442 if (dump_file)
3443 {
3444 fprintf (dump_file, "Streamed in alias set 0 type ");
3445 print_generic_expr (dump_file, ref_tree);
3446 fprintf (dump_file, "\n");
3447 }
c33f4742 3448 ref_tree = NULL;
d119f34c
JH
3449 }
3450
c33f4742
JH
3451 modref_ref_node <alias_set_type> *nolto_ref_node = NULL;
3452 modref_ref_node <tree> *lto_ref_node = NULL;
3453
d119f34c 3454 if (nolto_base_node)
c33f4742
JH
3455 nolto_ref_node
3456 = nolto_base_node->insert_ref (ref_tree
3457 ? get_alias_set (ref_tree) : 0,
3458 max_refs);
d119f34c 3459 if (lto_base_node)
c33f4742
JH
3460 lto_ref_node = lto_base_node->insert_ref (ref_tree, max_refs);
3461
3462 size_t every_access = streamer_read_uhwi (ib);
3463 size_t naccesses = streamer_read_uhwi (ib);
3464
3465 if (nolto_ref_node)
3466 nolto_ref_node->every_access = every_access;
3467 if (lto_ref_node)
3468 lto_ref_node->every_access = every_access;
3469
3470 for (size_t k = 0; k < naccesses; k++)
3471 {
56cb815b 3472 int parm_index = streamer_read_hwi (ib);
c34db4b6
JH
3473 bool parm_offset_known = false;
3474 poly_int64 parm_offset = 0;
3475 poly_int64 offset = 0;
3476 poly_int64 size = -1;
3477 poly_int64 max_size = -1;
3478
3479 if (parm_index != -1)
3480 {
3481 parm_offset_known = streamer_read_uhwi (ib);
3482 if (parm_offset_known)
3483 {
3484 parm_offset = streamer_read_poly_int64 (ib);
3485 offset = streamer_read_poly_int64 (ib);
3486 size = streamer_read_poly_int64 (ib);
3487 max_size = streamer_read_poly_int64 (ib);
3488 }
3489 }
3490 modref_access_node a = {offset, size, max_size, parm_offset,
5c85f295 3491 parm_index, parm_offset_known, false};
c33f4742 3492 if (nolto_ref_node)
5c85f295 3493 nolto_ref_node->insert_access (a, max_accesses, false);
c33f4742 3494 if (lto_ref_node)
5c85f295 3495 lto_ref_node->insert_access (a, max_accesses, false);
c33f4742 3496 }
d119f34c
JH
3497 }
3498 }
71dbabcc 3499 if (lto_ret)
c33f4742 3500 (*lto_ret)->cleanup ();
71dbabcc 3501 if (nolto_ret)
c33f4742 3502 (*nolto_ret)->cleanup ();
d119f34c
JH
3503}
3504
85ebbabd
JH
3505/* Write ESUM to BP. */
3506
3507static void
3508modref_write_escape_summary (struct bitpack_d *bp, escape_summary *esum)
3509{
3510 if (!esum)
3511 {
3512 bp_pack_var_len_unsigned (bp, 0);
3513 return;
3514 }
3515 bp_pack_var_len_unsigned (bp, esum->esc.length ());
3516 unsigned int i;
3517 escape_entry *ee;
3518 FOR_EACH_VEC_ELT (esum->esc, i, ee)
3519 {
b8ef019a 3520 bp_pack_var_len_int (bp, ee->parm_index);
85ebbabd
JH
3521 bp_pack_var_len_unsigned (bp, ee->arg);
3522 bp_pack_var_len_unsigned (bp, ee->min_flags);
3523 bp_pack_value (bp, ee->direct, 1);
3524 }
3525}
3526
3527/* Read escape summary for E from BP. */
3528
3529static void
3530modref_read_escape_summary (struct bitpack_d *bp, cgraph_edge *e)
3531{
3532 unsigned int n = bp_unpack_var_len_unsigned (bp);
3533 if (!n)
3534 return;
3535 escape_summary *esum = escape_summaries->get_create (e);
3536 esum->esc.reserve_exact (n);
3537 for (unsigned int i = 0; i < n; i++)
3538 {
3539 escape_entry ee;
b8ef019a 3540 ee.parm_index = bp_unpack_var_len_int (bp);
85ebbabd
JH
3541 ee.arg = bp_unpack_var_len_unsigned (bp);
3542 ee.min_flags = bp_unpack_var_len_unsigned (bp);
3543 ee.direct = bp_unpack_value (bp, 1);
3544 esum->esc.quick_push (ee);
3545 }
3546}
3547
d119f34c
JH
3548/* Callback for write_summary. */
3549
3550static void
3551modref_write ()
3552{
3553 struct output_block *ob = create_output_block (LTO_section_ipa_modref);
3554 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3555 unsigned int count = 0;
3556 int i;
3557
71dbabcc 3558 if (!summaries_lto)
d119f34c
JH
3559 {
3560 streamer_write_uhwi (ob, 0);
3561 streamer_write_char_stream (ob->main_stream, 0);
3562 produce_asm (ob, NULL);
3563 destroy_output_block (ob);
3564 return;
3565 }
3566
3567 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3568 {
3569 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3570 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
71dbabcc 3571 modref_summary_lto *r;
d119f34c
JH
3572
3573 if (cnode && cnode->definition && !cnode->alias
71dbabcc
JH
3574 && (r = summaries_lto->get (cnode))
3575 && r->useful_p (flags_from_decl_or_type (cnode->decl)))
d119f34c
JH
3576 count++;
3577 }
3578 streamer_write_uhwi (ob, count);
3579
3580 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3581 {
3582 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3583 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
3584
3585 if (cnode && cnode->definition && !cnode->alias)
3586 {
71dbabcc 3587 modref_summary_lto *r = summaries_lto->get (cnode);
d119f34c 3588
71dbabcc 3589 if (!r || !r->useful_p (flags_from_decl_or_type (cnode->decl)))
d119f34c
JH
3590 continue;
3591
3592 streamer_write_uhwi (ob, lto_symtab_encoder_encode (encoder, cnode));
3593
85ebbabd
JH
3594 streamer_write_uhwi (ob, r->arg_flags.length ());
3595 for (unsigned int i = 0; i < r->arg_flags.length (); i++)
8da8ed43 3596 streamer_write_uhwi (ob, r->arg_flags[i]);
b8ef019a 3597 streamer_write_uhwi (ob, r->retslot_flags);
a70c0512 3598 streamer_write_uhwi (ob, r->static_chain_flags);
85ebbabd 3599
56cb815b
JH
3600 write_modref_records (r->loads, ob);
3601 write_modref_records (r->stores, ob);
6cef01c3
JH
3602
3603 struct bitpack_d bp = bitpack_create (ob->main_stream);
3604 bp_pack_value (&bp, r->writes_errno, 1);
992644c3 3605 bp_pack_value (&bp, r->side_effects, 1);
a34edf9a
JH
3606 bp_pack_value (&bp, r->nondeterministic, 1);
3607 bp_pack_value (&bp, r->calls_interposable, 1);
6cef01c3
JH
3608 if (!flag_wpa)
3609 {
3610 for (cgraph_edge *e = cnode->indirect_calls;
3611 e; e = e->next_callee)
3612 {
3613 class fnspec_summary *sum = fnspec_summaries->get (e);
3614 bp_pack_value (&bp, sum != NULL, 1);
3615 if (sum)
3616 bp_pack_string (ob, &bp, sum->fnspec, true);
85ebbabd
JH
3617 class escape_summary *esum = escape_summaries->get (e);
3618 modref_write_escape_summary (&bp,esum);
6cef01c3
JH
3619 }
3620 for (cgraph_edge *e = cnode->callees; e; e = e->next_callee)
3621 {
3622 class fnspec_summary *sum = fnspec_summaries->get (e);
3623 bp_pack_value (&bp, sum != NULL, 1);
3624 if (sum)
3625 bp_pack_string (ob, &bp, sum->fnspec, true);
85ebbabd
JH
3626 class escape_summary *esum = escape_summaries->get (e);
3627 modref_write_escape_summary (&bp,esum);
6cef01c3
JH
3628 }
3629 }
3630 streamer_write_bitpack (&bp);
d119f34c
JH
3631 }
3632 }
3633 streamer_write_char_stream (ob->main_stream, 0);
3634 produce_asm (ob, NULL);
3635 destroy_output_block (ob);
3636}
3637
3638static void
3639read_section (struct lto_file_decl_data *file_data, const char *data,
3640 size_t len)
3641{
3642 const struct lto_function_header *header
3643 = (const struct lto_function_header *) data;
3644 const int cfg_offset = sizeof (struct lto_function_header);
3645 const int main_offset = cfg_offset + header->cfg_size;
3646 const int string_offset = main_offset + header->main_size;
3647 struct data_in *data_in;
3648 unsigned int i;
3649 unsigned int f_count;
3650
3651 lto_input_block ib ((const char *) data + main_offset, header->main_size,
3652 file_data->mode_table);
3653
3654 data_in
3655 = lto_data_in_create (file_data, (const char *) data + string_offset,
3656 header->string_size, vNULL);
3657 f_count = streamer_read_uhwi (&ib);
3658 for (i = 0; i < f_count; i++)
3659 {
3660 struct cgraph_node *node;
3661 lto_symtab_encoder_t encoder;
3662
3663 unsigned int index = streamer_read_uhwi (&ib);
3664 encoder = file_data->symtab_node_encoder;
3665 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder,
3666 index));
3667
71dbabcc
JH
3668 modref_summary *modref_sum = summaries
3669 ? summaries->get_create (node) : NULL;
3670 modref_summary_lto *modref_sum_lto = summaries_lto
3671 ? summaries_lto->get_create (node)
3672 : NULL;
71dbabcc
JH
3673 if (optimization_summaries)
3674 modref_sum = optimization_summaries->get_create (node);
3675
ea937e7d 3676 if (modref_sum)
992644c3
JH
3677 {
3678 modref_sum->writes_errno = false;
3679 modref_sum->side_effects = false;
a34edf9a
JH
3680 modref_sum->nondeterministic = false;
3681 modref_sum->calls_interposable = false;
992644c3 3682 }
6cef01c3 3683 if (modref_sum_lto)
992644c3
JH
3684 {
3685 modref_sum_lto->writes_errno = false;
3686 modref_sum_lto->side_effects = false;
a34edf9a
JH
3687 modref_sum_lto->nondeterministic = false;
3688 modref_sum_lto->calls_interposable = false;
992644c3 3689 }
ea937e7d 3690
71dbabcc
JH
3691 gcc_assert (!modref_sum || (!modref_sum->loads
3692 && !modref_sum->stores));
3693 gcc_assert (!modref_sum_lto || (!modref_sum_lto->loads
3694 && !modref_sum_lto->stores));
85ebbabd
JH
3695 unsigned int args = streamer_read_uhwi (&ib);
3696 if (args && modref_sum)
3697 modref_sum->arg_flags.reserve_exact (args);
3698 if (args && modref_sum_lto)
3699 modref_sum_lto->arg_flags.reserve_exact (args);
3700 for (unsigned int i = 0; i < args; i++)
3701 {
8da8ed43 3702 eaf_flags_t flags = streamer_read_uhwi (&ib);
85ebbabd
JH
3703 if (modref_sum)
3704 modref_sum->arg_flags.quick_push (flags);
3705 if (modref_sum_lto)
3706 modref_sum_lto->arg_flags.quick_push (flags);
3707 }
b8ef019a
JH
3708 eaf_flags_t flags = streamer_read_uhwi (&ib);
3709 if (modref_sum)
3710 modref_sum->retslot_flags = flags;
3711 if (modref_sum_lto)
3712 modref_sum_lto->retslot_flags = flags;
a70c0512
JH
3713
3714 flags = streamer_read_uhwi (&ib);
3715 if (modref_sum)
3716 modref_sum->static_chain_flags = flags;
3717 if (modref_sum_lto)
3718 modref_sum_lto->static_chain_flags = flags;
3719
56cb815b
JH
3720 read_modref_records (&ib, data_in,
3721 modref_sum ? &modref_sum->loads : NULL,
3722 modref_sum_lto ? &modref_sum_lto->loads : NULL);
3723 read_modref_records (&ib, data_in,
3724 modref_sum ? &modref_sum->stores : NULL,
3725 modref_sum_lto ? &modref_sum_lto->stores : NULL);
6cef01c3
JH
3726 struct bitpack_d bp = streamer_read_bitpack (&ib);
3727 if (bp_unpack_value (&bp, 1))
3728 {
3729 if (modref_sum)
3730 modref_sum->writes_errno = true;
3731 if (modref_sum_lto)
3732 modref_sum_lto->writes_errno = true;
3733 }
992644c3
JH
3734 if (bp_unpack_value (&bp, 1))
3735 {
3736 if (modref_sum)
3737 modref_sum->side_effects = true;
3738 if (modref_sum_lto)
3739 modref_sum_lto->side_effects = true;
3740 }
a34edf9a
JH
3741 if (bp_unpack_value (&bp, 1))
3742 {
3743 if (modref_sum)
3744 modref_sum->nondeterministic = true;
3745 if (modref_sum_lto)
3746 modref_sum_lto->nondeterministic = true;
3747 }
3748 if (bp_unpack_value (&bp, 1))
3749 {
3750 if (modref_sum)
3751 modref_sum->calls_interposable = true;
3752 if (modref_sum_lto)
3753 modref_sum_lto->calls_interposable = true;
3754 }
6cef01c3
JH
3755 if (!flag_ltrans)
3756 {
3757 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
3758 {
3759 if (bp_unpack_value (&bp, 1))
3760 {
3761 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3762 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3763 }
85ebbabd 3764 modref_read_escape_summary (&bp, e);
6cef01c3
JH
3765 }
3766 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
3767 {
3768 if (bp_unpack_value (&bp, 1))
3769 {
3770 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3771 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3772 }
85ebbabd 3773 modref_read_escape_summary (&bp, e);
6cef01c3
JH
3774 }
3775 }
e0040bc3 3776 if (flag_ltrans)
5aa91072 3777 modref_sum->finalize (node->decl);
d119f34c
JH
3778 if (dump_file)
3779 {
3780 fprintf (dump_file, "Read modref for %s\n",
3781 node->dump_name ());
71dbabcc
JH
3782 if (modref_sum)
3783 modref_sum->dump (dump_file);
3784 if (modref_sum_lto)
3785 modref_sum_lto->dump (dump_file);
85ebbabd 3786 dump_modref_edge_summaries (dump_file, node, 4);
d119f34c 3787 }
d119f34c
JH
3788 }
3789
3790 lto_free_section_data (file_data, LTO_section_ipa_modref, NULL, data,
3791 len);
3792 lto_data_in_delete (data_in);
3793}
3794
3795/* Callback for read_summary. */
3796
3797static void
3798modref_read (void)
3799{
3800 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3801 struct lto_file_decl_data *file_data;
3802 unsigned int j = 0;
3803
71dbabcc
JH
3804 gcc_checking_assert (!optimization_summaries && !summaries && !summaries_lto);
3805 if (flag_ltrans)
3806 optimization_summaries = modref_summaries::create_ggc (symtab);
3807 else
3808 {
3809 if (flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
3810 summaries_lto = modref_summaries_lto::create_ggc (symtab);
3811 if (!flag_wpa
3812 || (flag_incremental_link == INCREMENTAL_LINK_LTO
3813 && flag_fat_lto_objects))
3814 summaries = modref_summaries::create_ggc (symtab);
6cef01c3
JH
3815 if (!fnspec_summaries)
3816 fnspec_summaries = new fnspec_summaries_t (symtab);
85ebbabd
JH
3817 if (!escape_summaries)
3818 escape_summaries = new escape_summaries_t (symtab);
71dbabcc 3819 }
d119f34c
JH
3820
3821 while ((file_data = file_data_vec[j++]))
3822 {
3823 size_t len;
3824 const char *data = lto_get_summary_section_data (file_data,
3825 LTO_section_ipa_modref,
3826 &len);
3827 if (data)
3828 read_section (file_data, data, len);
3829 else
3830 /* Fatal error here. We do not want to support compiling ltrans units
3831 with different version of compiler or different flags than the WPA
3832 unit, so this should never happen. */
3833 fatal_error (input_location,
3834 "IPA modref summary is missing in input file");
3835 }
3836}
3837
85ebbabd
JH
3838/* Recompute arg_flags for param adjustments in INFO. */
3839
3840static void
8da8ed43 3841remap_arg_flags (auto_vec <eaf_flags_t> &arg_flags, clone_info *info)
85ebbabd 3842{
8da8ed43 3843 auto_vec<eaf_flags_t> old = arg_flags.copy ();
85ebbabd
JH
3844 int max = -1;
3845 size_t i;
3846 ipa_adjusted_param *p;
3847
3848 arg_flags.release ();
3849
3850 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
3851 {
3852 int o = info->param_adjustments->get_original_index (i);
3853 if (o >= 0 && (int)old.length () > o && old[o])
3854 max = i;
3855 }
5962efe9 3856 if (max >= 0)
85ebbabd
JH
3857 arg_flags.safe_grow_cleared (max + 1, true);
3858 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
3859 {
3860 int o = info->param_adjustments->get_original_index (i);
3861 if (o >= 0 && (int)old.length () > o && old[o])
3862 arg_flags[i] = old[o];
3863 }
3864}
3865
c8fd2be1
JH
3866/* If signature changed, update the summary. */
3867
fe90c504
JH
3868static void
3869update_signature (struct cgraph_node *node)
c8fd2be1 3870{
ae7a23a3
JH
3871 clone_info *info = clone_info::get (node);
3872 if (!info || !info->param_adjustments)
fe90c504
JH
3873 return;
3874
3875 modref_summary *r = optimization_summaries
3876 ? optimization_summaries->get (node) : NULL;
3877 modref_summary_lto *r_lto = summaries_lto
3878 ? summaries_lto->get (node) : NULL;
3879 if (!r && !r_lto)
3880 return;
c8fd2be1
JH
3881 if (dump_file)
3882 {
3883 fprintf (dump_file, "Updating summary for %s from:\n",
3884 node->dump_name ());
85ebbabd
JH
3885 if (r)
3886 r->dump (dump_file);
3887 if (r_lto)
3888 r_lto->dump (dump_file);
c8fd2be1
JH
3889 }
3890
3891 size_t i, max = 0;
3892 ipa_adjusted_param *p;
3893
ae7a23a3 3894 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
c8fd2be1 3895 {
ae7a23a3 3896 int idx = info->param_adjustments->get_original_index (i);
c8fd2be1
JH
3897 if (idx > (int)max)
3898 max = idx;
3899 }
3900
3901 auto_vec <int, 32> map;
3902
5d2cedaa 3903 map.reserve (max + 1);
c8fd2be1 3904 for (i = 0; i <= max; i++)
992644c3 3905 map.quick_push (MODREF_UNKNOWN_PARM);
ae7a23a3 3906 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
c8fd2be1 3907 {
ae7a23a3 3908 int idx = info->param_adjustments->get_original_index (i);
c8fd2be1 3909 if (idx >= 0)
2f61125f 3910 map[idx] = i;
c8fd2be1 3911 }
fe90c504
JH
3912 if (r)
3913 {
3914 r->loads->remap_params (&map);
3915 r->stores->remap_params (&map);
64f3e71c
JH
3916 /* TODO: One we do IPA kills analysis, update the table here. */
3917 r->kills.release ();
85ebbabd
JH
3918 if (r->arg_flags.length ())
3919 remap_arg_flags (r->arg_flags, info);
fe90c504
JH
3920 }
3921 if (r_lto)
3922 {
3923 r_lto->loads->remap_params (&map);
3924 r_lto->stores->remap_params (&map);
64f3e71c
JH
3925 /* TODO: One we do IPA kills analysis, update the table here. */
3926 r_lto->kills.release ();
85ebbabd
JH
3927 if (r_lto->arg_flags.length ())
3928 remap_arg_flags (r_lto->arg_flags, info);
fe90c504 3929 }
c8fd2be1
JH
3930 if (dump_file)
3931 {
3932 fprintf (dump_file, "to:\n");
fe90c504 3933 if (r)
6cef01c3 3934 r->dump (dump_file);
fe90c504 3935 if (r_lto)
6cef01c3 3936 r_lto->dump (dump_file);
c8fd2be1 3937 }
e0040bc3 3938 if (r)
5aa91072 3939 r->finalize (node->decl);
fe90c504 3940 return;
c8fd2be1
JH
3941}
3942
d119f34c
JH
3943/* Definition of the modref IPA pass. */
3944const pass_data pass_data_ipa_modref =
3945{
3946 IPA_PASS, /* type */
3947 "modref", /* name */
3948 OPTGROUP_IPA, /* optinfo_flags */
3949 TV_IPA_MODREF, /* tv_id */
3950 0, /* properties_required */
3951 0, /* properties_provided */
3952 0, /* properties_destroyed */
3953 0, /* todo_flags_start */
3954 ( TODO_dump_symtab ), /* todo_flags_finish */
3955};
3956
3957class pass_ipa_modref : public ipa_opt_pass_d
3958{
3959public:
3960 pass_ipa_modref (gcc::context *ctxt)
3961 : ipa_opt_pass_d (pass_data_ipa_modref, ctxt,
3962 modref_generate, /* generate_summary */
3963 modref_write, /* write_summary */
3964 modref_read, /* read_summary */
3965 modref_write, /* write_optimization_summary */
3966 modref_read, /* read_optimization_summary */
3967 NULL, /* stmt_fixup */
3968 0, /* function_transform_todo_flags_start */
fe90c504 3969 NULL, /* function_transform */
d119f34c
JH
3970 NULL) /* variable_transform */
3971 {}
3972
3973 /* opt_pass methods: */
3974 opt_pass *clone () { return new pass_ipa_modref (m_ctxt); }
3975 virtual bool gate (function *)
3976 {
3977 return true;
3978 }
3979 virtual unsigned int execute (function *);
3980
3981};
3982
3983}
3984
3985unsigned int pass_modref::execute (function *f)
3986{
494bdadf
JH
3987 if (analyze_function (f, false))
3988 return execute_fixup_cfg ();
d119f34c
JH
3989 return 0;
3990}
3991
3992gimple_opt_pass *
3993make_pass_modref (gcc::context *ctxt)
3994{
3995 return new pass_modref (ctxt);
3996}
3997
3998ipa_opt_pass_d *
3999make_pass_ipa_modref (gcc::context *ctxt)
4000{
4001 return new pass_ipa_modref (ctxt);
4002}
4003
18f0873d
JH
4004namespace {
4005
d119f34c
JH
4006/* Skip edges from and to nodes without ipa_pure_const enabled.
4007 Ignore not available symbols. */
4008
4009static bool
4010ignore_edge (struct cgraph_edge *e)
4011{
87d75a11
JH
4012 /* We merge summaries of inline clones into summaries of functions they
4013 are inlined to. For that reason the complete function bodies must
4014 act as unit. */
4015 if (!e->inline_failed)
4016 return false;
d119f34c
JH
4017 enum availability avail;
4018 cgraph_node *callee = e->callee->function_or_virtual_thunk_symbol
4019 (&avail, e->caller);
4020
4021 return (avail <= AVAIL_INTERPOSABLE
56cb815b 4022 || ((!optimization_summaries || !optimization_summaries->get (callee))
494bdadf 4023 && (!summaries_lto || !summaries_lto->get (callee))));
d119f34c
JH
4024}
4025
8a2fd716 4026/* Compute parm_map for CALLEE_EDGE. */
d119f34c 4027
6cef01c3 4028static bool
c34db4b6 4029compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
ada353b8
JH
4030{
4031 class ipa_edge_args *args;
4032 if (ipa_node_params_sum
4033 && !callee_edge->call_stmt_cannot_inline_p
a4a3cdd0 4034 && (args = ipa_edge_args_sum->get (callee_edge)) != NULL)
ada353b8
JH
4035 {
4036 int i, count = ipa_get_cs_argument_count (args);
4037 class ipa_node_params *caller_parms_info, *callee_pi;
4038 class ipa_call_summary *es
4039 = ipa_call_summaries->get (callee_edge);
4040 cgraph_node *callee
4041 = callee_edge->callee->function_or_virtual_thunk_symbol
4042 (NULL, callee_edge->caller);
4043
a4a3cdd0
MJ
4044 caller_parms_info
4045 = ipa_node_params_sum->get (callee_edge->caller->inlined_to
4046 ? callee_edge->caller->inlined_to
4047 : callee_edge->caller);
4048 callee_pi = ipa_node_params_sum->get (callee);
ada353b8 4049
520d5ad3 4050 (*parm_map).safe_grow_cleared (count, true);
ada353b8
JH
4051
4052 for (i = 0; i < count; i++)
4053 {
4054 if (es && es->param[i].points_to_local_or_readonly_memory)
4055 {
1f3a3363 4056 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
ada353b8
JH
4057 continue;
4058 }
4059
4060 struct ipa_jump_func *jf
4061 = ipa_get_ith_jump_func (args, i);
899c10c9 4062 if (jf && callee_pi)
ada353b8
JH
4063 {
4064 tree cst = ipa_value_from_jfunc (caller_parms_info,
4065 jf,
4066 ipa_get_type
4067 (callee_pi, i));
4068 if (cst && points_to_local_or_readonly_memory_p (cst))
4069 {
1f3a3363 4070 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
ada353b8
JH
4071 continue;
4072 }
4073 }
4074 if (jf && jf->type == IPA_JF_PASS_THROUGH)
4075 {
c34db4b6 4076 (*parm_map)[i].parm_index
56cb815b 4077 = ipa_get_jf_pass_through_formal_id (jf);
4d90edb9
JH
4078 if (ipa_get_jf_pass_through_operation (jf) == NOP_EXPR)
4079 {
4080 (*parm_map)[i].parm_offset_known = true;
4081 (*parm_map)[i].parm_offset = 0;
4082 }
4083 else if (ipa_get_jf_pass_through_operation (jf)
4084 == POINTER_PLUS_EXPR
4085 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf),
4086 &(*parm_map)[i].parm_offset))
4087 (*parm_map)[i].parm_offset_known = true;
4088 else
4089 (*parm_map)[i].parm_offset_known = false;
ada353b8
JH
4090 continue;
4091 }
4092 if (jf && jf->type == IPA_JF_ANCESTOR)
c34db4b6
JH
4093 {
4094 (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
4095 (*parm_map)[i].parm_offset_known = true;
c8fd2be1
JH
4096 gcc_checking_assert
4097 (!(ipa_get_jf_ancestor_offset (jf) & (BITS_PER_UNIT - 1)));
4098 (*parm_map)[i].parm_offset
4099 = ipa_get_jf_ancestor_offset (jf) >> LOG2_BITS_PER_UNIT;
85ebbabd 4100 }
ada353b8 4101 else
c34db4b6 4102 (*parm_map)[i].parm_index = -1;
ada353b8
JH
4103 }
4104 if (dump_file)
4105 {
4106 fprintf (dump_file, " Parm map: ");
4107 for (i = 0; i < count; i++)
c34db4b6 4108 fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
ada353b8
JH
4109 fprintf (dump_file, "\n");
4110 }
6cef01c3 4111 return true;
ada353b8 4112 }
6cef01c3 4113 return false;
ada353b8
JH
4114}
4115
85ebbabd
JH
4116/* Map used to translate escape infos. */
4117
4118struct escape_map
4119{
4120 int parm_index;
4121 bool direct;
4122};
4123
b8ef019a 4124/* Update escape map for E. */
85ebbabd
JH
4125
4126static void
4127update_escape_summary_1 (cgraph_edge *e,
9851a163
JH
4128 vec <vec <escape_map>> &map,
4129 bool ignore_stores)
85ebbabd
JH
4130{
4131 escape_summary *sum = escape_summaries->get (e);
4132 if (!sum)
4133 return;
4134 auto_vec <escape_entry> old = sum->esc.copy ();
4135 sum->esc.release ();
4136
4137 unsigned int i;
4138 escape_entry *ee;
4139 FOR_EACH_VEC_ELT (old, i, ee)
4140 {
4141 unsigned int j;
4142 struct escape_map *em;
b8ef019a
JH
4143 /* TODO: We do not have jump functions for return slots, so we
4144 never propagate them to outer function. */
4145 if (ee->parm_index >= (int)map.length ()
4146 || ee->parm_index < 0)
85ebbabd
JH
4147 continue;
4148 FOR_EACH_VEC_ELT (map[ee->parm_index], j, em)
4149 {
9851a163
JH
4150 int min_flags = ee->min_flags;
4151 if (ee->direct && !em->direct)
4152 min_flags = deref_flags (min_flags, ignore_stores);
85ebbabd 4153 struct escape_entry entry = {em->parm_index, ee->arg,
b8ef019a 4154 ee->min_flags,
85ebbabd
JH
4155 ee->direct & em->direct};
4156 sum->esc.safe_push (entry);
4157 }
4158 }
4159 if (!sum->esc.length ())
4160 escape_summaries->remove (e);
4161}
4162
4163/* Update escape map fo NODE. */
4164
4165static void
4166update_escape_summary (cgraph_node *node,
9851a163
JH
4167 vec <vec <escape_map>> &map,
4168 bool ignore_stores)
85ebbabd
JH
4169{
4170 if (!escape_summaries)
4171 return;
4172 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
9851a163 4173 update_escape_summary_1 (e, map, ignore_stores);
85ebbabd
JH
4174 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
4175 {
4176 if (!e->inline_failed)
9851a163 4177 update_escape_summary (e->callee, map, ignore_stores);
85ebbabd 4178 else
9851a163 4179 update_escape_summary_1 (e, map, ignore_stores);
85ebbabd
JH
4180 }
4181}
4182
6cef01c3
JH
4183/* Get parameter type from DECL. This is only safe for special cases
4184 like builtins we create fnspec for because the type match is checked
4185 at fnspec creation time. */
d119f34c 4186
6cef01c3
JH
4187static tree
4188get_parm_type (tree decl, unsigned int i)
ada353b8 4189{
6cef01c3 4190 tree t = TYPE_ARG_TYPES (TREE_TYPE (decl));
ada353b8 4191
6cef01c3
JH
4192 for (unsigned int p = 0; p < i; p++)
4193 t = TREE_CHAIN (t);
4194 return TREE_VALUE (t);
4195}
4196
4197/* Return access mode for argument I of call E with FNSPEC. */
4198
4199static modref_access_node
4200get_access_for_fnspec (cgraph_edge *e, attr_fnspec &fnspec,
4201 unsigned int i, modref_parm_map &map)
4202{
4203 tree size = NULL_TREE;
4204 unsigned int size_arg;
4205
4206 if (!fnspec.arg_specified_p (i))
4207 ;
4208 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
ada353b8 4209 {
6cef01c3
JH
4210 cgraph_node *node = e->caller->inlined_to
4211 ? e->caller->inlined_to : e->caller;
a4a3cdd0
MJ
4212 ipa_node_params *caller_parms_info = ipa_node_params_sum->get (node);
4213 ipa_edge_args *args = ipa_edge_args_sum->get (e);
6cef01c3
JH
4214 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, size_arg);
4215
4216 if (jf)
4217 size = ipa_value_from_jfunc (caller_parms_info, jf,
4218 get_parm_type (e->callee->decl, size_arg));
ada353b8 4219 }
6cef01c3
JH
4220 else if (fnspec.arg_access_size_given_by_type_p (i))
4221 size = TYPE_SIZE_UNIT (get_parm_type (e->callee->decl, i));
4222 modref_access_node a = {0, -1, -1,
4223 map.parm_offset, map.parm_index,
5c85f295 4224 map.parm_offset_known, 0};
6cef01c3
JH
4225 poly_int64 size_hwi;
4226 if (size
4227 && poly_int_tree_p (size, &size_hwi)
4228 && coeffs_in_range_p (size_hwi, 0,
4229 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
ada353b8 4230 {
6cef01c3
JH
4231 a.size = -1;
4232 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
ada353b8 4233 }
6cef01c3
JH
4234 return a;
4235}
4236
4237/* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
4238 CUR_SUMMARY_LTO accordingly. Return true if something changed. */
4239
4240static bool
4241propagate_unknown_call (cgraph_node *node,
4242 cgraph_edge *e, int ecf_flags,
85ebbabd 4243 modref_summary *cur_summary,
8d3abf42
JH
4244 modref_summary_lto *cur_summary_lto,
4245 bool nontrivial_scc)
6cef01c3
JH
4246{
4247 bool changed = false;
6cef01c3
JH
4248 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4249 auto_vec <modref_parm_map, 32> parm_map;
992644c3
JH
4250 bool looping;
4251
4252 if (e->callee
4253 && builtin_safe_for_const_function_p (&looping, e->callee->decl))
4254 {
8d3abf42 4255 if (looping && cur_summary && !cur_summary->side_effects)
992644c3
JH
4256 {
4257 cur_summary->side_effects = true;
4258 changed = true;
4259 }
8d3abf42 4260 if (looping && cur_summary_lto && !cur_summary_lto->side_effects)
992644c3
JH
4261 {
4262 cur_summary_lto->side_effects = true;
4263 changed = true;
4264 }
4265 return changed;
4266 }
4267
8d3abf42
JH
4268 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
4269 || (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
4270 || nontrivial_scc)
992644c3
JH
4271 {
4272 if (cur_summary && !cur_summary->side_effects)
4273 {
4274 cur_summary->side_effects = true;
4275 changed = true;
4276 }
4277 if (cur_summary_lto && !cur_summary_lto->side_effects)
4278 {
4279 cur_summary_lto->side_effects = true;
4280 changed = true;
4281 }
a34edf9a
JH
4282 if (cur_summary && !cur_summary->nondeterministic
4283 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4284 {
4285 cur_summary->nondeterministic = true;
4286 changed = true;
4287 }
4288 if (cur_summary_lto && !cur_summary_lto->nondeterministic
4289 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4290 {
4291 cur_summary_lto->nondeterministic = true;
4292 changed = true;
4293 }
992644c3 4294 }
8d3abf42
JH
4295 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
4296 return changed;
992644c3 4297
6cef01c3
JH
4298 if (fnspec_sum
4299 && compute_parm_map (e, &parm_map))
4300 {
4301 attr_fnspec fnspec (fnspec_sum->fnspec);
4302
4303 gcc_checking_assert (fnspec.known_p ());
4304 if (fnspec.global_memory_read_p ())
4305 collapse_loads (cur_summary, cur_summary_lto);
4306 else
4307 {
4308 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
4309 for (unsigned i = 0; i < parm_map.length () && t;
4310 i++, t = TREE_CHAIN (t))
4311 if (!POINTER_TYPE_P (TREE_VALUE (t)))
4312 ;
4313 else if (!fnspec.arg_specified_p (i)
4314 || fnspec.arg_maybe_read_p (i))
4315 {
4316 modref_parm_map map = parm_map[i];
1f3a3363 4317 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
6cef01c3 4318 continue;
1f3a3363 4319 if (map.parm_index == MODREF_UNKNOWN_PARM)
6cef01c3
JH
4320 {
4321 collapse_loads (cur_summary, cur_summary_lto);
4322 break;
4323 }
4324 if (cur_summary)
4325 changed |= cur_summary->loads->insert
5c85f295 4326 (0, 0, get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4327 if (cur_summary_lto)
4328 changed |= cur_summary_lto->loads->insert
5c85f295 4329 (0, 0, get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4330 }
4331 }
4332 if (ignore_stores_p (node->decl, ecf_flags))
4333 ;
4334 else if (fnspec.global_memory_written_p ())
4335 collapse_stores (cur_summary, cur_summary_lto);
4336 else
4337 {
4338 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
4339 for (unsigned i = 0; i < parm_map.length () && t;
4340 i++, t = TREE_CHAIN (t))
4341 if (!POINTER_TYPE_P (TREE_VALUE (t)))
4342 ;
4343 else if (!fnspec.arg_specified_p (i)
4344 || fnspec.arg_maybe_written_p (i))
4345 {
4346 modref_parm_map map = parm_map[i];
1f3a3363 4347 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
6cef01c3 4348 continue;
1f3a3363 4349 if (map.parm_index == MODREF_UNKNOWN_PARM)
6cef01c3
JH
4350 {
4351 collapse_stores (cur_summary, cur_summary_lto);
4352 break;
4353 }
4354 if (cur_summary)
4355 changed |= cur_summary->stores->insert
5c85f295 4356 (0, 0, get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4357 if (cur_summary_lto)
4358 changed |= cur_summary_lto->stores->insert
5c85f295 4359 (0, 0, get_access_for_fnspec (e, fnspec, i, map), false);
6cef01c3
JH
4360 }
4361 }
4362 if (fnspec.errno_maybe_written_p () && flag_errno_math)
4363 {
4364 if (cur_summary && !cur_summary->writes_errno)
4365 {
4366 cur_summary->writes_errno = true;
4367 changed = true;
4368 }
4369 if (cur_summary_lto && !cur_summary_lto->writes_errno)
4370 {
4371 cur_summary_lto->writes_errno = true;
4372 changed = true;
4373 }
4374 }
4375 return changed;
4376 }
85ebbabd
JH
4377 if (dump_file)
4378 fprintf (dump_file, " collapsing loads\n");
4379 changed |= collapse_loads (cur_summary, cur_summary_lto);
4380 if (!ignore_stores_p (node->decl, ecf_flags))
6cef01c3
JH
4381 {
4382 if (dump_file)
85ebbabd
JH
4383 fprintf (dump_file, " collapsing stores\n");
4384 changed |= collapse_stores (cur_summary, cur_summary_lto);
6cef01c3 4385 }
85ebbabd 4386 return changed;
ada353b8 4387}
d119f34c 4388
85ebbabd
JH
4389/* Maybe remove summaies of NODE pointed to by CUR_SUMMARY_PTR
4390 and CUR_SUMMARY_LTO_PTR if they are useless according to ECF_FLAGS. */
4391
4392static void
4393remove_useless_summaries (cgraph_node *node,
4394 modref_summary **cur_summary_ptr,
4395 modref_summary_lto **cur_summary_lto_ptr,
4396 int ecf_flags)
4397{
4398 if (*cur_summary_ptr && !(*cur_summary_ptr)->useful_p (ecf_flags, false))
4399 {
4400 optimization_summaries->remove (node);
4401 *cur_summary_ptr = NULL;
4402 }
4403 if (*cur_summary_lto_ptr
4404 && !(*cur_summary_lto_ptr)->useful_p (ecf_flags, false))
4405 {
4406 summaries_lto->remove (node);
4407 *cur_summary_lto_ptr = NULL;
4408 }
4409}
4410
4411/* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
4412 and propagate loads/stores. */
ada353b8 4413
494bdadf 4414static bool
ada353b8
JH
4415modref_propagate_in_scc (cgraph_node *component_node)
4416{
4417 bool changed = true;
5c85f295 4418 bool first = true;
ada353b8
JH
4419 int iteration = 0;
4420
4421 while (changed)
4422 {
8d3abf42
JH
4423 bool nontrivial_scc
4424 = ((struct ipa_dfs_info *) component_node->aux)->next_cycle;
ada353b8
JH
4425 changed = false;
4426 for (struct cgraph_node *cur = component_node; cur;
d119f34c
JH
4427 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4428 {
ada353b8 4429 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
71dbabcc
JH
4430 modref_summary *cur_summary = optimization_summaries
4431 ? optimization_summaries->get (node)
4432 : NULL;
4433 modref_summary_lto *cur_summary_lto = summaries_lto
4434 ? summaries_lto->get (node)
4435 : NULL;
4436
4437 if (!cur_summary && !cur_summary_lto)
ada353b8
JH
4438 continue;
4439
85ebbabd
JH
4440 int cur_ecf_flags = flags_from_decl_or_type (node->decl);
4441
ada353b8
JH
4442 if (dump_file)
4443 fprintf (dump_file, " Processing %s%s%s\n",
4444 cur->dump_name (),
4445 TREE_READONLY (cur->decl) ? " (const)" : "",
4446 DECL_PURE_P (cur->decl) ? " (pure)" : "");
d119f34c 4447
d119f34c
JH
4448 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
4449 {
6cef01c3 4450 if (dump_file)
8d3abf42 4451 fprintf (dump_file, " Indirect call\n");
85ebbabd 4452 if (propagate_unknown_call
6cef01c3 4453 (node, e, e->indirect_info->ecf_flags,
8d3abf42
JH
4454 cur_summary, cur_summary_lto,
4455 nontrivial_scc))
85ebbabd
JH
4456 {
4457 changed = true;
4458 remove_useless_summaries (node, &cur_summary,
4459 &cur_summary_lto,
4460 cur_ecf_flags);
4461 if (!cur_summary && !cur_summary_lto)
4462 break;
4463 }
d119f34c
JH
4464 }
4465
71dbabcc 4466 if (!cur_summary && !cur_summary_lto)
ada353b8
JH
4467 continue;
4468
d119f34c
JH
4469 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
4470 callee_edge = callee_edge->next_callee)
4471 {
4472 int flags = flags_from_decl_or_type (callee_edge->callee->decl);
71dbabcc
JH
4473 modref_summary *callee_summary = NULL;
4474 modref_summary_lto *callee_summary_lto = NULL;
d119f34c
JH
4475 struct cgraph_node *callee;
4476
8d3abf42
JH
4477 if (!callee_edge->inline_failed
4478 || ((flags & (ECF_CONST | ECF_NOVOPS))
4479 && !(flags & ECF_LOOPING_CONST_OR_PURE)))
d119f34c
JH
4480 continue;
4481
d119f34c
JH
4482 /* Get the callee and its summary. */
4483 enum availability avail;
4484 callee = callee_edge->callee->function_or_virtual_thunk_symbol
4485 (&avail, cur);
4486
ada353b8
JH
4487 /* It is not necessary to re-process calls outside of the
4488 SCC component. */
4489 if (iteration > 0
4490 && (!callee->aux
4491 || ((struct ipa_dfs_info *)cur->aux)->scc_no
4492 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
4493 continue;
4494
4495 if (dump_file)
4496 fprintf (dump_file, " Call to %s\n",
4497 callee_edge->callee->dump_name ());
d119f34c
JH
4498
4499 bool ignore_stores = ignore_stores_p (cur->decl, flags);
4500
71dbabcc 4501 if (avail <= AVAIL_INTERPOSABLE)
d119f34c 4502 {
6cef01c3
JH
4503 if (dump_file)
4504 fprintf (dump_file, " Call target interposable"
4505 " or not available\n");
4506 changed |= propagate_unknown_call
4507 (node, callee_edge, flags,
8d3abf42
JH
4508 cur_summary, cur_summary_lto,
4509 nontrivial_scc);
6cef01c3
JH
4510 if (!cur_summary && !cur_summary_lto)
4511 break;
4512 continue;
71dbabcc
JH
4513 }
4514
4515 /* We don't know anything about CALLEE, hence we cannot tell
4516 anything about the entire component. */
4517
4518 if (cur_summary
4519 && !(callee_summary = optimization_summaries->get (callee)))
4520 {
6cef01c3
JH
4521 if (dump_file)
4522 fprintf (dump_file, " No call target summary\n");
4523 changed |= propagate_unknown_call
4524 (node, callee_edge, flags,
8d3abf42
JH
4525 cur_summary, NULL,
4526 nontrivial_scc);
71dbabcc
JH
4527 }
4528 if (cur_summary_lto
4529 && !(callee_summary_lto = summaries_lto->get (callee)))
4530 {
6cef01c3
JH
4531 if (dump_file)
4532 fprintf (dump_file, " No call target summary\n");
4533 changed |= propagate_unknown_call
4534 (node, callee_edge, flags,
8d3abf42
JH
4535 NULL, cur_summary_lto,
4536 nontrivial_scc);
d119f34c
JH
4537 }
4538
8d3abf42
JH
4539 if (callee_summary && !cur_summary->side_effects
4540 && (callee_summary->side_effects
4541 || callee_edge->recursive_p ()))
4542 {
4543 cur_summary->side_effects = true;
4544 changed = true;
4545 }
4546 if (callee_summary_lto && !cur_summary_lto->side_effects
4547 && (callee_summary_lto->side_effects
4548 || callee_edge->recursive_p ()))
4549 {
4550 cur_summary_lto->side_effects = true;
4551 changed = true;
4552 }
a34edf9a
JH
4553 if (callee_summary && !cur_summary->nondeterministic
4554 && callee_summary->nondeterministic
4555 && !ignore_nondeterminism_p (cur->decl, flags))
4556 {
4557 cur_summary->nondeterministic = true;
4558 changed = true;
4559 }
4560 if (callee_summary_lto && !cur_summary_lto->nondeterministic
4561 && callee_summary_lto->nondeterministic
4562 && !ignore_nondeterminism_p (cur->decl, flags))
4563 {
4564 cur_summary_lto->nondeterministic = true;
4565 changed = true;
4566 }
8d3abf42
JH
4567 if (flags & (ECF_CONST | ECF_NOVOPS))
4568 continue;
4569
ada353b8
JH
4570 /* We can not safely optimize based on summary of callee if it
4571 does not always bind to current def: it is possible that
4572 memory load was optimized out earlier which may not happen in
4573 the interposed variant. */
4574 if (!callee_edge->binds_to_current_def_p ())
4575 {
a34edf9a
JH
4576 if (cur_summary && !cur_summary->calls_interposable)
4577 {
4578 cur_summary->calls_interposable = true;
4579 changed = true;
4580 }
4581 if (cur_summary_lto && !cur_summary_lto->calls_interposable)
4582 {
4583 cur_summary_lto->calls_interposable = true;
4584 changed = true;
4585 }
ada353b8
JH
4586 if (dump_file)
4587 fprintf (dump_file, " May not bind local;"
4588 " collapsing loads\n");
4589 }
4590
4591
c34db4b6 4592 auto_vec <modref_parm_map, 32> parm_map;
1f3a3363
JH
4593 modref_parm_map chain_map;
4594 /* TODO: Once we get jump functions for static chains we could
4595 compute this. */
4596 chain_map.parm_index = MODREF_UNKNOWN_PARM;
ada353b8
JH
4597
4598 compute_parm_map (callee_edge, &parm_map);
c33f4742 4599
d119f34c 4600 /* Merge in callee's information. */
71dbabcc
JH
4601 if (callee_summary)
4602 {
56cb815b 4603 changed |= cur_summary->loads->merge
1f3a3363
JH
4604 (callee_summary->loads, &parm_map,
4605 &chain_map, !first);
56cb815b 4606 if (!ignore_stores)
6cef01c3
JH
4607 {
4608 changed |= cur_summary->stores->merge
5c85f295 4609 (callee_summary->stores, &parm_map,
1f3a3363 4610 &chain_map, !first);
6cef01c3
JH
4611 if (!cur_summary->writes_errno
4612 && callee_summary->writes_errno)
4613 {
4614 cur_summary->writes_errno = true;
4615 changed = true;
4616 }
4617 }
71dbabcc
JH
4618 }
4619 if (callee_summary_lto)
4620 {
56cb815b 4621 changed |= cur_summary_lto->loads->merge
5c85f295 4622 (callee_summary_lto->loads, &parm_map,
1f3a3363 4623 &chain_map, !first);
56cb815b 4624 if (!ignore_stores)
6cef01c3
JH
4625 {
4626 changed |= cur_summary_lto->stores->merge
5c85f295 4627 (callee_summary_lto->stores, &parm_map,
1f3a3363 4628 &chain_map, !first);
6cef01c3
JH
4629 if (!cur_summary_lto->writes_errno
4630 && callee_summary_lto->writes_errno)
4631 {
4632 cur_summary_lto->writes_errno = true;
4633 changed = true;
4634 }
4635 }
71dbabcc 4636 }
85ebbabd
JH
4637 if (changed)
4638 remove_useless_summaries (node, &cur_summary,
4639 &cur_summary_lto,
4640 cur_ecf_flags);
4641 if (!cur_summary && !cur_summary_lto)
4642 break;
ada353b8 4643 if (dump_file && changed)
71dbabcc
JH
4644 {
4645 if (cur_summary)
4646 cur_summary->dump (dump_file);
4647 if (cur_summary_lto)
4648 cur_summary_lto->dump (dump_file);
85ebbabd 4649 dump_modref_edge_summaries (dump_file, node, 4);
71dbabcc 4650 }
d119f34c
JH
4651 }
4652 }
ada353b8 4653 iteration++;
5c85f295 4654 first = false;
ada353b8 4655 }
ada353b8 4656 if (dump_file)
85ebbabd
JH
4657 fprintf (dump_file,
4658 "Propagation finished in %i iterations\n", iteration);
494bdadf
JH
4659 bool pureconst = false;
4660 for (struct cgraph_node *cur = component_node; cur;
4661 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4662 if (!cur->inlined_to && opt_for_fn (cur->decl, flag_ipa_pure_const))
4663 {
4664 modref_summary *summary = optimization_summaries
4665 ? optimization_summaries->get (cur)
4666 : NULL;
4667 modref_summary_lto *summary_lto = summaries_lto
4668 ? summaries_lto->get (cur)
4669 : NULL;
1b62cddc 4670 if (summary && !summary->stores->every_base && !summary->stores->bases
a34edf9a 4671 && !summary->nondeterministic)
494bdadf 4672 {
a34edf9a
JH
4673 if (!summary->loads->every_base && !summary->loads->bases
4674 && !summary->calls_interposable)
494bdadf
JH
4675 pureconst |= ipa_make_function_const
4676 (cur, summary->side_effects, false);
4677 else
4678 pureconst |= ipa_make_function_pure
4679 (cur, summary->side_effects, false);
4680 }
4681 if (summary_lto && !summary_lto->stores->every_base
a34edf9a 4682 && !summary_lto->stores->bases && !summary_lto->nondeterministic)
494bdadf 4683 {
a34edf9a
JH
4684 if (!summary_lto->loads->every_base && !summary_lto->loads->bases
4685 && !summary_lto->calls_interposable)
494bdadf
JH
4686 pureconst |= ipa_make_function_const
4687 (cur, summary_lto->side_effects, false);
4688 else
4689 pureconst |= ipa_make_function_pure
4690 (cur, summary_lto->side_effects, false);
4691 }
4692 }
4693 return pureconst;
85ebbabd
JH
4694}
4695
4696/* Dump results of propagation in SCC rooted in COMPONENT_NODE. */
4697
4698static void
4699modref_propagate_dump_scc (cgraph_node *component_node)
4700{
4701 for (struct cgraph_node *cur = component_node; cur;
4702 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4703 if (!cur->inlined_to)
4704 {
4705 modref_summary *cur_summary = optimization_summaries
4706 ? optimization_summaries->get (cur)
4707 : NULL;
4708 modref_summary_lto *cur_summary_lto = summaries_lto
4709 ? summaries_lto->get (cur)
4710 : NULL;
4711
4712 fprintf (dump_file, "Propagated modref for %s%s%s\n",
4713 cur->dump_name (),
4714 TREE_READONLY (cur->decl) ? " (const)" : "",
4715 DECL_PURE_P (cur->decl) ? " (pure)" : "");
4716 if (optimization_summaries)
4717 {
4718 if (cur_summary)
4719 cur_summary->dump (dump_file);
4720 else
4721 fprintf (dump_file, " Not tracked\n");
4722 }
4723 if (summaries_lto)
4724 {
4725 if (cur_summary_lto)
4726 cur_summary_lto->dump (dump_file);
4727 else
4728 fprintf (dump_file, " Not tracked (lto)\n");
4729 }
4730 }
4731}
4732
4733/* Process escapes in SUM and merge SUMMARY to CUR_SUMMARY
4734 and SUMMARY_LTO to CUR_SUMMARY_LTO.
4735 Return true if something changed. */
4736
4737static bool
4738modref_merge_call_site_flags (escape_summary *sum,
4739 modref_summary *cur_summary,
4740 modref_summary_lto *cur_summary_lto,
4741 modref_summary *summary,
4742 modref_summary_lto *summary_lto,
4341b1b1 4743 tree caller,
f6f704fd
JH
4744 cgraph_edge *e,
4745 int caller_ecf_flags,
4746 int callee_ecf_flags,
4747 bool binds_to_current_def)
85ebbabd
JH
4748{
4749 escape_entry *ee;
4750 unsigned int i;
4751 bool changed = false;
f6f704fd 4752 bool ignore_stores = ignore_stores_p (caller, callee_ecf_flags);
85ebbabd
JH
4753
4754 /* If we have no useful info to propagate. */
4755 if ((!cur_summary || !cur_summary->arg_flags.length ())
4756 && (!cur_summary_lto || !cur_summary_lto->arg_flags.length ()))
4757 return false;
4758
4759 FOR_EACH_VEC_ELT (sum->esc, i, ee)
ada353b8 4760 {
85ebbabd
JH
4761 int flags = 0;
4762 int flags_lto = 0;
f6f704fd 4763 /* Returning the value is already accounted to at local propagation. */
d70ef656
JH
4764 int implicit_flags = EAF_NOT_RETURNED_DIRECTLY
4765 | EAF_NOT_RETURNED_INDIRECTLY;
85ebbabd
JH
4766
4767 if (summary && ee->arg < summary->arg_flags.length ())
4768 flags = summary->arg_flags[ee->arg];
4769 if (summary_lto
4770 && ee->arg < summary_lto->arg_flags.length ())
4771 flags_lto = summary_lto->arg_flags[ee->arg];
4772 if (!ee->direct)
4773 {
4774 flags = deref_flags (flags, ignore_stores);
4775 flags_lto = deref_flags (flags_lto, ignore_stores);
4776 }
f6f704fd
JH
4777 if (ignore_stores)
4778 implicit_flags |= ignore_stores_eaf_flags;
4779 if (callee_ecf_flags & ECF_PURE)
4780 implicit_flags |= implicit_pure_eaf_flags;
4781 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
4782 implicit_flags |= implicit_const_eaf_flags;
4783 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4784 if (fnspec_sum)
85ebbabd 4785 {
f6f704fd 4786 attr_fnspec fnspec (fnspec_sum->fnspec);
e2dd12ab 4787 implicit_flags |= fnspec.arg_eaf_flags (ee->arg);
f6f704fd
JH
4788 }
4789 if (!ee->direct)
4790 implicit_flags = deref_flags (implicit_flags, ignore_stores);
4791 flags |= implicit_flags;
4792 flags_lto |= implicit_flags;
4793 if (!binds_to_current_def && (flags || flags_lto))
4794 {
4795 flags = interposable_eaf_flags (flags, implicit_flags);
4796 flags_lto = interposable_eaf_flags (flags_lto, implicit_flags);
85ebbabd 4797 }
3350e59f 4798 if (!(flags & EAF_UNUSED)
b8ef019a 4799 && cur_summary && ee->parm_index < (int)cur_summary->arg_flags.length ())
85ebbabd 4800 {
1f3a3363 4801 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
b8ef019a 4802 ? cur_summary->retslot_flags
1f3a3363 4803 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
a70c0512 4804 ? cur_summary->static_chain_flags
b8ef019a 4805 : cur_summary->arg_flags[ee->parm_index];
85ebbabd
JH
4806 if ((f & flags) != f)
4807 {
4341b1b1 4808 f = remove_useless_eaf_flags
f6f704fd 4809 (f & flags, caller_ecf_flags,
4341b1b1 4810 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
85ebbabd
JH
4811 changed = true;
4812 }
4813 }
3350e59f
JH
4814 if (!(flags_lto & EAF_UNUSED)
4815 && cur_summary_lto
b8ef019a 4816 && ee->parm_index < (int)cur_summary_lto->arg_flags.length ())
85ebbabd 4817 {
1f3a3363 4818 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
b8ef019a 4819 ? cur_summary_lto->retslot_flags
1f3a3363 4820 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
a70c0512 4821 ? cur_summary_lto->static_chain_flags
b8ef019a 4822 : cur_summary_lto->arg_flags[ee->parm_index];
85ebbabd
JH
4823 if ((f & flags_lto) != f)
4824 {
4341b1b1 4825 f = remove_useless_eaf_flags
f6f704fd 4826 (f & flags_lto, caller_ecf_flags,
4341b1b1 4827 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
85ebbabd
JH
4828 changed = true;
4829 }
4830 }
4831 }
4832 return changed;
4833}
4834
4835/* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
4836 and propagate arg flags. */
4837
4838static void
4839modref_propagate_flags_in_scc (cgraph_node *component_node)
4840{
4841 bool changed = true;
4842 int iteration = 0;
4843
4844 while (changed)
4845 {
4846 changed = false;
ada353b8 4847 for (struct cgraph_node *cur = component_node; cur;
d119f34c 4848 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
85ebbabd
JH
4849 {
4850 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
4851 modref_summary *cur_summary = optimization_summaries
4852 ? optimization_summaries->get (node)
4853 : NULL;
4854 modref_summary_lto *cur_summary_lto = summaries_lto
4855 ? summaries_lto->get (node)
4856 : NULL;
4857
4858 if (!cur_summary && !cur_summary_lto)
4859 continue;
f6f704fd 4860 int caller_ecf_flags = flags_from_decl_or_type (cur->decl);
85ebbabd
JH
4861
4862 if (dump_file)
4863 fprintf (dump_file, " Processing %s%s%s\n",
ada353b8
JH
4864 cur->dump_name (),
4865 TREE_READONLY (cur->decl) ? " (const)" : "",
4866 DECL_PURE_P (cur->decl) ? " (pure)" : "");
85ebbabd
JH
4867
4868 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
4869 {
4870 escape_summary *sum = escape_summaries->get (e);
4871
4872 if (!sum || (e->indirect_info->ecf_flags
4873 & (ECF_CONST | ECF_NOVOPS)))
4874 continue;
4875
4876 changed |= modref_merge_call_site_flags
4877 (sum, cur_summary, cur_summary_lto,
4341b1b1 4878 NULL, NULL,
f6f704fd
JH
4879 node->decl,
4880 e,
4881 caller_ecf_flags,
4882 e->indirect_info->ecf_flags,
4883 false);
85ebbabd
JH
4884 }
4885
4886 if (!cur_summary && !cur_summary_lto)
4887 continue;
4888
4889 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
4890 callee_edge = callee_edge->next_callee)
4891 {
4341b1b1
JH
4892 int ecf_flags = flags_from_decl_or_type
4893 (callee_edge->callee->decl);
85ebbabd
JH
4894 modref_summary *callee_summary = NULL;
4895 modref_summary_lto *callee_summary_lto = NULL;
4896 struct cgraph_node *callee;
4897
4341b1b1 4898 if (ecf_flags & (ECF_CONST | ECF_NOVOPS)
85ebbabd
JH
4899 || !callee_edge->inline_failed)
4900 continue;
4901 /* Get the callee and its summary. */
4902 enum availability avail;
4903 callee = callee_edge->callee->function_or_virtual_thunk_symbol
4904 (&avail, cur);
4905
4906 /* It is not necessary to re-process calls outside of the
4907 SCC component. */
4908 if (iteration > 0
4909 && (!callee->aux
4910 || ((struct ipa_dfs_info *)cur->aux)->scc_no
4911 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
4912 continue;
4913
4914 escape_summary *sum = escape_summaries->get (callee_edge);
4915 if (!sum)
4916 continue;
4917
4918 if (dump_file)
4919 fprintf (dump_file, " Call to %s\n",
4920 callee_edge->callee->dump_name ());
4921
4922 if (avail <= AVAIL_INTERPOSABLE
4923 || callee_edge->call_stmt_cannot_inline_p)
4924 ;
4925 else
4926 {
4927 if (cur_summary)
4928 callee_summary = optimization_summaries->get (callee);
4929 if (cur_summary_lto)
4930 callee_summary_lto = summaries_lto->get (callee);
4931 }
4932 changed |= modref_merge_call_site_flags
4933 (sum, cur_summary, cur_summary_lto,
4934 callee_summary, callee_summary_lto,
f6f704fd
JH
4935 node->decl,
4936 callee_edge,
4937 caller_ecf_flags,
4938 ecf_flags,
4939 callee->binds_to_current_def_p ());
85ebbabd
JH
4940 if (dump_file && changed)
4941 {
4942 if (cur_summary)
4943 cur_summary->dump (dump_file);
4944 if (cur_summary_lto)
4945 cur_summary_lto->dump (dump_file);
4946 }
4947 }
4948 }
4949 iteration++;
4950 }
4951 if (dump_file)
4952 fprintf (dump_file,
4953 "Propagation of flags finished in %i iterations\n", iteration);
ada353b8
JH
4954}
4955
18f0873d
JH
4956} /* ANON namespace. */
4957
4958/* Call EDGE was inlined; merge summary from callee to the caller. */
4959
4960void
4961ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
4962{
4963 if (!summaries && !summaries_lto)
4964 return;
4965
4966 struct cgraph_node *to = (edge->caller->inlined_to
4967 ? edge->caller->inlined_to : edge->caller);
4968 class modref_summary *to_info = summaries ? summaries->get (to) : NULL;
4969 class modref_summary_lto *to_info_lto = summaries_lto
4970 ? summaries_lto->get (to) : NULL;
4971
4972 if (!to_info && !to_info_lto)
4973 {
4974 if (summaries)
4975 summaries->remove (edge->callee);
4976 if (summaries_lto)
4977 summaries_lto->remove (edge->callee);
4978 remove_modref_edge_summaries (edge->callee);
4979 return;
4980 }
4981
4982 class modref_summary *callee_info = summaries ? summaries->get (edge->callee)
4983 : NULL;
4984 class modref_summary_lto *callee_info_lto
4985 = summaries_lto ? summaries_lto->get (edge->callee) : NULL;
4986 int flags = flags_from_decl_or_type (edge->callee->decl);
4987 bool ignore_stores = ignore_stores_p (edge->caller->decl, flags);
4988
4989 if (!callee_info && to_info)
4990 {
4991 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
4992 to_info->loads->collapse ();
4993 if (!ignore_stores)
4994 to_info->stores->collapse ();
4995 }
4996 if (!callee_info_lto && to_info_lto)
4997 {
4998 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
4999 to_info_lto->loads->collapse ();
5000 if (!ignore_stores)
5001 to_info_lto->stores->collapse ();
5002 }
5003 if (callee_info || callee_info_lto)
5004 {
5005 auto_vec <modref_parm_map, 32> parm_map;
1f3a3363
JH
5006 modref_parm_map chain_map;
5007 /* TODO: Once we get jump functions for static chains we could
5008 compute this. */
5009 chain_map.parm_index = MODREF_UNKNOWN_PARM;
18f0873d
JH
5010
5011 compute_parm_map (edge, &parm_map);
5012
5013 if (!ignore_stores)
5014 {
5015 if (to_info && callee_info)
1f3a3363
JH
5016 to_info->stores->merge (callee_info->stores, &parm_map,
5017 &chain_map, false);
18f0873d
JH
5018 if (to_info_lto && callee_info_lto)
5019 to_info_lto->stores->merge (callee_info_lto->stores, &parm_map,
1f3a3363 5020 &chain_map, false);
18f0873d
JH
5021 }
5022 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5023 {
5024 if (to_info && callee_info)
1f3a3363
JH
5025 to_info->loads->merge (callee_info->loads, &parm_map,
5026 &chain_map, false);
18f0873d
JH
5027 if (to_info_lto && callee_info_lto)
5028 to_info_lto->loads->merge (callee_info_lto->loads, &parm_map,
1f3a3363 5029 &chain_map, false);
18f0873d
JH
5030 }
5031 }
5032
5033 /* Now merge escape summaries.
5034 For every escape to the callee we need to merge calle flags
5035 and remap calees escapes. */
5036 class escape_summary *sum = escape_summaries->get (edge);
5037 int max_escape = -1;
5038 escape_entry *ee;
5039 unsigned int i;
5040
5041 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
5042 FOR_EACH_VEC_ELT (sum->esc, i, ee)
5043 if ((int)ee->arg > max_escape)
5044 max_escape = ee->arg;
5045
5046 auto_vec <vec <struct escape_map>, 32> emap (max_escape + 1);
5047 emap.safe_grow (max_escape + 1, true);
5048 for (i = 0; (int)i < max_escape + 1; i++)
5049 emap[i] = vNULL;
5050
5051 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
5052 FOR_EACH_VEC_ELT (sum->esc, i, ee)
5053 {
5054 bool needed = false;
5055 /* TODO: We do not have jump functions for return slots, so we
5056 never propagate them to outer function. */
5057 if (ee->parm_index < 0)
5058 continue;
5059 if (to_info && (int)to_info->arg_flags.length () > ee->parm_index)
5060 {
5061 int flags = callee_info
5062 && callee_info->arg_flags.length () > ee->arg
5063 ? callee_info->arg_flags[ee->arg] : 0;
5064 if (!ee->direct)
5065 flags = deref_flags (flags, ignore_stores);
5066 else if (ignore_stores)
5067 flags |= ignore_stores_eaf_flags;
5068 flags |= ee->min_flags;
5069 to_info->arg_flags[ee->parm_index] &= flags;
5070 if (to_info->arg_flags[ee->parm_index])
5071 needed = true;
5072 }
5073 if (to_info_lto
62af7d94 5074 && (int)to_info_lto->arg_flags.length () > ee->parm_index)
18f0873d
JH
5075 {
5076 int flags = callee_info_lto
5077 && callee_info_lto->arg_flags.length () > ee->arg
5078 ? callee_info_lto->arg_flags[ee->arg] : 0;
5079 if (!ee->direct)
5080 flags = deref_flags (flags, ignore_stores);
5081 else if (ignore_stores)
5082 flags |= ignore_stores_eaf_flags;
5083 flags |= ee->min_flags;
5084 to_info_lto->arg_flags[ee->parm_index] &= flags;
5085 if (to_info_lto->arg_flags[ee->parm_index])
5086 needed = true;
5087 }
5088 struct escape_map entry = {ee->parm_index, ee->direct};
5089 if (needed)
5090 emap[ee->arg].safe_push (entry);
5091 }
5092 update_escape_summary (edge->callee, emap, ignore_stores);
5093 for (i = 0; (int)i < max_escape + 1; i++)
5094 emap[i].release ();
5095 if (sum)
5096 escape_summaries->remove (edge);
5097
5098 if (summaries)
5099 {
5100 if (to_info && !to_info->useful_p (flags))
5101 {
5102 if (dump_file)
5103 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5104 to->dump_name ());
5105 summaries->remove (to);
5106 to_info = NULL;
5107 }
5108 else if (to_info && dump_file)
5109 {
5110 if (dump_file)
5111 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5112 to->dump_name ());
5113 to_info->dump (dump_file);
5114 }
5115 if (callee_info)
5116 summaries->remove (edge->callee);
5117 }
5118 if (summaries_lto)
5119 {
5120 if (to_info_lto && !to_info_lto->useful_p (flags))
5121 {
5122 if (dump_file)
5123 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5124 to->dump_name ());
5125 summaries_lto->remove (to);
5126 }
5127 else if (to_info_lto && dump_file)
5128 {
5129 if (dump_file)
5130 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5131 to->dump_name ());
5132 to_info_lto->dump (dump_file);
5133 to_info_lto = NULL;
5134 }
5135 if (callee_info_lto)
5136 summaries_lto->remove (edge->callee);
5137 }
5138 if (!to_info && !to_info_lto)
5139 remove_modref_edge_summaries (to);
5140 return;
5141}
5142
ada353b8
JH
5143/* Run the IPA pass. This will take a function's summaries and calls and
5144 construct new summaries which represent a transitive closure. So that
5145 summary of an analyzed function contains information about the loads and
5146 stores that the function or any function that it calls does. */
5147
5148unsigned int
5149pass_ipa_modref::execute (function *)
5150{
71dbabcc 5151 if (!summaries && !summaries_lto)
ada353b8 5152 return 0;
494bdadf 5153 bool pureconst = false;
ada353b8 5154
71dbabcc
JH
5155 if (optimization_summaries)
5156 ggc_delete (optimization_summaries);
5157 optimization_summaries = summaries;
5158 summaries = NULL;
5159
ada353b8
JH
5160 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *,
5161 symtab->cgraph_count);
5162 int order_pos;
5163 order_pos = ipa_reduced_postorder (order, true, ignore_edge);
5164 int i;
5165
5166 /* Iterate over all strongly connected components in post-order. */
5167 for (i = 0; i < order_pos; i++)
5168 {
5169 /* Get the component's representative. That's just any node in the
5170 component from which we can traverse the entire component. */
5171 struct cgraph_node *component_node = order[i];
5172
5173 if (dump_file)
5174 fprintf (dump_file, "\n\nStart of SCC component\n");
5175
494bdadf 5176 pureconst |= modref_propagate_in_scc (component_node);
85ebbabd 5177 modref_propagate_flags_in_scc (component_node);
e0040bc3
JH
5178 if (optimization_summaries)
5179 for (struct cgraph_node *cur = component_node; cur;
5180 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
5181 if (modref_summary *sum = optimization_summaries->get (cur))
5aa91072 5182 sum->finalize (cur->decl);
85ebbabd
JH
5183 if (dump_file)
5184 modref_propagate_dump_scc (component_node);
d119f34c 5185 }
fe90c504
JH
5186 cgraph_node *node;
5187 FOR_EACH_FUNCTION (node)
5188 update_signature (node);
71dbabcc
JH
5189 if (summaries_lto)
5190 ((modref_summaries_lto *)summaries_lto)->propagated = true;
d119f34c 5191 ipa_free_postorder_info ();
a0e6e49d 5192 free (order);
6cef01c3
JH
5193 delete fnspec_summaries;
5194 fnspec_summaries = NULL;
85ebbabd
JH
5195 delete escape_summaries;
5196 escape_summaries = NULL;
494bdadf
JH
5197
5198 /* If we posibly made constructors const/pure we may need to remove
5199 them. */
5200 return pureconst ? TODO_remove_functions : 0;
d119f34c
JH
5201}
5202
39b3b1bd
JH
5203/* Summaries must stay alive until end of compilation. */
5204
5205void
5206ipa_modref_c_finalize ()
5207{
71dbabcc
JH
5208 if (optimization_summaries)
5209 ggc_delete (optimization_summaries);
5210 optimization_summaries = NULL;
71dbabcc 5211 if (summaries_lto)
85ebbabd
JH
5212 ggc_delete (summaries_lto);
5213 summaries_lto = NULL;
6cef01c3
JH
5214 if (fnspec_summaries)
5215 delete fnspec_summaries;
5216 fnspec_summaries = NULL;
85ebbabd
JH
5217 if (escape_summaries)
5218 delete escape_summaries;
5219 escape_summaries = NULL;
39b3b1bd
JH
5220}
5221
d119f34c 5222#include "gt-ipa-modref.h"