]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-modref.cc
16e8bfb97a6793ed71da90b6aded28398d000b7c
[thirdparty/gcc.git] / gcc / ipa-modref.cc
1 /* Search for references that a functions loads or stores.
2 Copyright (C) 2020-2023 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
23 accesses across function calls.
24
25 This file contains a tree pass and an IPA pass. Both performs the same
26 analysis however tree pass is executed during early and late optimization
27 passes to propagate info downwards in the compilation order. IPA pass
28 propagates across the callgraph and is able to handle recursion and works on
29 whole program during link-time analysis.
30
31 LTO mode differs from the local mode by not recording alias sets but types
32 that are translated to alias sets later. This is necessary in order stream
33 the information because the alias sets are rebuild at stream-in time and may
34 not correspond to ones seen during analysis. For this reason part of
35 analysis is duplicated.
36
37 The following information is computed
38 1) load/store access tree described in ipa-modref-tree.h
39 This is used by tree-ssa-alias to disambiguate load/stores
40 2) EAF flags used by points-to analysis (in tree-ssa-structalias).
41 and defined in tree-core.h.
42 and stored to optimization_summaries.
43
44 There are multiple summaries computed and used during the propagation:
45 - summaries holds summaries from analysis to IPA propagation
46 time.
47 - summaries_lto is same as summaries but holds them in a format
48 that can be streamed (as described above).
49 - fnspec_summary holds fnspec strings for call. This is
50 necessary because gimple_call_fnspec performs additional
51 analysis except for looking callee fndecl.
52 - escape_summary holds escape points for given call edge.
53 That is a vector recording what function parameters
54 may escape to a function call (and with what parameter index). */
55
56 #include "config.h"
57 #include "system.h"
58 #include "coretypes.h"
59 #include "backend.h"
60 #include "tree.h"
61 #include "gimple.h"
62 #include "alloc-pool.h"
63 #include "tree-pass.h"
64 #include "gimple-iterator.h"
65 #include "tree-dfa.h"
66 #include "cgraph.h"
67 #include "ipa-utils.h"
68 #include "symbol-summary.h"
69 #include "gimple-pretty-print.h"
70 #include "gimple-walk.h"
71 #include "print-tree.h"
72 #include "tree-streamer.h"
73 #include "alias.h"
74 #include "calls.h"
75 #include "ipa-modref-tree.h"
76 #include "ipa-modref.h"
77 #include "value-range.h"
78 #include "ipa-prop.h"
79 #include "ipa-fnsummary.h"
80 #include "attr-fnspec.h"
81 #include "symtab-clones.h"
82 #include "gimple-ssa.h"
83 #include "tree-phinodes.h"
84 #include "tree-ssa-operands.h"
85 #include "ssa-iterators.h"
86 #include "stringpool.h"
87 #include "tree-ssanames.h"
88 #include "attribs.h"
89 #include "tree-cfg.h"
90 #include "tree-eh.h"
91
92
93 namespace {
94
95 /* We record fnspec specifiers for call edges since they depends on actual
96 gimple statements. */
97
98 class fnspec_summary
99 {
100 public:
101 char *fnspec;
102
103 fnspec_summary ()
104 : fnspec (NULL)
105 {
106 }
107
108 ~fnspec_summary ()
109 {
110 free (fnspec);
111 }
112 };
113
114 /* Summary holding fnspec string for a given call. */
115
116 class fnspec_summaries_t : public call_summary <fnspec_summary *>
117 {
118 public:
119 fnspec_summaries_t (symbol_table *symtab)
120 : call_summary <fnspec_summary *> (symtab) {}
121 /* Hook that is called by summary when an edge is duplicated. */
122 void duplicate (cgraph_edge *,
123 cgraph_edge *,
124 fnspec_summary *src,
125 fnspec_summary *dst) final override
126 {
127 dst->fnspec = xstrdup (src->fnspec);
128 }
129 };
130
131 static fnspec_summaries_t *fnspec_summaries = NULL;
132
133 /* Escape summary holds a vector of param indexes that escape to
134 a given call. */
135 struct escape_entry
136 {
137 /* Parameter that escapes at a given call. */
138 int parm_index;
139 /* Argument it escapes to. */
140 unsigned int arg;
141 /* Minimal flags known about the argument. */
142 eaf_flags_t min_flags;
143 /* Does it escape directly or indirectly? */
144 bool direct;
145 };
146
147 /* Dump EAF flags. */
148
149 static void
150 dump_eaf_flags (FILE *out, int flags, bool newline = true)
151 {
152 if (flags & EAF_UNUSED)
153 fprintf (out, " unused");
154 if (flags & EAF_NO_DIRECT_CLOBBER)
155 fprintf (out, " no_direct_clobber");
156 if (flags & EAF_NO_INDIRECT_CLOBBER)
157 fprintf (out, " no_indirect_clobber");
158 if (flags & EAF_NO_DIRECT_ESCAPE)
159 fprintf (out, " no_direct_escape");
160 if (flags & EAF_NO_INDIRECT_ESCAPE)
161 fprintf (out, " no_indirect_escape");
162 if (flags & EAF_NOT_RETURNED_DIRECTLY)
163 fprintf (out, " not_returned_directly");
164 if (flags & EAF_NOT_RETURNED_INDIRECTLY)
165 fprintf (out, " not_returned_indirectly");
166 if (flags & EAF_NO_DIRECT_READ)
167 fprintf (out, " no_direct_read");
168 if (flags & EAF_NO_INDIRECT_READ)
169 fprintf (out, " no_indirect_read");
170 if (newline)
171 fprintf (out, "\n");
172 }
173
174 struct escape_summary
175 {
176 auto_vec <escape_entry> esc;
177 void dump (FILE *out)
178 {
179 for (unsigned int i = 0; i < esc.length (); i++)
180 {
181 fprintf (out, " parm %i arg %i %s min:",
182 esc[i].parm_index,
183 esc[i].arg,
184 esc[i].direct ? "(direct)" : "(indirect)");
185 dump_eaf_flags (out, esc[i].min_flags, false);
186 }
187 fprintf (out, "\n");
188 }
189 };
190
191 class escape_summaries_t : public call_summary <escape_summary *>
192 {
193 public:
194 escape_summaries_t (symbol_table *symtab)
195 : call_summary <escape_summary *> (symtab) {}
196 /* Hook that is called by summary when an edge is duplicated. */
197 void duplicate (cgraph_edge *,
198 cgraph_edge *,
199 escape_summary *src,
200 escape_summary *dst) final override
201 {
202 dst->esc = src->esc.copy ();
203 }
204 };
205
206 static escape_summaries_t *escape_summaries = NULL;
207
208 } /* ANON namespace: GTY annotated summaries can not be anonymous. */
209
210
211 /* Class (from which there is one global instance) that holds modref summaries
212 for all analyzed functions. */
213
214 class GTY((user)) modref_summaries
215 : public fast_function_summary <modref_summary *, va_gc>
216 {
217 public:
218 modref_summaries (symbol_table *symtab)
219 : fast_function_summary <modref_summary *, va_gc> (symtab) {}
220 void insert (cgraph_node *, modref_summary *state) final override;
221 void duplicate (cgraph_node *src_node,
222 cgraph_node *dst_node,
223 modref_summary *src_data,
224 modref_summary *dst_data) final override;
225 static modref_summaries *create_ggc (symbol_table *symtab)
226 {
227 return new (ggc_alloc_no_dtor<modref_summaries> ())
228 modref_summaries (symtab);
229 }
230 };
231
232 class modref_summary_lto;
233
234 /* Class (from which there is one global instance) that holds modref summaries
235 for all analyzed functions. */
236
237 class GTY((user)) modref_summaries_lto
238 : public fast_function_summary <modref_summary_lto *, va_gc>
239 {
240 public:
241 modref_summaries_lto (symbol_table *symtab)
242 : fast_function_summary <modref_summary_lto *, va_gc> (symtab),
243 propagated (false) {}
244 void insert (cgraph_node *, modref_summary_lto *state) final override;
245 void duplicate (cgraph_node *src_node,
246 cgraph_node *dst_node,
247 modref_summary_lto *src_data,
248 modref_summary_lto *dst_data) final override;
249 static modref_summaries_lto *create_ggc (symbol_table *symtab)
250 {
251 return new (ggc_alloc_no_dtor<modref_summaries_lto> ())
252 modref_summaries_lto (symtab);
253 }
254 bool propagated;
255 };
256
257 /* Global variable holding all modref summaries
258 (from analysis to IPA propagation time). */
259
260 static GTY(()) fast_function_summary <modref_summary *, va_gc>
261 *summaries;
262
263 /* Global variable holding all modref optimization summaries
264 (from IPA propagation time or used by local optimization pass). */
265
266 static GTY(()) fast_function_summary <modref_summary *, va_gc>
267 *optimization_summaries;
268
269 /* LTO summaries hold info from analysis to LTO streaming or from LTO
270 stream-in through propagation to LTO stream-out. */
271
272 static GTY(()) fast_function_summary <modref_summary_lto *, va_gc>
273 *summaries_lto;
274
275 /* Summary for a single function which this pass produces. */
276
277 modref_summary::modref_summary ()
278 : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
279 writes_errno (false), side_effects (false), nondeterministic (false),
280 calls_interposable (false), global_memory_read (false),
281 global_memory_written (false), try_dse (false)
282 {
283 }
284
285 modref_summary::~modref_summary ()
286 {
287 if (loads)
288 ggc_delete (loads);
289 if (stores)
290 ggc_delete (stores);
291 }
292
293 /* Remove all flags from EAF_FLAGS that are implied by ECF_FLAGS and not
294 useful to track. If returns_void is true moreover clear
295 EAF_NOT_RETURNED. */
296 static int
297 remove_useless_eaf_flags (int eaf_flags, int ecf_flags, bool returns_void)
298 {
299 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
300 eaf_flags &= ~implicit_const_eaf_flags;
301 else if (ecf_flags & ECF_PURE)
302 eaf_flags &= ~implicit_pure_eaf_flags;
303 else if ((ecf_flags & ECF_NORETURN) || returns_void)
304 eaf_flags &= ~(EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY);
305 return eaf_flags;
306 }
307
308 /* Return true if FLAGS holds some useful information. */
309
310 static bool
311 eaf_flags_useful_p (vec <eaf_flags_t> &flags, int ecf_flags)
312 {
313 for (unsigned i = 0; i < flags.length (); i++)
314 if (remove_useless_eaf_flags (flags[i], ecf_flags, false))
315 return true;
316 return false;
317 }
318
319 /* Return true if summary is potentially useful for optimization.
320 If CHECK_FLAGS is false assume that arg_flags are useful. */
321
322 bool
323 modref_summary::useful_p (int ecf_flags, bool check_flags)
324 {
325 if (arg_flags.length () && !check_flags)
326 return true;
327 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
328 return true;
329 arg_flags.release ();
330 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
331 return true;
332 if (check_flags
333 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
334 return true;
335 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
336 return ((!side_effects || !nondeterministic)
337 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
338 if (loads && !loads->every_base)
339 return true;
340 else
341 kills.release ();
342 if (ecf_flags & ECF_PURE)
343 return ((!side_effects || !nondeterministic)
344 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
345 return stores && !stores->every_base;
346 }
347
348 /* Single function summary used for LTO. */
349
350 typedef modref_tree <tree> modref_records_lto;
351 struct GTY(()) modref_summary_lto
352 {
353 /* Load and stores in functions using types rather then alias sets.
354
355 This is necessary to make the information streamable for LTO but is also
356 more verbose and thus more likely to hit the limits. */
357 modref_records_lto *loads;
358 modref_records_lto *stores;
359 auto_vec<modref_access_node> GTY((skip)) kills;
360 auto_vec<eaf_flags_t> GTY((skip)) arg_flags;
361 eaf_flags_t retslot_flags;
362 eaf_flags_t static_chain_flags;
363 unsigned writes_errno : 1;
364 unsigned side_effects : 1;
365 unsigned nondeterministic : 1;
366 unsigned calls_interposable : 1;
367
368 modref_summary_lto ();
369 ~modref_summary_lto ();
370 void dump (FILE *);
371 bool useful_p (int ecf_flags, bool check_flags = true);
372 };
373
374 /* Summary for a single function which this pass produces. */
375
376 modref_summary_lto::modref_summary_lto ()
377 : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
378 writes_errno (false), side_effects (false), nondeterministic (false),
379 calls_interposable (false)
380 {
381 }
382
383 modref_summary_lto::~modref_summary_lto ()
384 {
385 if (loads)
386 ggc_delete (loads);
387 if (stores)
388 ggc_delete (stores);
389 }
390
391
392 /* Return true if lto summary is potentially useful for optimization.
393 If CHECK_FLAGS is false assume that arg_flags are useful. */
394
395 bool
396 modref_summary_lto::useful_p (int ecf_flags, bool check_flags)
397 {
398 if (arg_flags.length () && !check_flags)
399 return true;
400 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
401 return true;
402 arg_flags.release ();
403 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
404 return true;
405 if (check_flags
406 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
407 return true;
408 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
409 return ((!side_effects || !nondeterministic)
410 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
411 if (loads && !loads->every_base)
412 return true;
413 else
414 kills.release ();
415 if (ecf_flags & ECF_PURE)
416 return ((!side_effects || !nondeterministic)
417 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
418 return stores && !stores->every_base;
419 }
420
421 /* Dump records TT to OUT. */
422
423 static void
424 dump_records (modref_records *tt, FILE *out)
425 {
426 if (tt->every_base)
427 {
428 fprintf (out, " Every base\n");
429 return;
430 }
431 size_t i;
432 modref_base_node <alias_set_type> *n;
433 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
434 {
435 fprintf (out, " Base %i: alias set %i\n", (int)i, n->base);
436 if (n->every_ref)
437 {
438 fprintf (out, " Every ref\n");
439 continue;
440 }
441 size_t j;
442 modref_ref_node <alias_set_type> *r;
443 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
444 {
445 fprintf (out, " Ref %i: alias set %i\n", (int)j, r->ref);
446 if (r->every_access)
447 {
448 fprintf (out, " Every access\n");
449 continue;
450 }
451 size_t k;
452 modref_access_node *a;
453 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
454 {
455 fprintf (out, " access:");
456 a->dump (out);
457 }
458 }
459 }
460 }
461
462 /* Dump records TT to OUT. */
463
464 static void
465 dump_lto_records (modref_records_lto *tt, FILE *out)
466 {
467 if (tt->every_base)
468 {
469 fprintf (out, " Every base\n");
470 return;
471 }
472 size_t i;
473 modref_base_node <tree> *n;
474 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
475 {
476 fprintf (out, " Base %i:", (int)i);
477 print_generic_expr (dump_file, n->base);
478 fprintf (out, " (alias set %i)\n",
479 n->base ? get_alias_set (n->base) : 0);
480 if (n->every_ref)
481 {
482 fprintf (out, " Every ref\n");
483 continue;
484 }
485 size_t j;
486 modref_ref_node <tree> *r;
487 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
488 {
489 fprintf (out, " Ref %i:", (int)j);
490 print_generic_expr (dump_file, r->ref);
491 fprintf (out, " (alias set %i)\n",
492 r->ref ? get_alias_set (r->ref) : 0);
493 if (r->every_access)
494 {
495 fprintf (out, " Every access\n");
496 continue;
497 }
498 size_t k;
499 modref_access_node *a;
500 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
501 {
502 fprintf (out, " access:");
503 a->dump (out);
504 }
505 }
506 }
507 }
508
509 /* Dump all escape points of NODE to OUT. */
510
511 static void
512 dump_modref_edge_summaries (FILE *out, cgraph_node *node, int depth)
513 {
514 int i = 0;
515 if (!escape_summaries)
516 return;
517 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
518 {
519 class escape_summary *sum = escape_summaries->get (e);
520 if (sum)
521 {
522 fprintf (out, "%*sIndirect call %i in %s escapes:",
523 depth, "", i, node->dump_name ());
524 sum->dump (out);
525 }
526 i++;
527 }
528 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
529 {
530 if (!e->inline_failed)
531 dump_modref_edge_summaries (out, e->callee, depth + 1);
532 class escape_summary *sum = escape_summaries->get (e);
533 if (sum)
534 {
535 fprintf (out, "%*sCall %s->%s escapes:", depth, "",
536 node->dump_name (), e->callee->dump_name ());
537 sum->dump (out);
538 }
539 class fnspec_summary *fsum = fnspec_summaries->get (e);
540 if (fsum)
541 {
542 fprintf (out, "%*sCall %s->%s fnspec: %s\n", depth, "",
543 node->dump_name (), e->callee->dump_name (),
544 fsum->fnspec);
545 }
546 }
547 }
548
549 /* Remove all call edge summaries associated with NODE. */
550
551 static void
552 remove_modref_edge_summaries (cgraph_node *node)
553 {
554 if (!escape_summaries)
555 return;
556 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
557 escape_summaries->remove (e);
558 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
559 {
560 if (!e->inline_failed)
561 remove_modref_edge_summaries (e->callee);
562 escape_summaries->remove (e);
563 fnspec_summaries->remove (e);
564 }
565 }
566
567 /* Dump summary. */
568
569 void
570 modref_summary::dump (FILE *out)
571 {
572 if (loads)
573 {
574 fprintf (out, " loads:\n");
575 dump_records (loads, out);
576 }
577 if (stores)
578 {
579 fprintf (out, " stores:\n");
580 dump_records (stores, out);
581 }
582 if (kills.length ())
583 {
584 fprintf (out, " kills:\n");
585 for (auto kill : kills)
586 {
587 fprintf (out, " ");
588 kill.dump (out);
589 }
590 }
591 if (writes_errno)
592 fprintf (out, " Writes errno\n");
593 if (side_effects)
594 fprintf (out, " Side effects\n");
595 if (nondeterministic)
596 fprintf (out, " Nondeterministic\n");
597 if (calls_interposable)
598 fprintf (out, " Calls interposable\n");
599 if (global_memory_read)
600 fprintf (out, " Global memory read\n");
601 if (global_memory_written)
602 fprintf (out, " Global memory written\n");
603 if (try_dse)
604 fprintf (out, " Try dse\n");
605 if (arg_flags.length ())
606 {
607 for (unsigned int i = 0; i < arg_flags.length (); i++)
608 if (arg_flags[i])
609 {
610 fprintf (out, " parm %i flags:", i);
611 dump_eaf_flags (out, arg_flags[i]);
612 }
613 }
614 if (retslot_flags)
615 {
616 fprintf (out, " Retslot flags:");
617 dump_eaf_flags (out, retslot_flags);
618 }
619 if (static_chain_flags)
620 {
621 fprintf (out, " Static chain flags:");
622 dump_eaf_flags (out, static_chain_flags);
623 }
624 }
625
626 /* Dump summary. */
627
628 void
629 modref_summary_lto::dump (FILE *out)
630 {
631 fprintf (out, " loads:\n");
632 dump_lto_records (loads, out);
633 fprintf (out, " stores:\n");
634 dump_lto_records (stores, out);
635 if (kills.length ())
636 {
637 fprintf (out, " kills:\n");
638 for (auto kill : kills)
639 {
640 fprintf (out, " ");
641 kill.dump (out);
642 }
643 }
644 if (writes_errno)
645 fprintf (out, " Writes errno\n");
646 if (side_effects)
647 fprintf (out, " Side effects\n");
648 if (nondeterministic)
649 fprintf (out, " Nondeterministic\n");
650 if (calls_interposable)
651 fprintf (out, " Calls interposable\n");
652 if (arg_flags.length ())
653 {
654 for (unsigned int i = 0; i < arg_flags.length (); i++)
655 if (arg_flags[i])
656 {
657 fprintf (out, " parm %i flags:", i);
658 dump_eaf_flags (out, arg_flags[i]);
659 }
660 }
661 if (retslot_flags)
662 {
663 fprintf (out, " Retslot flags:");
664 dump_eaf_flags (out, retslot_flags);
665 }
666 if (static_chain_flags)
667 {
668 fprintf (out, " Static chain flags:");
669 dump_eaf_flags (out, static_chain_flags);
670 }
671 }
672
673 /* Called after summary is produced and before it is used by local analysis.
674 Can be called multiple times in case summary needs to update signature.
675 FUN is decl of function summary is attached to. */
676 void
677 modref_summary::finalize (tree fun)
678 {
679 global_memory_read = !loads || loads->global_access_p ();
680 global_memory_written = !stores || stores->global_access_p ();
681
682 /* We can do DSE if we know function has no side effects and
683 we can analyze all stores. Disable dse if there are too many
684 stores to try. */
685 if (side_effects || global_memory_written || writes_errno)
686 try_dse = false;
687 else
688 {
689 try_dse = true;
690 size_t i, j, k;
691 int num_tests = 0, max_tests
692 = opt_for_fn (fun, param_modref_max_tests);
693 modref_base_node <alias_set_type> *base_node;
694 modref_ref_node <alias_set_type> *ref_node;
695 modref_access_node *access_node;
696 FOR_EACH_VEC_SAFE_ELT (stores->bases, i, base_node)
697 {
698 if (base_node->every_ref)
699 {
700 try_dse = false;
701 break;
702 }
703 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
704 {
705 if (base_node->every_ref)
706 {
707 try_dse = false;
708 break;
709 }
710 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
711 if (num_tests++ > max_tests
712 || !access_node->parm_offset_known)
713 {
714 try_dse = false;
715 break;
716 }
717 if (!try_dse)
718 break;
719 }
720 if (!try_dse)
721 break;
722 }
723 }
724 if (loads->every_base)
725 load_accesses = 1;
726 else
727 {
728 load_accesses = 0;
729 for (auto base_node : loads->bases)
730 {
731 if (base_node->every_ref)
732 load_accesses++;
733 else
734 for (auto ref_node : base_node->refs)
735 if (ref_node->every_access)
736 load_accesses++;
737 else
738 load_accesses += ref_node->accesses->length ();
739 }
740 }
741 }
742
743 /* Get function summary for FUNC if it exists, return NULL otherwise. */
744
745 modref_summary *
746 get_modref_function_summary (cgraph_node *func)
747 {
748 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
749 if (!optimization_summaries)
750 return NULL;
751
752 /* A single function body may be represented by multiple symbols with
753 different visibility. For example, if FUNC is an interposable alias,
754 we don't want to return anything, even if we have summary for the target
755 function. */
756 enum availability avail;
757 func = func->ultimate_alias_target
758 (&avail, current_function_decl ?
759 cgraph_node::get (current_function_decl) : NULL);
760 if (avail <= AVAIL_INTERPOSABLE)
761 return NULL;
762
763 modref_summary *r = optimization_summaries->get (func);
764 return r;
765 }
766
767 /* Get function summary for CALL if it exists, return NULL otherwise.
768 If non-null set interposed to indicate whether function may not
769 bind to current def. In this case sometimes loads from function
770 needs to be ignored. */
771
772 modref_summary *
773 get_modref_function_summary (gcall *call, bool *interposed)
774 {
775 tree callee = gimple_call_fndecl (call);
776 if (!callee)
777 return NULL;
778 struct cgraph_node *node = cgraph_node::get (callee);
779 if (!node)
780 return NULL;
781 modref_summary *r = get_modref_function_summary (node);
782 if (interposed && r)
783 *interposed = r->calls_interposable
784 || !node->binds_to_current_def_p ();
785 return r;
786 }
787
788
789 namespace {
790
791 /* Return true if ECF flags says that nondeterminism can be ignored. */
792
793 static bool
794 ignore_nondeterminism_p (tree caller, int flags)
795 {
796 if (flags & (ECF_CONST | ECF_PURE))
797 return true;
798 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
799 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
800 return true;
801 return false;
802 }
803
804 /* Return true if ECF flags says that return value can be ignored. */
805
806 static bool
807 ignore_retval_p (tree caller, int flags)
808 {
809 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
810 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
811 return true;
812 return false;
813 }
814
815 /* Return true if ECF flags says that stores can be ignored. */
816
817 static bool
818 ignore_stores_p (tree caller, int flags)
819 {
820 if (flags & (ECF_PURE | ECF_CONST | ECF_NOVOPS))
821 return true;
822 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
823 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
824 return true;
825 return false;
826 }
827
828 /* Determine parm_map for PTR which is supposed to be a pointer. */
829
830 modref_parm_map
831 parm_map_for_ptr (tree op)
832 {
833 bool offset_known;
834 poly_int64 offset;
835 struct modref_parm_map parm_map;
836 gcall *call;
837
838 parm_map.parm_offset_known = false;
839 parm_map.parm_offset = 0;
840
841 offset_known = unadjusted_ptr_and_unit_offset (op, &op, &offset);
842 if (TREE_CODE (op) == SSA_NAME
843 && SSA_NAME_IS_DEFAULT_DEF (op)
844 && TREE_CODE (SSA_NAME_VAR (op)) == PARM_DECL)
845 {
846 int index = 0;
847
848 if (cfun->static_chain_decl
849 && op == ssa_default_def (cfun, cfun->static_chain_decl))
850 index = MODREF_STATIC_CHAIN_PARM;
851 else
852 for (tree t = DECL_ARGUMENTS (current_function_decl);
853 t != SSA_NAME_VAR (op); t = DECL_CHAIN (t))
854 index++;
855 parm_map.parm_index = index;
856 parm_map.parm_offset_known = offset_known;
857 parm_map.parm_offset = offset;
858 }
859 else if (points_to_local_or_readonly_memory_p (op))
860 parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
861 /* Memory allocated in the function is not visible to caller before the
862 call and thus we do not need to record it as load/stores/kills. */
863 else if (TREE_CODE (op) == SSA_NAME
864 && (call = dyn_cast<gcall *>(SSA_NAME_DEF_STMT (op))) != NULL
865 && gimple_call_flags (call) & ECF_MALLOC)
866 parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
867 else
868 parm_map.parm_index = MODREF_UNKNOWN_PARM;
869 return parm_map;
870 }
871
872 /* Return true if ARG with EAF flags FLAGS can not make any caller's parameter
873 used (if LOAD is true we check loads, otherwise stores). */
874
875 static bool
876 verify_arg (tree arg, int flags, bool load)
877 {
878 if (flags & EAF_UNUSED)
879 return true;
880 if (load && (flags & EAF_NO_DIRECT_READ))
881 return true;
882 if (!load
883 && (flags & (EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER))
884 == (EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER))
885 return true;
886 if (is_gimple_constant (arg))
887 return true;
888 if (DECL_P (arg) && TREE_READONLY (arg))
889 return true;
890 if (TREE_CODE (arg) == ADDR_EXPR)
891 {
892 tree t = get_base_address (TREE_OPERAND (arg, 0));
893 if (is_gimple_constant (t))
894 return true;
895 if (DECL_P (t)
896 && (TREE_READONLY (t) || TREE_CODE (t) == FUNCTION_DECL))
897 return true;
898 }
899 return false;
900 }
901
902 /* Return true if STMT may access memory that is pointed to by parameters
903 of caller and which is not seen as an escape by PTA.
904 CALLEE_ECF_FLAGS are ECF flags of callee. If LOAD is true then by access
905 we mean load, otherwise we mean store. */
906
907 static bool
908 may_access_nonescaping_parm_p (gcall *call, int callee_ecf_flags, bool load)
909 {
910 int implicit_flags = 0;
911
912 if (ignore_stores_p (current_function_decl, callee_ecf_flags))
913 implicit_flags |= ignore_stores_eaf_flags;
914 if (callee_ecf_flags & ECF_PURE)
915 implicit_flags |= implicit_pure_eaf_flags;
916 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
917 implicit_flags |= implicit_const_eaf_flags;
918 if (gimple_call_chain (call)
919 && !verify_arg (gimple_call_chain (call),
920 gimple_call_static_chain_flags (call) | implicit_flags,
921 load))
922 return true;
923 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
924 if (!verify_arg (gimple_call_arg (call, i),
925 gimple_call_arg_flags (call, i) | implicit_flags,
926 load))
927 return true;
928 return false;
929 }
930
931
932 /* Analyze memory accesses (loads, stores and kills) performed
933 by the function. Set also side_effects, calls_interposable
934 and nondeterminism flags. */
935
936 class modref_access_analysis
937 {
938 public:
939 modref_access_analysis (bool ipa, modref_summary *summary,
940 modref_summary_lto *summary_lto)
941 : m_summary (summary), m_summary_lto (summary_lto), m_ipa (ipa)
942 {
943 }
944 void analyze ();
945 private:
946 bool set_side_effects ();
947 bool set_nondeterministic ();
948 static modref_access_node get_access (ao_ref *ref);
949 static void record_access (modref_records *, ao_ref *, modref_access_node &);
950 static void record_access_lto (modref_records_lto *, ao_ref *,
951 modref_access_node &a);
952 bool record_access_p (tree);
953 bool record_unknown_load ();
954 bool record_unknown_store ();
955 bool record_global_memory_load ();
956 bool record_global_memory_store ();
957 bool merge_call_side_effects (gimple *, modref_summary *,
958 cgraph_node *, bool);
959 modref_access_node get_access_for_fnspec (gcall *, attr_fnspec &,
960 unsigned int, modref_parm_map &);
961 void process_fnspec (gcall *);
962 void analyze_call (gcall *);
963 static bool analyze_load (gimple *, tree, tree, void *);
964 static bool analyze_store (gimple *, tree, tree, void *);
965 void analyze_stmt (gimple *, bool);
966 void propagate ();
967
968 /* Summary being computed.
969 We work either with m_summary or m_summary_lto. Never on both. */
970 modref_summary *m_summary;
971 modref_summary_lto *m_summary_lto;
972 /* Recursive calls needs simplistic dataflow after analysis finished.
973 Collect all calls into this vector during analysis and later process
974 them in propagate. */
975 auto_vec <gimple *, 32> m_recursive_calls;
976 /* ECF flags of function being analyzed. */
977 int m_ecf_flags;
978 /* True if IPA propagation will be done later. */
979 bool m_ipa;
980 /* Set true if statement currently analyze is known to be
981 executed each time function is called. */
982 bool m_always_executed;
983 };
984
985 /* Set side_effects flag and return if something changed. */
986
987 bool
988 modref_access_analysis::set_side_effects ()
989 {
990 bool changed = false;
991
992 if (m_summary && !m_summary->side_effects)
993 {
994 m_summary->side_effects = true;
995 changed = true;
996 }
997 if (m_summary_lto && !m_summary_lto->side_effects)
998 {
999 m_summary_lto->side_effects = true;
1000 changed = true;
1001 }
1002 return changed;
1003 }
1004
1005 /* Set nondeterministic flag and return if something changed. */
1006
1007 bool
1008 modref_access_analysis::set_nondeterministic ()
1009 {
1010 bool changed = false;
1011
1012 if (m_summary && !m_summary->nondeterministic)
1013 {
1014 m_summary->side_effects = m_summary->nondeterministic = true;
1015 changed = true;
1016 }
1017 if (m_summary_lto && !m_summary_lto->nondeterministic)
1018 {
1019 m_summary_lto->side_effects = m_summary_lto->nondeterministic = true;
1020 changed = true;
1021 }
1022 return changed;
1023 }
1024
1025 /* Construct modref_access_node from REF. */
1026
1027 modref_access_node
1028 modref_access_analysis::get_access (ao_ref *ref)
1029 {
1030 tree base;
1031
1032 base = ao_ref_base (ref);
1033 modref_access_node a = {ref->offset, ref->size, ref->max_size,
1034 0, MODREF_UNKNOWN_PARM, false, 0};
1035 if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
1036 {
1037 tree memref = base;
1038 modref_parm_map m = parm_map_for_ptr (TREE_OPERAND (base, 0));
1039
1040 a.parm_index = m.parm_index;
1041 if (a.parm_index != MODREF_UNKNOWN_PARM && TREE_CODE (memref) == MEM_REF)
1042 {
1043 a.parm_offset_known
1044 = wi::to_poly_wide (TREE_OPERAND
1045 (memref, 1)).to_shwi (&a.parm_offset);
1046 if (a.parm_offset_known && m.parm_offset_known)
1047 a.parm_offset += m.parm_offset;
1048 else
1049 a.parm_offset_known = false;
1050 }
1051 }
1052 else
1053 a.parm_index = MODREF_UNKNOWN_PARM;
1054 return a;
1055 }
1056
1057 /* Record access into the modref_records data structure. */
1058
1059 void
1060 modref_access_analysis::record_access (modref_records *tt,
1061 ao_ref *ref,
1062 modref_access_node &a)
1063 {
1064 alias_set_type base_set = !flag_strict_aliasing
1065 || !flag_ipa_strict_aliasing ? 0
1066 : ao_ref_base_alias_set (ref);
1067 alias_set_type ref_set = !flag_strict_aliasing
1068 || !flag_ipa_strict_aliasing ? 0
1069 : (ao_ref_alias_set (ref));
1070 if (dump_file)
1071 {
1072 fprintf (dump_file, " - Recording base_set=%i ref_set=%i ",
1073 base_set, ref_set);
1074 a.dump (dump_file);
1075 }
1076 tt->insert (current_function_decl, base_set, ref_set, a, false);
1077 }
1078
1079 /* IPA version of record_access_tree. */
1080
1081 void
1082 modref_access_analysis::record_access_lto (modref_records_lto *tt, ao_ref *ref,
1083 modref_access_node &a)
1084 {
1085 /* get_alias_set sometimes use different type to compute the alias set
1086 than TREE_TYPE (base). Do same adjustments. */
1087 tree base_type = NULL_TREE, ref_type = NULL_TREE;
1088 if (flag_strict_aliasing && flag_ipa_strict_aliasing)
1089 {
1090 tree base;
1091
1092 base = ref->ref;
1093 while (handled_component_p (base))
1094 base = TREE_OPERAND (base, 0);
1095
1096 base_type = reference_alias_ptr_type_1 (&base);
1097
1098 if (!base_type)
1099 base_type = TREE_TYPE (base);
1100 else
1101 base_type = TYPE_REF_CAN_ALIAS_ALL (base_type)
1102 ? NULL_TREE : TREE_TYPE (base_type);
1103
1104 tree ref_expr = ref->ref;
1105 ref_type = reference_alias_ptr_type_1 (&ref_expr);
1106
1107 if (!ref_type)
1108 ref_type = TREE_TYPE (ref_expr);
1109 else
1110 ref_type = TYPE_REF_CAN_ALIAS_ALL (ref_type)
1111 ? NULL_TREE : TREE_TYPE (ref_type);
1112
1113 /* Sanity check that we are in sync with what get_alias_set does. */
1114 gcc_checking_assert ((!base_type && !ao_ref_base_alias_set (ref))
1115 || get_alias_set (base_type)
1116 == ao_ref_base_alias_set (ref));
1117 gcc_checking_assert ((!ref_type && !ao_ref_alias_set (ref))
1118 || get_alias_set (ref_type)
1119 == ao_ref_alias_set (ref));
1120
1121 /* Do not bother to record types that have no meaningful alias set.
1122 Also skip variably modified types since these go to local streams. */
1123 if (base_type && (!get_alias_set (base_type)
1124 || variably_modified_type_p (base_type, NULL_TREE)))
1125 base_type = NULL_TREE;
1126 if (ref_type && (!get_alias_set (ref_type)
1127 || variably_modified_type_p (ref_type, NULL_TREE)))
1128 ref_type = NULL_TREE;
1129 }
1130 if (dump_file)
1131 {
1132 fprintf (dump_file, " - Recording base type:");
1133 print_generic_expr (dump_file, base_type);
1134 fprintf (dump_file, " (alias set %i) ref type:",
1135 base_type ? get_alias_set (base_type) : 0);
1136 print_generic_expr (dump_file, ref_type);
1137 fprintf (dump_file, " (alias set %i) ",
1138 ref_type ? get_alias_set (ref_type) : 0);
1139 a.dump (dump_file);
1140 }
1141
1142 tt->insert (current_function_decl, base_type, ref_type, a, false);
1143 }
1144
1145 /* Returns true if and only if we should store the access to EXPR.
1146 Some accesses, e.g. loads from automatic variables, are not interesting. */
1147
1148 bool
1149 modref_access_analysis::record_access_p (tree expr)
1150 {
1151 if (TREE_THIS_VOLATILE (expr))
1152 {
1153 if (dump_file)
1154 fprintf (dump_file, " (volatile; marking nondeterministic) ");
1155 set_nondeterministic ();
1156 }
1157 if (cfun->can_throw_non_call_exceptions
1158 && tree_could_throw_p (expr))
1159 {
1160 if (dump_file)
1161 fprintf (dump_file, " (can throw; marking side effects) ");
1162 set_side_effects ();
1163 }
1164
1165 if (refs_local_or_readonly_memory_p (expr))
1166 {
1167 if (dump_file)
1168 fprintf (dump_file, " - Read-only or local, ignoring.\n");
1169 return false;
1170 }
1171 return true;
1172 }
1173
1174 /* Collapse loads and return true if something changed. */
1175
1176 bool
1177 modref_access_analysis::record_unknown_load ()
1178 {
1179 bool changed = false;
1180
1181 if (m_summary && !m_summary->loads->every_base)
1182 {
1183 m_summary->loads->collapse ();
1184 changed = true;
1185 }
1186 if (m_summary_lto && !m_summary_lto->loads->every_base)
1187 {
1188 m_summary_lto->loads->collapse ();
1189 changed = true;
1190 }
1191 return changed;
1192 }
1193
1194 /* Collapse loads and return true if something changed. */
1195
1196 bool
1197 modref_access_analysis::record_unknown_store ()
1198 {
1199 bool changed = false;
1200
1201 if (m_summary && !m_summary->stores->every_base)
1202 {
1203 m_summary->stores->collapse ();
1204 changed = true;
1205 }
1206 if (m_summary_lto && !m_summary_lto->stores->every_base)
1207 {
1208 m_summary_lto->stores->collapse ();
1209 changed = true;
1210 }
1211 return changed;
1212 }
1213
1214 /* Record unknown load from global memory. */
1215
1216 bool
1217 modref_access_analysis::record_global_memory_load ()
1218 {
1219 bool changed = false;
1220 modref_access_node a = {0, -1, -1,
1221 0, MODREF_GLOBAL_MEMORY_PARM, false, 0};
1222
1223 if (m_summary && !m_summary->loads->every_base)
1224 changed |= m_summary->loads->insert (current_function_decl, 0, 0, a, false);
1225 if (m_summary_lto && !m_summary_lto->loads->every_base)
1226 changed |= m_summary_lto->loads->insert (current_function_decl,
1227 0, 0, a, false);
1228 return changed;
1229 }
1230
1231 /* Record unknown store from global memory. */
1232
1233 bool
1234 modref_access_analysis::record_global_memory_store ()
1235 {
1236 bool changed = false;
1237 modref_access_node a = {0, -1, -1,
1238 0, MODREF_GLOBAL_MEMORY_PARM, false, 0};
1239
1240 if (m_summary && !m_summary->stores->every_base)
1241 changed |= m_summary->stores->insert (current_function_decl,
1242 0, 0, a, false);
1243 if (m_summary_lto && !m_summary_lto->stores->every_base)
1244 changed |= m_summary_lto->stores->insert (current_function_decl,
1245 0, 0, a, false);
1246 return changed;
1247 }
1248
1249 /* Merge side effects of call STMT to function with CALLEE_SUMMARY.
1250 Return true if something changed.
1251 If IGNORE_STORES is true, do not merge stores.
1252 If RECORD_ADJUSTMENTS is true cap number of adjustments to
1253 a given access to make dataflow finite. */
1254
1255 bool
1256 modref_access_analysis::merge_call_side_effects
1257 (gimple *stmt, modref_summary *callee_summary,
1258 cgraph_node *callee_node, bool record_adjustments)
1259 {
1260 gcall *call = as_a <gcall *> (stmt);
1261 int flags = gimple_call_flags (call);
1262
1263 /* Nothing to do for non-looping cont functions. */
1264 if ((flags & (ECF_CONST | ECF_NOVOPS))
1265 && !(flags & ECF_LOOPING_CONST_OR_PURE))
1266 return false;
1267
1268 bool changed = false;
1269
1270 if (dump_file)
1271 fprintf (dump_file, " - Merging side effects of %s\n",
1272 callee_node->dump_name ());
1273
1274 /* Merge side effects and non-determinism.
1275 PURE/CONST flags makes functions deterministic and if there is
1276 no LOOPING_CONST_OR_PURE they also have no side effects. */
1277 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
1278 || (flags & ECF_LOOPING_CONST_OR_PURE))
1279 {
1280 if (!m_summary->side_effects && callee_summary->side_effects)
1281 {
1282 if (dump_file)
1283 fprintf (dump_file, " - merging side effects.\n");
1284 m_summary->side_effects = true;
1285 changed = true;
1286 }
1287 if (!m_summary->nondeterministic && callee_summary->nondeterministic
1288 && !ignore_nondeterminism_p (current_function_decl, flags))
1289 {
1290 if (dump_file)
1291 fprintf (dump_file, " - merging nondeterministic.\n");
1292 m_summary->nondeterministic = true;
1293 changed = true;
1294 }
1295 }
1296
1297 /* For const functions we are done. */
1298 if (flags & (ECF_CONST | ECF_NOVOPS))
1299 return changed;
1300
1301 /* Merge calls_interposable flags. */
1302 if (!m_summary->calls_interposable && callee_summary->calls_interposable)
1303 {
1304 if (dump_file)
1305 fprintf (dump_file, " - merging calls interposable.\n");
1306 m_summary->calls_interposable = true;
1307 changed = true;
1308 }
1309
1310 if (!callee_node->binds_to_current_def_p () && !m_summary->calls_interposable)
1311 {
1312 if (dump_file)
1313 fprintf (dump_file, " - May be interposed.\n");
1314 m_summary->calls_interposable = true;
1315 changed = true;
1316 }
1317
1318 /* Now merge the actual load, store and kill vectors. For this we need
1319 to compute map translating new parameters to old. */
1320 if (dump_file)
1321 fprintf (dump_file, " Parm map:");
1322
1323 auto_vec <modref_parm_map, 32> parm_map;
1324 parm_map.safe_grow_cleared (gimple_call_num_args (call), true);
1325 for (unsigned i = 0; i < gimple_call_num_args (call); i++)
1326 {
1327 parm_map[i] = parm_map_for_ptr (gimple_call_arg (call, i));
1328 if (dump_file)
1329 {
1330 fprintf (dump_file, " %i", parm_map[i].parm_index);
1331 if (parm_map[i].parm_offset_known)
1332 {
1333 fprintf (dump_file, " offset:");
1334 print_dec ((poly_int64_pod)parm_map[i].parm_offset,
1335 dump_file, SIGNED);
1336 }
1337 }
1338 }
1339
1340 modref_parm_map chain_map;
1341 if (gimple_call_chain (call))
1342 {
1343 chain_map = parm_map_for_ptr (gimple_call_chain (call));
1344 if (dump_file)
1345 {
1346 fprintf (dump_file, "static chain %i", chain_map.parm_index);
1347 if (chain_map.parm_offset_known)
1348 {
1349 fprintf (dump_file, " offset:");
1350 print_dec ((poly_int64_pod)chain_map.parm_offset,
1351 dump_file, SIGNED);
1352 }
1353 }
1354 }
1355 if (dump_file)
1356 fprintf (dump_file, "\n");
1357
1358 /* Kills can me merged in only if we know the function is going to be
1359 always executed. */
1360 if (m_always_executed
1361 && callee_summary->kills.length ()
1362 && (!cfun->can_throw_non_call_exceptions
1363 || !stmt_could_throw_p (cfun, call)))
1364 {
1365 /* Watch for self recursive updates. */
1366 auto_vec<modref_access_node, 32> saved_kills;
1367
1368 saved_kills.reserve_exact (callee_summary->kills.length ());
1369 saved_kills.splice (callee_summary->kills);
1370 for (auto kill : saved_kills)
1371 {
1372 if (kill.parm_index >= (int)parm_map.length ())
1373 continue;
1374 modref_parm_map &m
1375 = kill.parm_index == MODREF_STATIC_CHAIN_PARM
1376 ? chain_map
1377 : parm_map[kill.parm_index];
1378 if (m.parm_index == MODREF_LOCAL_MEMORY_PARM
1379 || m.parm_index == MODREF_UNKNOWN_PARM
1380 || m.parm_index == MODREF_RETSLOT_PARM
1381 || !m.parm_offset_known)
1382 continue;
1383 modref_access_node n = kill;
1384 n.parm_index = m.parm_index;
1385 n.parm_offset += m.parm_offset;
1386 if (modref_access_node::insert_kill (m_summary->kills, n,
1387 record_adjustments))
1388 changed = true;
1389 }
1390 }
1391
1392 /* Merge in loads. */
1393 changed |= m_summary->loads->merge (current_function_decl,
1394 callee_summary->loads,
1395 &parm_map, &chain_map,
1396 record_adjustments,
1397 !may_access_nonescaping_parm_p
1398 (call, flags, true));
1399 /* Merge in stores. */
1400 if (!ignore_stores_p (current_function_decl, flags))
1401 {
1402 changed |= m_summary->stores->merge (current_function_decl,
1403 callee_summary->stores,
1404 &parm_map, &chain_map,
1405 record_adjustments,
1406 !may_access_nonescaping_parm_p
1407 (call, flags, false));
1408 if (!m_summary->writes_errno
1409 && callee_summary->writes_errno)
1410 {
1411 m_summary->writes_errno = true;
1412 changed = true;
1413 }
1414 }
1415 return changed;
1416 }
1417
1418 /* Return access mode for argument I of call STMT with FNSPEC. */
1419
1420 modref_access_node
1421 modref_access_analysis::get_access_for_fnspec (gcall *call, attr_fnspec &fnspec,
1422 unsigned int i,
1423 modref_parm_map &map)
1424 {
1425 tree size = NULL_TREE;
1426 unsigned int size_arg;
1427
1428 if (!fnspec.arg_specified_p (i))
1429 ;
1430 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
1431 size = gimple_call_arg (call, size_arg);
1432 else if (fnspec.arg_access_size_given_by_type_p (i))
1433 {
1434 tree callee = gimple_call_fndecl (call);
1435 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
1436
1437 for (unsigned int p = 0; p < i; p++)
1438 t = TREE_CHAIN (t);
1439 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
1440 }
1441 modref_access_node a = {0, -1, -1,
1442 map.parm_offset, map.parm_index,
1443 map.parm_offset_known, 0};
1444 poly_int64 size_hwi;
1445 if (size
1446 && poly_int_tree_p (size, &size_hwi)
1447 && coeffs_in_range_p (size_hwi, 0,
1448 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
1449 {
1450 a.size = -1;
1451 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
1452 }
1453 return a;
1454 }
1455 /* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
1456 If IGNORE_STORES is true ignore them.
1457 Return false if no useful summary can be produced. */
1458
1459 void
1460 modref_access_analysis::process_fnspec (gcall *call)
1461 {
1462 int flags = gimple_call_flags (call);
1463
1464 /* PURE/CONST flags makes functions deterministic and if there is
1465 no LOOPING_CONST_OR_PURE they also have no side effects. */
1466 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
1467 || (flags & ECF_LOOPING_CONST_OR_PURE)
1468 || (cfun->can_throw_non_call_exceptions
1469 && stmt_could_throw_p (cfun, call)))
1470 {
1471 set_side_effects ();
1472 if (!ignore_nondeterminism_p (current_function_decl, flags))
1473 set_nondeterministic ();
1474 }
1475
1476 /* For const functions we are done. */
1477 if (flags & (ECF_CONST | ECF_NOVOPS))
1478 return;
1479
1480 attr_fnspec fnspec = gimple_call_fnspec (call);
1481 /* If there is no fnpec we know nothing about loads & stores. */
1482 if (!fnspec.known_p ())
1483 {
1484 if (dump_file && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1485 fprintf (dump_file, " Builtin with no fnspec: %s\n",
1486 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call))));
1487 if (!ignore_stores_p (current_function_decl, flags))
1488 {
1489 if (!may_access_nonescaping_parm_p (call, flags, false))
1490 record_global_memory_store ();
1491 else
1492 record_unknown_store ();
1493 if (!may_access_nonescaping_parm_p (call, flags, true))
1494 record_global_memory_load ();
1495 else
1496 record_unknown_load ();
1497 }
1498 else
1499 {
1500 if (!may_access_nonescaping_parm_p (call, flags, true))
1501 record_global_memory_load ();
1502 else
1503 record_unknown_load ();
1504 }
1505 return;
1506 }
1507 /* Process fnspec. */
1508 if (fnspec.global_memory_read_p ())
1509 {
1510 if (may_access_nonescaping_parm_p (call, flags, true))
1511 record_unknown_load ();
1512 else
1513 record_global_memory_load ();
1514 }
1515 else
1516 {
1517 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1518 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
1519 ;
1520 else if (!fnspec.arg_specified_p (i)
1521 || fnspec.arg_maybe_read_p (i))
1522 {
1523 modref_parm_map map = parm_map_for_ptr
1524 (gimple_call_arg (call, i));
1525
1526 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
1527 continue;
1528 if (map.parm_index == MODREF_UNKNOWN_PARM)
1529 {
1530 record_unknown_load ();
1531 break;
1532 }
1533 modref_access_node a = get_access_for_fnspec (call, fnspec, i, map);
1534 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1535 continue;
1536 if (m_summary)
1537 m_summary->loads->insert (current_function_decl, 0, 0, a, false);
1538 if (m_summary_lto)
1539 m_summary_lto->loads->insert (current_function_decl, 0, 0, a,
1540 false);
1541 }
1542 }
1543 if (ignore_stores_p (current_function_decl, flags))
1544 return;
1545 if (fnspec.global_memory_written_p ())
1546 {
1547 if (may_access_nonescaping_parm_p (call, flags, false))
1548 record_unknown_store ();
1549 else
1550 record_global_memory_store ();
1551 }
1552 else
1553 {
1554 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1555 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
1556 ;
1557 else if (!fnspec.arg_specified_p (i)
1558 || fnspec.arg_maybe_written_p (i))
1559 {
1560 modref_parm_map map = parm_map_for_ptr
1561 (gimple_call_arg (call, i));
1562
1563 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
1564 continue;
1565 if (map.parm_index == MODREF_UNKNOWN_PARM)
1566 {
1567 record_unknown_store ();
1568 break;
1569 }
1570 modref_access_node a = get_access_for_fnspec (call, fnspec, i, map);
1571 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1572 continue;
1573 if (m_summary)
1574 m_summary->stores->insert (current_function_decl, 0, 0, a, false);
1575 if (m_summary_lto)
1576 m_summary_lto->stores->insert (current_function_decl,
1577 0, 0, a, false);
1578 }
1579 if (fnspec.errno_maybe_written_p () && flag_errno_math)
1580 {
1581 if (m_summary)
1582 m_summary->writes_errno = true;
1583 if (m_summary_lto)
1584 m_summary_lto->writes_errno = true;
1585 }
1586 }
1587 }
1588
1589 /* Analyze function call STMT in function F.
1590 Remember recursive calls in RECURSIVE_CALLS. */
1591
1592 void
1593 modref_access_analysis::analyze_call (gcall *stmt)
1594 {
1595 /* Check flags on the function call. In certain cases, analysis can be
1596 simplified. */
1597 int flags = gimple_call_flags (stmt);
1598
1599 if (dump_file)
1600 {
1601 fprintf (dump_file, " - Analyzing call:");
1602 print_gimple_stmt (dump_file, stmt, 0);
1603 }
1604
1605 if ((flags & (ECF_CONST | ECF_NOVOPS))
1606 && !(flags & ECF_LOOPING_CONST_OR_PURE))
1607 {
1608 if (dump_file)
1609 fprintf (dump_file,
1610 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
1611 "except for args.\n");
1612 return;
1613 }
1614
1615 /* Next, we try to get the callee's function declaration. The goal is to
1616 merge their summary with ours. */
1617 tree callee = gimple_call_fndecl (stmt);
1618
1619 /* Check if this is an indirect call. */
1620 if (!callee)
1621 {
1622 if (dump_file)
1623 fprintf (dump_file, gimple_call_internal_p (stmt)
1624 ? " - Internal call" : " - Indirect call.\n");
1625 process_fnspec (stmt);
1626 return;
1627 }
1628 /* We only need to handle internal calls in IPA mode. */
1629 gcc_checking_assert (!m_summary_lto && !m_ipa);
1630
1631 struct cgraph_node *callee_node = cgraph_node::get_create (callee);
1632
1633 /* If this is a recursive call, the target summary is the same as ours, so
1634 there's nothing to do. */
1635 if (recursive_call_p (current_function_decl, callee))
1636 {
1637 m_recursive_calls.safe_push (stmt);
1638 set_side_effects ();
1639 if (dump_file)
1640 fprintf (dump_file, " - Skipping recursive call.\n");
1641 return;
1642 }
1643
1644 gcc_assert (callee_node != NULL);
1645
1646 /* Get the function symbol and its availability. */
1647 enum availability avail;
1648 callee_node = callee_node->function_symbol (&avail);
1649 bool looping;
1650 if (builtin_safe_for_const_function_p (&looping, callee))
1651 {
1652 if (looping)
1653 set_side_effects ();
1654 if (dump_file)
1655 fprintf (dump_file, " - Builtin is safe for const.\n");
1656 return;
1657 }
1658 if (avail <= AVAIL_INTERPOSABLE)
1659 {
1660 if (dump_file)
1661 fprintf (dump_file,
1662 " - Function availability <= AVAIL_INTERPOSABLE.\n");
1663 process_fnspec (stmt);
1664 return;
1665 }
1666
1667 /* Get callee's modref summary. As above, if there's no summary, we either
1668 have to give up or, if stores are ignored, we can just purge loads. */
1669 modref_summary *callee_summary = optimization_summaries->get (callee_node);
1670 if (!callee_summary)
1671 {
1672 if (dump_file)
1673 fprintf (dump_file, " - No modref summary available for callee.\n");
1674 process_fnspec (stmt);
1675 return;
1676 }
1677
1678 merge_call_side_effects (stmt, callee_summary, callee_node, false);
1679
1680 return;
1681 }
1682
1683 /* Helper for analyze_stmt. */
1684
1685 bool
1686 modref_access_analysis::analyze_load (gimple *, tree, tree op, void *data)
1687 {
1688 modref_access_analysis *t = (modref_access_analysis *)data;
1689
1690 if (dump_file)
1691 {
1692 fprintf (dump_file, " - Analyzing load: ");
1693 print_generic_expr (dump_file, op);
1694 fprintf (dump_file, "\n");
1695 }
1696
1697 if (!t->record_access_p (op))
1698 return false;
1699
1700 ao_ref r;
1701 ao_ref_init (&r, op);
1702 modref_access_node a = get_access (&r);
1703 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1704 return false;
1705
1706 if (t->m_summary)
1707 t->record_access (t->m_summary->loads, &r, a);
1708 if (t->m_summary_lto)
1709 t->record_access_lto (t->m_summary_lto->loads, &r, a);
1710 return false;
1711 }
1712
1713 /* Helper for analyze_stmt. */
1714
1715 bool
1716 modref_access_analysis::analyze_store (gimple *stmt, tree, tree op, void *data)
1717 {
1718 modref_access_analysis *t = (modref_access_analysis *)data;
1719
1720 if (dump_file)
1721 {
1722 fprintf (dump_file, " - Analyzing store: ");
1723 print_generic_expr (dump_file, op);
1724 fprintf (dump_file, "\n");
1725 }
1726
1727 if (!t->record_access_p (op))
1728 return false;
1729
1730 ao_ref r;
1731 ao_ref_init (&r, op);
1732 modref_access_node a = get_access (&r);
1733 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1734 return false;
1735
1736 if (t->m_summary)
1737 t->record_access (t->m_summary->stores, &r, a);
1738 if (t->m_summary_lto)
1739 t->record_access_lto (t->m_summary_lto->stores, &r, a);
1740 if (t->m_always_executed
1741 && a.useful_for_kill_p ()
1742 && (!cfun->can_throw_non_call_exceptions
1743 || !stmt_could_throw_p (cfun, stmt)))
1744 {
1745 if (dump_file)
1746 fprintf (dump_file, " - Recording kill\n");
1747 if (t->m_summary)
1748 modref_access_node::insert_kill (t->m_summary->kills, a, false);
1749 if (t->m_summary_lto)
1750 modref_access_node::insert_kill (t->m_summary_lto->kills, a, false);
1751 }
1752 return false;
1753 }
1754
1755 /* Analyze statement STMT of function F.
1756 If IPA is true do not merge in side effects of calls. */
1757
1758 void
1759 modref_access_analysis::analyze_stmt (gimple *stmt, bool always_executed)
1760 {
1761 m_always_executed = always_executed;
1762 /* In general we can not ignore clobbers because they are barriers for code
1763 motion, however after inlining it is safe to do because local optimization
1764 passes do not consider clobbers from other functions.
1765 Similar logic is in ipa-pure-const.cc. */
1766 if ((m_ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
1767 {
1768 if (always_executed && record_access_p (gimple_assign_lhs (stmt)))
1769 {
1770 ao_ref r;
1771 ao_ref_init (&r, gimple_assign_lhs (stmt));
1772 modref_access_node a = get_access (&r);
1773 if (a.useful_for_kill_p ())
1774 {
1775 if (dump_file)
1776 fprintf (dump_file, " - Recording kill\n");
1777 if (m_summary)
1778 modref_access_node::insert_kill (m_summary->kills, a, false);
1779 if (m_summary_lto)
1780 modref_access_node::insert_kill (m_summary_lto->kills,
1781 a, false);
1782 }
1783 }
1784 return;
1785 }
1786
1787 /* Analyze all loads and stores in STMT. */
1788 walk_stmt_load_store_ops (stmt, this,
1789 analyze_load, analyze_store);
1790
1791 switch (gimple_code (stmt))
1792 {
1793 case GIMPLE_ASM:
1794 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
1795 set_nondeterministic ();
1796 if (cfun->can_throw_non_call_exceptions
1797 && stmt_could_throw_p (cfun, stmt))
1798 set_side_effects ();
1799 /* If the ASM statement does not read nor write memory, there's nothing
1800 to do. Otherwise just give up. */
1801 if (!gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
1802 return;
1803 if (dump_file)
1804 fprintf (dump_file, " - Function contains GIMPLE_ASM statement "
1805 "which clobbers memory.\n");
1806 record_unknown_load ();
1807 record_unknown_store ();
1808 return;
1809 case GIMPLE_CALL:
1810 if (!m_ipa || gimple_call_internal_p (stmt))
1811 analyze_call (as_a <gcall *> (stmt));
1812 else
1813 {
1814 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
1815
1816 if (fnspec.known_p ()
1817 && (!fnspec.global_memory_read_p ()
1818 || !fnspec.global_memory_written_p ()))
1819 {
1820 cgraph_edge *e = cgraph_node::get
1821 (current_function_decl)->get_edge (stmt);
1822 if (e->callee)
1823 {
1824 fnspec_summaries->get_create (e)->fnspec
1825 = xstrdup (fnspec.get_str ());
1826 if (dump_file)
1827 fprintf (dump_file, " Recorded fnspec %s\n",
1828 fnspec.get_str ());
1829 }
1830 }
1831 }
1832 return;
1833 default:
1834 if (cfun->can_throw_non_call_exceptions
1835 && stmt_could_throw_p (cfun, stmt))
1836 set_side_effects ();
1837 return;
1838 }
1839 }
1840
1841 /* Propagate load/stores across recursive calls. */
1842
1843 void
1844 modref_access_analysis::propagate ()
1845 {
1846 if (m_ipa && m_summary)
1847 return;
1848
1849 bool changed = true;
1850 bool first = true;
1851 cgraph_node *fnode = cgraph_node::get (current_function_decl);
1852
1853 m_always_executed = false;
1854 while (changed && m_summary->useful_p (m_ecf_flags, false))
1855 {
1856 changed = false;
1857 for (unsigned i = 0; i < m_recursive_calls.length (); i++)
1858 {
1859 changed |= merge_call_side_effects (m_recursive_calls[i], m_summary,
1860 fnode, !first);
1861 }
1862 first = false;
1863 }
1864 }
1865
1866 /* Analyze function. */
1867
1868 void
1869 modref_access_analysis::analyze ()
1870 {
1871 m_ecf_flags = flags_from_decl_or_type (current_function_decl);
1872 bool summary_useful = true;
1873
1874 /* Analyze each statement in each basic block of the function. If the
1875 statement cannot be analyzed (for any reason), the entire function cannot
1876 be analyzed by modref. */
1877 basic_block bb;
1878 FOR_EACH_BB_FN (bb, cfun)
1879 {
1880 gimple_stmt_iterator si;
1881 bool always_executed
1882 = bb == single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1883
1884 for (si = gsi_start_nondebug_after_labels_bb (bb);
1885 !gsi_end_p (si); gsi_next_nondebug (&si))
1886 {
1887 /* NULL memory accesses terminates BB. These accesses are known
1888 to trip undefined behavior. gimple-ssa-isolate-paths turns them
1889 to volatile accesses and adds builtin_trap call which would
1890 confuse us otherwise. */
1891 if (infer_nonnull_range_by_dereference (gsi_stmt (si),
1892 null_pointer_node))
1893 {
1894 if (dump_file)
1895 fprintf (dump_file, " - NULL memory access; terminating BB\n");
1896 if (flag_non_call_exceptions)
1897 set_side_effects ();
1898 break;
1899 }
1900 analyze_stmt (gsi_stmt (si), always_executed);
1901
1902 /* Avoid doing useless work. */
1903 if ((!m_summary || !m_summary->useful_p (m_ecf_flags, false))
1904 && (!m_summary_lto
1905 || !m_summary_lto->useful_p (m_ecf_flags, false)))
1906 {
1907 summary_useful = false;
1908 break;
1909 }
1910 if (always_executed
1911 && stmt_can_throw_external (cfun, gsi_stmt (si)))
1912 always_executed = false;
1913 }
1914 if (!summary_useful)
1915 break;
1916 }
1917 /* In non-IPA mode we need to perform iterative dataflow on recursive calls.
1918 This needs to be done after all other side effects are computed. */
1919 if (summary_useful)
1920 {
1921 if (!m_ipa)
1922 propagate ();
1923 if (m_summary && !m_summary->side_effects && !finite_function_p ())
1924 m_summary->side_effects = true;
1925 if (m_summary_lto && !m_summary_lto->side_effects
1926 && !finite_function_p ())
1927 m_summary_lto->side_effects = true;
1928 }
1929 }
1930
1931 /* Return true if OP accesses memory pointed to by SSA_NAME. */
1932
1933 bool
1934 memory_access_to (tree op, tree ssa_name)
1935 {
1936 tree base = get_base_address (op);
1937 if (!base)
1938 return false;
1939 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
1940 return false;
1941 return TREE_OPERAND (base, 0) == ssa_name;
1942 }
1943
1944 /* Consider statement val = *arg.
1945 return EAF flags of ARG that can be determined from EAF flags of VAL
1946 (which are known to be FLAGS). If IGNORE_STORES is true we can ignore
1947 all stores to VAL, i.e. when handling noreturn function. */
1948
1949 static int
1950 deref_flags (int flags, bool ignore_stores)
1951 {
1952 /* Dereference is also a direct read but dereferenced value does not
1953 yield any other direct use. */
1954 int ret = EAF_NO_DIRECT_CLOBBER | EAF_NO_DIRECT_ESCAPE
1955 | EAF_NOT_RETURNED_DIRECTLY;
1956 /* If argument is unused just account for
1957 the read involved in dereference. */
1958 if (flags & EAF_UNUSED)
1959 ret |= EAF_NO_INDIRECT_READ | EAF_NO_INDIRECT_CLOBBER
1960 | EAF_NO_INDIRECT_ESCAPE;
1961 else
1962 {
1963 /* Direct or indirect accesses leads to indirect accesses. */
1964 if (((flags & EAF_NO_DIRECT_CLOBBER)
1965 && (flags & EAF_NO_INDIRECT_CLOBBER))
1966 || ignore_stores)
1967 ret |= EAF_NO_INDIRECT_CLOBBER;
1968 if (((flags & EAF_NO_DIRECT_ESCAPE)
1969 && (flags & EAF_NO_INDIRECT_ESCAPE))
1970 || ignore_stores)
1971 ret |= EAF_NO_INDIRECT_ESCAPE;
1972 if ((flags & EAF_NO_DIRECT_READ)
1973 && (flags & EAF_NO_INDIRECT_READ))
1974 ret |= EAF_NO_INDIRECT_READ;
1975 if ((flags & EAF_NOT_RETURNED_DIRECTLY)
1976 && (flags & EAF_NOT_RETURNED_INDIRECTLY))
1977 ret |= EAF_NOT_RETURNED_INDIRECTLY;
1978 }
1979 return ret;
1980 }
1981
1982
1983 /* Description of an escape point: a call which affects flags of a given
1984 SSA name. */
1985
1986 struct escape_point
1987 {
1988 /* Value escapes to this call. */
1989 gcall *call;
1990 /* Argument it escapes to. */
1991 int arg;
1992 /* Flags already known about the argument (this can save us from recording
1993 escape points if local analysis did good job already). */
1994 eaf_flags_t min_flags;
1995 /* Does value escape directly or indirectly? */
1996 bool direct;
1997 };
1998
1999 /* Lattice used during the eaf flags analysis dataflow. For a given SSA name
2000 we aim to compute its flags and escape points. We also use the lattice
2001 to dynamically build dataflow graph to propagate on. */
2002
2003 class modref_lattice
2004 {
2005 public:
2006 /* EAF flags of the SSA name. */
2007 eaf_flags_t flags;
2008 /* Used during DFS walk to mark names where final value was determined
2009 without need for dataflow. */
2010 bool known;
2011 /* Used during DFS walk to mark open vertices (for cycle detection). */
2012 bool open;
2013 /* Set during DFS walk for names that needs dataflow propagation. */
2014 bool do_dataflow;
2015 /* Used during the iterative dataflow. */
2016 bool changed;
2017
2018 /* When doing IPA analysis we can not merge in callee escape points;
2019 Only remember them and do the merging at IPA propagation time. */
2020 vec <escape_point, va_heap, vl_ptr> escape_points;
2021
2022 /* Representation of a graph for dataflow. This graph is built on-demand
2023 using modref_eaf_analysis::analyze_ssa and later solved by
2024 modref_eaf_analysis::propagate.
2025 Each edge represents the fact that flags of current lattice should be
2026 propagated to lattice of SSA_NAME. */
2027 struct propagate_edge
2028 {
2029 int ssa_name;
2030 bool deref;
2031 };
2032 vec <propagate_edge, va_heap, vl_ptr> propagate_to;
2033
2034 void init ();
2035 void release ();
2036 bool merge (const modref_lattice &with);
2037 bool merge (int flags);
2038 bool merge_deref (const modref_lattice &with, bool ignore_stores);
2039 bool merge_direct_load ();
2040 bool merge_direct_store ();
2041 bool add_escape_point (gcall *call, int arg, int min_flags, bool diret);
2042 void dump (FILE *out, int indent = 0) const;
2043 };
2044
2045 /* Lattices are saved to vectors, so keep them PODs. */
2046 void
2047 modref_lattice::init ()
2048 {
2049 /* All flags we track. */
2050 int f = EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER
2051 | EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE
2052 | EAF_NO_DIRECT_READ | EAF_NO_INDIRECT_READ
2053 | EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY
2054 | EAF_UNUSED;
2055 flags = f;
2056 /* Check that eaf_flags_t is wide enough to hold all flags. */
2057 gcc_checking_assert (f == flags);
2058 open = true;
2059 known = false;
2060 }
2061
2062 /* Release memory. */
2063 void
2064 modref_lattice::release ()
2065 {
2066 escape_points.release ();
2067 propagate_to.release ();
2068 }
2069
2070 /* Dump lattice to OUT; indent with INDENT spaces. */
2071
2072 void
2073 modref_lattice::dump (FILE *out, int indent) const
2074 {
2075 dump_eaf_flags (out, flags);
2076 if (escape_points.length ())
2077 {
2078 fprintf (out, "%*sEscapes:\n", indent, "");
2079 for (unsigned int i = 0; i < escape_points.length (); i++)
2080 {
2081 fprintf (out, "%*s Arg %i (%s) min flags", indent, "",
2082 escape_points[i].arg,
2083 escape_points[i].direct ? "direct" : "indirect");
2084 dump_eaf_flags (out, escape_points[i].min_flags, false);
2085 fprintf (out, " in call ");
2086 print_gimple_stmt (out, escape_points[i].call, 0);
2087 }
2088 }
2089 }
2090
2091 /* Add escape point CALL, ARG, MIN_FLAGS, DIRECT. Return false if such escape
2092 point exists. */
2093
2094 bool
2095 modref_lattice::add_escape_point (gcall *call, int arg, int min_flags,
2096 bool direct)
2097 {
2098 escape_point *ep;
2099 unsigned int i;
2100
2101 /* If we already determined flags to be bad enough,
2102 we do not need to record. */
2103 if ((flags & min_flags) == flags || (min_flags & EAF_UNUSED))
2104 return false;
2105
2106 FOR_EACH_VEC_ELT (escape_points, i, ep)
2107 if (ep->call == call && ep->arg == arg && ep->direct == direct)
2108 {
2109 if ((ep->min_flags & min_flags) == min_flags)
2110 return false;
2111 ep->min_flags &= min_flags;
2112 return true;
2113 }
2114 /* Give up if max escape points is met. */
2115 if ((int)escape_points.length () > param_modref_max_escape_points)
2116 {
2117 if (dump_file)
2118 fprintf (dump_file, "--param modref-max-escape-points limit reached\n");
2119 merge (0);
2120 return true;
2121 }
2122 escape_point new_ep = {call, arg, min_flags, direct};
2123 escape_points.safe_push (new_ep);
2124 return true;
2125 }
2126
2127 /* Merge in flags from F. */
2128 bool
2129 modref_lattice::merge (int f)
2130 {
2131 if (f & EAF_UNUSED)
2132 return false;
2133 /* Check that flags seems sane: if function does not read the parameter
2134 it can not access it indirectly. */
2135 gcc_checking_assert (!(f & EAF_NO_DIRECT_READ)
2136 || ((f & EAF_NO_INDIRECT_READ)
2137 && (f & EAF_NO_INDIRECT_CLOBBER)
2138 && (f & EAF_NO_INDIRECT_ESCAPE)
2139 && (f & EAF_NOT_RETURNED_INDIRECTLY)));
2140 if ((flags & f) != flags)
2141 {
2142 flags &= f;
2143 /* Prune obviously useless flags;
2144 We do not have ECF_FLAGS handy which is not big problem since
2145 we will do final flags cleanup before producing summary.
2146 Merging should be fast so it can work well with dataflow. */
2147 flags = remove_useless_eaf_flags (flags, 0, false);
2148 if (!flags)
2149 escape_points.release ();
2150 return true;
2151 }
2152 return false;
2153 }
2154
2155 /* Merge in WITH. Return true if anything changed. */
2156
2157 bool
2158 modref_lattice::merge (const modref_lattice &with)
2159 {
2160 if (!with.known)
2161 do_dataflow = true;
2162
2163 bool changed = merge (with.flags);
2164
2165 if (!flags)
2166 return changed;
2167 for (unsigned int i = 0; i < with.escape_points.length (); i++)
2168 changed |= add_escape_point (with.escape_points[i].call,
2169 with.escape_points[i].arg,
2170 with.escape_points[i].min_flags,
2171 with.escape_points[i].direct);
2172 return changed;
2173 }
2174
2175 /* Merge in deref of WITH. If IGNORE_STORES is true do not consider
2176 stores. Return true if anything changed. */
2177
2178 bool
2179 modref_lattice::merge_deref (const modref_lattice &with, bool ignore_stores)
2180 {
2181 if (!with.known)
2182 do_dataflow = true;
2183
2184 bool changed = merge (deref_flags (with.flags, ignore_stores));
2185
2186 if (!flags)
2187 return changed;
2188 for (unsigned int i = 0; i < with.escape_points.length (); i++)
2189 {
2190 int min_flags = with.escape_points[i].min_flags;
2191
2192 if (with.escape_points[i].direct)
2193 min_flags = deref_flags (min_flags, ignore_stores);
2194 else if (ignore_stores)
2195 min_flags |= ignore_stores_eaf_flags;
2196 changed |= add_escape_point (with.escape_points[i].call,
2197 with.escape_points[i].arg,
2198 min_flags,
2199 false);
2200 }
2201 return changed;
2202 }
2203
2204 /* Merge in flags for direct load. */
2205
2206 bool
2207 modref_lattice::merge_direct_load ()
2208 {
2209 return merge (~(EAF_UNUSED | EAF_NO_DIRECT_READ));
2210 }
2211
2212 /* Merge in flags for direct store. */
2213
2214 bool
2215 modref_lattice::merge_direct_store ()
2216 {
2217 return merge (~(EAF_UNUSED | EAF_NO_DIRECT_CLOBBER));
2218 }
2219
2220 /* Analyzer of EAF flags.
2221 This is generally dataflow problem over the SSA graph, however we only
2222 care about flags of few selected ssa names (arguments, return slot and
2223 static chain). So we first call analyze_ssa_name on all relevant names
2224 and perform a DFS walk to discover SSA names where flags needs to be
2225 determined. For acyclic graphs we try to determine final flags during
2226 this walk. Once cycles or recursion depth is met we enlist SSA names
2227 for dataflow which is done by propagate call.
2228
2229 After propagation the flags can be obtained using get_ssa_name_flags. */
2230
2231 class modref_eaf_analysis
2232 {
2233 public:
2234 /* Mark NAME as relevant for analysis. */
2235 void analyze_ssa_name (tree name, bool deferred = false);
2236 /* Dataflow solver. */
2237 void propagate ();
2238 /* Return flags computed earlier for NAME. */
2239 int get_ssa_name_flags (tree name)
2240 {
2241 int version = SSA_NAME_VERSION (name);
2242 gcc_checking_assert (m_lattice[version].known);
2243 return m_lattice[version].flags;
2244 }
2245 /* In IPA mode this will record all escape points
2246 determined for NAME to PARM_IDNEX. Flags are minimal
2247 flags known. */
2248 void record_escape_points (tree name, int parm_index, int flags);
2249 modref_eaf_analysis (bool ipa)
2250 {
2251 m_ipa = ipa;
2252 m_depth = 0;
2253 m_lattice.safe_grow_cleared (num_ssa_names, true);
2254 }
2255 ~modref_eaf_analysis ()
2256 {
2257 gcc_checking_assert (!m_depth);
2258 if (m_ipa || m_names_to_propagate.length ())
2259 for (unsigned int i = 0; i < num_ssa_names; i++)
2260 m_lattice[i].release ();
2261 }
2262 private:
2263 /* If true, we produce analysis for IPA mode. In this case escape points are
2264 collected. */
2265 bool m_ipa;
2266 /* Depth of recursion of analyze_ssa_name. */
2267 int m_depth;
2268 /* Propagation lattice for individual ssa names. */
2269 auto_vec<modref_lattice> m_lattice;
2270 auto_vec<tree> m_deferred_names;
2271 auto_vec<int> m_names_to_propagate;
2272
2273 void merge_with_ssa_name (tree dest, tree src, bool deref);
2274 void merge_call_lhs_flags (gcall *call, int arg, tree name, bool direct,
2275 bool deref);
2276 };
2277
2278
2279 /* Call statements may return their parameters. Consider argument number
2280 ARG of USE_STMT and determine flags that can needs to be cleared
2281 in case pointer possibly indirectly references from ARG I is returned.
2282 If DIRECT is true consider direct returns and if INDIRECT consider
2283 indirect returns.
2284 LATTICE, DEPTH and ipa are same as in analyze_ssa_name.
2285 ARG is set to -1 for static chain. */
2286
2287 void
2288 modref_eaf_analysis::merge_call_lhs_flags (gcall *call, int arg,
2289 tree name, bool direct,
2290 bool indirect)
2291 {
2292 int index = SSA_NAME_VERSION (name);
2293 bool returned_directly = false;
2294
2295 /* If there is no return value, no flags are affected. */
2296 if (!gimple_call_lhs (call))
2297 return;
2298
2299 /* If we know that function returns given argument and it is not ARG
2300 we can still be happy. */
2301 if (arg >= 0)
2302 {
2303 int flags = gimple_call_return_flags (call);
2304 if (flags & ERF_RETURNS_ARG)
2305 {
2306 if ((flags & ERF_RETURN_ARG_MASK) == arg)
2307 returned_directly = true;
2308 else
2309 return;
2310 }
2311 }
2312 /* Make ERF_RETURNS_ARG overwrite EAF_UNUSED. */
2313 if (returned_directly)
2314 {
2315 direct = true;
2316 indirect = false;
2317 }
2318 /* If value is not returned at all, do nothing. */
2319 else if (!direct && !indirect)
2320 return;
2321
2322 /* If return value is SSA name determine its flags. */
2323 if (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME)
2324 {
2325 tree lhs = gimple_call_lhs (call);
2326 if (direct)
2327 merge_with_ssa_name (name, lhs, false);
2328 if (indirect)
2329 merge_with_ssa_name (name, lhs, true);
2330 }
2331 /* In the case of memory store we can do nothing. */
2332 else if (!direct)
2333 m_lattice[index].merge (deref_flags (0, false));
2334 else
2335 m_lattice[index].merge (0);
2336 }
2337
2338 /* CALL_FLAGS are EAF_FLAGS of the argument. Turn them
2339 into flags for caller, update LATTICE of corresponding
2340 argument if needed. */
2341
2342 static int
2343 callee_to_caller_flags (int call_flags, bool ignore_stores,
2344 modref_lattice &lattice)
2345 {
2346 /* call_flags is about callee returning a value
2347 that is not the same as caller returning it. */
2348 call_flags |= EAF_NOT_RETURNED_DIRECTLY
2349 | EAF_NOT_RETURNED_INDIRECTLY;
2350 if (!ignore_stores && !(call_flags & EAF_UNUSED))
2351 {
2352 /* If value escapes we are no longer able to track what happens
2353 with it because we can read it from the escaped location
2354 anytime. */
2355 if (!(call_flags & EAF_NO_DIRECT_ESCAPE))
2356 lattice.merge (0);
2357 else if (!(call_flags & EAF_NO_INDIRECT_ESCAPE))
2358 lattice.merge (~(EAF_NOT_RETURNED_INDIRECTLY
2359 | EAF_NO_DIRECT_READ
2360 | EAF_NO_INDIRECT_READ
2361 | EAF_NO_INDIRECT_CLOBBER
2362 | EAF_UNUSED));
2363 }
2364 else
2365 call_flags |= ignore_stores_eaf_flags;
2366 return call_flags;
2367 }
2368
2369 /* Analyze EAF flags for SSA name NAME and store result to LATTICE.
2370 LATTICE is an array of modref_lattices.
2371 DEPTH is a recursion depth used to make debug output prettier.
2372 If IPA is true we analyze for IPA propagation (and thus call escape points
2373 are processed later) */
2374
2375 void
2376 modref_eaf_analysis::analyze_ssa_name (tree name, bool deferred)
2377 {
2378 imm_use_iterator ui;
2379 gimple *use_stmt;
2380 int index = SSA_NAME_VERSION (name);
2381
2382 if (!deferred)
2383 {
2384 /* See if value is already computed. */
2385 if (m_lattice[index].known || m_lattice[index].do_dataflow)
2386 return;
2387 if (m_lattice[index].open)
2388 {
2389 if (dump_file)
2390 fprintf (dump_file,
2391 "%*sCycle in SSA graph\n",
2392 m_depth * 4, "");
2393 return;
2394 }
2395 /* Recursion guard. */
2396 m_lattice[index].init ();
2397 if (m_depth == param_modref_max_depth)
2398 {
2399 if (dump_file)
2400 fprintf (dump_file,
2401 "%*sMax recursion depth reached; postponing\n",
2402 m_depth * 4, "");
2403 m_deferred_names.safe_push (name);
2404 return;
2405 }
2406 }
2407
2408 if (dump_file)
2409 {
2410 fprintf (dump_file,
2411 "%*sAnalyzing flags of ssa name: ", m_depth * 4, "");
2412 print_generic_expr (dump_file, name);
2413 fprintf (dump_file, "\n");
2414 }
2415
2416 FOR_EACH_IMM_USE_STMT (use_stmt, ui, name)
2417 {
2418 if (m_lattice[index].flags == 0)
2419 break;
2420 if (is_gimple_debug (use_stmt))
2421 continue;
2422 if (dump_file)
2423 {
2424 fprintf (dump_file, "%*s Analyzing stmt: ", m_depth * 4, "");
2425 print_gimple_stmt (dump_file, use_stmt, 0);
2426 }
2427 /* If we see a direct non-debug use, clear unused bit.
2428 All dereferences should be accounted below using deref_flags. */
2429 m_lattice[index].merge (~EAF_UNUSED);
2430
2431 /* Gimple return may load the return value.
2432 Returning name counts as an use by tree-ssa-structalias.cc */
2433 if (greturn *ret = dyn_cast <greturn *> (use_stmt))
2434 {
2435 /* Returning through return slot is seen as memory write earlier. */
2436 if (DECL_RESULT (current_function_decl)
2437 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
2438 ;
2439 else if (gimple_return_retval (ret) == name)
2440 m_lattice[index].merge (~(EAF_UNUSED | EAF_NOT_RETURNED_DIRECTLY
2441 | EAF_NOT_RETURNED_DIRECTLY));
2442 else if (memory_access_to (gimple_return_retval (ret), name))
2443 {
2444 m_lattice[index].merge_direct_load ();
2445 m_lattice[index].merge (~(EAF_UNUSED
2446 | EAF_NOT_RETURNED_INDIRECTLY));
2447 }
2448 }
2449 /* Account for LHS store, arg loads and flags from callee function. */
2450 else if (gcall *call = dyn_cast <gcall *> (use_stmt))
2451 {
2452 tree callee = gimple_call_fndecl (call);
2453
2454 /* IPA PTA internally it treats calling a function as "writing" to
2455 the argument space of all functions the function pointer points to
2456 (PR101949). We can not drop EAF_NOCLOBBER only when ipa-pta
2457 is on since that would allow propagation of this from -fno-ipa-pta
2458 to -fipa-pta functions. */
2459 if (gimple_call_fn (use_stmt) == name)
2460 m_lattice[index].merge (~(EAF_NO_DIRECT_CLOBBER | EAF_UNUSED));
2461
2462 /* Recursion would require bit of propagation; give up for now. */
2463 if (callee && !m_ipa && recursive_call_p (current_function_decl,
2464 callee))
2465 m_lattice[index].merge (0);
2466 else
2467 {
2468 int ecf_flags = gimple_call_flags (call);
2469 bool ignore_stores = ignore_stores_p (current_function_decl,
2470 ecf_flags);
2471 bool ignore_retval = ignore_retval_p (current_function_decl,
2472 ecf_flags);
2473
2474 /* Handle *name = func (...). */
2475 if (gimple_call_lhs (call)
2476 && memory_access_to (gimple_call_lhs (call), name))
2477 {
2478 m_lattice[index].merge_direct_store ();
2479 /* Return slot optimization passes address of
2480 LHS to callee via hidden parameter and this
2481 may make LHS to escape. See PR 98499. */
2482 if (gimple_call_return_slot_opt_p (call)
2483 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call))))
2484 {
2485 int call_flags = gimple_call_retslot_flags (call);
2486 bool isretslot = false;
2487
2488 if (DECL_RESULT (current_function_decl)
2489 && DECL_BY_REFERENCE
2490 (DECL_RESULT (current_function_decl)))
2491 isretslot = ssa_default_def
2492 (cfun,
2493 DECL_RESULT (current_function_decl))
2494 == name;
2495
2496 /* Passing returnslot to return slot is special because
2497 not_returned and escape has same meaning.
2498 However passing arg to return slot is different. If
2499 the callee's return slot is returned it means that
2500 arg is written to itself which is an escape.
2501 Since we do not track the memory it is written to we
2502 need to give up on analyzing it. */
2503 if (!isretslot)
2504 {
2505 if (!(call_flags & (EAF_NOT_RETURNED_DIRECTLY
2506 | EAF_UNUSED)))
2507 m_lattice[index].merge (0);
2508 else gcc_checking_assert
2509 (call_flags & (EAF_NOT_RETURNED_INDIRECTLY
2510 | EAF_UNUSED));
2511 call_flags = callee_to_caller_flags
2512 (call_flags, false,
2513 m_lattice[index]);
2514 }
2515 m_lattice[index].merge (call_flags);
2516 }
2517 }
2518
2519 if (gimple_call_chain (call)
2520 && (gimple_call_chain (call) == name))
2521 {
2522 int call_flags = gimple_call_static_chain_flags (call);
2523 if (!ignore_retval && !(call_flags & EAF_UNUSED))
2524 merge_call_lhs_flags
2525 (call, -1, name,
2526 !(call_flags & EAF_NOT_RETURNED_DIRECTLY),
2527 !(call_flags & EAF_NOT_RETURNED_INDIRECTLY));
2528 call_flags = callee_to_caller_flags
2529 (call_flags, ignore_stores,
2530 m_lattice[index]);
2531 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
2532 m_lattice[index].merge (call_flags);
2533 }
2534
2535 /* Process internal functions and right away. */
2536 bool record_ipa = m_ipa && !gimple_call_internal_p (call);
2537
2538 /* Handle all function parameters. */
2539 for (unsigned i = 0;
2540 i < gimple_call_num_args (call)
2541 && m_lattice[index].flags; i++)
2542 /* Name is directly passed to the callee. */
2543 if (gimple_call_arg (call, i) == name)
2544 {
2545 int call_flags = gimple_call_arg_flags (call, i);
2546 if (!ignore_retval)
2547 merge_call_lhs_flags
2548 (call, i, name,
2549 !(call_flags & (EAF_NOT_RETURNED_DIRECTLY
2550 | EAF_UNUSED)),
2551 !(call_flags & (EAF_NOT_RETURNED_INDIRECTLY
2552 | EAF_UNUSED)));
2553 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
2554 {
2555 call_flags = callee_to_caller_flags
2556 (call_flags, ignore_stores,
2557 m_lattice[index]);
2558 if (!record_ipa)
2559 m_lattice[index].merge (call_flags);
2560 else
2561 m_lattice[index].add_escape_point (call, i,
2562 call_flags, true);
2563 }
2564 }
2565 /* Name is dereferenced and passed to a callee. */
2566 else if (memory_access_to (gimple_call_arg (call, i), name))
2567 {
2568 int call_flags = deref_flags
2569 (gimple_call_arg_flags (call, i), ignore_stores);
2570 if (!ignore_retval && !(call_flags & EAF_UNUSED)
2571 && !(call_flags & EAF_NOT_RETURNED_DIRECTLY)
2572 && !(call_flags & EAF_NOT_RETURNED_INDIRECTLY))
2573 merge_call_lhs_flags (call, i, name, false, true);
2574 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
2575 m_lattice[index].merge_direct_load ();
2576 else
2577 {
2578 call_flags = callee_to_caller_flags
2579 (call_flags, ignore_stores,
2580 m_lattice[index]);
2581 if (!record_ipa)
2582 m_lattice[index].merge (call_flags);
2583 else
2584 m_lattice[index].add_escape_point (call, i,
2585 call_flags, false);
2586 }
2587 }
2588 }
2589 }
2590 else if (gimple_assign_load_p (use_stmt))
2591 {
2592 gassign *assign = as_a <gassign *> (use_stmt);
2593 /* Memory to memory copy. */
2594 if (gimple_store_p (assign))
2595 {
2596 /* Handle *lhs = *name.
2597
2598 We do not track memory locations, so assume that value
2599 is used arbitrarily. */
2600 if (memory_access_to (gimple_assign_rhs1 (assign), name))
2601 m_lattice[index].merge (deref_flags (0, false));
2602 /* Handle *name = *exp. */
2603 else if (memory_access_to (gimple_assign_lhs (assign), name))
2604 m_lattice[index].merge_direct_store ();
2605 }
2606 /* Handle lhs = *name. */
2607 else if (memory_access_to (gimple_assign_rhs1 (assign), name))
2608 {
2609 tree lhs = gimple_assign_lhs (assign);
2610 merge_with_ssa_name (name, lhs, true);
2611 }
2612 }
2613 else if (gimple_store_p (use_stmt))
2614 {
2615 gassign *assign = dyn_cast <gassign *> (use_stmt);
2616
2617 /* Handle *lhs = name. */
2618 if (assign && gimple_assign_rhs1 (assign) == name)
2619 {
2620 if (dump_file)
2621 fprintf (dump_file, "%*s ssa name saved to memory\n",
2622 m_depth * 4, "");
2623 m_lattice[index].merge (0);
2624 }
2625 /* Handle *name = exp. */
2626 else if (assign
2627 && memory_access_to (gimple_assign_lhs (assign), name))
2628 {
2629 /* In general we can not ignore clobbers because they are
2630 barriers for code motion, however after inlining it is safe to
2631 do because local optimization passes do not consider clobbers
2632 from other functions.
2633 Similar logic is in ipa-pure-const.cc. */
2634 if (!cfun->after_inlining || !gimple_clobber_p (assign))
2635 m_lattice[index].merge_direct_store ();
2636 }
2637 /* ASM statements etc. */
2638 else if (!assign)
2639 {
2640 if (dump_file)
2641 fprintf (dump_file, "%*s Unhandled store\n", m_depth * 4, "");
2642 m_lattice[index].merge (0);
2643 }
2644 }
2645 else if (gassign *assign = dyn_cast <gassign *> (use_stmt))
2646 {
2647 enum tree_code code = gimple_assign_rhs_code (assign);
2648
2649 /* See if operation is a merge as considered by
2650 tree-ssa-structalias.cc:find_func_aliases. */
2651 if (!truth_value_p (code)
2652 && code != POINTER_DIFF_EXPR
2653 && (code != POINTER_PLUS_EXPR
2654 || gimple_assign_rhs1 (assign) == name))
2655 {
2656 tree lhs = gimple_assign_lhs (assign);
2657 merge_with_ssa_name (name, lhs, false);
2658 }
2659 }
2660 else if (gphi *phi = dyn_cast <gphi *> (use_stmt))
2661 {
2662 tree result = gimple_phi_result (phi);
2663 merge_with_ssa_name (name, result, false);
2664 }
2665 /* Conditions are not considered escape points
2666 by tree-ssa-structalias. */
2667 else if (gimple_code (use_stmt) == GIMPLE_COND)
2668 ;
2669 else
2670 {
2671 if (dump_file)
2672 fprintf (dump_file, "%*s Unhandled stmt\n", m_depth * 4, "");
2673 m_lattice[index].merge (0);
2674 }
2675
2676 if (dump_file)
2677 {
2678 fprintf (dump_file, "%*s current flags of ", m_depth * 4, "");
2679 print_generic_expr (dump_file, name);
2680 m_lattice[index].dump (dump_file, m_depth * 4 + 4);
2681 }
2682 }
2683 if (dump_file)
2684 {
2685 fprintf (dump_file, "%*sflags of ssa name ", m_depth * 4, "");
2686 print_generic_expr (dump_file, name);
2687 m_lattice[index].dump (dump_file, m_depth * 4 + 2);
2688 }
2689 m_lattice[index].open = false;
2690 if (!m_lattice[index].do_dataflow)
2691 m_lattice[index].known = true;
2692 }
2693
2694 /* Propagate info from SRC to DEST. If DEREF it true, assume that SRC
2695 is dereferenced. */
2696
2697 void
2698 modref_eaf_analysis::merge_with_ssa_name (tree dest, tree src, bool deref)
2699 {
2700 int index = SSA_NAME_VERSION (dest);
2701 int src_index = SSA_NAME_VERSION (src);
2702
2703 /* Merging lattice with itself is a no-op. */
2704 if (!deref && src == dest)
2705 return;
2706
2707 m_depth++;
2708 analyze_ssa_name (src);
2709 m_depth--;
2710 if (deref)
2711 m_lattice[index].merge_deref (m_lattice[src_index], false);
2712 else
2713 m_lattice[index].merge (m_lattice[src_index]);
2714
2715 /* If we failed to produce final solution add an edge to the dataflow
2716 graph. */
2717 if (!m_lattice[src_index].known)
2718 {
2719 modref_lattice::propagate_edge e = {index, deref};
2720
2721 if (!m_lattice[src_index].propagate_to.length ())
2722 m_names_to_propagate.safe_push (src_index);
2723 m_lattice[src_index].propagate_to.safe_push (e);
2724 m_lattice[src_index].changed = true;
2725 m_lattice[src_index].do_dataflow = true;
2726 if (dump_file)
2727 fprintf (dump_file,
2728 "%*sWill propgate from ssa_name %i to %i%s\n",
2729 m_depth * 4 + 4,
2730 "", src_index, index, deref ? " (deref)" : "");
2731 }
2732 }
2733
2734 /* In the case we deferred some SSA names, reprocess them. In the case some
2735 dataflow edges were introduced, do the actual iterative dataflow. */
2736
2737 void
2738 modref_eaf_analysis::propagate ()
2739 {
2740 int iterations = 0;
2741 size_t i;
2742 int index;
2743 bool changed = true;
2744
2745 while (m_deferred_names.length ())
2746 {
2747 tree name = m_deferred_names.pop ();
2748 if (dump_file)
2749 fprintf (dump_file, "Analyzing deferred SSA name\n");
2750 analyze_ssa_name (name, true);
2751 }
2752
2753 if (!m_names_to_propagate.length ())
2754 return;
2755 if (dump_file)
2756 fprintf (dump_file, "Propagating EAF flags\n");
2757
2758 /* Compute reverse postorder. */
2759 auto_vec <int> rpo;
2760 struct stack_entry
2761 {
2762 int name;
2763 unsigned pos;
2764 };
2765 auto_vec <struct stack_entry> stack;
2766 int pos = m_names_to_propagate.length () - 1;
2767
2768 rpo.safe_grow (m_names_to_propagate.length (), true);
2769 stack.reserve_exact (m_names_to_propagate.length ());
2770
2771 /* We reuse known flag for RPO DFS walk bookkeeping. */
2772 if (flag_checking)
2773 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
2774 gcc_assert (!m_lattice[index].known && m_lattice[index].changed);
2775
2776 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
2777 {
2778 if (!m_lattice[index].known)
2779 {
2780 stack_entry e = {index, 0};
2781
2782 stack.quick_push (e);
2783 m_lattice[index].known = true;
2784 }
2785 while (stack.length ())
2786 {
2787 bool found = false;
2788 int index1 = stack.last ().name;
2789
2790 while (stack.last ().pos < m_lattice[index1].propagate_to.length ())
2791 {
2792 int index2 = m_lattice[index1]
2793 .propagate_to[stack.last ().pos].ssa_name;
2794
2795 stack.last ().pos++;
2796 if (!m_lattice[index2].known
2797 && m_lattice[index2].propagate_to.length ())
2798 {
2799 stack_entry e = {index2, 0};
2800
2801 stack.quick_push (e);
2802 m_lattice[index2].known = true;
2803 found = true;
2804 break;
2805 }
2806 }
2807 if (!found
2808 && stack.last ().pos == m_lattice[index1].propagate_to.length ())
2809 {
2810 rpo[pos--] = index1;
2811 stack.pop ();
2812 }
2813 }
2814 }
2815
2816 /* Perform iterative dataflow. */
2817 while (changed)
2818 {
2819 changed = false;
2820 iterations++;
2821 if (dump_file)
2822 fprintf (dump_file, " iteration %i\n", iterations);
2823 FOR_EACH_VEC_ELT (rpo, i, index)
2824 {
2825 if (m_lattice[index].changed)
2826 {
2827 size_t j;
2828
2829 m_lattice[index].changed = false;
2830 if (dump_file)
2831 fprintf (dump_file, " Visiting ssa name %i\n", index);
2832 for (j = 0; j < m_lattice[index].propagate_to.length (); j++)
2833 {
2834 bool ch;
2835 int target = m_lattice[index].propagate_to[j].ssa_name;
2836 bool deref = m_lattice[index].propagate_to[j].deref;
2837
2838 if (dump_file)
2839 fprintf (dump_file, " Propagating flags of ssa name"
2840 " %i to %i%s\n",
2841 index, target, deref ? " (deref)" : "");
2842 m_lattice[target].known = true;
2843 if (!m_lattice[index].propagate_to[j].deref)
2844 ch = m_lattice[target].merge (m_lattice[index]);
2845 else
2846 ch = m_lattice[target].merge_deref (m_lattice[index],
2847 false);
2848 if (!ch)
2849 continue;
2850 if (dump_file)
2851 {
2852 fprintf (dump_file, " New lattice: ");
2853 m_lattice[target].dump (dump_file);
2854 }
2855 changed = true;
2856 m_lattice[target].changed = true;
2857 }
2858 }
2859 }
2860 }
2861 if (dump_file)
2862 fprintf (dump_file, "EAF flags propagated in %i iterations\n", iterations);
2863 }
2864
2865 /* Record escape points of PARM_INDEX according to LATTICE. */
2866
2867 void
2868 modref_eaf_analysis::record_escape_points (tree name, int parm_index, int flags)
2869 {
2870 modref_lattice &lattice = m_lattice[SSA_NAME_VERSION (name)];
2871
2872 if (lattice.escape_points.length ())
2873 {
2874 escape_point *ep;
2875 unsigned int ip;
2876 cgraph_node *node = cgraph_node::get (current_function_decl);
2877
2878 gcc_assert (m_ipa);
2879 FOR_EACH_VEC_ELT (lattice.escape_points, ip, ep)
2880 if ((ep->min_flags & flags) != flags)
2881 {
2882 cgraph_edge *e = node->get_edge (ep->call);
2883 struct escape_entry ee = {parm_index, ep->arg,
2884 ep->min_flags, ep->direct};
2885
2886 escape_summaries->get_create (e)->esc.safe_push (ee);
2887 }
2888 }
2889 }
2890
2891 /* Determine EAF flags for function parameters
2892 and fill in SUMMARY/SUMMARY_LTO. If IPA is true work in IPA mode
2893 where we also collect escape points.
2894 PAST_FLAGS, PAST_RETSLOT_FLAGS, PAST_STATIC_CHAIN_FLAGS can be
2895 used to preserve flags from previous (IPA) run for cases where
2896 late optimizations changed code in a way we can no longer analyze
2897 it easily. */
2898
2899 static void
2900 analyze_parms (modref_summary *summary, modref_summary_lto *summary_lto,
2901 bool ipa, vec<eaf_flags_t> &past_flags,
2902 int past_retslot_flags, int past_static_chain_flags)
2903 {
2904 unsigned int parm_index = 0;
2905 unsigned int count = 0;
2906 int ecf_flags = flags_from_decl_or_type (current_function_decl);
2907 tree retslot = NULL;
2908 tree static_chain = NULL;
2909
2910 /* If there is return slot, look up its SSA name. */
2911 if (DECL_RESULT (current_function_decl)
2912 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
2913 retslot = ssa_default_def (cfun, DECL_RESULT (current_function_decl));
2914 if (cfun->static_chain_decl)
2915 static_chain = ssa_default_def (cfun, cfun->static_chain_decl);
2916
2917 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
2918 parm = TREE_CHAIN (parm))
2919 count++;
2920
2921 if (!count && !retslot && !static_chain)
2922 return;
2923
2924 modref_eaf_analysis eaf_analysis (ipa);
2925
2926 /* Determine all SSA names we need to know flags for. */
2927 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
2928 parm = TREE_CHAIN (parm))
2929 {
2930 tree name = ssa_default_def (cfun, parm);
2931 if (name)
2932 eaf_analysis.analyze_ssa_name (name);
2933 }
2934 if (retslot)
2935 eaf_analysis.analyze_ssa_name (retslot);
2936 if (static_chain)
2937 eaf_analysis.analyze_ssa_name (static_chain);
2938
2939 /* Do the dataflow. */
2940 eaf_analysis.propagate ();
2941
2942 tree attr = lookup_attribute ("fn spec",
2943 TYPE_ATTRIBUTES
2944 (TREE_TYPE (current_function_decl)));
2945 attr_fnspec fnspec (attr
2946 ? TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))
2947 : "");
2948
2949
2950 /* Store results to summaries. */
2951 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm; parm_index++,
2952 parm = TREE_CHAIN (parm))
2953 {
2954 tree name = ssa_default_def (cfun, parm);
2955 if (!name || has_zero_uses (name))
2956 {
2957 /* We do not track non-SSA parameters,
2958 but we want to track unused gimple_regs. */
2959 if (!is_gimple_reg (parm))
2960 continue;
2961 if (summary)
2962 {
2963 if (parm_index >= summary->arg_flags.length ())
2964 summary->arg_flags.safe_grow_cleared (count, true);
2965 summary->arg_flags[parm_index] = EAF_UNUSED;
2966 }
2967 else if (summary_lto)
2968 {
2969 if (parm_index >= summary_lto->arg_flags.length ())
2970 summary_lto->arg_flags.safe_grow_cleared (count, true);
2971 summary_lto->arg_flags[parm_index] = EAF_UNUSED;
2972 }
2973 continue;
2974 }
2975 int flags = eaf_analysis.get_ssa_name_flags (name);
2976 int attr_flags = fnspec.arg_eaf_flags (parm_index);
2977
2978 if (dump_file && (flags | attr_flags) != flags && !(flags & EAF_UNUSED))
2979 {
2980 fprintf (dump_file,
2981 " Flags for param %i combined with fnspec flags:",
2982 (int)parm_index);
2983 dump_eaf_flags (dump_file, attr_flags, false);
2984 fprintf (dump_file, " determined: ");
2985 dump_eaf_flags (dump_file, flags, true);
2986 }
2987 flags |= attr_flags;
2988
2989 /* Eliminate useless flags so we do not end up storing unnecessary
2990 summaries. */
2991
2992 flags = remove_useless_eaf_flags
2993 (flags, ecf_flags,
2994 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
2995 if (past_flags.length () > parm_index)
2996 {
2997 int past = past_flags[parm_index];
2998 past = remove_useless_eaf_flags
2999 (past, ecf_flags,
3000 VOID_TYPE_P (TREE_TYPE
3001 (TREE_TYPE (current_function_decl))));
3002 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
3003 {
3004 fprintf (dump_file,
3005 " Flags for param %i combined with IPA pass:",
3006 (int)parm_index);
3007 dump_eaf_flags (dump_file, past, false);
3008 fprintf (dump_file, " determined: ");
3009 dump_eaf_flags (dump_file, flags, true);
3010 }
3011 if (!(flags & EAF_UNUSED))
3012 flags |= past;
3013 }
3014
3015 if (flags)
3016 {
3017 if (summary)
3018 {
3019 if (parm_index >= summary->arg_flags.length ())
3020 summary->arg_flags.safe_grow_cleared (count, true);
3021 summary->arg_flags[parm_index] = flags;
3022 }
3023 else if (summary_lto)
3024 {
3025 if (parm_index >= summary_lto->arg_flags.length ())
3026 summary_lto->arg_flags.safe_grow_cleared (count, true);
3027 summary_lto->arg_flags[parm_index] = flags;
3028 }
3029 eaf_analysis.record_escape_points (name, parm_index, flags);
3030 }
3031 }
3032 if (retslot)
3033 {
3034 int flags = eaf_analysis.get_ssa_name_flags (retslot);
3035 int past = past_retslot_flags;
3036
3037 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
3038 past = remove_useless_eaf_flags
3039 (past, ecf_flags,
3040 VOID_TYPE_P (TREE_TYPE
3041 (TREE_TYPE (current_function_decl))));
3042 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
3043 {
3044 fprintf (dump_file,
3045 " Retslot flags combined with IPA pass:");
3046 dump_eaf_flags (dump_file, past, false);
3047 fprintf (dump_file, " determined: ");
3048 dump_eaf_flags (dump_file, flags, true);
3049 }
3050 if (!(flags & EAF_UNUSED))
3051 flags |= past;
3052 if (flags)
3053 {
3054 if (summary)
3055 summary->retslot_flags = flags;
3056 if (summary_lto)
3057 summary_lto->retslot_flags = flags;
3058 eaf_analysis.record_escape_points (retslot,
3059 MODREF_RETSLOT_PARM, flags);
3060 }
3061 }
3062 if (static_chain)
3063 {
3064 int flags = eaf_analysis.get_ssa_name_flags (static_chain);
3065 int past = past_static_chain_flags;
3066
3067 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
3068 past = remove_useless_eaf_flags
3069 (past, ecf_flags,
3070 VOID_TYPE_P (TREE_TYPE
3071 (TREE_TYPE (current_function_decl))));
3072 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
3073 {
3074 fprintf (dump_file,
3075 " Static chain flags combined with IPA pass:");
3076 dump_eaf_flags (dump_file, past, false);
3077 fprintf (dump_file, " determined: ");
3078 dump_eaf_flags (dump_file, flags, true);
3079 }
3080 if (!(flags & EAF_UNUSED))
3081 flags |= past;
3082 if (flags)
3083 {
3084 if (summary)
3085 summary->static_chain_flags = flags;
3086 if (summary_lto)
3087 summary_lto->static_chain_flags = flags;
3088 eaf_analysis.record_escape_points (static_chain,
3089 MODREF_STATIC_CHAIN_PARM,
3090 flags);
3091 }
3092 }
3093 }
3094
3095 /* Analyze function. IPA indicates whether we're running in local mode
3096 (false) or the IPA mode (true).
3097 Return true if fixup cfg is needed after the pass. */
3098
3099 static bool
3100 analyze_function (bool ipa)
3101 {
3102 bool fixup_cfg = false;
3103 if (dump_file)
3104 fprintf (dump_file, "\n\nmodref analyzing '%s' (ipa=%i)%s%s\n",
3105 cgraph_node::get (current_function_decl)->dump_name (), ipa,
3106 TREE_READONLY (current_function_decl) ? " (const)" : "",
3107 DECL_PURE_P (current_function_decl) ? " (pure)" : "");
3108
3109 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
3110 if (!flag_ipa_modref
3111 || lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl)))
3112 return false;
3113
3114 /* Compute no-LTO summaries when local optimization is going to happen. */
3115 bool nolto = (!ipa || ((!flag_lto || flag_fat_lto_objects) && !in_lto_p)
3116 || (in_lto_p && !flag_wpa
3117 && flag_incremental_link != INCREMENTAL_LINK_LTO));
3118 /* Compute LTO when LTO streaming is going to happen. */
3119 bool lto = ipa && ((flag_lto && !in_lto_p)
3120 || flag_wpa
3121 || flag_incremental_link == INCREMENTAL_LINK_LTO);
3122 cgraph_node *fnode = cgraph_node::get (current_function_decl);
3123
3124 modref_summary *summary = NULL;
3125 modref_summary_lto *summary_lto = NULL;
3126
3127 bool past_flags_known = false;
3128 auto_vec <eaf_flags_t> past_flags;
3129 int past_retslot_flags = 0;
3130 int past_static_chain_flags = 0;
3131
3132 /* Initialize the summary.
3133 If we run in local mode there is possibly pre-existing summary from
3134 IPA pass. Dump it so it is easy to compare if mod-ref info has
3135 improved. */
3136 if (!ipa)
3137 {
3138 if (!optimization_summaries)
3139 optimization_summaries = modref_summaries::create_ggc (symtab);
3140 else /* Remove existing summary if we are re-running the pass. */
3141 {
3142 summary = optimization_summaries->get (fnode);
3143 if (summary != NULL
3144 && summary->loads)
3145 {
3146 if (dump_file)
3147 {
3148 fprintf (dump_file, "Past summary:\n");
3149 optimization_summaries->get (fnode)->dump (dump_file);
3150 }
3151 past_flags.reserve_exact (summary->arg_flags.length ());
3152 past_flags.splice (summary->arg_flags);
3153 past_retslot_flags = summary->retslot_flags;
3154 past_static_chain_flags = summary->static_chain_flags;
3155 past_flags_known = true;
3156 }
3157 optimization_summaries->remove (fnode);
3158 }
3159 summary = optimization_summaries->get_create (fnode);
3160 gcc_checking_assert (nolto && !lto);
3161 }
3162 /* In IPA mode we analyze every function precisely once. Assert that. */
3163 else
3164 {
3165 if (nolto)
3166 {
3167 if (!summaries)
3168 summaries = modref_summaries::create_ggc (symtab);
3169 else
3170 summaries->remove (fnode);
3171 summary = summaries->get_create (fnode);
3172 }
3173 if (lto)
3174 {
3175 if (!summaries_lto)
3176 summaries_lto = modref_summaries_lto::create_ggc (symtab);
3177 else
3178 summaries_lto->remove (fnode);
3179 summary_lto = summaries_lto->get_create (fnode);
3180 }
3181 if (!fnspec_summaries)
3182 fnspec_summaries = new fnspec_summaries_t (symtab);
3183 if (!escape_summaries)
3184 escape_summaries = new escape_summaries_t (symtab);
3185 }
3186
3187
3188 /* Create and initialize summary for F.
3189 Note that summaries may be already allocated from previous
3190 run of the pass. */
3191 if (nolto)
3192 {
3193 gcc_assert (!summary->loads);
3194 summary->loads = modref_records::create_ggc ();
3195 gcc_assert (!summary->stores);
3196 summary->stores = modref_records::create_ggc ();
3197 summary->writes_errno = false;
3198 summary->side_effects = false;
3199 summary->nondeterministic = false;
3200 summary->calls_interposable = false;
3201 }
3202 if (lto)
3203 {
3204 gcc_assert (!summary_lto->loads);
3205 summary_lto->loads = modref_records_lto::create_ggc ();
3206 gcc_assert (!summary_lto->stores);
3207 summary_lto->stores = modref_records_lto::create_ggc ();
3208 summary_lto->writes_errno = false;
3209 summary_lto->side_effects = false;
3210 summary_lto->nondeterministic = false;
3211 summary_lto->calls_interposable = false;
3212 }
3213
3214 analyze_parms (summary, summary_lto, ipa,
3215 past_flags, past_retslot_flags, past_static_chain_flags);
3216
3217 {
3218 modref_access_analysis analyzer (ipa, summary, summary_lto);
3219 analyzer.analyze ();
3220 }
3221
3222 if (!ipa && flag_ipa_pure_const)
3223 {
3224 if (!summary->stores->every_base && !summary->stores->bases
3225 && !summary->nondeterministic)
3226 {
3227 if (!summary->loads->every_base && !summary->loads->bases
3228 && !summary->calls_interposable)
3229 fixup_cfg = ipa_make_function_const (fnode,
3230 summary->side_effects, true);
3231 else
3232 fixup_cfg = ipa_make_function_pure (fnode,
3233 summary->side_effects, true);
3234 }
3235 }
3236 int ecf_flags = flags_from_decl_or_type (current_function_decl);
3237 if (summary && !summary->useful_p (ecf_flags))
3238 {
3239 if (!ipa)
3240 optimization_summaries->remove (fnode);
3241 else
3242 summaries->remove (fnode);
3243 summary = NULL;
3244 }
3245 if (summary)
3246 summary->finalize (current_function_decl);
3247 if (summary_lto && !summary_lto->useful_p (ecf_flags))
3248 {
3249 summaries_lto->remove (fnode);
3250 summary_lto = NULL;
3251 }
3252
3253 if (ipa && !summary && !summary_lto)
3254 remove_modref_edge_summaries (fnode);
3255
3256 if (dump_file)
3257 {
3258 fprintf (dump_file, " - modref done with result: tracked.\n");
3259 if (summary)
3260 summary->dump (dump_file);
3261 if (summary_lto)
3262 summary_lto->dump (dump_file);
3263 dump_modref_edge_summaries (dump_file, fnode, 2);
3264 /* To simplify debugging, compare IPA and local solutions. */
3265 if (past_flags_known && summary)
3266 {
3267 size_t len = summary->arg_flags.length ();
3268
3269 if (past_flags.length () > len)
3270 len = past_flags.length ();
3271 for (size_t i = 0; i < len; i++)
3272 {
3273 int old_flags = i < past_flags.length () ? past_flags[i] : 0;
3274 int new_flags = i < summary->arg_flags.length ()
3275 ? summary->arg_flags[i] : 0;
3276 old_flags = remove_useless_eaf_flags
3277 (old_flags, flags_from_decl_or_type (current_function_decl),
3278 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3279 if (old_flags != new_flags)
3280 {
3281 if ((old_flags & ~new_flags) == 0
3282 || (new_flags & EAF_UNUSED))
3283 fprintf (dump_file, " Flags for param %i improved:",
3284 (int)i);
3285 else
3286 gcc_unreachable ();
3287 dump_eaf_flags (dump_file, old_flags, false);
3288 fprintf (dump_file, " -> ");
3289 dump_eaf_flags (dump_file, new_flags, true);
3290 }
3291 }
3292 past_retslot_flags = remove_useless_eaf_flags
3293 (past_retslot_flags,
3294 flags_from_decl_or_type (current_function_decl),
3295 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3296 if (past_retslot_flags != summary->retslot_flags)
3297 {
3298 if ((past_retslot_flags & ~summary->retslot_flags) == 0
3299 || (summary->retslot_flags & EAF_UNUSED))
3300 fprintf (dump_file, " Flags for retslot improved:");
3301 else
3302 gcc_unreachable ();
3303 dump_eaf_flags (dump_file, past_retslot_flags, false);
3304 fprintf (dump_file, " -> ");
3305 dump_eaf_flags (dump_file, summary->retslot_flags, true);
3306 }
3307 past_static_chain_flags = remove_useless_eaf_flags
3308 (past_static_chain_flags,
3309 flags_from_decl_or_type (current_function_decl),
3310 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3311 if (past_static_chain_flags != summary->static_chain_flags)
3312 {
3313 if ((past_static_chain_flags & ~summary->static_chain_flags) == 0
3314 || (summary->static_chain_flags & EAF_UNUSED))
3315 fprintf (dump_file, " Flags for static chain improved:");
3316 else
3317 gcc_unreachable ();
3318 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3319 fprintf (dump_file, " -> ");
3320 dump_eaf_flags (dump_file, summary->static_chain_flags, true);
3321 }
3322 }
3323 else if (past_flags_known && !summary)
3324 {
3325 for (size_t i = 0; i < past_flags.length (); i++)
3326 {
3327 int old_flags = past_flags[i];
3328 old_flags = remove_useless_eaf_flags
3329 (old_flags, flags_from_decl_or_type (current_function_decl),
3330 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3331 if (old_flags)
3332 {
3333 fprintf (dump_file, " Flags for param %i worsened:",
3334 (int)i);
3335 dump_eaf_flags (dump_file, old_flags, false);
3336 fprintf (dump_file, " -> \n");
3337 }
3338 }
3339 past_retslot_flags = remove_useless_eaf_flags
3340 (past_retslot_flags,
3341 flags_from_decl_or_type (current_function_decl),
3342 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3343 if (past_retslot_flags)
3344 {
3345 fprintf (dump_file, " Flags for retslot worsened:");
3346 dump_eaf_flags (dump_file, past_retslot_flags, false);
3347 fprintf (dump_file, " ->\n");
3348 }
3349 past_static_chain_flags = remove_useless_eaf_flags
3350 (past_static_chain_flags,
3351 flags_from_decl_or_type (current_function_decl),
3352 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3353 if (past_static_chain_flags)
3354 {
3355 fprintf (dump_file, " Flags for static chain worsened:");
3356 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3357 fprintf (dump_file, " ->\n");
3358 }
3359 }
3360 }
3361 return fixup_cfg;
3362 }
3363
3364 /* Callback for generate_summary. */
3365
3366 static void
3367 modref_generate (void)
3368 {
3369 struct cgraph_node *node;
3370 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
3371 {
3372 function *f = DECL_STRUCT_FUNCTION (node->decl);
3373 if (!f)
3374 continue;
3375 push_cfun (f);
3376 analyze_function (true);
3377 pop_cfun ();
3378 }
3379 }
3380
3381 } /* ANON namespace. */
3382
3383 /* Debugging helper. */
3384
3385 void
3386 debug_eaf_flags (int flags)
3387 {
3388 dump_eaf_flags (stderr, flags, true);
3389 }
3390
3391 /* Called when a new function is inserted to callgraph late. */
3392
3393 void
3394 modref_summaries::insert (struct cgraph_node *node, modref_summary *)
3395 {
3396 /* Local passes ought to be executed by the pass manager. */
3397 if (this == optimization_summaries)
3398 {
3399 optimization_summaries->remove (node);
3400 return;
3401 }
3402 if (!DECL_STRUCT_FUNCTION (node->decl)
3403 || !opt_for_fn (node->decl, flag_ipa_modref))
3404 {
3405 summaries->remove (node);
3406 return;
3407 }
3408 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
3409 analyze_function (true);
3410 pop_cfun ();
3411 }
3412
3413 /* Called when a new function is inserted to callgraph late. */
3414
3415 void
3416 modref_summaries_lto::insert (struct cgraph_node *node, modref_summary_lto *)
3417 {
3418 /* We do not support adding new function when IPA information is already
3419 propagated. This is done only by SIMD cloning that is not very
3420 critical. */
3421 if (!DECL_STRUCT_FUNCTION (node->decl)
3422 || !opt_for_fn (node->decl, flag_ipa_modref)
3423 || propagated)
3424 {
3425 summaries_lto->remove (node);
3426 return;
3427 }
3428 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
3429 analyze_function (true);
3430 pop_cfun ();
3431 }
3432
3433 /* Called when new clone is inserted to callgraph late. */
3434
3435 void
3436 modref_summaries::duplicate (cgraph_node *, cgraph_node *dst,
3437 modref_summary *src_data,
3438 modref_summary *dst_data)
3439 {
3440 /* Do not duplicate optimization summaries; we do not handle parameter
3441 transforms on them. */
3442 if (this == optimization_summaries)
3443 {
3444 optimization_summaries->remove (dst);
3445 return;
3446 }
3447 dst_data->stores = modref_records::create_ggc ();
3448 dst_data->stores->copy_from (src_data->stores);
3449 dst_data->loads = modref_records::create_ggc ();
3450 dst_data->loads->copy_from (src_data->loads);
3451 dst_data->kills.reserve_exact (src_data->kills.length ());
3452 dst_data->kills.splice (src_data->kills);
3453 dst_data->writes_errno = src_data->writes_errno;
3454 dst_data->side_effects = src_data->side_effects;
3455 dst_data->nondeterministic = src_data->nondeterministic;
3456 dst_data->calls_interposable = src_data->calls_interposable;
3457 if (src_data->arg_flags.length ())
3458 dst_data->arg_flags = src_data->arg_flags.copy ();
3459 dst_data->retslot_flags = src_data->retslot_flags;
3460 dst_data->static_chain_flags = src_data->static_chain_flags;
3461 }
3462
3463 /* Called when new clone is inserted to callgraph late. */
3464
3465 void
3466 modref_summaries_lto::duplicate (cgraph_node *, cgraph_node *,
3467 modref_summary_lto *src_data,
3468 modref_summary_lto *dst_data)
3469 {
3470 /* Be sure that no further cloning happens after ipa-modref. If it does
3471 we will need to update signatures for possible param changes. */
3472 gcc_checking_assert (!((modref_summaries_lto *)summaries_lto)->propagated);
3473 dst_data->stores = modref_records_lto::create_ggc ();
3474 dst_data->stores->copy_from (src_data->stores);
3475 dst_data->loads = modref_records_lto::create_ggc ();
3476 dst_data->loads->copy_from (src_data->loads);
3477 dst_data->kills.reserve_exact (src_data->kills.length ());
3478 dst_data->kills.splice (src_data->kills);
3479 dst_data->writes_errno = src_data->writes_errno;
3480 dst_data->side_effects = src_data->side_effects;
3481 dst_data->nondeterministic = src_data->nondeterministic;
3482 dst_data->calls_interposable = src_data->calls_interposable;
3483 if (src_data->arg_flags.length ())
3484 dst_data->arg_flags = src_data->arg_flags.copy ();
3485 dst_data->retslot_flags = src_data->retslot_flags;
3486 dst_data->static_chain_flags = src_data->static_chain_flags;
3487 }
3488
3489 namespace
3490 {
3491 /* Definition of the modref pass on GIMPLE. */
3492 const pass_data pass_data_modref = {
3493 GIMPLE_PASS,
3494 "modref",
3495 OPTGROUP_IPA,
3496 TV_TREE_MODREF,
3497 (PROP_cfg | PROP_ssa),
3498 0,
3499 0,
3500 0,
3501 0,
3502 };
3503
3504 class pass_modref : public gimple_opt_pass
3505 {
3506 public:
3507 pass_modref (gcc::context *ctxt)
3508 : gimple_opt_pass (pass_data_modref, ctxt) {}
3509
3510 /* opt_pass methods: */
3511 opt_pass *clone () final override
3512 {
3513 return new pass_modref (m_ctxt);
3514 }
3515 bool gate (function *) final override
3516 {
3517 return flag_ipa_modref;
3518 }
3519 unsigned int execute (function *) final override;
3520 };
3521
3522 /* Encode TT to the output block OB using the summary streaming API. */
3523
3524 static void
3525 write_modref_records (modref_records_lto *tt, struct output_block *ob)
3526 {
3527 streamer_write_uhwi (ob, tt->every_base);
3528 streamer_write_uhwi (ob, vec_safe_length (tt->bases));
3529 for (auto base_node : tt->bases)
3530 {
3531 stream_write_tree (ob, base_node->base, true);
3532
3533 streamer_write_uhwi (ob, base_node->every_ref);
3534 streamer_write_uhwi (ob, vec_safe_length (base_node->refs));
3535
3536 for (auto ref_node : base_node->refs)
3537 {
3538 stream_write_tree (ob, ref_node->ref, true);
3539 streamer_write_uhwi (ob, ref_node->every_access);
3540 streamer_write_uhwi (ob, vec_safe_length (ref_node->accesses));
3541
3542 for (auto access_node : ref_node->accesses)
3543 access_node.stream_out (ob);
3544 }
3545 }
3546 }
3547
3548 /* Read a modref_tree from the input block IB using the data from DATA_IN.
3549 This assumes that the tree was encoded using write_modref_tree.
3550 Either nolto_ret or lto_ret is initialized by the tree depending whether
3551 LTO streaming is expected or not. */
3552
3553 static void
3554 read_modref_records (tree decl,
3555 lto_input_block *ib, struct data_in *data_in,
3556 modref_records **nolto_ret,
3557 modref_records_lto **lto_ret)
3558 {
3559 size_t max_bases = opt_for_fn (decl, param_modref_max_bases);
3560 size_t max_refs = opt_for_fn (decl, param_modref_max_refs);
3561 size_t max_accesses = opt_for_fn (decl, param_modref_max_accesses);
3562
3563 if (lto_ret)
3564 *lto_ret = modref_records_lto::create_ggc ();
3565 if (nolto_ret)
3566 *nolto_ret = modref_records::create_ggc ();
3567 gcc_checking_assert (lto_ret || nolto_ret);
3568
3569 size_t every_base = streamer_read_uhwi (ib);
3570 size_t nbase = streamer_read_uhwi (ib);
3571
3572 gcc_assert (!every_base || nbase == 0);
3573 if (every_base)
3574 {
3575 if (nolto_ret)
3576 (*nolto_ret)->collapse ();
3577 if (lto_ret)
3578 (*lto_ret)->collapse ();
3579 }
3580 for (size_t i = 0; i < nbase; i++)
3581 {
3582 tree base_tree = stream_read_tree (ib, data_in);
3583 modref_base_node <alias_set_type> *nolto_base_node = NULL;
3584 modref_base_node <tree> *lto_base_node = NULL;
3585
3586 /* At stream in time we have LTO alias info. Check if we streamed in
3587 something obviously unnecessary. Do not glob types by alias sets;
3588 it is not 100% clear that ltrans types will get merged same way.
3589 Types may get refined based on ODR type conflicts. */
3590 if (base_tree && !get_alias_set (base_tree))
3591 {
3592 if (dump_file)
3593 {
3594 fprintf (dump_file, "Streamed in alias set 0 type ");
3595 print_generic_expr (dump_file, base_tree);
3596 fprintf (dump_file, "\n");
3597 }
3598 base_tree = NULL;
3599 }
3600
3601 if (nolto_ret)
3602 nolto_base_node = (*nolto_ret)->insert_base (base_tree
3603 ? get_alias_set (base_tree)
3604 : 0, 0, INT_MAX);
3605 if (lto_ret)
3606 lto_base_node = (*lto_ret)->insert_base (base_tree, 0, max_bases);
3607 size_t every_ref = streamer_read_uhwi (ib);
3608 size_t nref = streamer_read_uhwi (ib);
3609
3610 gcc_assert (!every_ref || nref == 0);
3611 if (every_ref)
3612 {
3613 if (nolto_base_node)
3614 nolto_base_node->collapse ();
3615 if (lto_base_node)
3616 lto_base_node->collapse ();
3617 }
3618 for (size_t j = 0; j < nref; j++)
3619 {
3620 tree ref_tree = stream_read_tree (ib, data_in);
3621
3622 if (ref_tree && !get_alias_set (ref_tree))
3623 {
3624 if (dump_file)
3625 {
3626 fprintf (dump_file, "Streamed in alias set 0 type ");
3627 print_generic_expr (dump_file, ref_tree);
3628 fprintf (dump_file, "\n");
3629 }
3630 ref_tree = NULL;
3631 }
3632
3633 modref_ref_node <alias_set_type> *nolto_ref_node = NULL;
3634 modref_ref_node <tree> *lto_ref_node = NULL;
3635
3636 if (nolto_base_node)
3637 nolto_ref_node
3638 = nolto_base_node->insert_ref (ref_tree
3639 ? get_alias_set (ref_tree) : 0,
3640 max_refs);
3641 if (lto_base_node)
3642 lto_ref_node = lto_base_node->insert_ref (ref_tree, max_refs);
3643
3644 size_t every_access = streamer_read_uhwi (ib);
3645 size_t naccesses = streamer_read_uhwi (ib);
3646
3647 if (nolto_ref_node && every_access)
3648 nolto_ref_node->collapse ();
3649 if (lto_ref_node && every_access)
3650 lto_ref_node->collapse ();
3651
3652 for (size_t k = 0; k < naccesses; k++)
3653 {
3654 modref_access_node a = modref_access_node::stream_in (ib);
3655 if (nolto_ref_node)
3656 nolto_ref_node->insert_access (a, max_accesses, false);
3657 if (lto_ref_node)
3658 lto_ref_node->insert_access (a, max_accesses, false);
3659 }
3660 }
3661 }
3662 if (lto_ret)
3663 (*lto_ret)->cleanup ();
3664 if (nolto_ret)
3665 (*nolto_ret)->cleanup ();
3666 }
3667
3668 /* Write ESUM to BP. */
3669
3670 static void
3671 modref_write_escape_summary (struct bitpack_d *bp, escape_summary *esum)
3672 {
3673 if (!esum)
3674 {
3675 bp_pack_var_len_unsigned (bp, 0);
3676 return;
3677 }
3678 bp_pack_var_len_unsigned (bp, esum->esc.length ());
3679 unsigned int i;
3680 escape_entry *ee;
3681 FOR_EACH_VEC_ELT (esum->esc, i, ee)
3682 {
3683 bp_pack_var_len_int (bp, ee->parm_index);
3684 bp_pack_var_len_unsigned (bp, ee->arg);
3685 bp_pack_var_len_unsigned (bp, ee->min_flags);
3686 bp_pack_value (bp, ee->direct, 1);
3687 }
3688 }
3689
3690 /* Read escape summary for E from BP. */
3691
3692 static void
3693 modref_read_escape_summary (struct bitpack_d *bp, cgraph_edge *e)
3694 {
3695 unsigned int n = bp_unpack_var_len_unsigned (bp);
3696 if (!n)
3697 return;
3698 escape_summary *esum = escape_summaries->get_create (e);
3699 esum->esc.reserve_exact (n);
3700 for (unsigned int i = 0; i < n; i++)
3701 {
3702 escape_entry ee;
3703 ee.parm_index = bp_unpack_var_len_int (bp);
3704 ee.arg = bp_unpack_var_len_unsigned (bp);
3705 ee.min_flags = bp_unpack_var_len_unsigned (bp);
3706 ee.direct = bp_unpack_value (bp, 1);
3707 esum->esc.quick_push (ee);
3708 }
3709 }
3710
3711 /* Callback for write_summary. */
3712
3713 static void
3714 modref_write ()
3715 {
3716 struct output_block *ob = create_output_block (LTO_section_ipa_modref);
3717 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3718 unsigned int count = 0;
3719 int i;
3720
3721 if (!summaries_lto)
3722 {
3723 streamer_write_uhwi (ob, 0);
3724 streamer_write_char_stream (ob->main_stream, 0);
3725 produce_asm (ob, NULL);
3726 destroy_output_block (ob);
3727 return;
3728 }
3729
3730 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3731 {
3732 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3733 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
3734 modref_summary_lto *r;
3735
3736 if (cnode && cnode->definition && !cnode->alias
3737 && (r = summaries_lto->get (cnode))
3738 && r->useful_p (flags_from_decl_or_type (cnode->decl)))
3739 count++;
3740 }
3741 streamer_write_uhwi (ob, count);
3742
3743 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3744 {
3745 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3746 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
3747
3748 if (cnode && cnode->definition && !cnode->alias)
3749 {
3750 modref_summary_lto *r = summaries_lto->get (cnode);
3751
3752 if (!r || !r->useful_p (flags_from_decl_or_type (cnode->decl)))
3753 continue;
3754
3755 streamer_write_uhwi (ob, lto_symtab_encoder_encode (encoder, cnode));
3756
3757 streamer_write_uhwi (ob, r->arg_flags.length ());
3758 for (unsigned int i = 0; i < r->arg_flags.length (); i++)
3759 streamer_write_uhwi (ob, r->arg_flags[i]);
3760 streamer_write_uhwi (ob, r->retslot_flags);
3761 streamer_write_uhwi (ob, r->static_chain_flags);
3762
3763 write_modref_records (r->loads, ob);
3764 write_modref_records (r->stores, ob);
3765 streamer_write_uhwi (ob, r->kills.length ());
3766 for (auto kill : r->kills)
3767 kill.stream_out (ob);
3768
3769 struct bitpack_d bp = bitpack_create (ob->main_stream);
3770 bp_pack_value (&bp, r->writes_errno, 1);
3771 bp_pack_value (&bp, r->side_effects, 1);
3772 bp_pack_value (&bp, r->nondeterministic, 1);
3773 bp_pack_value (&bp, r->calls_interposable, 1);
3774 if (!flag_wpa)
3775 {
3776 for (cgraph_edge *e = cnode->indirect_calls;
3777 e; e = e->next_callee)
3778 {
3779 class fnspec_summary *sum = fnspec_summaries->get (e);
3780 bp_pack_value (&bp, sum != NULL, 1);
3781 if (sum)
3782 bp_pack_string (ob, &bp, sum->fnspec, true);
3783 class escape_summary *esum = escape_summaries->get (e);
3784 modref_write_escape_summary (&bp,esum);
3785 }
3786 for (cgraph_edge *e = cnode->callees; e; e = e->next_callee)
3787 {
3788 class fnspec_summary *sum = fnspec_summaries->get (e);
3789 bp_pack_value (&bp, sum != NULL, 1);
3790 if (sum)
3791 bp_pack_string (ob, &bp, sum->fnspec, true);
3792 class escape_summary *esum = escape_summaries->get (e);
3793 modref_write_escape_summary (&bp,esum);
3794 }
3795 }
3796 streamer_write_bitpack (&bp);
3797 }
3798 }
3799 streamer_write_char_stream (ob->main_stream, 0);
3800 produce_asm (ob, NULL);
3801 destroy_output_block (ob);
3802 }
3803
3804 static void
3805 read_section (struct lto_file_decl_data *file_data, const char *data,
3806 size_t len)
3807 {
3808 const struct lto_function_header *header
3809 = (const struct lto_function_header *) data;
3810 const int cfg_offset = sizeof (struct lto_function_header);
3811 const int main_offset = cfg_offset + header->cfg_size;
3812 const int string_offset = main_offset + header->main_size;
3813 struct data_in *data_in;
3814 unsigned int i;
3815 unsigned int f_count;
3816
3817 lto_input_block ib ((const char *) data + main_offset, header->main_size,
3818 file_data->mode_table);
3819
3820 data_in
3821 = lto_data_in_create (file_data, (const char *) data + string_offset,
3822 header->string_size, vNULL);
3823 f_count = streamer_read_uhwi (&ib);
3824 for (i = 0; i < f_count; i++)
3825 {
3826 struct cgraph_node *node;
3827 lto_symtab_encoder_t encoder;
3828
3829 unsigned int index = streamer_read_uhwi (&ib);
3830 encoder = file_data->symtab_node_encoder;
3831 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder,
3832 index));
3833
3834 modref_summary *modref_sum = summaries
3835 ? summaries->get_create (node) : NULL;
3836 modref_summary_lto *modref_sum_lto = summaries_lto
3837 ? summaries_lto->get_create (node)
3838 : NULL;
3839 if (optimization_summaries)
3840 modref_sum = optimization_summaries->get_create (node);
3841
3842 if (modref_sum)
3843 {
3844 modref_sum->writes_errno = false;
3845 modref_sum->side_effects = false;
3846 modref_sum->nondeterministic = false;
3847 modref_sum->calls_interposable = false;
3848 }
3849 if (modref_sum_lto)
3850 {
3851 modref_sum_lto->writes_errno = false;
3852 modref_sum_lto->side_effects = false;
3853 modref_sum_lto->nondeterministic = false;
3854 modref_sum_lto->calls_interposable = false;
3855 }
3856
3857 gcc_assert (!modref_sum || (!modref_sum->loads
3858 && !modref_sum->stores));
3859 gcc_assert (!modref_sum_lto || (!modref_sum_lto->loads
3860 && !modref_sum_lto->stores));
3861 unsigned int args = streamer_read_uhwi (&ib);
3862 if (args && modref_sum)
3863 modref_sum->arg_flags.reserve_exact (args);
3864 if (args && modref_sum_lto)
3865 modref_sum_lto->arg_flags.reserve_exact (args);
3866 for (unsigned int i = 0; i < args; i++)
3867 {
3868 eaf_flags_t flags = streamer_read_uhwi (&ib);
3869 if (modref_sum)
3870 modref_sum->arg_flags.quick_push (flags);
3871 if (modref_sum_lto)
3872 modref_sum_lto->arg_flags.quick_push (flags);
3873 }
3874 eaf_flags_t flags = streamer_read_uhwi (&ib);
3875 if (modref_sum)
3876 modref_sum->retslot_flags = flags;
3877 if (modref_sum_lto)
3878 modref_sum_lto->retslot_flags = flags;
3879
3880 flags = streamer_read_uhwi (&ib);
3881 if (modref_sum)
3882 modref_sum->static_chain_flags = flags;
3883 if (modref_sum_lto)
3884 modref_sum_lto->static_chain_flags = flags;
3885
3886 read_modref_records (node->decl, &ib, data_in,
3887 modref_sum ? &modref_sum->loads : NULL,
3888 modref_sum_lto ? &modref_sum_lto->loads : NULL);
3889 read_modref_records (node->decl, &ib, data_in,
3890 modref_sum ? &modref_sum->stores : NULL,
3891 modref_sum_lto ? &modref_sum_lto->stores : NULL);
3892 int j = streamer_read_uhwi (&ib);
3893 if (j && modref_sum)
3894 modref_sum->kills.reserve_exact (j);
3895 if (j && modref_sum_lto)
3896 modref_sum_lto->kills.reserve_exact (j);
3897 for (int k = 0; k < j; k++)
3898 {
3899 modref_access_node a = modref_access_node::stream_in (&ib);
3900
3901 if (modref_sum)
3902 modref_sum->kills.quick_push (a);
3903 if (modref_sum_lto)
3904 modref_sum_lto->kills.quick_push (a);
3905 }
3906 struct bitpack_d bp = streamer_read_bitpack (&ib);
3907 if (bp_unpack_value (&bp, 1))
3908 {
3909 if (modref_sum)
3910 modref_sum->writes_errno = true;
3911 if (modref_sum_lto)
3912 modref_sum_lto->writes_errno = true;
3913 }
3914 if (bp_unpack_value (&bp, 1))
3915 {
3916 if (modref_sum)
3917 modref_sum->side_effects = true;
3918 if (modref_sum_lto)
3919 modref_sum_lto->side_effects = true;
3920 }
3921 if (bp_unpack_value (&bp, 1))
3922 {
3923 if (modref_sum)
3924 modref_sum->nondeterministic = true;
3925 if (modref_sum_lto)
3926 modref_sum_lto->nondeterministic = true;
3927 }
3928 if (bp_unpack_value (&bp, 1))
3929 {
3930 if (modref_sum)
3931 modref_sum->calls_interposable = true;
3932 if (modref_sum_lto)
3933 modref_sum_lto->calls_interposable = true;
3934 }
3935 if (!flag_ltrans)
3936 {
3937 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
3938 {
3939 if (bp_unpack_value (&bp, 1))
3940 {
3941 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3942 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3943 }
3944 modref_read_escape_summary (&bp, e);
3945 }
3946 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
3947 {
3948 if (bp_unpack_value (&bp, 1))
3949 {
3950 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3951 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3952 }
3953 modref_read_escape_summary (&bp, e);
3954 }
3955 }
3956 if (flag_ltrans)
3957 modref_sum->finalize (node->decl);
3958 if (dump_file)
3959 {
3960 fprintf (dump_file, "Read modref for %s\n",
3961 node->dump_name ());
3962 if (modref_sum)
3963 modref_sum->dump (dump_file);
3964 if (modref_sum_lto)
3965 modref_sum_lto->dump (dump_file);
3966 dump_modref_edge_summaries (dump_file, node, 4);
3967 }
3968 }
3969
3970 lto_free_section_data (file_data, LTO_section_ipa_modref, NULL, data,
3971 len);
3972 lto_data_in_delete (data_in);
3973 }
3974
3975 /* Callback for read_summary. */
3976
3977 static void
3978 modref_read (void)
3979 {
3980 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3981 struct lto_file_decl_data *file_data;
3982 unsigned int j = 0;
3983
3984 gcc_checking_assert (!optimization_summaries && !summaries && !summaries_lto);
3985 if (flag_ltrans)
3986 optimization_summaries = modref_summaries::create_ggc (symtab);
3987 else
3988 {
3989 if (flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
3990 summaries_lto = modref_summaries_lto::create_ggc (symtab);
3991 if (!flag_wpa
3992 || (flag_incremental_link == INCREMENTAL_LINK_LTO
3993 && flag_fat_lto_objects))
3994 summaries = modref_summaries::create_ggc (symtab);
3995 if (!fnspec_summaries)
3996 fnspec_summaries = new fnspec_summaries_t (symtab);
3997 if (!escape_summaries)
3998 escape_summaries = new escape_summaries_t (symtab);
3999 }
4000
4001 while ((file_data = file_data_vec[j++]))
4002 {
4003 size_t len;
4004 const char *data = lto_get_summary_section_data (file_data,
4005 LTO_section_ipa_modref,
4006 &len);
4007 if (data)
4008 read_section (file_data, data, len);
4009 else
4010 /* Fatal error here. We do not want to support compiling ltrans units
4011 with different version of compiler or different flags than the WPA
4012 unit, so this should never happen. */
4013 fatal_error (input_location,
4014 "IPA modref summary is missing in input file");
4015 }
4016 }
4017
4018 /* Recompute arg_flags for param adjustments in INFO. */
4019
4020 static void
4021 remap_arg_flags (auto_vec <eaf_flags_t> &arg_flags, clone_info *info)
4022 {
4023 auto_vec<eaf_flags_t> old = arg_flags.copy ();
4024 int max = -1;
4025 size_t i;
4026 ipa_adjusted_param *p;
4027
4028 arg_flags.release ();
4029
4030 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
4031 {
4032 int o = info->param_adjustments->get_original_index (i);
4033 if (o >= 0 && (int)old.length () > o && old[o])
4034 max = i;
4035 }
4036 if (max >= 0)
4037 arg_flags.safe_grow_cleared (max + 1, true);
4038 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
4039 {
4040 int o = info->param_adjustments->get_original_index (i);
4041 if (o >= 0 && (int)old.length () > o && old[o])
4042 arg_flags[i] = old[o];
4043 }
4044 }
4045
4046 /* Update kills according to the parm map MAP. */
4047
4048 static void
4049 remap_kills (vec <modref_access_node> &kills, const vec <int> &map)
4050 {
4051 for (size_t i = 0; i < kills.length ();)
4052 if (kills[i].parm_index >= 0)
4053 {
4054 if (kills[i].parm_index < (int)map.length ()
4055 && map[kills[i].parm_index] != MODREF_UNKNOWN_PARM)
4056 {
4057 kills[i].parm_index = map[kills[i].parm_index];
4058 i++;
4059 }
4060 else
4061 kills.unordered_remove (i);
4062 }
4063 else
4064 i++;
4065 }
4066
4067 /* If signature changed, update the summary. */
4068
4069 static void
4070 update_signature (struct cgraph_node *node)
4071 {
4072 clone_info *info = clone_info::get (node);
4073 if (!info || !info->param_adjustments)
4074 return;
4075
4076 modref_summary *r = optimization_summaries
4077 ? optimization_summaries->get (node) : NULL;
4078 modref_summary_lto *r_lto = summaries_lto
4079 ? summaries_lto->get (node) : NULL;
4080 if (!r && !r_lto)
4081 return;
4082 if (dump_file)
4083 {
4084 fprintf (dump_file, "Updating summary for %s from:\n",
4085 node->dump_name ());
4086 if (r)
4087 r->dump (dump_file);
4088 if (r_lto)
4089 r_lto->dump (dump_file);
4090 }
4091
4092 size_t i, max = 0;
4093 ipa_adjusted_param *p;
4094
4095 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
4096 {
4097 int idx = info->param_adjustments->get_original_index (i);
4098 if (idx > (int)max)
4099 max = idx;
4100 }
4101
4102 auto_vec <int, 32> map;
4103
4104 map.reserve (max + 1);
4105 for (i = 0; i <= max; i++)
4106 map.quick_push (MODREF_UNKNOWN_PARM);
4107 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
4108 {
4109 int idx = info->param_adjustments->get_original_index (i);
4110 if (idx >= 0)
4111 map[idx] = i;
4112 }
4113 if (r)
4114 {
4115 r->loads->remap_params (&map);
4116 r->stores->remap_params (&map);
4117 remap_kills (r->kills, map);
4118 if (r->arg_flags.length ())
4119 remap_arg_flags (r->arg_flags, info);
4120 }
4121 if (r_lto)
4122 {
4123 r_lto->loads->remap_params (&map);
4124 r_lto->stores->remap_params (&map);
4125 remap_kills (r_lto->kills, map);
4126 if (r_lto->arg_flags.length ())
4127 remap_arg_flags (r_lto->arg_flags, info);
4128 }
4129 if (dump_file)
4130 {
4131 fprintf (dump_file, "to:\n");
4132 if (r)
4133 r->dump (dump_file);
4134 if (r_lto)
4135 r_lto->dump (dump_file);
4136 }
4137 if (r)
4138 r->finalize (node->decl);
4139 return;
4140 }
4141
4142 /* Definition of the modref IPA pass. */
4143 const pass_data pass_data_ipa_modref =
4144 {
4145 IPA_PASS, /* type */
4146 "modref", /* name */
4147 OPTGROUP_IPA, /* optinfo_flags */
4148 TV_IPA_MODREF, /* tv_id */
4149 0, /* properties_required */
4150 0, /* properties_provided */
4151 0, /* properties_destroyed */
4152 0, /* todo_flags_start */
4153 ( TODO_dump_symtab ), /* todo_flags_finish */
4154 };
4155
4156 class pass_ipa_modref : public ipa_opt_pass_d
4157 {
4158 public:
4159 pass_ipa_modref (gcc::context *ctxt)
4160 : ipa_opt_pass_d (pass_data_ipa_modref, ctxt,
4161 modref_generate, /* generate_summary */
4162 modref_write, /* write_summary */
4163 modref_read, /* read_summary */
4164 modref_write, /* write_optimization_summary */
4165 modref_read, /* read_optimization_summary */
4166 NULL, /* stmt_fixup */
4167 0, /* function_transform_todo_flags_start */
4168 NULL, /* function_transform */
4169 NULL) /* variable_transform */
4170 {}
4171
4172 /* opt_pass methods: */
4173 opt_pass *clone () final override { return new pass_ipa_modref (m_ctxt); }
4174 bool gate (function *) final override
4175 {
4176 return true;
4177 }
4178 unsigned int execute (function *) final override;
4179
4180 };
4181
4182 }
4183
4184 unsigned int pass_modref::execute (function *)
4185 {
4186 if (analyze_function (false))
4187 return execute_fixup_cfg ();
4188 return 0;
4189 }
4190
4191 gimple_opt_pass *
4192 make_pass_modref (gcc::context *ctxt)
4193 {
4194 return new pass_modref (ctxt);
4195 }
4196
4197 ipa_opt_pass_d *
4198 make_pass_ipa_modref (gcc::context *ctxt)
4199 {
4200 return new pass_ipa_modref (ctxt);
4201 }
4202
4203 namespace {
4204
4205 /* Skip edges from and to nodes without ipa_pure_const enabled.
4206 Ignore not available symbols. */
4207
4208 static bool
4209 ignore_edge (struct cgraph_edge *e)
4210 {
4211 /* We merge summaries of inline clones into summaries of functions they
4212 are inlined to. For that reason the complete function bodies must
4213 act as unit. */
4214 if (!e->inline_failed)
4215 return false;
4216 enum availability avail;
4217 cgraph_node *callee = e->callee->ultimate_alias_target
4218 (&avail, e->caller);
4219
4220 return (avail <= AVAIL_INTERPOSABLE
4221 || ((!optimization_summaries || !optimization_summaries->get (callee))
4222 && (!summaries_lto || !summaries_lto->get (callee))));
4223 }
4224
4225 /* Compute parm_map for CALLEE_EDGE. */
4226
4227 static bool
4228 compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
4229 {
4230 class ipa_edge_args *args;
4231 if (ipa_node_params_sum
4232 && !callee_edge->call_stmt_cannot_inline_p
4233 && (args = ipa_edge_args_sum->get (callee_edge)) != NULL)
4234 {
4235 int i, count = ipa_get_cs_argument_count (args);
4236 class ipa_node_params *caller_parms_info, *callee_pi;
4237 class ipa_call_summary *es
4238 = ipa_call_summaries->get (callee_edge);
4239 cgraph_node *callee
4240 = callee_edge->callee->ultimate_alias_target
4241 (NULL, callee_edge->caller);
4242
4243 caller_parms_info
4244 = ipa_node_params_sum->get (callee_edge->caller->inlined_to
4245 ? callee_edge->caller->inlined_to
4246 : callee_edge->caller);
4247 callee_pi = ipa_node_params_sum->get (callee);
4248
4249 (*parm_map).safe_grow_cleared (count, true);
4250
4251 for (i = 0; i < count; i++)
4252 {
4253 if (es && es->param[i].points_to_local_or_readonly_memory)
4254 {
4255 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
4256 continue;
4257 }
4258
4259 struct ipa_jump_func *jf
4260 = ipa_get_ith_jump_func (args, i);
4261 if (jf && callee_pi)
4262 {
4263 tree cst = ipa_value_from_jfunc (caller_parms_info,
4264 jf,
4265 ipa_get_type
4266 (callee_pi, i));
4267 if (cst && points_to_local_or_readonly_memory_p (cst))
4268 {
4269 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
4270 continue;
4271 }
4272 }
4273 if (jf && jf->type == IPA_JF_PASS_THROUGH)
4274 {
4275 (*parm_map)[i].parm_index
4276 = ipa_get_jf_pass_through_formal_id (jf);
4277 if (ipa_get_jf_pass_through_operation (jf) == NOP_EXPR)
4278 {
4279 (*parm_map)[i].parm_offset_known = true;
4280 (*parm_map)[i].parm_offset = 0;
4281 }
4282 else if (ipa_get_jf_pass_through_operation (jf)
4283 == POINTER_PLUS_EXPR
4284 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf),
4285 &(*parm_map)[i].parm_offset))
4286 (*parm_map)[i].parm_offset_known = true;
4287 else
4288 (*parm_map)[i].parm_offset_known = false;
4289 continue;
4290 }
4291 if (jf && jf->type == IPA_JF_ANCESTOR)
4292 {
4293 (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
4294 (*parm_map)[i].parm_offset_known = true;
4295 gcc_checking_assert
4296 (!(ipa_get_jf_ancestor_offset (jf) & (BITS_PER_UNIT - 1)));
4297 (*parm_map)[i].parm_offset
4298 = ipa_get_jf_ancestor_offset (jf) >> LOG2_BITS_PER_UNIT;
4299 }
4300 else
4301 (*parm_map)[i].parm_index = -1;
4302 }
4303 if (dump_file)
4304 {
4305 fprintf (dump_file, " Parm map: ");
4306 for (i = 0; i < count; i++)
4307 fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
4308 fprintf (dump_file, "\n");
4309 }
4310 return true;
4311 }
4312 return false;
4313 }
4314
4315 /* Map used to translate escape infos. */
4316
4317 struct escape_map
4318 {
4319 int parm_index;
4320 bool direct;
4321 };
4322
4323 /* Update escape map for E. */
4324
4325 static void
4326 update_escape_summary_1 (cgraph_edge *e,
4327 vec <vec <escape_map>> &map,
4328 bool ignore_stores)
4329 {
4330 escape_summary *sum = escape_summaries->get (e);
4331 if (!sum)
4332 return;
4333 auto_vec <escape_entry> old = sum->esc.copy ();
4334 sum->esc.release ();
4335
4336 unsigned int i;
4337 escape_entry *ee;
4338 FOR_EACH_VEC_ELT (old, i, ee)
4339 {
4340 unsigned int j;
4341 struct escape_map *em;
4342 /* TODO: We do not have jump functions for return slots, so we
4343 never propagate them to outer function. */
4344 if (ee->parm_index >= (int)map.length ()
4345 || ee->parm_index < 0)
4346 continue;
4347 FOR_EACH_VEC_ELT (map[ee->parm_index], j, em)
4348 {
4349 int min_flags = ee->min_flags;
4350 if (ee->direct && !em->direct)
4351 min_flags = deref_flags (min_flags, ignore_stores);
4352 struct escape_entry entry = {em->parm_index, ee->arg,
4353 min_flags,
4354 ee->direct & em->direct};
4355 sum->esc.safe_push (entry);
4356 }
4357 }
4358 if (!sum->esc.length ())
4359 escape_summaries->remove (e);
4360 }
4361
4362 /* Update escape map for NODE. */
4363
4364 static void
4365 update_escape_summary (cgraph_node *node,
4366 vec <vec <escape_map>> &map,
4367 bool ignore_stores)
4368 {
4369 if (!escape_summaries)
4370 return;
4371 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
4372 update_escape_summary_1 (e, map, ignore_stores);
4373 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
4374 {
4375 if (!e->inline_failed)
4376 update_escape_summary (e->callee, map, ignore_stores);
4377 else
4378 update_escape_summary_1 (e, map, ignore_stores);
4379 }
4380 }
4381
4382 /* Get parameter type from DECL. This is only safe for special cases
4383 like builtins we create fnspec for because the type match is checked
4384 at fnspec creation time. */
4385
4386 static tree
4387 get_parm_type (tree decl, unsigned int i)
4388 {
4389 tree t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4390
4391 for (unsigned int p = 0; p < i; p++)
4392 t = TREE_CHAIN (t);
4393 return TREE_VALUE (t);
4394 }
4395
4396 /* Return access mode for argument I of call E with FNSPEC. */
4397
4398 static modref_access_node
4399 get_access_for_fnspec (cgraph_edge *e, attr_fnspec &fnspec,
4400 unsigned int i, modref_parm_map &map)
4401 {
4402 tree size = NULL_TREE;
4403 unsigned int size_arg;
4404
4405 if (!fnspec.arg_specified_p (i))
4406 ;
4407 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
4408 {
4409 cgraph_node *node = e->caller->inlined_to
4410 ? e->caller->inlined_to : e->caller;
4411 ipa_node_params *caller_parms_info = ipa_node_params_sum->get (node);
4412 ipa_edge_args *args = ipa_edge_args_sum->get (e);
4413 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, size_arg);
4414
4415 if (jf)
4416 size = ipa_value_from_jfunc (caller_parms_info, jf,
4417 get_parm_type (e->callee->decl, size_arg));
4418 }
4419 else if (fnspec.arg_access_size_given_by_type_p (i))
4420 size = TYPE_SIZE_UNIT (get_parm_type (e->callee->decl, i));
4421 modref_access_node a = {0, -1, -1,
4422 map.parm_offset, map.parm_index,
4423 map.parm_offset_known, 0};
4424 poly_int64 size_hwi;
4425 if (size
4426 && poly_int_tree_p (size, &size_hwi)
4427 && coeffs_in_range_p (size_hwi, 0,
4428 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
4429 {
4430 a.size = -1;
4431 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
4432 }
4433 return a;
4434 }
4435
4436 /* Collapse loads and return true if something changed. */
4437 static bool
4438 collapse_loads (modref_summary *cur_summary,
4439 modref_summary_lto *cur_summary_lto)
4440 {
4441 bool changed = false;
4442
4443 if (cur_summary && !cur_summary->loads->every_base)
4444 {
4445 cur_summary->loads->collapse ();
4446 changed = true;
4447 }
4448 if (cur_summary_lto
4449 && !cur_summary_lto->loads->every_base)
4450 {
4451 cur_summary_lto->loads->collapse ();
4452 changed = true;
4453 }
4454 return changed;
4455 }
4456
4457 /* Collapse loads and return true if something changed. */
4458
4459 static bool
4460 collapse_stores (modref_summary *cur_summary,
4461 modref_summary_lto *cur_summary_lto)
4462 {
4463 bool changed = false;
4464
4465 if (cur_summary && !cur_summary->stores->every_base)
4466 {
4467 cur_summary->stores->collapse ();
4468 changed = true;
4469 }
4470 if (cur_summary_lto
4471 && !cur_summary_lto->stores->every_base)
4472 {
4473 cur_summary_lto->stores->collapse ();
4474 changed = true;
4475 }
4476 return changed;
4477 }
4478
4479 /* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
4480 CUR_SUMMARY_LTO accordingly. Return true if something changed. */
4481
4482 static bool
4483 propagate_unknown_call (cgraph_node *node,
4484 cgraph_edge *e, int ecf_flags,
4485 modref_summary *cur_summary,
4486 modref_summary_lto *cur_summary_lto,
4487 bool nontrivial_scc)
4488 {
4489 bool changed = false;
4490 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4491 auto_vec <modref_parm_map, 32> parm_map;
4492 bool looping;
4493
4494 if (e->callee
4495 && builtin_safe_for_const_function_p (&looping, e->callee->decl))
4496 {
4497 if (looping && cur_summary && !cur_summary->side_effects)
4498 {
4499 cur_summary->side_effects = true;
4500 changed = true;
4501 }
4502 if (looping && cur_summary_lto && !cur_summary_lto->side_effects)
4503 {
4504 cur_summary_lto->side_effects = true;
4505 changed = true;
4506 }
4507 return changed;
4508 }
4509
4510 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
4511 || (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
4512 || nontrivial_scc)
4513 {
4514 if (cur_summary && !cur_summary->side_effects)
4515 {
4516 cur_summary->side_effects = true;
4517 changed = true;
4518 }
4519 if (cur_summary_lto && !cur_summary_lto->side_effects)
4520 {
4521 cur_summary_lto->side_effects = true;
4522 changed = true;
4523 }
4524 if (cur_summary && !cur_summary->nondeterministic
4525 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4526 {
4527 cur_summary->nondeterministic = true;
4528 changed = true;
4529 }
4530 if (cur_summary_lto && !cur_summary_lto->nondeterministic
4531 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4532 {
4533 cur_summary_lto->nondeterministic = true;
4534 changed = true;
4535 }
4536 }
4537 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
4538 return changed;
4539
4540 if (fnspec_sum
4541 && compute_parm_map (e, &parm_map))
4542 {
4543 attr_fnspec fnspec (fnspec_sum->fnspec);
4544
4545 gcc_checking_assert (fnspec.known_p ());
4546 if (fnspec.global_memory_read_p ())
4547 collapse_loads (cur_summary, cur_summary_lto);
4548 else
4549 {
4550 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
4551 for (unsigned i = 0; i < parm_map.length () && t;
4552 i++, t = TREE_CHAIN (t))
4553 if (!POINTER_TYPE_P (TREE_VALUE (t)))
4554 ;
4555 else if (!fnspec.arg_specified_p (i)
4556 || fnspec.arg_maybe_read_p (i))
4557 {
4558 modref_parm_map map = parm_map[i];
4559 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
4560 continue;
4561 if (map.parm_index == MODREF_UNKNOWN_PARM)
4562 {
4563 collapse_loads (cur_summary, cur_summary_lto);
4564 break;
4565 }
4566 if (cur_summary)
4567 changed |= cur_summary->loads->insert
4568 (node->decl, 0, 0,
4569 get_access_for_fnspec (e, fnspec, i, map), false);
4570 if (cur_summary_lto)
4571 changed |= cur_summary_lto->loads->insert
4572 (node->decl, 0, 0,
4573 get_access_for_fnspec (e, fnspec, i, map), false);
4574 }
4575 }
4576 if (ignore_stores_p (node->decl, ecf_flags))
4577 ;
4578 else if (fnspec.global_memory_written_p ())
4579 collapse_stores (cur_summary, cur_summary_lto);
4580 else
4581 {
4582 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
4583 for (unsigned i = 0; i < parm_map.length () && t;
4584 i++, t = TREE_CHAIN (t))
4585 if (!POINTER_TYPE_P (TREE_VALUE (t)))
4586 ;
4587 else if (!fnspec.arg_specified_p (i)
4588 || fnspec.arg_maybe_written_p (i))
4589 {
4590 modref_parm_map map = parm_map[i];
4591 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
4592 continue;
4593 if (map.parm_index == MODREF_UNKNOWN_PARM)
4594 {
4595 collapse_stores (cur_summary, cur_summary_lto);
4596 break;
4597 }
4598 if (cur_summary)
4599 changed |= cur_summary->stores->insert
4600 (node->decl, 0, 0,
4601 get_access_for_fnspec (e, fnspec, i, map), false);
4602 if (cur_summary_lto)
4603 changed |= cur_summary_lto->stores->insert
4604 (node->decl, 0, 0,
4605 get_access_for_fnspec (e, fnspec, i, map), false);
4606 }
4607 }
4608 if (fnspec.errno_maybe_written_p () && flag_errno_math)
4609 {
4610 if (cur_summary && !cur_summary->writes_errno)
4611 {
4612 cur_summary->writes_errno = true;
4613 changed = true;
4614 }
4615 if (cur_summary_lto && !cur_summary_lto->writes_errno)
4616 {
4617 cur_summary_lto->writes_errno = true;
4618 changed = true;
4619 }
4620 }
4621 return changed;
4622 }
4623 if (dump_file)
4624 fprintf (dump_file, " collapsing loads\n");
4625 changed |= collapse_loads (cur_summary, cur_summary_lto);
4626 if (!ignore_stores_p (node->decl, ecf_flags))
4627 {
4628 if (dump_file)
4629 fprintf (dump_file, " collapsing stores\n");
4630 changed |= collapse_stores (cur_summary, cur_summary_lto);
4631 }
4632 return changed;
4633 }
4634
4635 /* Maybe remove summaries of NODE pointed to by CUR_SUMMARY_PTR
4636 and CUR_SUMMARY_LTO_PTR if they are useless according to ECF_FLAGS. */
4637
4638 static void
4639 remove_useless_summaries (cgraph_node *node,
4640 modref_summary **cur_summary_ptr,
4641 modref_summary_lto **cur_summary_lto_ptr,
4642 int ecf_flags)
4643 {
4644 if (*cur_summary_ptr && !(*cur_summary_ptr)->useful_p (ecf_flags, false))
4645 {
4646 optimization_summaries->remove (node);
4647 *cur_summary_ptr = NULL;
4648 }
4649 if (*cur_summary_lto_ptr
4650 && !(*cur_summary_lto_ptr)->useful_p (ecf_flags, false))
4651 {
4652 summaries_lto->remove (node);
4653 *cur_summary_lto_ptr = NULL;
4654 }
4655 }
4656
4657 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
4658 and propagate loads/stores. */
4659
4660 static bool
4661 modref_propagate_in_scc (cgraph_node *component_node)
4662 {
4663 bool changed = true;
4664 bool first = true;
4665 int iteration = 0;
4666
4667 while (changed)
4668 {
4669 bool nontrivial_scc
4670 = ((struct ipa_dfs_info *) component_node->aux)->next_cycle;
4671 changed = false;
4672 for (struct cgraph_node *cur = component_node; cur;
4673 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4674 {
4675 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
4676 modref_summary *cur_summary = optimization_summaries
4677 ? optimization_summaries->get (node)
4678 : NULL;
4679 modref_summary_lto *cur_summary_lto = summaries_lto
4680 ? summaries_lto->get (node)
4681 : NULL;
4682
4683 if (!cur_summary && !cur_summary_lto)
4684 continue;
4685
4686 int cur_ecf_flags = flags_from_decl_or_type (node->decl);
4687
4688 if (dump_file)
4689 fprintf (dump_file, " Processing %s%s%s\n",
4690 cur->dump_name (),
4691 TREE_READONLY (cur->decl) ? " (const)" : "",
4692 DECL_PURE_P (cur->decl) ? " (pure)" : "");
4693
4694 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
4695 {
4696 if (dump_file)
4697 fprintf (dump_file, " Indirect call\n");
4698 if (propagate_unknown_call
4699 (node, e, e->indirect_info->ecf_flags,
4700 cur_summary, cur_summary_lto,
4701 nontrivial_scc))
4702 {
4703 changed = true;
4704 remove_useless_summaries (node, &cur_summary,
4705 &cur_summary_lto,
4706 cur_ecf_flags);
4707 if (!cur_summary && !cur_summary_lto)
4708 break;
4709 }
4710 }
4711
4712 if (!cur_summary && !cur_summary_lto)
4713 continue;
4714
4715 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
4716 callee_edge = callee_edge->next_callee)
4717 {
4718 int flags = flags_from_decl_or_type (callee_edge->callee->decl);
4719 modref_summary *callee_summary = NULL;
4720 modref_summary_lto *callee_summary_lto = NULL;
4721 struct cgraph_node *callee;
4722
4723 if (!callee_edge->inline_failed
4724 || ((flags & (ECF_CONST | ECF_NOVOPS))
4725 && !(flags & ECF_LOOPING_CONST_OR_PURE)))
4726 continue;
4727
4728 /* Get the callee and its summary. */
4729 enum availability avail;
4730 callee = callee_edge->callee->ultimate_alias_target
4731 (&avail, cur);
4732
4733 /* It is not necessary to re-process calls outside of the
4734 SCC component. */
4735 if (iteration > 0
4736 && (!callee->aux
4737 || ((struct ipa_dfs_info *)cur->aux)->scc_no
4738 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
4739 continue;
4740
4741 if (dump_file)
4742 fprintf (dump_file, " Call to %s\n",
4743 callee_edge->callee->dump_name ());
4744
4745 bool ignore_stores = ignore_stores_p (cur->decl, flags);
4746
4747 if (avail <= AVAIL_INTERPOSABLE)
4748 {
4749 if (dump_file)
4750 fprintf (dump_file, " Call target interposable"
4751 " or not available\n");
4752 changed |= propagate_unknown_call
4753 (node, callee_edge, flags,
4754 cur_summary, cur_summary_lto,
4755 nontrivial_scc);
4756 if (!cur_summary && !cur_summary_lto)
4757 break;
4758 continue;
4759 }
4760
4761 /* We don't know anything about CALLEE, hence we cannot tell
4762 anything about the entire component. */
4763
4764 if (cur_summary
4765 && !(callee_summary = optimization_summaries->get (callee)))
4766 {
4767 if (dump_file)
4768 fprintf (dump_file, " No call target summary\n");
4769 changed |= propagate_unknown_call
4770 (node, callee_edge, flags,
4771 cur_summary, NULL,
4772 nontrivial_scc);
4773 }
4774 if (cur_summary_lto
4775 && !(callee_summary_lto = summaries_lto->get (callee)))
4776 {
4777 if (dump_file)
4778 fprintf (dump_file, " No call target summary\n");
4779 changed |= propagate_unknown_call
4780 (node, callee_edge, flags,
4781 NULL, cur_summary_lto,
4782 nontrivial_scc);
4783 }
4784
4785 if (callee_summary && !cur_summary->side_effects
4786 && (callee_summary->side_effects
4787 || callee_edge->recursive_p ()))
4788 {
4789 cur_summary->side_effects = true;
4790 changed = true;
4791 }
4792 if (callee_summary_lto && !cur_summary_lto->side_effects
4793 && (callee_summary_lto->side_effects
4794 || callee_edge->recursive_p ()))
4795 {
4796 cur_summary_lto->side_effects = true;
4797 changed = true;
4798 }
4799 if (callee_summary && !cur_summary->nondeterministic
4800 && callee_summary->nondeterministic
4801 && !ignore_nondeterminism_p (cur->decl, flags))
4802 {
4803 cur_summary->nondeterministic = true;
4804 changed = true;
4805 }
4806 if (callee_summary_lto && !cur_summary_lto->nondeterministic
4807 && callee_summary_lto->nondeterministic
4808 && !ignore_nondeterminism_p (cur->decl, flags))
4809 {
4810 cur_summary_lto->nondeterministic = true;
4811 changed = true;
4812 }
4813 if (flags & (ECF_CONST | ECF_NOVOPS))
4814 continue;
4815
4816 /* We can not safely optimize based on summary of callee if it
4817 does not always bind to current def: it is possible that
4818 memory load was optimized out earlier which may not happen in
4819 the interposed variant. */
4820 if (!callee_edge->binds_to_current_def_p ())
4821 {
4822 if (cur_summary && !cur_summary->calls_interposable)
4823 {
4824 cur_summary->calls_interposable = true;
4825 changed = true;
4826 }
4827 if (cur_summary_lto && !cur_summary_lto->calls_interposable)
4828 {
4829 cur_summary_lto->calls_interposable = true;
4830 changed = true;
4831 }
4832 if (dump_file)
4833 fprintf (dump_file, " May not bind local;"
4834 " collapsing loads\n");
4835 }
4836
4837
4838 auto_vec <modref_parm_map, 32> parm_map;
4839 modref_parm_map chain_map;
4840 /* TODO: Once we get jump functions for static chains we could
4841 compute this. */
4842 chain_map.parm_index = MODREF_UNKNOWN_PARM;
4843
4844 compute_parm_map (callee_edge, &parm_map);
4845
4846 /* Merge in callee's information. */
4847 if (callee_summary)
4848 {
4849 changed |= cur_summary->loads->merge
4850 (node->decl, callee_summary->loads,
4851 &parm_map, &chain_map, !first);
4852 if (!ignore_stores)
4853 {
4854 changed |= cur_summary->stores->merge
4855 (node->decl, callee_summary->stores,
4856 &parm_map, &chain_map, !first);
4857 if (!cur_summary->writes_errno
4858 && callee_summary->writes_errno)
4859 {
4860 cur_summary->writes_errno = true;
4861 changed = true;
4862 }
4863 }
4864 }
4865 if (callee_summary_lto)
4866 {
4867 changed |= cur_summary_lto->loads->merge
4868 (node->decl, callee_summary_lto->loads,
4869 &parm_map, &chain_map, !first);
4870 if (!ignore_stores)
4871 {
4872 changed |= cur_summary_lto->stores->merge
4873 (node->decl, callee_summary_lto->stores,
4874 &parm_map, &chain_map, !first);
4875 if (!cur_summary_lto->writes_errno
4876 && callee_summary_lto->writes_errno)
4877 {
4878 cur_summary_lto->writes_errno = true;
4879 changed = true;
4880 }
4881 }
4882 }
4883 if (changed)
4884 remove_useless_summaries (node, &cur_summary,
4885 &cur_summary_lto,
4886 cur_ecf_flags);
4887 if (!cur_summary && !cur_summary_lto)
4888 break;
4889 if (dump_file && changed)
4890 {
4891 if (cur_summary)
4892 cur_summary->dump (dump_file);
4893 if (cur_summary_lto)
4894 cur_summary_lto->dump (dump_file);
4895 dump_modref_edge_summaries (dump_file, node, 4);
4896 }
4897 }
4898 }
4899 iteration++;
4900 first = false;
4901 }
4902 if (dump_file)
4903 fprintf (dump_file,
4904 "Propagation finished in %i iterations\n", iteration);
4905 bool pureconst = false;
4906 for (struct cgraph_node *cur = component_node; cur;
4907 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4908 if (!cur->inlined_to && opt_for_fn (cur->decl, flag_ipa_pure_const))
4909 {
4910 modref_summary *summary = optimization_summaries
4911 ? optimization_summaries->get (cur)
4912 : NULL;
4913 modref_summary_lto *summary_lto = summaries_lto
4914 ? summaries_lto->get (cur)
4915 : NULL;
4916 if (summary && !summary->stores->every_base && !summary->stores->bases
4917 && !summary->nondeterministic)
4918 {
4919 if (!summary->loads->every_base && !summary->loads->bases
4920 && !summary->calls_interposable)
4921 pureconst |= ipa_make_function_const
4922 (cur, summary->side_effects, false);
4923 else
4924 pureconst |= ipa_make_function_pure
4925 (cur, summary->side_effects, false);
4926 }
4927 if (summary_lto && !summary_lto->stores->every_base
4928 && !summary_lto->stores->bases && !summary_lto->nondeterministic)
4929 {
4930 if (!summary_lto->loads->every_base && !summary_lto->loads->bases
4931 && !summary_lto->calls_interposable)
4932 pureconst |= ipa_make_function_const
4933 (cur, summary_lto->side_effects, false);
4934 else
4935 pureconst |= ipa_make_function_pure
4936 (cur, summary_lto->side_effects, false);
4937 }
4938 }
4939 return pureconst;
4940 }
4941
4942 /* Dump results of propagation in SCC rooted in COMPONENT_NODE. */
4943
4944 static void
4945 modref_propagate_dump_scc (cgraph_node *component_node)
4946 {
4947 for (struct cgraph_node *cur = component_node; cur;
4948 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4949 if (!cur->inlined_to)
4950 {
4951 modref_summary *cur_summary = optimization_summaries
4952 ? optimization_summaries->get (cur)
4953 : NULL;
4954 modref_summary_lto *cur_summary_lto = summaries_lto
4955 ? summaries_lto->get (cur)
4956 : NULL;
4957
4958 fprintf (dump_file, "Propagated modref for %s%s%s\n",
4959 cur->dump_name (),
4960 TREE_READONLY (cur->decl) ? " (const)" : "",
4961 DECL_PURE_P (cur->decl) ? " (pure)" : "");
4962 if (optimization_summaries)
4963 {
4964 if (cur_summary)
4965 cur_summary->dump (dump_file);
4966 else
4967 fprintf (dump_file, " Not tracked\n");
4968 }
4969 if (summaries_lto)
4970 {
4971 if (cur_summary_lto)
4972 cur_summary_lto->dump (dump_file);
4973 else
4974 fprintf (dump_file, " Not tracked (lto)\n");
4975 }
4976 }
4977 }
4978
4979 /* Determine EAF flags know for call E with CALLEE_ECF_FLAGS and ARG. */
4980
4981 int
4982 implicit_eaf_flags_for_edge_and_arg (cgraph_edge *e, int callee_ecf_flags,
4983 bool ignore_stores, int arg)
4984 {
4985 /* Returning the value is already accounted to at local propagation. */
4986 int implicit_flags = EAF_NOT_RETURNED_DIRECTLY
4987 | EAF_NOT_RETURNED_INDIRECTLY;
4988 if (ignore_stores)
4989 implicit_flags |= ignore_stores_eaf_flags;
4990 if (callee_ecf_flags & ECF_PURE)
4991 implicit_flags |= implicit_pure_eaf_flags;
4992 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
4993 implicit_flags |= implicit_const_eaf_flags;
4994 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4995 if (fnspec_sum)
4996 {
4997 attr_fnspec fnspec (fnspec_sum->fnspec);
4998 implicit_flags |= fnspec.arg_eaf_flags (arg);
4999 }
5000 return implicit_flags;
5001 }
5002
5003 /* Process escapes in SUM and merge SUMMARY to CUR_SUMMARY
5004 and SUMMARY_LTO to CUR_SUMMARY_LTO.
5005 Return true if something changed. */
5006
5007 static bool
5008 modref_merge_call_site_flags (escape_summary *sum,
5009 modref_summary *cur_summary,
5010 modref_summary_lto *cur_summary_lto,
5011 modref_summary *summary,
5012 modref_summary_lto *summary_lto,
5013 tree caller,
5014 cgraph_edge *e,
5015 int caller_ecf_flags,
5016 int callee_ecf_flags,
5017 bool binds_to_current_def)
5018 {
5019 escape_entry *ee;
5020 unsigned int i;
5021 bool changed = false;
5022 bool ignore_stores = ignore_stores_p (caller, callee_ecf_flags);
5023
5024 /* Return early if we have no useful info to propagate. */
5025 if ((!cur_summary
5026 || (!cur_summary->arg_flags.length ()
5027 && !cur_summary->static_chain_flags
5028 && !cur_summary->retslot_flags))
5029 && (!cur_summary_lto
5030 || (!cur_summary_lto->arg_flags.length ()
5031 && !cur_summary_lto->static_chain_flags
5032 && !cur_summary_lto->retslot_flags)))
5033 return false;
5034
5035 FOR_EACH_VEC_ELT (sum->esc, i, ee)
5036 {
5037 int flags = 0;
5038 int flags_lto = 0;
5039 int implicit_flags = implicit_eaf_flags_for_edge_and_arg
5040 (e, callee_ecf_flags, ignore_stores, ee->arg);
5041
5042 if (summary && ee->arg < summary->arg_flags.length ())
5043 flags = summary->arg_flags[ee->arg];
5044 if (summary_lto
5045 && ee->arg < summary_lto->arg_flags.length ())
5046 flags_lto = summary_lto->arg_flags[ee->arg];
5047 if (!ee->direct)
5048 {
5049 flags = deref_flags (flags, ignore_stores);
5050 flags_lto = deref_flags (flags_lto, ignore_stores);
5051 }
5052 if (ignore_stores)
5053 implicit_flags |= ignore_stores_eaf_flags;
5054 if (callee_ecf_flags & ECF_PURE)
5055 implicit_flags |= implicit_pure_eaf_flags;
5056 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
5057 implicit_flags |= implicit_const_eaf_flags;
5058 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
5059 if (fnspec_sum)
5060 {
5061 attr_fnspec fnspec (fnspec_sum->fnspec);
5062 implicit_flags |= fnspec.arg_eaf_flags (ee->arg);
5063 }
5064 if (!ee->direct)
5065 implicit_flags = deref_flags (implicit_flags, ignore_stores);
5066 flags |= implicit_flags;
5067 flags_lto |= implicit_flags;
5068 if (!binds_to_current_def && (flags || flags_lto))
5069 {
5070 flags = interposable_eaf_flags (flags, implicit_flags);
5071 flags_lto = interposable_eaf_flags (flags_lto, implicit_flags);
5072 }
5073 if (!(flags & EAF_UNUSED)
5074 && cur_summary && ee->parm_index < (int)cur_summary->arg_flags.length ())
5075 {
5076 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
5077 ? cur_summary->retslot_flags
5078 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
5079 ? cur_summary->static_chain_flags
5080 : cur_summary->arg_flags[ee->parm_index];
5081 if ((f & flags) != f)
5082 {
5083 f = remove_useless_eaf_flags
5084 (f & flags, caller_ecf_flags,
5085 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
5086 changed = true;
5087 }
5088 }
5089 if (!(flags_lto & EAF_UNUSED)
5090 && cur_summary_lto
5091 && ee->parm_index < (int)cur_summary_lto->arg_flags.length ())
5092 {
5093 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
5094 ? cur_summary_lto->retslot_flags
5095 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
5096 ? cur_summary_lto->static_chain_flags
5097 : cur_summary_lto->arg_flags[ee->parm_index];
5098 if ((f & flags_lto) != f)
5099 {
5100 f = remove_useless_eaf_flags
5101 (f & flags_lto, caller_ecf_flags,
5102 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
5103 changed = true;
5104 }
5105 }
5106 }
5107 return changed;
5108 }
5109
5110 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
5111 and propagate arg flags. */
5112
5113 static void
5114 modref_propagate_flags_in_scc (cgraph_node *component_node)
5115 {
5116 bool changed = true;
5117 int iteration = 0;
5118
5119 while (changed)
5120 {
5121 changed = false;
5122 for (struct cgraph_node *cur = component_node; cur;
5123 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
5124 {
5125 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
5126 modref_summary *cur_summary = optimization_summaries
5127 ? optimization_summaries->get (node)
5128 : NULL;
5129 modref_summary_lto *cur_summary_lto = summaries_lto
5130 ? summaries_lto->get (node)
5131 : NULL;
5132
5133 if (!cur_summary && !cur_summary_lto)
5134 continue;
5135 int caller_ecf_flags = flags_from_decl_or_type (cur->decl);
5136
5137 if (dump_file)
5138 fprintf (dump_file, " Processing %s%s%s\n",
5139 cur->dump_name (),
5140 TREE_READONLY (cur->decl) ? " (const)" : "",
5141 DECL_PURE_P (cur->decl) ? " (pure)" : "");
5142
5143 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
5144 {
5145 escape_summary *sum = escape_summaries->get (e);
5146
5147 if (!sum || (e->indirect_info->ecf_flags
5148 & (ECF_CONST | ECF_NOVOPS)))
5149 continue;
5150
5151 changed |= modref_merge_call_site_flags
5152 (sum, cur_summary, cur_summary_lto,
5153 NULL, NULL,
5154 node->decl,
5155 e,
5156 caller_ecf_flags,
5157 e->indirect_info->ecf_flags,
5158 false);
5159 }
5160
5161 if (!cur_summary && !cur_summary_lto)
5162 continue;
5163
5164 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
5165 callee_edge = callee_edge->next_callee)
5166 {
5167 int ecf_flags = flags_from_decl_or_type
5168 (callee_edge->callee->decl);
5169 modref_summary *callee_summary = NULL;
5170 modref_summary_lto *callee_summary_lto = NULL;
5171 struct cgraph_node *callee;
5172
5173 if (ecf_flags & (ECF_CONST | ECF_NOVOPS)
5174 || !callee_edge->inline_failed)
5175 continue;
5176
5177 /* Get the callee and its summary. */
5178 enum availability avail;
5179 callee = callee_edge->callee->ultimate_alias_target
5180 (&avail, cur);
5181
5182 /* It is not necessary to re-process calls outside of the
5183 SCC component. */
5184 if (iteration > 0
5185 && (!callee->aux
5186 || ((struct ipa_dfs_info *)cur->aux)->scc_no
5187 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
5188 continue;
5189
5190 escape_summary *sum = escape_summaries->get (callee_edge);
5191 if (!sum)
5192 continue;
5193
5194 if (dump_file)
5195 fprintf (dump_file, " Call to %s\n",
5196 callee_edge->callee->dump_name ());
5197
5198 if (avail <= AVAIL_INTERPOSABLE
5199 || callee_edge->call_stmt_cannot_inline_p)
5200 ;
5201 else
5202 {
5203 if (cur_summary)
5204 callee_summary = optimization_summaries->get (callee);
5205 if (cur_summary_lto)
5206 callee_summary_lto = summaries_lto->get (callee);
5207 }
5208 changed |= modref_merge_call_site_flags
5209 (sum, cur_summary, cur_summary_lto,
5210 callee_summary, callee_summary_lto,
5211 node->decl,
5212 callee_edge,
5213 caller_ecf_flags,
5214 ecf_flags,
5215 callee->binds_to_current_def_p ());
5216 if (dump_file && changed)
5217 {
5218 if (cur_summary)
5219 cur_summary->dump (dump_file);
5220 if (cur_summary_lto)
5221 cur_summary_lto->dump (dump_file);
5222 }
5223 }
5224 }
5225 iteration++;
5226 }
5227 if (dump_file)
5228 fprintf (dump_file,
5229 "Propagation of flags finished in %i iterations\n", iteration);
5230 }
5231
5232 } /* ANON namespace. */
5233
5234 /* Call EDGE was inlined; merge summary from callee to the caller. */
5235
5236 void
5237 ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
5238 {
5239 if (!summaries && !summaries_lto)
5240 return;
5241
5242 struct cgraph_node *to = (edge->caller->inlined_to
5243 ? edge->caller->inlined_to : edge->caller);
5244 class modref_summary *to_info = summaries ? summaries->get (to) : NULL;
5245 class modref_summary_lto *to_info_lto = summaries_lto
5246 ? summaries_lto->get (to) : NULL;
5247
5248 if (!to_info && !to_info_lto)
5249 {
5250 if (summaries)
5251 summaries->remove (edge->callee);
5252 if (summaries_lto)
5253 summaries_lto->remove (edge->callee);
5254 remove_modref_edge_summaries (edge->callee);
5255 return;
5256 }
5257
5258 class modref_summary *callee_info = summaries ? summaries->get (edge->callee)
5259 : NULL;
5260 class modref_summary_lto *callee_info_lto
5261 = summaries_lto ? summaries_lto->get (edge->callee) : NULL;
5262 int flags = flags_from_decl_or_type (edge->callee->decl);
5263 /* Combine in outer flags. */
5264 cgraph_node *n;
5265 for (n = edge->caller; n->inlined_to; n = n->callers->caller)
5266 flags |= flags_from_decl_or_type (n->decl);
5267 flags |= flags_from_decl_or_type (n->decl);
5268 bool ignore_stores = ignore_stores_p (edge->caller->decl, flags);
5269
5270 if (!callee_info && to_info)
5271 {
5272 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5273 to_info->loads->collapse ();
5274 if (!ignore_stores)
5275 to_info->stores->collapse ();
5276 }
5277 if (!callee_info_lto && to_info_lto)
5278 {
5279 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5280 to_info_lto->loads->collapse ();
5281 if (!ignore_stores)
5282 to_info_lto->stores->collapse ();
5283 }
5284 /* Merge side effects and non-determinism.
5285 PURE/CONST flags makes functions deterministic and if there is
5286 no LOOPING_CONST_OR_PURE they also have no side effects. */
5287 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
5288 || (flags & ECF_LOOPING_CONST_OR_PURE))
5289 {
5290 if (to_info)
5291 {
5292 if (!callee_info || callee_info->side_effects)
5293 to_info->side_effects = true;
5294 if ((!callee_info || callee_info->nondeterministic)
5295 && !ignore_nondeterminism_p (edge->caller->decl, flags))
5296 to_info->nondeterministic = true;
5297 }
5298 if (to_info_lto)
5299 {
5300 if (!callee_info_lto || callee_info_lto->side_effects)
5301 to_info_lto->side_effects = true;
5302 if ((!callee_info_lto || callee_info_lto->nondeterministic)
5303 && !ignore_nondeterminism_p (edge->caller->decl, flags))
5304 to_info_lto->nondeterministic = true;
5305 }
5306 }
5307 if (callee_info || callee_info_lto)
5308 {
5309 auto_vec <modref_parm_map, 32> parm_map;
5310 modref_parm_map chain_map;
5311 /* TODO: Once we get jump functions for static chains we could
5312 compute parm_index. */
5313
5314 compute_parm_map (edge, &parm_map);
5315
5316 if (!ignore_stores)
5317 {
5318 if (to_info && callee_info)
5319 to_info->stores->merge (to->decl, callee_info->stores, &parm_map,
5320 &chain_map, false);
5321 if (to_info_lto && callee_info_lto)
5322 to_info_lto->stores->merge (to->decl, callee_info_lto->stores,
5323 &parm_map, &chain_map, false);
5324 }
5325 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5326 {
5327 if (to_info && callee_info)
5328 to_info->loads->merge (to->decl, callee_info->loads, &parm_map,
5329 &chain_map, false);
5330 if (to_info_lto && callee_info_lto)
5331 to_info_lto->loads->merge (to->decl, callee_info_lto->loads,
5332 &parm_map, &chain_map, false);
5333 }
5334 }
5335
5336 /* Now merge escape summaries.
5337 For every escape to the callee we need to merge callee flags
5338 and remap callee's escapes. */
5339 class escape_summary *sum = escape_summaries->get (edge);
5340 int max_escape = -1;
5341 escape_entry *ee;
5342 unsigned int i;
5343
5344 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
5345 FOR_EACH_VEC_ELT (sum->esc, i, ee)
5346 if ((int)ee->arg > max_escape)
5347 max_escape = ee->arg;
5348
5349 auto_vec <vec <struct escape_map>, 32> emap (max_escape + 1);
5350 emap.safe_grow (max_escape + 1, true);
5351 for (i = 0; (int)i < max_escape + 1; i++)
5352 emap[i] = vNULL;
5353
5354 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
5355 FOR_EACH_VEC_ELT (sum->esc, i, ee)
5356 {
5357 bool needed = false;
5358 int implicit_flags = implicit_eaf_flags_for_edge_and_arg
5359 (edge, flags, ignore_stores,
5360 ee->arg);
5361 if (!ee->direct)
5362 implicit_flags = deref_flags (implicit_flags, ignore_stores);
5363 if (to_info && (int)to_info->arg_flags.length () > ee->parm_index)
5364 {
5365 int flags = callee_info
5366 && callee_info->arg_flags.length () > ee->arg
5367 ? callee_info->arg_flags[ee->arg] : 0;
5368 if (!ee->direct)
5369 flags = deref_flags (flags, ignore_stores);
5370 flags |= ee->min_flags | implicit_flags;
5371 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
5372 ? to_info->retslot_flags
5373 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
5374 ? to_info->static_chain_flags
5375 : to_info->arg_flags[ee->parm_index];
5376 f &= flags;
5377 if (f)
5378 needed = true;
5379 }
5380 if (to_info_lto
5381 && (int)to_info_lto->arg_flags.length () > ee->parm_index)
5382 {
5383 int flags = callee_info_lto
5384 && callee_info_lto->arg_flags.length () > ee->arg
5385 ? callee_info_lto->arg_flags[ee->arg] : 0;
5386 if (!ee->direct)
5387 flags = deref_flags (flags, ignore_stores);
5388 flags |= ee->min_flags | implicit_flags;
5389 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
5390 ? to_info_lto->retslot_flags
5391 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
5392 ? to_info_lto->static_chain_flags
5393 : to_info_lto->arg_flags[ee->parm_index];
5394 f &= flags;
5395 if (f)
5396 needed = true;
5397 }
5398 struct escape_map entry = {ee->parm_index, ee->direct};
5399 if (needed)
5400 emap[ee->arg].safe_push (entry);
5401 }
5402 update_escape_summary (edge->callee, emap, ignore_stores);
5403 for (i = 0; (int)i < max_escape + 1; i++)
5404 emap[i].release ();
5405 if (sum)
5406 escape_summaries->remove (edge);
5407
5408 if (summaries)
5409 {
5410 if (to_info && !to_info->useful_p (flags))
5411 {
5412 if (dump_file)
5413 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5414 to->dump_name ());
5415 summaries->remove (to);
5416 to_info = NULL;
5417 }
5418 else if (to_info && dump_file)
5419 {
5420 if (dump_file)
5421 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5422 to->dump_name ());
5423 to_info->dump (dump_file);
5424 }
5425 if (callee_info)
5426 summaries->remove (edge->callee);
5427 }
5428 if (summaries_lto)
5429 {
5430 if (to_info_lto && !to_info_lto->useful_p (flags))
5431 {
5432 if (dump_file)
5433 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5434 to->dump_name ());
5435 summaries_lto->remove (to);
5436 to_info_lto = NULL;
5437 }
5438 else if (to_info_lto && dump_file)
5439 {
5440 if (dump_file)
5441 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5442 to->dump_name ());
5443 to_info_lto->dump (dump_file);
5444 }
5445 if (callee_info_lto)
5446 summaries_lto->remove (edge->callee);
5447 }
5448 if (!to_info && !to_info_lto)
5449 remove_modref_edge_summaries (to);
5450 return;
5451 }
5452
5453 /* Run the IPA pass. This will take a function's summaries and calls and
5454 construct new summaries which represent a transitive closure. So that
5455 summary of an analyzed function contains information about the loads and
5456 stores that the function or any function that it calls does. */
5457
5458 unsigned int
5459 pass_ipa_modref::execute (function *)
5460 {
5461 if (!summaries && !summaries_lto)
5462 return 0;
5463 bool pureconst = false;
5464
5465 if (optimization_summaries)
5466 ggc_delete (optimization_summaries);
5467 optimization_summaries = summaries;
5468 summaries = NULL;
5469
5470 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *,
5471 symtab->cgraph_count);
5472 int order_pos;
5473 order_pos = ipa_reduced_postorder (order, true, ignore_edge);
5474 int i;
5475
5476 /* Iterate over all strongly connected components in post-order. */
5477 for (i = 0; i < order_pos; i++)
5478 {
5479 /* Get the component's representative. That's just any node in the
5480 component from which we can traverse the entire component. */
5481 struct cgraph_node *component_node = order[i];
5482
5483 if (dump_file)
5484 fprintf (dump_file, "\n\nStart of SCC component\n");
5485
5486 pureconst |= modref_propagate_in_scc (component_node);
5487 modref_propagate_flags_in_scc (component_node);
5488 if (optimization_summaries)
5489 for (struct cgraph_node *cur = component_node; cur;
5490 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
5491 if (modref_summary *sum = optimization_summaries->get (cur))
5492 sum->finalize (cur->decl);
5493 if (dump_file)
5494 modref_propagate_dump_scc (component_node);
5495 }
5496 cgraph_node *node;
5497 FOR_EACH_FUNCTION (node)
5498 update_signature (node);
5499 if (summaries_lto)
5500 ((modref_summaries_lto *)summaries_lto)->propagated = true;
5501 ipa_free_postorder_info ();
5502 free (order);
5503 delete fnspec_summaries;
5504 fnspec_summaries = NULL;
5505 delete escape_summaries;
5506 escape_summaries = NULL;
5507
5508 /* If we possibly made constructors const/pure we may need to remove
5509 them. */
5510 return pureconst ? TODO_remove_functions : 0;
5511 }
5512
5513 /* Summaries must stay alive until end of compilation. */
5514
5515 void
5516 ipa_modref_cc_finalize ()
5517 {
5518 if (optimization_summaries)
5519 ggc_delete (optimization_summaries);
5520 optimization_summaries = NULL;
5521 if (summaries_lto)
5522 ggc_delete (summaries_lto);
5523 summaries_lto = NULL;
5524 if (fnspec_summaries)
5525 delete fnspec_summaries;
5526 fnspec_summaries = NULL;
5527 if (escape_summaries)
5528 delete escape_summaries;
5529 escape_summaries = NULL;
5530 }
5531
5532 #include "gt-ipa-modref.h"