]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-pure-const.cc
7e9ece21b2298468014eafbc97bd42588713b359
[thirdparty/gcc.git] / gcc / ipa-pure-const.cc
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
64 #include "symtab-thunks.h"
65 #include "dbgcnt.h"
66
67 /* Lattice values for const and pure functions. Everything starts out
68 being const, then may drop to pure and then neither depending on
69 what is found. */
70 enum pure_const_state_e
71 {
72 IPA_CONST,
73 IPA_PURE,
74 IPA_NEITHER
75 };
76
77 static const char *pure_const_names[3] = {"const", "pure", "neither"};
78
79 enum malloc_state_e
80 {
81 STATE_MALLOC_TOP,
82 STATE_MALLOC,
83 STATE_MALLOC_BOTTOM
84 };
85
86 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
87
88 /* Holder for the const_state. There is one of these per function
89 decl. */
90 class funct_state_d
91 {
92 public:
93 funct_state_d (): pure_const_state (IPA_NEITHER),
94 state_previously_known (IPA_NEITHER), looping_previously_known (true),
95 looping (true), can_throw (true), can_free (true),
96 malloc_state (STATE_MALLOC_BOTTOM) {}
97
98 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
99 state_previously_known (s.state_previously_known),
100 looping_previously_known (s.looping_previously_known),
101 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
102 malloc_state (s.malloc_state) {}
103
104 /* See above. */
105 enum pure_const_state_e pure_const_state;
106 /* What user set here; we can be always sure about this. */
107 enum pure_const_state_e state_previously_known;
108 bool looping_previously_known;
109
110 /* True if the function could possibly infinite loop. There are a
111 lot of ways that this could be determined. We are pretty
112 conservative here. While it is possible to cse pure and const
113 calls, it is not legal to have dce get rid of the call if there
114 is a possibility that the call could infinite loop since this is
115 a behavioral change. */
116 bool looping;
117
118 bool can_throw;
119
120 /* If function can call free, munmap or otherwise make previously
121 non-trapping memory accesses trapping. */
122 bool can_free;
123
124 enum malloc_state_e malloc_state;
125 };
126
127 typedef class funct_state_d * funct_state;
128
129 /* The storage of the funct_state is abstracted because there is the
130 possibility that it may be desirable to move this to the cgraph
131 local info. */
132
133 class funct_state_summary_t:
134 public fast_function_summary <funct_state_d *, va_heap>
135 {
136 public:
137 funct_state_summary_t (symbol_table *symtab):
138 fast_function_summary <funct_state_d *, va_heap> (symtab) {}
139
140 void insert (cgraph_node *, funct_state_d *state) final override;
141 void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
142 funct_state_d *src_data,
143 funct_state_d *dst_data) final override;
144 };
145
146 static funct_state_summary_t *funct_state_summaries = NULL;
147
148 static bool gate_pure_const (void);
149
150 namespace {
151
152 const pass_data pass_data_ipa_pure_const =
153 {
154 IPA_PASS, /* type */
155 "pure-const", /* name */
156 OPTGROUP_NONE, /* optinfo_flags */
157 TV_IPA_PURE_CONST, /* tv_id */
158 0, /* properties_required */
159 0, /* properties_provided */
160 0, /* properties_destroyed */
161 0, /* todo_flags_start */
162 0, /* todo_flags_finish */
163 };
164
165 class pass_ipa_pure_const : public ipa_opt_pass_d
166 {
167 public:
168 pass_ipa_pure_const(gcc::context *ctxt);
169
170 /* opt_pass methods: */
171 bool gate (function *) final override { return gate_pure_const (); }
172 unsigned int execute (function *fun) final override;
173
174 void register_hooks (void);
175
176 private:
177 bool init_p;
178 }; // class pass_ipa_pure_const
179
180 } // anon namespace
181
182 /* Try to guess if function body will always be visible to compiler
183 when compiling the call and whether compiler will be able
184 to propagate the information by itself. */
185
186 static bool
187 function_always_visible_to_compiler_p (tree decl)
188 {
189 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
190 || DECL_COMDAT (decl));
191 }
192
193 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
194 is true if the function is known to be finite. The diagnostic is
195 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
196 OPTION, this function may initialize it and it is always returned
197 by the function. */
198
199 static hash_set<tree> *
200 suggest_attribute (int option, tree decl, bool known_finite,
201 hash_set<tree> *warned_about,
202 const char * attrib_name)
203 {
204 if (!option_enabled (option, lang_hooks.option_lang_mask (), &global_options))
205 return warned_about;
206 if (TREE_THIS_VOLATILE (decl)
207 || (known_finite && function_always_visible_to_compiler_p (decl)))
208 return warned_about;
209
210 if (!warned_about)
211 warned_about = new hash_set<tree>;
212 if (warned_about->contains (decl))
213 return warned_about;
214 warned_about->add (decl);
215 warning_at (DECL_SOURCE_LOCATION (decl),
216 option,
217 known_finite
218 ? G_("function might be candidate for attribute %qs")
219 : G_("function might be candidate for attribute %qs"
220 " if it is known to return normally"), attrib_name);
221 return warned_about;
222 }
223
224 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
225 is true if the function is known to be finite. */
226
227 static void
228 warn_function_pure (tree decl, bool known_finite)
229 {
230 /* Declaring a void function pure makes no sense and is diagnosed
231 by -Wattributes because calling it would have no effect. */
232 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
233 return;
234
235 static hash_set<tree> *warned_about;
236 warned_about
237 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
238 known_finite, warned_about, "pure");
239 }
240
241 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
242 is true if the function is known to be finite. */
243
244 static void
245 warn_function_const (tree decl, bool known_finite)
246 {
247 /* Declaring a void function const makes no sense is diagnosed
248 by -Wattributes because calling it would have no effect. */
249 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
250 return;
251
252 static hash_set<tree> *warned_about;
253 warned_about
254 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
255 known_finite, warned_about, "const");
256 }
257
258 /* Emit suggestion about __attribute__((malloc)) for DECL. */
259
260 static void
261 warn_function_malloc (tree decl)
262 {
263 static hash_set<tree> *warned_about;
264 warned_about
265 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
266 true, warned_about, "malloc");
267 }
268
269 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
270
271 static void
272 warn_function_noreturn (tree decl)
273 {
274 tree original_decl = decl;
275
276 static hash_set<tree> *warned_about;
277 if (!lang_hooks.missing_noreturn_ok_p (decl)
278 && targetm.warn_func_return (decl))
279 warned_about
280 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
281 true, warned_about, "noreturn");
282 }
283
284 void
285 warn_function_cold (tree decl)
286 {
287 tree original_decl = decl;
288
289 static hash_set<tree> *warned_about;
290 warned_about
291 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
292 true, warned_about, "cold");
293 }
294
295 void
296 warn_function_returns_nonnull (tree decl)
297 {
298 static hash_set<tree> *warned_about;
299 warned_about
300 = suggest_attribute (OPT_Wsuggest_attribute_returns_nonnull, decl,
301 true, warned_about, "returns_nonnull");
302 }
303
304 /* Check to see if the use (or definition when CHECKING_WRITE is true)
305 variable T is legal in a function that is either pure or const. */
306
307 static inline void
308 check_decl (funct_state local,
309 tree t, bool checking_write, bool ipa)
310 {
311 /* Do not want to do anything with volatile except mark any
312 function that uses one to be not const or pure. */
313 if (TREE_THIS_VOLATILE (t))
314 {
315 local->pure_const_state = IPA_NEITHER;
316 if (dump_file)
317 fprintf (dump_file, " Volatile operand is not const/pure\n");
318 return;
319 }
320
321 /* Do not care about a local automatic that is not static. */
322 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
323 return;
324
325 /* If the variable has the "used" attribute, treat it as if it had a
326 been touched by the devil. */
327 if (DECL_PRESERVE_P (t))
328 {
329 local->pure_const_state = IPA_NEITHER;
330 if (dump_file)
331 fprintf (dump_file, " Used static/global variable is not const/pure\n");
332 return;
333 }
334
335 /* In IPA mode we are not interested in checking actual loads and stores;
336 they will be processed at propagation time using ipa_ref. */
337 if (ipa)
338 return;
339
340 /* Since we have dealt with the locals and params cases above, if we
341 are CHECKING_WRITE, this cannot be a pure or constant
342 function. */
343 if (checking_write)
344 {
345 local->pure_const_state = IPA_NEITHER;
346 if (dump_file)
347 fprintf (dump_file, " static/global memory write is not const/pure\n");
348 return;
349 }
350
351 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
352 {
353 /* Readonly reads are safe. */
354 if (TREE_READONLY (t))
355 return; /* Read of a constant, do not change the function state. */
356 else
357 {
358 if (dump_file)
359 fprintf (dump_file, " global memory read is not const\n");
360 /* Just a regular read. */
361 if (local->pure_const_state == IPA_CONST)
362 local->pure_const_state = IPA_PURE;
363 }
364 }
365 else
366 {
367 /* Compilation level statics can be read if they are readonly
368 variables. */
369 if (TREE_READONLY (t))
370 return;
371
372 if (dump_file)
373 fprintf (dump_file, " static memory read is not const\n");
374 /* Just a regular read. */
375 if (local->pure_const_state == IPA_CONST)
376 local->pure_const_state = IPA_PURE;
377 }
378 }
379
380
381 /* Check to see if the use (or definition when CHECKING_WRITE is true)
382 variable T is legal in a function that is either pure or const. */
383
384 static inline void
385 check_op (funct_state local, tree t, bool checking_write)
386 {
387 t = get_base_address (t);
388 if (t && TREE_THIS_VOLATILE (t))
389 {
390 local->pure_const_state = IPA_NEITHER;
391 if (dump_file)
392 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
393 return;
394 }
395 else if (refs_local_or_readonly_memory_p (t))
396 {
397 if (dump_file)
398 fprintf (dump_file, " Indirect ref to local or readonly "
399 "memory is OK\n");
400 return;
401 }
402 else if (checking_write)
403 {
404 local->pure_const_state = IPA_NEITHER;
405 if (dump_file)
406 fprintf (dump_file, " Indirect ref write is not const/pure\n");
407 return;
408 }
409 else
410 {
411 if (dump_file)
412 fprintf (dump_file, " Indirect ref read is not const\n");
413 if (local->pure_const_state == IPA_CONST)
414 local->pure_const_state = IPA_PURE;
415 }
416 }
417
418 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
419
420 static void
421 state_from_flags (enum pure_const_state_e *state, bool *looping,
422 int flags, bool cannot_lead_to_return)
423 {
424 *looping = false;
425 if (flags & ECF_LOOPING_CONST_OR_PURE)
426 {
427 *looping = true;
428 if (dump_file && (dump_flags & TDF_DETAILS))
429 fprintf (dump_file, " looping\n");
430 }
431 if (flags & ECF_CONST)
432 {
433 *state = IPA_CONST;
434 if (dump_file && (dump_flags & TDF_DETAILS))
435 fprintf (dump_file, " const\n");
436 }
437 else if (flags & ECF_PURE)
438 {
439 *state = IPA_PURE;
440 if (dump_file && (dump_flags & TDF_DETAILS))
441 fprintf (dump_file, " pure\n");
442 }
443 else if (cannot_lead_to_return)
444 {
445 *state = IPA_PURE;
446 *looping = true;
447 if (dump_file && (dump_flags & TDF_DETAILS))
448 fprintf (dump_file, " ignoring side effects->pure looping\n");
449 }
450 else
451 {
452 if (dump_file && (dump_flags & TDF_DETAILS))
453 fprintf (dump_file, " neither\n");
454 *state = IPA_NEITHER;
455 *looping = true;
456 }
457 }
458
459 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
460 into STATE and LOOPING better of the two variants.
461 Be sure to merge looping correctly. IPA_NEITHER functions
462 have looping 0 even if they don't have to return. */
463
464 static inline void
465 better_state (enum pure_const_state_e *state, bool *looping,
466 enum pure_const_state_e state2, bool looping2)
467 {
468 if (state2 < *state)
469 {
470 if (*state == IPA_NEITHER)
471 *looping = looping2;
472 else
473 *looping = MIN (*looping, looping2);
474 *state = state2;
475 }
476 else if (state2 != IPA_NEITHER)
477 *looping = MIN (*looping, looping2);
478 }
479
480 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
481 into STATE and LOOPING worse of the two variants.
482 N is the actual node called. */
483
484 static inline void
485 worse_state (enum pure_const_state_e *state, bool *looping,
486 enum pure_const_state_e state2, bool looping2,
487 struct symtab_node *from,
488 struct symtab_node *to)
489 {
490 /* Consider function:
491
492 bool a(int *p)
493 {
494 return *p==*p;
495 }
496
497 During early optimization we will turn this into:
498
499 bool a(int *p)
500 {
501 return true;
502 }
503
504 Now if this function will be detected as CONST however when interposed it
505 may end up being just pure. We always must assume the worst scenario here.
506 */
507 if (*state == IPA_CONST && state2 == IPA_CONST
508 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
509 {
510 if (dump_file && (dump_flags & TDF_DETAILS))
511 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
512 "bind to current def.\n", to->dump_name ());
513 state2 = IPA_PURE;
514 }
515 *state = MAX (*state, state2);
516 *looping = MAX (*looping, looping2);
517 }
518
519 /* Recognize special cases of builtins that are by themselves not const
520 but function using them is. */
521 bool
522 builtin_safe_for_const_function_p (bool *looping, tree callee)
523 {
524 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
525 switch (DECL_FUNCTION_CODE (callee))
526 {
527 case BUILT_IN_RETURN:
528 case BUILT_IN_UNREACHABLE:
529 CASE_BUILT_IN_ALLOCA:
530 case BUILT_IN_STACK_SAVE:
531 case BUILT_IN_STACK_RESTORE:
532 case BUILT_IN_EH_POINTER:
533 case BUILT_IN_EH_FILTER:
534 case BUILT_IN_UNWIND_RESUME:
535 case BUILT_IN_CXA_END_CLEANUP:
536 case BUILT_IN_EH_COPY_VALUES:
537 case BUILT_IN_FRAME_ADDRESS:
538 case BUILT_IN_APPLY_ARGS:
539 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
540 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
541 case BUILT_IN_DWARF_CFA:
542 case BUILT_IN_RETURN_ADDRESS:
543 *looping = false;
544 return true;
545 case BUILT_IN_PREFETCH:
546 *looping = true;
547 return true;
548 default:
549 break;
550 }
551 return false;
552 }
553
554 /* Check the parameters of a function call to CALL_EXPR to see if
555 there are any references in the parameters that are not allowed for
556 pure or const functions. Also check to see if this is either an
557 indirect call, a call outside the compilation unit, or has special
558 attributes that may also effect the purity. The CALL_EXPR node for
559 the entire call expression. */
560
561 static void
562 check_call (funct_state local, gcall *call, bool ipa)
563 {
564 int flags = gimple_call_flags (call);
565 tree callee_t = gimple_call_fndecl (call);
566 bool possibly_throws = stmt_could_throw_p (cfun, call);
567 bool possibly_throws_externally = (possibly_throws
568 && stmt_can_throw_external (cfun, call));
569
570 if (possibly_throws)
571 {
572 unsigned int i;
573 for (i = 0; i < gimple_num_ops (call); i++)
574 if (gimple_op (call, i)
575 && tree_could_throw_p (gimple_op (call, i)))
576 {
577 if (possibly_throws && cfun->can_throw_non_call_exceptions)
578 {
579 if (dump_file)
580 fprintf (dump_file, " operand can throw; looping\n");
581 local->looping = true;
582 }
583 if (possibly_throws_externally)
584 {
585 if (dump_file)
586 fprintf (dump_file, " operand can throw externally\n");
587 local->can_throw = true;
588 }
589 }
590 }
591
592 /* The const and pure flags are set by a variety of places in the
593 compiler (including here). If someone has already set the flags
594 for the callee, (such as for some of the builtins) we will use
595 them, otherwise we will compute our own information.
596
597 Const and pure functions have less clobber effects than other
598 functions so we process these first. Otherwise if it is a call
599 outside the compilation unit or an indirect call we punt. This
600 leaves local calls which will be processed by following the call
601 graph. */
602 if (callee_t)
603 {
604 bool call_looping;
605
606 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
607 && !nonfreeing_call_p (call))
608 local->can_free = true;
609
610 if (builtin_safe_for_const_function_p (&call_looping, callee_t))
611 {
612 worse_state (&local->pure_const_state, &local->looping,
613 IPA_CONST, call_looping,
614 NULL, NULL);
615 return;
616 }
617 /* When bad things happen to bad functions, they cannot be const
618 or pure. */
619 if (setjmp_call_p (callee_t))
620 {
621 if (dump_file)
622 fprintf (dump_file, " setjmp is not const/pure\n");
623 local->looping = true;
624 local->pure_const_state = IPA_NEITHER;
625 }
626
627 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
628 switch (DECL_FUNCTION_CODE (callee_t))
629 {
630 case BUILT_IN_LONGJMP:
631 case BUILT_IN_NONLOCAL_GOTO:
632 if (dump_file)
633 fprintf (dump_file,
634 " longjmp and nonlocal goto is not const/pure\n");
635 local->pure_const_state = IPA_NEITHER;
636 local->looping = true;
637 break;
638 default:
639 break;
640 }
641 }
642 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
643 local->can_free = true;
644
645 /* When not in IPA mode, we can still handle self recursion. */
646 if (!ipa && callee_t
647 && recursive_call_p (current_function_decl, callee_t))
648 {
649 if (dump_file)
650 fprintf (dump_file, " Recursive call can loop.\n");
651 local->looping = true;
652 }
653 /* Either callee is unknown or we are doing local analysis.
654 Look to see if there are any bits available for the callee (such as by
655 declaration or because it is builtin) and process solely on the basis of
656 those bits. Handle internal calls always, those calls don't have
657 corresponding cgraph edges and thus aren't processed during
658 the propagation. */
659 else if (!ipa || gimple_call_internal_p (call))
660 {
661 enum pure_const_state_e call_state;
662 bool call_looping;
663 if (possibly_throws && cfun->can_throw_non_call_exceptions)
664 {
665 if (dump_file)
666 fprintf (dump_file, " can throw; looping\n");
667 local->looping = true;
668 }
669 if (possibly_throws_externally)
670 {
671 if (dump_file)
672 {
673 fprintf (dump_file, " can throw externally to lp %i\n",
674 lookup_stmt_eh_lp (call));
675 if (callee_t)
676 fprintf (dump_file, " callee:%s\n",
677 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
678 }
679 local->can_throw = true;
680 }
681 if (dump_file && (dump_flags & TDF_DETAILS))
682 fprintf (dump_file, " checking flags for call:");
683 state_from_flags (&call_state, &call_looping, flags,
684 ((flags & (ECF_NORETURN | ECF_NOTHROW))
685 == (ECF_NORETURN | ECF_NOTHROW))
686 || (!flag_exceptions && (flags & ECF_NORETURN)));
687 worse_state (&local->pure_const_state, &local->looping,
688 call_state, call_looping, NULL, NULL);
689 }
690 /* Direct functions calls are handled by IPA propagation. */
691 }
692
693 /* Wrapper around check_decl for loads in local more. */
694
695 static bool
696 check_load (gimple *, tree op, tree, void *data)
697 {
698 if (DECL_P (op))
699 check_decl ((funct_state)data, op, false, false);
700 else
701 check_op ((funct_state)data, op, false);
702 return false;
703 }
704
705 /* Wrapper around check_decl for stores in local more. */
706
707 static bool
708 check_store (gimple *, tree op, tree, void *data)
709 {
710 if (DECL_P (op))
711 check_decl ((funct_state)data, op, true, false);
712 else
713 check_op ((funct_state)data, op, true);
714 return false;
715 }
716
717 /* Wrapper around check_decl for loads in ipa mode. */
718
719 static bool
720 check_ipa_load (gimple *, tree op, tree, void *data)
721 {
722 if (DECL_P (op))
723 check_decl ((funct_state)data, op, false, true);
724 else
725 check_op ((funct_state)data, op, false);
726 return false;
727 }
728
729 /* Wrapper around check_decl for stores in ipa mode. */
730
731 static bool
732 check_ipa_store (gimple *, tree op, tree, void *data)
733 {
734 if (DECL_P (op))
735 check_decl ((funct_state)data, op, true, true);
736 else
737 check_op ((funct_state)data, op, true);
738 return false;
739 }
740
741 /* Look into pointer pointed to by GSIP and figure out what interesting side
742 effects it has. */
743 static void
744 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
745 {
746 gimple *stmt = gsi_stmt (*gsip);
747
748 if (is_gimple_debug (stmt))
749 return;
750
751 /* Do consider clobber as side effects before IPA, so we rather inline
752 C++ destructors and keep clobber semantics than eliminate them.
753
754 Similar logic is in ipa-modref.
755
756 TODO: We may get smarter during early optimizations on these and let
757 functions containing only clobbers to be optimized more. This is a common
758 case of C++ destructors. */
759
760 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
761 return;
762
763 if (dump_file)
764 {
765 fprintf (dump_file, " scanning: ");
766 print_gimple_stmt (dump_file, stmt, 0);
767 }
768
769 if (gimple_has_volatile_ops (stmt)
770 && !gimple_clobber_p (stmt))
771 {
772 local->pure_const_state = IPA_NEITHER;
773 if (dump_file)
774 fprintf (dump_file, " Volatile stmt is not const/pure\n");
775 }
776
777 /* Look for loads and stores. */
778 walk_stmt_load_store_ops (stmt, local,
779 ipa ? check_ipa_load : check_load,
780 ipa ? check_ipa_store : check_store);
781
782 if (gimple_code (stmt) != GIMPLE_CALL
783 && stmt_could_throw_p (cfun, stmt))
784 {
785 if (cfun->can_throw_non_call_exceptions)
786 {
787 if (dump_file)
788 fprintf (dump_file, " can throw; looping\n");
789 local->looping = true;
790 }
791 if (stmt_can_throw_external (cfun, stmt))
792 {
793 if (dump_file)
794 fprintf (dump_file, " can throw externally\n");
795 local->can_throw = true;
796 }
797 else
798 if (dump_file)
799 fprintf (dump_file, " can throw\n");
800 }
801 switch (gimple_code (stmt))
802 {
803 case GIMPLE_CALL:
804 check_call (local, as_a <gcall *> (stmt), ipa);
805 break;
806 case GIMPLE_LABEL:
807 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
808 /* Target of long jump. */
809 {
810 if (dump_file)
811 fprintf (dump_file, " nonlocal label is not const/pure\n");
812 local->pure_const_state = IPA_NEITHER;
813 }
814 break;
815 case GIMPLE_ASM:
816 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
817 {
818 if (dump_file)
819 fprintf (dump_file, " memory asm clobber is not const/pure\n");
820 /* Abandon all hope, ye who enter here. */
821 local->pure_const_state = IPA_NEITHER;
822 local->can_free = true;
823 }
824 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
825 {
826 if (dump_file)
827 fprintf (dump_file, " volatile is not const/pure\n");
828 /* Abandon all hope, ye who enter here. */
829 local->pure_const_state = IPA_NEITHER;
830 local->looping = true;
831 local->can_free = true;
832 }
833 return;
834 default:
835 break;
836 }
837 }
838
839 /* Check that RETVAL is used only in STMT and in comparisons against 0.
840 RETVAL is return value of the function and STMT is return stmt. */
841
842 static bool
843 check_retval_uses (tree retval, gimple *stmt)
844 {
845 imm_use_iterator use_iter;
846 gimple *use_stmt;
847
848 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
849 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
850 {
851 tree op2 = gimple_cond_rhs (cond);
852 if (!integer_zerop (op2))
853 return false;
854 }
855 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
856 {
857 enum tree_code code = gimple_assign_rhs_code (ga);
858 if (TREE_CODE_CLASS (code) != tcc_comparison)
859 return false;
860 if (!integer_zerop (gimple_assign_rhs2 (ga)))
861 return false;
862 }
863 else if (is_gimple_debug (use_stmt))
864 ;
865 else if (use_stmt != stmt)
866 return false;
867
868 return true;
869 }
870
871 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
872 attribute. Currently this function does a very conservative analysis.
873 FUN is considered to be a candidate if
874 1) It returns a value of pointer type.
875 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
876 a phi, and element of phi is either NULL or
877 SSA_NAME_DEF_STMT(element) is function call.
878 3) The return-value has immediate uses only within comparisons (gcond or gassign)
879 and return_stmt (and likewise a phi arg has immediate use only within comparison
880 or the phi stmt). */
881
882 #define DUMP_AND_RETURN(reason) \
883 { \
884 if (dump_file && (dump_flags & TDF_DETAILS)) \
885 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
886 (node->dump_name ()), (reason)); \
887 return false; \
888 }
889
890 static bool
891 malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
892 bitmap visited)
893 {
894 cgraph_node *node = cgraph_node::get_create (fun->decl);
895 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
896 return true;
897
898 if (!check_retval_uses (retval, ret_stmt))
899 DUMP_AND_RETURN("Return value has uses outside return stmt"
900 " and comparisons against 0.")
901
902 gimple *def = SSA_NAME_DEF_STMT (retval);
903
904 if (gcall *call_stmt = dyn_cast<gcall *> (def))
905 {
906 tree callee_decl = gimple_call_fndecl (call_stmt);
907 if (!callee_decl)
908 return false;
909
910 if (!ipa && !DECL_IS_MALLOC (callee_decl))
911 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
912 " non-ipa mode.")
913
914 cgraph_edge *cs = node->get_edge (call_stmt);
915 if (cs)
916 {
917 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
918 es->is_return_callee_uncaptured = true;
919 }
920 }
921
922 else if (gphi *phi = dyn_cast<gphi *> (def))
923 {
924 bool all_args_zero = true;
925 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
926 {
927 tree arg = gimple_phi_arg_def (phi, i);
928 if (integer_zerop (arg))
929 continue;
930
931 all_args_zero = false;
932 if (TREE_CODE (arg) != SSA_NAME)
933 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
934 if (!check_retval_uses (arg, phi))
935 DUMP_AND_RETURN ("phi arg has uses outside phi"
936 " and comparisons against 0.")
937
938 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
939 if (is_a<gphi *> (arg_def))
940 {
941 if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
942 DUMP_AND_RETURN ("nested phi fail")
943 continue;
944 }
945
946 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
947 if (!call_stmt)
948 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
949
950 tree callee_decl = gimple_call_fndecl (call_stmt);
951 if (!callee_decl)
952 return false;
953 if (!ipa && !DECL_IS_MALLOC (callee_decl))
954 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
955 " for non-ipa mode.")
956
957 cgraph_edge *cs = node->get_edge (call_stmt);
958 if (cs)
959 {
960 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
961 es->is_return_callee_uncaptured = true;
962 }
963 }
964
965 if (all_args_zero)
966 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
967 }
968
969 else
970 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
971
972 return true;
973 }
974
975 static bool
976 malloc_candidate_p (function *fun, bool ipa)
977 {
978 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
979 edge e;
980 edge_iterator ei;
981 cgraph_node *node = cgraph_node::get_create (fun->decl);
982
983 if (EDGE_COUNT (exit_block->preds) == 0
984 || !flag_delete_null_pointer_checks)
985 return false;
986
987 auto_bitmap visited;
988 FOR_EACH_EDGE (e, ei, exit_block->preds)
989 {
990 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
991 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
992
993 if (!ret_stmt)
994 return false;
995
996 tree retval = gimple_return_retval (ret_stmt);
997 if (!retval)
998 DUMP_AND_RETURN("No return value.")
999
1000 if (TREE_CODE (retval) != SSA_NAME
1001 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
1002 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
1003
1004 if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
1005 return false;
1006 }
1007
1008 if (dump_file && (dump_flags & TDF_DETAILS))
1009 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1010 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1011 return true;
1012 }
1013
1014 #undef DUMP_AND_RETURN
1015
1016 /* Return true if function is known to be finite. */
1017
1018 bool
1019 finite_function_p ()
1020 {
1021 /* Const functions cannot have back edges (an
1022 indication of possible infinite loop side
1023 effect. */
1024 bool finite = true;
1025 if (mark_dfs_back_edges ())
1026 {
1027 /* Preheaders are needed for SCEV to work.
1028 Simple latches and recorded exits improve chances that loop will
1029 proved to be finite in testcases such as in loop-15.c
1030 and loop-24.c */
1031 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1032 | LOOPS_HAVE_SIMPLE_LATCHES
1033 | LOOPS_HAVE_RECORDED_EXITS);
1034 if (dump_file && (dump_flags & TDF_DETAILS))
1035 flow_loops_dump (dump_file, NULL, 0);
1036 if (mark_irreducible_loops ())
1037 {
1038 if (dump_file)
1039 fprintf (dump_file, " has irreducible loops\n");
1040 finite = false;
1041 }
1042 else
1043 {
1044 scev_initialize ();
1045 for (auto loop : loops_list (cfun, 0))
1046 if (!finite_loop_p (loop))
1047 {
1048 if (dump_file)
1049 fprintf (dump_file, " cannot prove finiteness of "
1050 "loop %i\n", loop->num);
1051 finite =false;
1052 break;
1053 }
1054 scev_finalize ();
1055 }
1056 loop_optimizer_finalize ();
1057 }
1058 return finite;
1059 }
1060
1061 /* This is the main routine for finding the reference patterns for
1062 global variables within a function FN. */
1063
1064 static funct_state
1065 analyze_function (struct cgraph_node *fn, bool ipa)
1066 {
1067 tree decl = fn->decl;
1068 funct_state l;
1069 basic_block this_block;
1070
1071 l = XCNEW (class funct_state_d);
1072 l->pure_const_state = IPA_CONST;
1073 l->state_previously_known = IPA_NEITHER;
1074 l->looping_previously_known = true;
1075 l->looping = false;
1076 l->can_throw = false;
1077 l->can_free = false;
1078 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1079 flags_from_decl_or_type (fn->decl),
1080 fn->cannot_return_p ());
1081
1082 if (fn->thunk || fn->alias)
1083 {
1084 /* Thunk gets propagated through, so nothing interesting happens. */
1085 gcc_assert (ipa);
1086 if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
1087 l->pure_const_state = IPA_NEITHER;
1088 return l;
1089 }
1090
1091 if (dump_file)
1092 {
1093 fprintf (dump_file, "\n\n local analysis of %s\n ",
1094 fn->dump_name ());
1095 }
1096
1097 push_cfun (DECL_STRUCT_FUNCTION (decl));
1098
1099 FOR_EACH_BB_FN (this_block, cfun)
1100 {
1101 gimple_stmt_iterator gsi;
1102 struct walk_stmt_info wi;
1103
1104 memset (&wi, 0, sizeof (wi));
1105 for (gsi = gsi_start_bb (this_block);
1106 !gsi_end_p (gsi);
1107 gsi_next (&gsi))
1108 {
1109 /* NULL memory accesses terminates BB. These accesses are known
1110 to trip undefined behaviour. gimple-ssa-isolate-paths turns them
1111 to volatile accesses and adds builtin_trap call which would
1112 confuse us otherwise. */
1113 if (infer_nonnull_range_by_dereference (gsi_stmt (gsi),
1114 null_pointer_node))
1115 {
1116 if (dump_file)
1117 fprintf (dump_file, " NULL memory access; terminating BB%s\n",
1118 flag_non_call_exceptions ? "; looping" : "");
1119 if (flag_non_call_exceptions)
1120 {
1121 l->looping = true;
1122 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
1123 {
1124 if (dump_file)
1125 fprintf (dump_file, " can throw externally\n");
1126 l->can_throw = true;
1127 }
1128 }
1129 break;
1130 }
1131 check_stmt (&gsi, l, ipa);
1132 if (l->pure_const_state == IPA_NEITHER
1133 && l->looping
1134 && l->can_throw
1135 && l->can_free)
1136 goto end;
1137 }
1138 }
1139
1140 end:
1141 if (l->pure_const_state != IPA_NEITHER
1142 && !l->looping
1143 && !finite_function_p ())
1144 l->looping = true;
1145
1146 if (dump_file && (dump_flags & TDF_DETAILS))
1147 fprintf (dump_file, " checking previously known:");
1148
1149 better_state (&l->pure_const_state, &l->looping,
1150 l->state_previously_known,
1151 l->looping_previously_known);
1152 if (TREE_NOTHROW (decl))
1153 l->can_throw = false;
1154
1155 l->malloc_state = STATE_MALLOC_BOTTOM;
1156 if (DECL_IS_MALLOC (decl))
1157 l->malloc_state = STATE_MALLOC;
1158 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1159 l->malloc_state = STATE_MALLOC_TOP;
1160 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1161 l->malloc_state = STATE_MALLOC;
1162
1163 pop_cfun ();
1164 if (dump_file)
1165 {
1166 if (l->looping)
1167 fprintf (dump_file, "Function is locally looping.\n");
1168 if (l->can_throw)
1169 fprintf (dump_file, "Function is locally throwing.\n");
1170 if (l->pure_const_state == IPA_CONST)
1171 fprintf (dump_file, "Function is locally const.\n");
1172 if (l->pure_const_state == IPA_PURE)
1173 fprintf (dump_file, "Function is locally pure.\n");
1174 if (l->can_free)
1175 fprintf (dump_file, "Function can locally free.\n");
1176 if (l->malloc_state == STATE_MALLOC)
1177 fprintf (dump_file, "Function is locally malloc.\n");
1178 }
1179 return l;
1180 }
1181
1182 void
1183 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1184 {
1185 /* There are some shared nodes, in particular the initializers on
1186 static declarations. We do not need to scan them more than once
1187 since all we would be interested in are the addressof
1188 operations. */
1189 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1190 {
1191 funct_state_d *a = analyze_function (node, true);
1192 new (state) funct_state_d (*a);
1193 free (a);
1194 }
1195 else
1196 /* Do not keep stale summaries. */
1197 funct_state_summaries->remove (node);
1198 }
1199
1200 /* Called when new clone is inserted to callgraph late. */
1201
1202 void
1203 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1204 funct_state_d *src_data,
1205 funct_state_d *dst_data)
1206 {
1207 new (dst_data) funct_state_d (*src_data);
1208 if (dst_data->malloc_state == STATE_MALLOC
1209 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1210 dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1211 }
1212
1213 \f
1214 void
1215 pass_ipa_pure_const::
1216 register_hooks (void)
1217 {
1218 if (init_p)
1219 return;
1220
1221 init_p = true;
1222
1223 funct_state_summaries = new funct_state_summary_t (symtab);
1224 }
1225
1226
1227 /* Analyze each function in the cgraph to see if it is locally PURE or
1228 CONST. */
1229
1230 static void
1231 pure_const_generate_summary (void)
1232 {
1233 struct cgraph_node *node;
1234
1235 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1236 pass->register_hooks ();
1237
1238 /* Process all of the functions.
1239
1240 We process AVAIL_INTERPOSABLE functions. We cannot use the results
1241 by default, but the info can be used at LTO with -fwhole-program or
1242 when function got cloned and the clone is AVAILABLE. */
1243
1244 FOR_EACH_DEFINED_FUNCTION (node)
1245 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1246 {
1247 funct_state_d *a = analyze_function (node, true);
1248 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1249 free (a);
1250 }
1251 }
1252
1253
1254 /* Serialize the ipa info for lto. */
1255
1256 static void
1257 pure_const_write_summary (void)
1258 {
1259 struct cgraph_node *node;
1260 struct lto_simple_output_block *ob
1261 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1262 unsigned int count = 0;
1263 lto_symtab_encoder_iterator lsei;
1264 lto_symtab_encoder_t encoder;
1265
1266 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1267
1268 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1269 lsei_next_function_in_partition (&lsei))
1270 {
1271 node = lsei_cgraph_node (lsei);
1272 if (node->definition && funct_state_summaries->exists (node))
1273 count++;
1274 }
1275
1276 streamer_write_uhwi_stream (ob->main_stream, count);
1277
1278 /* Process all of the functions. */
1279 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1280 lsei_next_function_in_partition (&lsei))
1281 {
1282 node = lsei_cgraph_node (lsei);
1283 funct_state_d *fs = funct_state_summaries->get (node);
1284 if (node->definition && fs != NULL)
1285 {
1286 struct bitpack_d bp;
1287 int node_ref;
1288 lto_symtab_encoder_t encoder;
1289
1290 encoder = ob->decl_state->symtab_node_encoder;
1291 node_ref = lto_symtab_encoder_encode (encoder, node);
1292 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1293
1294 /* Note that flags will need to be read in the opposite
1295 order as we are pushing the bitflags into FLAGS. */
1296 bp = bitpack_create (ob->main_stream);
1297 bp_pack_value (&bp, fs->pure_const_state, 2);
1298 bp_pack_value (&bp, fs->state_previously_known, 2);
1299 bp_pack_value (&bp, fs->looping_previously_known, 1);
1300 bp_pack_value (&bp, fs->looping, 1);
1301 bp_pack_value (&bp, fs->can_throw, 1);
1302 bp_pack_value (&bp, fs->can_free, 1);
1303 bp_pack_value (&bp, fs->malloc_state, 2);
1304 streamer_write_bitpack (&bp);
1305 }
1306 }
1307
1308 lto_destroy_simple_output_block (ob);
1309 }
1310
1311
1312 /* Deserialize the ipa info for lto. */
1313
1314 static void
1315 pure_const_read_summary (void)
1316 {
1317 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1318 struct lto_file_decl_data *file_data;
1319 unsigned int j = 0;
1320
1321 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1322 pass->register_hooks ();
1323
1324 while ((file_data = file_data_vec[j++]))
1325 {
1326 const char *data;
1327 size_t len;
1328 class lto_input_block *ib
1329 = lto_create_simple_input_block (file_data,
1330 LTO_section_ipa_pure_const,
1331 &data, &len);
1332 if (ib)
1333 {
1334 unsigned int i;
1335 unsigned int count = streamer_read_uhwi (ib);
1336
1337 for (i = 0; i < count; i++)
1338 {
1339 unsigned int index;
1340 struct cgraph_node *node;
1341 struct bitpack_d bp;
1342 funct_state fs;
1343 lto_symtab_encoder_t encoder;
1344
1345 index = streamer_read_uhwi (ib);
1346 encoder = file_data->symtab_node_encoder;
1347 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1348 index));
1349
1350 fs = funct_state_summaries->get_create (node);
1351 /* Note that the flags must be read in the opposite
1352 order in which they were written (the bitflags were
1353 pushed into FLAGS). */
1354 bp = streamer_read_bitpack (ib);
1355 fs->pure_const_state
1356 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1357 fs->state_previously_known
1358 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1359 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1360 fs->looping = bp_unpack_value (&bp, 1);
1361 fs->can_throw = bp_unpack_value (&bp, 1);
1362 fs->can_free = bp_unpack_value (&bp, 1);
1363 fs->malloc_state
1364 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1365
1366 if (dump_file)
1367 {
1368 int flags = flags_from_decl_or_type (node->decl);
1369 fprintf (dump_file, "Read info for %s ", node->dump_name ());
1370 if (flags & ECF_CONST)
1371 fprintf (dump_file, " const");
1372 if (flags & ECF_PURE)
1373 fprintf (dump_file, " pure");
1374 if (flags & ECF_NOTHROW)
1375 fprintf (dump_file, " nothrow");
1376 fprintf (dump_file, "\n pure const state: %s\n",
1377 pure_const_names[fs->pure_const_state]);
1378 fprintf (dump_file, " previously known state: %s\n",
1379 pure_const_names[fs->state_previously_known]);
1380 if (fs->looping)
1381 fprintf (dump_file," function is locally looping\n");
1382 if (fs->looping_previously_known)
1383 fprintf (dump_file," function is previously known looping\n");
1384 if (fs->can_throw)
1385 fprintf (dump_file," function is locally throwing\n");
1386 if (fs->can_free)
1387 fprintf (dump_file," function can locally free\n");
1388 fprintf (dump_file, "\n malloc state: %s\n",
1389 malloc_state_names[fs->malloc_state]);
1390 }
1391 }
1392
1393 lto_destroy_simple_input_block (file_data,
1394 LTO_section_ipa_pure_const,
1395 ib, data, len);
1396 }
1397 }
1398 }
1399
1400 /* We only propagate across edges that can throw externally and their callee
1401 is not interposable. */
1402
1403 static bool
1404 ignore_edge_for_nothrow (struct cgraph_edge *e)
1405 {
1406 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1407 return true;
1408
1409 enum availability avail;
1410 cgraph_node *ultimate_target
1411 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1412 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1413 return true;
1414 return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1415 && !e->callee->binds_to_current_def_p (e->caller))
1416 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1417 || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1418 }
1419
1420 /* Return true if NODE is self recursive function.
1421 Indirectly recursive functions appears as non-trivial strongly
1422 connected components, so we need to care about self recursion
1423 only. */
1424
1425 static bool
1426 self_recursive_p (struct cgraph_node *node)
1427 {
1428 struct cgraph_edge *e;
1429 for (e = node->callees; e; e = e->next_callee)
1430 if (e->callee->function_symbol () == node)
1431 return true;
1432 return false;
1433 }
1434
1435 /* Return true if N is cdtor that is not const or pure. In this case we may
1436 need to remove unreachable function if it is marked const/pure. */
1437
1438 static bool
1439 cdtor_p (cgraph_node *n, void *)
1440 {
1441 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1442 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1443 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1444 return false;
1445 }
1446
1447 /* Skip edges from and to nodes without ipa_pure_const enabled.
1448 Ignore not available symbols. */
1449
1450 static bool
1451 ignore_edge_for_pure_const (struct cgraph_edge *e)
1452 {
1453 enum availability avail;
1454 cgraph_node *ultimate_target
1455 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1456
1457 return (avail <= AVAIL_INTERPOSABLE
1458 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1459 || !opt_for_fn (ultimate_target->decl,
1460 flag_ipa_pure_const));
1461 }
1462
1463 /* Return true if function should be skipped for local pure const analysis. */
1464
1465 static bool
1466 skip_function_for_local_pure_const (struct cgraph_node *node)
1467 {
1468 /* Because we do not schedule pass_fixup_cfg over whole program after early
1469 optimizations we must not promote functions that are called by already
1470 processed functions. */
1471
1472 if (function_called_by_processed_nodes_p ())
1473 {
1474 if (dump_file)
1475 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1476 return true;
1477 }
1478 /* Save some work and do not analyze functions which are interposable and
1479 do not have any non-interposable aliases. */
1480 if (node->get_availability () <= AVAIL_INTERPOSABLE
1481 && !flag_lto
1482 && !node->has_aliases_p ())
1483 {
1484 if (dump_file)
1485 fprintf (dump_file,
1486 "Function is interposable; not analyzing.\n");
1487 return true;
1488 }
1489 return false;
1490 }
1491
1492 /* Make function const and output warning. If LOCAL is true,
1493 return true if anything changed. Otherwise return true if
1494 we may have introduced removale ctors. */
1495
1496 bool
1497 ipa_make_function_const (struct cgraph_node *node, bool looping, bool local)
1498 {
1499 bool cdtor = false;
1500
1501 if (TREE_READONLY (node->decl)
1502 && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
1503 return false;
1504 warn_function_const (node->decl, !looping);
1505 if (local && skip_function_for_local_pure_const (node))
1506 return false;
1507 if (dump_file)
1508 fprintf (dump_file, "Function found to be %sconst: %s\n",
1509 looping ? "looping " : "",
1510 node->dump_name ());
1511 if (!local && !looping)
1512 cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1513 if (!dbg_cnt (ipa_attr))
1514 return false;
1515 if (node->set_const_flag (true, looping))
1516 {
1517 if (dump_file)
1518 fprintf (dump_file,
1519 "Declaration updated to be %sconst: %s\n",
1520 looping ? "looping " : "",
1521 node->dump_name ());
1522 if (local)
1523 return true;
1524 return cdtor;
1525 }
1526 return false;
1527 }
1528
1529 /* Make function const and output warning. If LOCAL is true,
1530 return true if anything changed. Otherwise return true if
1531 we may have introduced removale ctors. */
1532
1533 bool
1534 ipa_make_function_pure (struct cgraph_node *node, bool looping, bool local)
1535 {
1536 bool cdtor = false;
1537
1538 if (TREE_READONLY (node->decl)
1539 || (DECL_PURE_P (node->decl)
1540 && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl))))
1541 return false;
1542 warn_function_pure (node->decl, !looping);
1543 if (local && skip_function_for_local_pure_const (node))
1544 return false;
1545 if (dump_file)
1546 fprintf (dump_file, "Function found to be %spure: %s\n",
1547 looping ? "looping " : "",
1548 node->dump_name ());
1549 if (!local && !looping)
1550 cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1551 if (!dbg_cnt (ipa_attr))
1552 return false;
1553 if (node->set_pure_flag (true, looping))
1554 {
1555 if (dump_file)
1556 fprintf (dump_file,
1557 "Declaration updated to be %spure: %s\n",
1558 looping ? "looping " : "",
1559 node->dump_name ());
1560 if (local)
1561 return true;
1562 return cdtor;
1563 }
1564 return false;
1565 }
1566
1567 /* Produce transitive closure over the callgraph and compute pure/const
1568 attributes. */
1569
1570 static bool
1571 propagate_pure_const (void)
1572 {
1573 struct cgraph_node *node;
1574 struct cgraph_node *w;
1575 struct cgraph_node **order =
1576 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1577 int order_pos;
1578 int i;
1579 struct ipa_dfs_info * w_info;
1580 bool remove_p = false;
1581
1582 order_pos = ipa_reduced_postorder (order, true,
1583 ignore_edge_for_pure_const);
1584 if (dump_file)
1585 {
1586 cgraph_node::dump_cgraph (dump_file);
1587 ipa_print_order (dump_file, "reduced", order, order_pos);
1588 }
1589
1590 /* Propagate the local information through the call graph to produce
1591 the global information. All the nodes within a cycle will have
1592 the same info so we collapse cycles first. Then we can do the
1593 propagation in one pass from the leaves to the roots. */
1594 for (i = 0; i < order_pos; i++ )
1595 {
1596 enum pure_const_state_e pure_const_state = IPA_CONST;
1597 bool looping = false;
1598 int count = 0;
1599 node = order[i];
1600
1601 if (node->alias)
1602 continue;
1603
1604 if (dump_file && (dump_flags & TDF_DETAILS))
1605 fprintf (dump_file, "Starting cycle\n");
1606
1607 /* Find the worst state for any node in the cycle. */
1608 w = node;
1609 while (w && pure_const_state != IPA_NEITHER)
1610 {
1611 struct cgraph_edge *e;
1612 struct cgraph_edge *ie;
1613 int i;
1614 struct ipa_ref *ref = NULL;
1615
1616 funct_state w_l = funct_state_summaries->get_create (w);
1617 if (dump_file && (dump_flags & TDF_DETAILS))
1618 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1619 w->dump_name (),
1620 pure_const_names[w_l->pure_const_state],
1621 w_l->looping);
1622
1623 /* First merge in function body properties.
1624 We are safe to pass NULL as FROM and TO because we will take care
1625 of possible interposition when walking callees. */
1626 worse_state (&pure_const_state, &looping,
1627 w_l->pure_const_state, w_l->looping,
1628 NULL, NULL);
1629 if (pure_const_state == IPA_NEITHER)
1630 break;
1631
1632 count++;
1633
1634 /* We consider recursive cycles as possibly infinite.
1635 This might be relaxed since infinite recursion leads to stack
1636 overflow. */
1637 if (count > 1)
1638 looping = true;
1639
1640 /* Now walk the edges and merge in callee properties. */
1641 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1642 e = e->next_callee)
1643 {
1644 enum availability avail;
1645 struct cgraph_node *y = e->callee->
1646 function_or_virtual_thunk_symbol (&avail,
1647 e->caller);
1648 enum pure_const_state_e edge_state = IPA_CONST;
1649 bool edge_looping = false;
1650
1651 if (e->recursive_p ())
1652 looping = true;
1653
1654 if (dump_file && (dump_flags & TDF_DETAILS))
1655 {
1656 fprintf (dump_file, " Call to %s",
1657 e->callee->dump_name ());
1658 }
1659 if (avail > AVAIL_INTERPOSABLE)
1660 {
1661 funct_state y_l = funct_state_summaries->get_create (y);
1662
1663 if (dump_file && (dump_flags & TDF_DETAILS))
1664 {
1665 fprintf (dump_file,
1666 " state:%s looping:%i\n",
1667 pure_const_names[y_l->pure_const_state],
1668 y_l->looping);
1669 }
1670 if (y_l->pure_const_state > IPA_PURE
1671 && e->cannot_lead_to_return_p ())
1672 {
1673 if (dump_file && (dump_flags & TDF_DETAILS))
1674 fprintf (dump_file,
1675 " Ignoring side effects"
1676 " -> pure, looping\n");
1677 edge_state = IPA_PURE;
1678 edge_looping = true;
1679 }
1680 else
1681 {
1682 edge_state = y_l->pure_const_state;
1683 edge_looping = y_l->looping;
1684 }
1685 }
1686 else if (builtin_safe_for_const_function_p (&edge_looping,
1687 y->decl))
1688 edge_state = IPA_CONST;
1689 else
1690 state_from_flags (&edge_state, &edge_looping,
1691 flags_from_decl_or_type (y->decl),
1692 e->cannot_lead_to_return_p ());
1693
1694 /* Merge the results with what we already know. */
1695 better_state (&edge_state, &edge_looping,
1696 w_l->state_previously_known,
1697 w_l->looping_previously_known);
1698 worse_state (&pure_const_state, &looping,
1699 edge_state, edge_looping, e->caller, e->callee);
1700 if (pure_const_state == IPA_NEITHER)
1701 break;
1702 }
1703
1704 /* Now process the indirect call. */
1705 for (ie = w->indirect_calls;
1706 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1707 {
1708 enum pure_const_state_e edge_state = IPA_CONST;
1709 bool edge_looping = false;
1710
1711 if (dump_file && (dump_flags & TDF_DETAILS))
1712 fprintf (dump_file, " Indirect call");
1713 state_from_flags (&edge_state, &edge_looping,
1714 ie->indirect_info->ecf_flags,
1715 ie->cannot_lead_to_return_p ());
1716 /* Merge the results with what we already know. */
1717 better_state (&edge_state, &edge_looping,
1718 w_l->state_previously_known,
1719 w_l->looping_previously_known);
1720 worse_state (&pure_const_state, &looping,
1721 edge_state, edge_looping, NULL, NULL);
1722 if (pure_const_state == IPA_NEITHER)
1723 break;
1724 }
1725
1726 /* And finally all loads and stores. */
1727 for (i = 0; w->iterate_reference (i, ref)
1728 && pure_const_state != IPA_NEITHER; i++)
1729 {
1730 enum pure_const_state_e ref_state = IPA_CONST;
1731 bool ref_looping = false;
1732 switch (ref->use)
1733 {
1734 case IPA_REF_LOAD:
1735 /* readonly reads are safe. */
1736 if (TREE_READONLY (ref->referred->decl))
1737 break;
1738 if (dump_file && (dump_flags & TDF_DETAILS))
1739 fprintf (dump_file, " nonreadonly global var read\n");
1740 ref_state = IPA_PURE;
1741 break;
1742 case IPA_REF_STORE:
1743 if (ref->cannot_lead_to_return ())
1744 break;
1745 ref_state = IPA_NEITHER;
1746 if (dump_file && (dump_flags & TDF_DETAILS))
1747 fprintf (dump_file, " global var write\n");
1748 break;
1749 case IPA_REF_ADDR:
1750 break;
1751 default:
1752 gcc_unreachable ();
1753 }
1754 better_state (&ref_state, &ref_looping,
1755 w_l->state_previously_known,
1756 w_l->looping_previously_known);
1757 worse_state (&pure_const_state, &looping,
1758 ref_state, ref_looping, NULL, NULL);
1759 if (pure_const_state == IPA_NEITHER)
1760 break;
1761 }
1762 w_info = (struct ipa_dfs_info *) w->aux;
1763 w = w_info->next_cycle;
1764 }
1765 if (dump_file && (dump_flags & TDF_DETAILS))
1766 fprintf (dump_file, "Result %s looping %i\n",
1767 pure_const_names [pure_const_state],
1768 looping);
1769
1770 /* Find the worst state of can_free for any node in the cycle. */
1771 bool can_free = false;
1772 w = node;
1773 while (w && !can_free)
1774 {
1775 struct cgraph_edge *e;
1776 funct_state w_l = funct_state_summaries->get (w);
1777
1778 if (w_l->can_free
1779 || w->get_availability () == AVAIL_INTERPOSABLE
1780 || w->indirect_calls)
1781 can_free = true;
1782
1783 for (e = w->callees; e && !can_free; e = e->next_callee)
1784 {
1785 enum availability avail;
1786 struct cgraph_node *y = e->callee->
1787 function_or_virtual_thunk_symbol (&avail,
1788 e->caller);
1789
1790 if (avail > AVAIL_INTERPOSABLE)
1791 can_free = funct_state_summaries->get (y)->can_free;
1792 else
1793 can_free = true;
1794 }
1795 w_info = (struct ipa_dfs_info *) w->aux;
1796 w = w_info->next_cycle;
1797 }
1798
1799 /* Copy back the region's pure_const_state which is shared by
1800 all nodes in the region. */
1801 w = node;
1802 while (w)
1803 {
1804 funct_state w_l = funct_state_summaries->get (w);
1805 enum pure_const_state_e this_state = pure_const_state;
1806 bool this_looping = looping;
1807
1808 w_l->can_free = can_free;
1809 w->nonfreeing_fn = !can_free;
1810 if (!can_free && dump_file)
1811 fprintf (dump_file, "Function found not to call free: %s\n",
1812 w->dump_name ());
1813
1814 if (w_l->state_previously_known != IPA_NEITHER
1815 && this_state > w_l->state_previously_known)
1816 {
1817 if (this_state == IPA_NEITHER)
1818 this_looping = w_l->looping_previously_known;
1819 this_state = w_l->state_previously_known;
1820 }
1821 if (!this_looping && self_recursive_p (w))
1822 this_looping = true;
1823 if (!w_l->looping_previously_known)
1824 this_looping = false;
1825
1826 /* All nodes within a cycle share the same info. */
1827 w_l->pure_const_state = this_state;
1828 w_l->looping = this_looping;
1829
1830 /* Inline clones share declaration with their offline copies;
1831 do not modify their declarations since the offline copy may
1832 be different. */
1833 if (!w->inlined_to)
1834 switch (this_state)
1835 {
1836 case IPA_CONST:
1837 remove_p |= ipa_make_function_const (w, this_looping, false);
1838 break;
1839
1840 case IPA_PURE:
1841 remove_p |= ipa_make_function_pure (w, this_looping, false);
1842 break;
1843
1844 default:
1845 break;
1846 }
1847 w_info = (struct ipa_dfs_info *) w->aux;
1848 w = w_info->next_cycle;
1849 }
1850 }
1851
1852 ipa_free_postorder_info ();
1853 free (order);
1854 return remove_p;
1855 }
1856
1857 /* Produce transitive closure over the callgraph and compute nothrow
1858 attributes. */
1859
1860 static void
1861 propagate_nothrow (void)
1862 {
1863 struct cgraph_node *node;
1864 struct cgraph_node *w;
1865 struct cgraph_node **order =
1866 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1867 int order_pos;
1868 int i;
1869 struct ipa_dfs_info * w_info;
1870
1871 order_pos = ipa_reduced_postorder (order, true,
1872 ignore_edge_for_nothrow);
1873 if (dump_file)
1874 {
1875 cgraph_node::dump_cgraph (dump_file);
1876 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1877 }
1878
1879 /* Propagate the local information through the call graph to produce
1880 the global information. All the nodes within a cycle will have
1881 the same info so we collapse cycles first. Then we can do the
1882 propagation in one pass from the leaves to the roots. */
1883 for (i = 0; i < order_pos; i++ )
1884 {
1885 bool can_throw = false;
1886 node = order[i];
1887
1888 if (node->alias)
1889 continue;
1890
1891 /* Find the worst state for any node in the cycle. */
1892 w = node;
1893 while (w && !can_throw)
1894 {
1895 struct cgraph_edge *e, *ie;
1896
1897 if (!TREE_NOTHROW (w->decl))
1898 {
1899 funct_state w_l = funct_state_summaries->get_create (w);
1900
1901 if (w_l->can_throw
1902 || w->get_availability () == AVAIL_INTERPOSABLE)
1903 can_throw = true;
1904
1905 for (e = w->callees; e && !can_throw; e = e->next_callee)
1906 {
1907 enum availability avail;
1908
1909 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1910 continue;
1911
1912 struct cgraph_node *y = e->callee->
1913 function_or_virtual_thunk_symbol (&avail,
1914 e->caller);
1915
1916 /* We can use info about the callee only if we know it
1917 cannot be interposed.
1918 When callee is compiled with non-call exceptions we also
1919 must check that the declaration is bound to current
1920 body as other semantically equivalent body may still
1921 throw. */
1922 if (avail <= AVAIL_INTERPOSABLE
1923 || (!TREE_NOTHROW (y->decl)
1924 && (funct_state_summaries->get_create (y)->can_throw
1925 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1926 && !e->callee->binds_to_current_def_p (w)))))
1927 can_throw = true;
1928 }
1929 for (ie = w->indirect_calls; ie && !can_throw;
1930 ie = ie->next_callee)
1931 if (ie->can_throw_external
1932 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1933 can_throw = true;
1934 }
1935 w_info = (struct ipa_dfs_info *) w->aux;
1936 w = w_info->next_cycle;
1937 }
1938
1939 /* Copy back the region's pure_const_state which is shared by
1940 all nodes in the region. */
1941 w = node;
1942 while (w)
1943 {
1944 funct_state w_l = funct_state_summaries->get_create (w);
1945 if (!can_throw && !TREE_NOTHROW (w->decl))
1946 {
1947 /* Inline clones share declaration with their offline copies;
1948 do not modify their declarations since the offline copy may
1949 be different. */
1950 if (!w->inlined_to)
1951 {
1952 w->set_nothrow_flag (true);
1953 if (dump_file)
1954 fprintf (dump_file, "Function found to be nothrow: %s\n",
1955 w->dump_name ());
1956 }
1957 }
1958 else if (can_throw && !TREE_NOTHROW (w->decl))
1959 w_l->can_throw = true;
1960 w_info = (struct ipa_dfs_info *) w->aux;
1961 w = w_info->next_cycle;
1962 }
1963 }
1964
1965 ipa_free_postorder_info ();
1966 free (order);
1967 }
1968
1969 /* Debugging function to dump state of malloc lattice. */
1970
1971 DEBUG_FUNCTION
1972 static void
1973 dump_malloc_lattice (FILE *dump_file, const char *s)
1974 {
1975 if (!dump_file)
1976 return;
1977
1978 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1979 cgraph_node *node;
1980 FOR_EACH_FUNCTION (node)
1981 {
1982 funct_state fs = funct_state_summaries->get (node);
1983 if (fs)
1984 fprintf (dump_file, "%s: %s\n", node->dump_name (),
1985 malloc_state_names[fs->malloc_state]);
1986 }
1987 }
1988
1989 /* Propagate malloc attribute across the callgraph. */
1990
1991 static void
1992 propagate_malloc (void)
1993 {
1994 cgraph_node *node;
1995 FOR_EACH_FUNCTION (node)
1996 {
1997 if (DECL_IS_MALLOC (node->decl))
1998 if (!funct_state_summaries->exists (node))
1999 {
2000 funct_state fs = funct_state_summaries->get_create (node);
2001 fs->malloc_state = STATE_MALLOC;
2002 }
2003 }
2004
2005 dump_malloc_lattice (dump_file, "Initial");
2006 struct cgraph_node **order
2007 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
2008 int order_pos = ipa_reverse_postorder (order);
2009 bool changed = true;
2010
2011 while (changed)
2012 {
2013 changed = false;
2014 /* Walk in postorder. */
2015 for (int i = order_pos - 1; i >= 0; --i)
2016 {
2017 cgraph_node *node = order[i];
2018 if (node->alias
2019 || !node->definition
2020 || !funct_state_summaries->exists (node))
2021 continue;
2022
2023 funct_state l = funct_state_summaries->get (node);
2024
2025 /* FIXME: add support for indirect-calls. */
2026 if (node->indirect_calls)
2027 {
2028 l->malloc_state = STATE_MALLOC_BOTTOM;
2029 continue;
2030 }
2031
2032 if (node->get_availability () <= AVAIL_INTERPOSABLE)
2033 {
2034 l->malloc_state = STATE_MALLOC_BOTTOM;
2035 continue;
2036 }
2037
2038 if (l->malloc_state == STATE_MALLOC_BOTTOM)
2039 continue;
2040
2041 auto_vec<cgraph_node *, 16> callees;
2042 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2043 {
2044 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
2045 if (es && es->is_return_callee_uncaptured)
2046 callees.safe_push (cs->callee);
2047 }
2048
2049 malloc_state_e new_state = l->malloc_state;
2050 for (unsigned j = 0; j < callees.length (); j++)
2051 {
2052 cgraph_node *callee = callees[j];
2053 if (!funct_state_summaries->exists (node))
2054 {
2055 new_state = STATE_MALLOC_BOTTOM;
2056 break;
2057 }
2058 malloc_state_e callee_state
2059 = funct_state_summaries->get_create (callee)->malloc_state;
2060 if (new_state < callee_state)
2061 new_state = callee_state;
2062 }
2063 if (new_state != l->malloc_state)
2064 {
2065 changed = true;
2066 l->malloc_state = new_state;
2067 }
2068 }
2069 }
2070
2071 FOR_EACH_DEFINED_FUNCTION (node)
2072 if (funct_state_summaries->exists (node))
2073 {
2074 funct_state l = funct_state_summaries->get (node);
2075 if (!node->alias
2076 && l->malloc_state == STATE_MALLOC
2077 && !node->inlined_to
2078 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
2079 {
2080 if (dump_file && (dump_flags & TDF_DETAILS))
2081 fprintf (dump_file, "Function %s found to be malloc\n",
2082 node->dump_name ());
2083
2084 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
2085 node->set_malloc_flag (true);
2086 if (!malloc_decl_p && warn_suggest_attribute_malloc)
2087 warn_function_malloc (node->decl);
2088 }
2089 }
2090
2091 dump_malloc_lattice (dump_file, "after propagation");
2092 ipa_free_postorder_info ();
2093 free (order);
2094 }
2095
2096 /* Produce the global information by preforming a transitive closure
2097 on the local information that was produced by generate_summary. */
2098
2099 unsigned int
2100 pass_ipa_pure_const::
2101 execute (function *)
2102 {
2103 bool remove_p;
2104
2105 /* Nothrow makes more function to not lead to return and improve
2106 later analysis. */
2107 propagate_nothrow ();
2108 propagate_malloc ();
2109 remove_p = propagate_pure_const ();
2110
2111 delete funct_state_summaries;
2112 return remove_p ? TODO_remove_functions : 0;
2113 }
2114
2115 static bool
2116 gate_pure_const (void)
2117 {
2118 return flag_ipa_pure_const || in_lto_p;
2119 }
2120
2121 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2122 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2123 pure_const_generate_summary, /* generate_summary */
2124 pure_const_write_summary, /* write_summary */
2125 pure_const_read_summary, /* read_summary */
2126 NULL, /* write_optimization_summary */
2127 NULL, /* read_optimization_summary */
2128 NULL, /* stmt_fixup */
2129 0, /* function_transform_todo_flags_start */
2130 NULL, /* function_transform */
2131 NULL), /* variable_transform */
2132 init_p (false) {}
2133
2134 ipa_opt_pass_d *
2135 make_pass_ipa_pure_const (gcc::context *ctxt)
2136 {
2137 return new pass_ipa_pure_const (ctxt);
2138 }
2139
2140 /* Simple local pass for pure const discovery reusing the analysis from
2141 ipa_pure_const. This pass is effective when executed together with
2142 other optimization passes in early optimization pass queue. */
2143
2144 namespace {
2145
2146 const pass_data pass_data_local_pure_const =
2147 {
2148 GIMPLE_PASS, /* type */
2149 "local-pure-const", /* name */
2150 OPTGROUP_NONE, /* optinfo_flags */
2151 TV_IPA_PURE_CONST, /* tv_id */
2152 0, /* properties_required */
2153 0, /* properties_provided */
2154 0, /* properties_destroyed */
2155 0, /* todo_flags_start */
2156 0, /* todo_flags_finish */
2157 };
2158
2159 class pass_local_pure_const : public gimple_opt_pass
2160 {
2161 public:
2162 pass_local_pure_const (gcc::context *ctxt)
2163 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2164 {}
2165
2166 /* opt_pass methods: */
2167 opt_pass * clone () final override
2168 {
2169 return new pass_local_pure_const (m_ctxt);
2170 }
2171 bool gate (function *) final override { return gate_pure_const (); }
2172 unsigned int execute (function *) final override;
2173
2174 }; // class pass_local_pure_const
2175
2176 unsigned int
2177 pass_local_pure_const::execute (function *fun)
2178 {
2179 bool changed = false;
2180 funct_state l;
2181 bool skip;
2182 struct cgraph_node *node;
2183
2184 node = cgraph_node::get (current_function_decl);
2185 skip = skip_function_for_local_pure_const (node);
2186
2187 if (!warn_suggest_attribute_const
2188 && !warn_suggest_attribute_pure
2189 && skip)
2190 return 0;
2191
2192 l = analyze_function (node, false);
2193
2194 /* Do NORETURN discovery. */
2195 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2196 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2197 {
2198 warn_function_noreturn (fun->decl);
2199 if (dump_file)
2200 fprintf (dump_file, "Function found to be noreturn: %s\n",
2201 current_function_name ());
2202
2203 /* Update declaration and reduce profile to executed once. */
2204 if (cgraph_node::get (current_function_decl)->set_noreturn_flag (true))
2205 changed = true;
2206 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2207 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2208 }
2209
2210 switch (l->pure_const_state)
2211 {
2212 case IPA_CONST:
2213 changed |= ipa_make_function_const
2214 (cgraph_node::get (current_function_decl), l->looping, true);
2215 break;
2216
2217 case IPA_PURE:
2218 changed |= ipa_make_function_pure
2219 (cgraph_node::get (current_function_decl), l->looping, true);
2220 break;
2221
2222 default:
2223 break;
2224 }
2225 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2226 {
2227 node->set_nothrow_flag (true);
2228 changed = true;
2229 if (dump_file)
2230 fprintf (dump_file, "Function found to be nothrow: %s\n",
2231 current_function_name ());
2232 }
2233
2234 if (l->malloc_state == STATE_MALLOC
2235 && !DECL_IS_MALLOC (current_function_decl))
2236 {
2237 node->set_malloc_flag (true);
2238 if (warn_suggest_attribute_malloc)
2239 warn_function_malloc (node->decl);
2240 changed = true;
2241 if (dump_file)
2242 fprintf (dump_file, "Function found to be malloc: %s\n",
2243 node->dump_name ());
2244 }
2245
2246 free (l);
2247 if (changed)
2248 return execute_fixup_cfg ();
2249 else
2250 return 0;
2251 }
2252
2253 } // anon namespace
2254
2255 gimple_opt_pass *
2256 make_pass_local_pure_const (gcc::context *ctxt)
2257 {
2258 return new pass_local_pure_const (ctxt);
2259 }
2260
2261 /* Emit noreturn warnings. */
2262
2263 namespace {
2264
2265 const pass_data pass_data_warn_function_noreturn =
2266 {
2267 GIMPLE_PASS, /* type */
2268 "*warn_function_noreturn", /* name */
2269 OPTGROUP_NONE, /* optinfo_flags */
2270 TV_NONE, /* tv_id */
2271 PROP_cfg, /* properties_required */
2272 0, /* properties_provided */
2273 0, /* properties_destroyed */
2274 0, /* todo_flags_start */
2275 0, /* todo_flags_finish */
2276 };
2277
2278 class pass_warn_function_noreturn : public gimple_opt_pass
2279 {
2280 public:
2281 pass_warn_function_noreturn (gcc::context *ctxt)
2282 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2283 {}
2284
2285 /* opt_pass methods: */
2286 bool gate (function *) final override
2287 {
2288 return warn_suggest_attribute_noreturn;
2289 }
2290 unsigned int execute (function *fun) final override
2291 {
2292 if (!TREE_THIS_VOLATILE (current_function_decl)
2293 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2294 warn_function_noreturn (current_function_decl);
2295 return 0;
2296 }
2297
2298 }; // class pass_warn_function_noreturn
2299
2300 } // anon namespace
2301
2302 gimple_opt_pass *
2303 make_pass_warn_function_noreturn (gcc::context *ctxt)
2304 {
2305 return new pass_warn_function_noreturn (ctxt);
2306 }
2307
2308 /* Simple local pass for pure const discovery reusing the analysis from
2309 ipa_pure_const. This pass is effective when executed together with
2310 other optimization passes in early optimization pass queue. */
2311
2312 namespace {
2313
2314 const pass_data pass_data_nothrow =
2315 {
2316 GIMPLE_PASS, /* type */
2317 "nothrow", /* name */
2318 OPTGROUP_NONE, /* optinfo_flags */
2319 TV_IPA_PURE_CONST, /* tv_id */
2320 0, /* properties_required */
2321 0, /* properties_provided */
2322 0, /* properties_destroyed */
2323 0, /* todo_flags_start */
2324 0, /* todo_flags_finish */
2325 };
2326
2327 class pass_nothrow : public gimple_opt_pass
2328 {
2329 public:
2330 pass_nothrow (gcc::context *ctxt)
2331 : gimple_opt_pass (pass_data_nothrow, ctxt)
2332 {}
2333
2334 /* opt_pass methods: */
2335 opt_pass * clone () final override { return new pass_nothrow (m_ctxt); }
2336 bool gate (function *) final override { return optimize; }
2337 unsigned int execute (function *) final override;
2338
2339 }; // class pass_nothrow
2340
2341 unsigned int
2342 pass_nothrow::execute (function *)
2343 {
2344 struct cgraph_node *node;
2345 basic_block this_block;
2346
2347 if (TREE_NOTHROW (current_function_decl))
2348 return 0;
2349
2350 node = cgraph_node::get (current_function_decl);
2351
2352 /* We run during lowering, we cannot really use availability yet. */
2353 if (cgraph_node::get (current_function_decl)->get_availability ()
2354 <= AVAIL_INTERPOSABLE)
2355 {
2356 if (dump_file)
2357 fprintf (dump_file, "Function is interposable;"
2358 " not analyzing.\n");
2359 return true;
2360 }
2361
2362 FOR_EACH_BB_FN (this_block, cfun)
2363 {
2364 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2365 !gsi_end_p (gsi);
2366 gsi_next (&gsi))
2367 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2368 {
2369 if (is_gimple_call (gsi_stmt (gsi)))
2370 {
2371 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2372 if (callee_t && recursive_call_p (current_function_decl,
2373 callee_t))
2374 continue;
2375 }
2376
2377 if (dump_file)
2378 {
2379 fprintf (dump_file, "Statement can throw: ");
2380 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2381 }
2382 return 0;
2383 }
2384 }
2385
2386 node->set_nothrow_flag (true);
2387
2388 bool cfg_changed = false;
2389 if (self_recursive_p (node))
2390 FOR_EACH_BB_FN (this_block, cfun)
2391 if (gcall *g = safe_dyn_cast <gcall *> (*gsi_last_bb (this_block)))
2392 {
2393 tree callee_t = gimple_call_fndecl (g);
2394 if (callee_t
2395 && recursive_call_p (current_function_decl, callee_t)
2396 && maybe_clean_eh_stmt (g)
2397 && gimple_purge_dead_eh_edges (this_block))
2398 cfg_changed = true;
2399 }
2400
2401 if (dump_file)
2402 fprintf (dump_file, "Function found to be nothrow: %s\n",
2403 current_function_name ());
2404 return cfg_changed ? TODO_cleanup_cfg : 0;
2405 }
2406
2407 } // anon namespace
2408
2409 gimple_opt_pass *
2410 make_pass_nothrow (gcc::context *ctxt)
2411 {
2412 return new pass_nothrow (ctxt);
2413 }