]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-pure-const.cc
Don't build readline/libreadline.a, when --with-system-readline is supplied
[thirdparty/gcc.git] / gcc / ipa-pure-const.cc
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2022 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
64 #include "symtab-thunks.h"
65 #include "dbgcnt.h"
66
67 /* Lattice values for const and pure functions. Everything starts out
68 being const, then may drop to pure and then neither depending on
69 what is found. */
70 enum pure_const_state_e
71 {
72 IPA_CONST,
73 IPA_PURE,
74 IPA_NEITHER
75 };
76
77 static const char *pure_const_names[3] = {"const", "pure", "neither"};
78
79 enum malloc_state_e
80 {
81 STATE_MALLOC_TOP,
82 STATE_MALLOC,
83 STATE_MALLOC_BOTTOM
84 };
85
86 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
87
88 /* Holder for the const_state. There is one of these per function
89 decl. */
90 class funct_state_d
91 {
92 public:
93 funct_state_d (): pure_const_state (IPA_NEITHER),
94 state_previously_known (IPA_NEITHER), looping_previously_known (true),
95 looping (true), can_throw (true), can_free (true),
96 malloc_state (STATE_MALLOC_BOTTOM) {}
97
98 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
99 state_previously_known (s.state_previously_known),
100 looping_previously_known (s.looping_previously_known),
101 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
102 malloc_state (s.malloc_state) {}
103
104 /* See above. */
105 enum pure_const_state_e pure_const_state;
106 /* What user set here; we can be always sure about this. */
107 enum pure_const_state_e state_previously_known;
108 bool looping_previously_known;
109
110 /* True if the function could possibly infinite loop. There are a
111 lot of ways that this could be determined. We are pretty
112 conservative here. While it is possible to cse pure and const
113 calls, it is not legal to have dce get rid of the call if there
114 is a possibility that the call could infinite loop since this is
115 a behavioral change. */
116 bool looping;
117
118 bool can_throw;
119
120 /* If function can call free, munmap or otherwise make previously
121 non-trapping memory accesses trapping. */
122 bool can_free;
123
124 enum malloc_state_e malloc_state;
125 };
126
127 typedef class funct_state_d * funct_state;
128
129 /* The storage of the funct_state is abstracted because there is the
130 possibility that it may be desirable to move this to the cgraph
131 local info. */
132
133 class funct_state_summary_t:
134 public fast_function_summary <funct_state_d *, va_heap>
135 {
136 public:
137 funct_state_summary_t (symbol_table *symtab):
138 fast_function_summary <funct_state_d *, va_heap> (symtab) {}
139
140 void insert (cgraph_node *, funct_state_d *state) final override;
141 void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
142 funct_state_d *src_data,
143 funct_state_d *dst_data) final override;
144 };
145
146 static funct_state_summary_t *funct_state_summaries = NULL;
147
148 static bool gate_pure_const (void);
149
150 namespace {
151
152 const pass_data pass_data_ipa_pure_const =
153 {
154 IPA_PASS, /* type */
155 "pure-const", /* name */
156 OPTGROUP_NONE, /* optinfo_flags */
157 TV_IPA_PURE_CONST, /* tv_id */
158 0, /* properties_required */
159 0, /* properties_provided */
160 0, /* properties_destroyed */
161 0, /* todo_flags_start */
162 0, /* todo_flags_finish */
163 };
164
165 class pass_ipa_pure_const : public ipa_opt_pass_d
166 {
167 public:
168 pass_ipa_pure_const(gcc::context *ctxt);
169
170 /* opt_pass methods: */
171 bool gate (function *) final override { return gate_pure_const (); }
172 unsigned int execute (function *fun) final override;
173
174 void register_hooks (void);
175
176 private:
177 bool init_p;
178 }; // class pass_ipa_pure_const
179
180 } // anon namespace
181
182 /* Try to guess if function body will always be visible to compiler
183 when compiling the call and whether compiler will be able
184 to propagate the information by itself. */
185
186 static bool
187 function_always_visible_to_compiler_p (tree decl)
188 {
189 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
190 || DECL_COMDAT (decl));
191 }
192
193 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
194 is true if the function is known to be finite. The diagnostic is
195 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
196 OPTION, this function may initialize it and it is always returned
197 by the function. */
198
199 static hash_set<tree> *
200 suggest_attribute (int option, tree decl, bool known_finite,
201 hash_set<tree> *warned_about,
202 const char * attrib_name)
203 {
204 if (!option_enabled (option, lang_hooks.option_lang_mask (), &global_options))
205 return warned_about;
206 if (TREE_THIS_VOLATILE (decl)
207 || (known_finite && function_always_visible_to_compiler_p (decl)))
208 return warned_about;
209
210 if (!warned_about)
211 warned_about = new hash_set<tree>;
212 if (warned_about->contains (decl))
213 return warned_about;
214 warned_about->add (decl);
215 warning_at (DECL_SOURCE_LOCATION (decl),
216 option,
217 known_finite
218 ? G_("function might be candidate for attribute %qs")
219 : G_("function might be candidate for attribute %qs"
220 " if it is known to return normally"), attrib_name);
221 return warned_about;
222 }
223
224 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
225 is true if the function is known to be finite. */
226
227 static void
228 warn_function_pure (tree decl, bool known_finite)
229 {
230 /* Declaring a void function pure makes no sense and is diagnosed
231 by -Wattributes because calling it would have no effect. */
232 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
233 return;
234
235 static hash_set<tree> *warned_about;
236 warned_about
237 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
238 known_finite, warned_about, "pure");
239 }
240
241 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
242 is true if the function is known to be finite. */
243
244 static void
245 warn_function_const (tree decl, bool known_finite)
246 {
247 /* Declaring a void function const makes no sense is diagnosed
248 by -Wattributes because calling it would have no effect. */
249 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
250 return;
251
252 static hash_set<tree> *warned_about;
253 warned_about
254 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
255 known_finite, warned_about, "const");
256 }
257
258 /* Emit suggestion about __attribute__((malloc)) for DECL. */
259
260 static void
261 warn_function_malloc (tree decl)
262 {
263 static hash_set<tree> *warned_about;
264 warned_about
265 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
266 true, warned_about, "malloc");
267 }
268
269 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
270
271 static void
272 warn_function_noreturn (tree decl)
273 {
274 tree original_decl = decl;
275
276 static hash_set<tree> *warned_about;
277 if (!lang_hooks.missing_noreturn_ok_p (decl)
278 && targetm.warn_func_return (decl))
279 warned_about
280 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
281 true, warned_about, "noreturn");
282 }
283
284 void
285 warn_function_cold (tree decl)
286 {
287 tree original_decl = decl;
288
289 static hash_set<tree> *warned_about;
290 warned_about
291 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
292 true, warned_about, "cold");
293 }
294
295 /* Check to see if the use (or definition when CHECKING_WRITE is true)
296 variable T is legal in a function that is either pure or const. */
297
298 static inline void
299 check_decl (funct_state local,
300 tree t, bool checking_write, bool ipa)
301 {
302 /* Do not want to do anything with volatile except mark any
303 function that uses one to be not const or pure. */
304 if (TREE_THIS_VOLATILE (t))
305 {
306 local->pure_const_state = IPA_NEITHER;
307 if (dump_file)
308 fprintf (dump_file, " Volatile operand is not const/pure\n");
309 return;
310 }
311
312 /* Do not care about a local automatic that is not static. */
313 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
314 return;
315
316 /* If the variable has the "used" attribute, treat it as if it had a
317 been touched by the devil. */
318 if (DECL_PRESERVE_P (t))
319 {
320 local->pure_const_state = IPA_NEITHER;
321 if (dump_file)
322 fprintf (dump_file, " Used static/global variable is not const/pure\n");
323 return;
324 }
325
326 /* In IPA mode we are not interested in checking actual loads and stores;
327 they will be processed at propagation time using ipa_ref. */
328 if (ipa)
329 return;
330
331 /* Since we have dealt with the locals and params cases above, if we
332 are CHECKING_WRITE, this cannot be a pure or constant
333 function. */
334 if (checking_write)
335 {
336 local->pure_const_state = IPA_NEITHER;
337 if (dump_file)
338 fprintf (dump_file, " static/global memory write is not const/pure\n");
339 return;
340 }
341
342 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
343 {
344 /* Readonly reads are safe. */
345 if (TREE_READONLY (t))
346 return; /* Read of a constant, do not change the function state. */
347 else
348 {
349 if (dump_file)
350 fprintf (dump_file, " global memory read is not const\n");
351 /* Just a regular read. */
352 if (local->pure_const_state == IPA_CONST)
353 local->pure_const_state = IPA_PURE;
354 }
355 }
356 else
357 {
358 /* Compilation level statics can be read if they are readonly
359 variables. */
360 if (TREE_READONLY (t))
361 return;
362
363 if (dump_file)
364 fprintf (dump_file, " static memory read is not const\n");
365 /* Just a regular read. */
366 if (local->pure_const_state == IPA_CONST)
367 local->pure_const_state = IPA_PURE;
368 }
369 }
370
371
372 /* Check to see if the use (or definition when CHECKING_WRITE is true)
373 variable T is legal in a function that is either pure or const. */
374
375 static inline void
376 check_op (funct_state local, tree t, bool checking_write)
377 {
378 t = get_base_address (t);
379 if (t && TREE_THIS_VOLATILE (t))
380 {
381 local->pure_const_state = IPA_NEITHER;
382 if (dump_file)
383 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
384 return;
385 }
386 else if (refs_local_or_readonly_memory_p (t))
387 {
388 if (dump_file)
389 fprintf (dump_file, " Indirect ref to local or readonly "
390 "memory is OK\n");
391 return;
392 }
393 else if (checking_write)
394 {
395 local->pure_const_state = IPA_NEITHER;
396 if (dump_file)
397 fprintf (dump_file, " Indirect ref write is not const/pure\n");
398 return;
399 }
400 else
401 {
402 if (dump_file)
403 fprintf (dump_file, " Indirect ref read is not const\n");
404 if (local->pure_const_state == IPA_CONST)
405 local->pure_const_state = IPA_PURE;
406 }
407 }
408
409 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
410
411 static void
412 state_from_flags (enum pure_const_state_e *state, bool *looping,
413 int flags, bool cannot_lead_to_return)
414 {
415 *looping = false;
416 if (flags & ECF_LOOPING_CONST_OR_PURE)
417 {
418 *looping = true;
419 if (dump_file && (dump_flags & TDF_DETAILS))
420 fprintf (dump_file, " looping\n");
421 }
422 if (flags & ECF_CONST)
423 {
424 *state = IPA_CONST;
425 if (dump_file && (dump_flags & TDF_DETAILS))
426 fprintf (dump_file, " const\n");
427 }
428 else if (flags & ECF_PURE)
429 {
430 *state = IPA_PURE;
431 if (dump_file && (dump_flags & TDF_DETAILS))
432 fprintf (dump_file, " pure\n");
433 }
434 else if (cannot_lead_to_return)
435 {
436 *state = IPA_PURE;
437 *looping = true;
438 if (dump_file && (dump_flags & TDF_DETAILS))
439 fprintf (dump_file, " ignoring side effects->pure looping\n");
440 }
441 else
442 {
443 if (dump_file && (dump_flags & TDF_DETAILS))
444 fprintf (dump_file, " neither\n");
445 *state = IPA_NEITHER;
446 *looping = true;
447 }
448 }
449
450 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
451 into STATE and LOOPING better of the two variants.
452 Be sure to merge looping correctly. IPA_NEITHER functions
453 have looping 0 even if they don't have to return. */
454
455 static inline void
456 better_state (enum pure_const_state_e *state, bool *looping,
457 enum pure_const_state_e state2, bool looping2)
458 {
459 if (state2 < *state)
460 {
461 if (*state == IPA_NEITHER)
462 *looping = looping2;
463 else
464 *looping = MIN (*looping, looping2);
465 *state = state2;
466 }
467 else if (state2 != IPA_NEITHER)
468 *looping = MIN (*looping, looping2);
469 }
470
471 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
472 into STATE and LOOPING worse of the two variants.
473 N is the actual node called. */
474
475 static inline void
476 worse_state (enum pure_const_state_e *state, bool *looping,
477 enum pure_const_state_e state2, bool looping2,
478 struct symtab_node *from,
479 struct symtab_node *to)
480 {
481 /* Consider function:
482
483 bool a(int *p)
484 {
485 return *p==*p;
486 }
487
488 During early optimization we will turn this into:
489
490 bool a(int *p)
491 {
492 return true;
493 }
494
495 Now if this function will be detected as CONST however when interposed it
496 may end up being just pure. We always must assume the worst scenario here.
497 */
498 if (*state == IPA_CONST && state2 == IPA_CONST
499 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
500 {
501 if (dump_file && (dump_flags & TDF_DETAILS))
502 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
503 "bind to current def.\n", to->dump_name ());
504 state2 = IPA_PURE;
505 }
506 *state = MAX (*state, state2);
507 *looping = MAX (*looping, looping2);
508 }
509
510 /* Recognize special cases of builtins that are by themselves not const
511 but function using them is. */
512 bool
513 builtin_safe_for_const_function_p (bool *looping, tree callee)
514 {
515 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
516 switch (DECL_FUNCTION_CODE (callee))
517 {
518 case BUILT_IN_RETURN:
519 case BUILT_IN_UNREACHABLE:
520 CASE_BUILT_IN_ALLOCA:
521 case BUILT_IN_STACK_SAVE:
522 case BUILT_IN_STACK_RESTORE:
523 case BUILT_IN_EH_POINTER:
524 case BUILT_IN_EH_FILTER:
525 case BUILT_IN_UNWIND_RESUME:
526 case BUILT_IN_CXA_END_CLEANUP:
527 case BUILT_IN_EH_COPY_VALUES:
528 case BUILT_IN_FRAME_ADDRESS:
529 case BUILT_IN_APPLY_ARGS:
530 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
531 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
532 case BUILT_IN_DWARF_CFA:
533 case BUILT_IN_RETURN_ADDRESS:
534 *looping = false;
535 return true;
536 case BUILT_IN_PREFETCH:
537 *looping = true;
538 return true;
539 default:
540 break;
541 }
542 return false;
543 }
544
545 /* Check the parameters of a function call to CALL_EXPR to see if
546 there are any references in the parameters that are not allowed for
547 pure or const functions. Also check to see if this is either an
548 indirect call, a call outside the compilation unit, or has special
549 attributes that may also effect the purity. The CALL_EXPR node for
550 the entire call expression. */
551
552 static void
553 check_call (funct_state local, gcall *call, bool ipa)
554 {
555 int flags = gimple_call_flags (call);
556 tree callee_t = gimple_call_fndecl (call);
557 bool possibly_throws = stmt_could_throw_p (cfun, call);
558 bool possibly_throws_externally = (possibly_throws
559 && stmt_can_throw_external (cfun, call));
560
561 if (possibly_throws)
562 {
563 unsigned int i;
564 for (i = 0; i < gimple_num_ops (call); i++)
565 if (gimple_op (call, i)
566 && tree_could_throw_p (gimple_op (call, i)))
567 {
568 if (possibly_throws && cfun->can_throw_non_call_exceptions)
569 {
570 if (dump_file)
571 fprintf (dump_file, " operand can throw; looping\n");
572 local->looping = true;
573 }
574 if (possibly_throws_externally)
575 {
576 if (dump_file)
577 fprintf (dump_file, " operand can throw externally\n");
578 local->can_throw = true;
579 }
580 }
581 }
582
583 /* The const and pure flags are set by a variety of places in the
584 compiler (including here). If someone has already set the flags
585 for the callee, (such as for some of the builtins) we will use
586 them, otherwise we will compute our own information.
587
588 Const and pure functions have less clobber effects than other
589 functions so we process these first. Otherwise if it is a call
590 outside the compilation unit or an indirect call we punt. This
591 leaves local calls which will be processed by following the call
592 graph. */
593 if (callee_t)
594 {
595 bool call_looping;
596
597 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
598 && !nonfreeing_call_p (call))
599 local->can_free = true;
600
601 if (builtin_safe_for_const_function_p (&call_looping, callee_t))
602 {
603 worse_state (&local->pure_const_state, &local->looping,
604 IPA_CONST, call_looping,
605 NULL, NULL);
606 return;
607 }
608 /* When bad things happen to bad functions, they cannot be const
609 or pure. */
610 if (setjmp_call_p (callee_t))
611 {
612 if (dump_file)
613 fprintf (dump_file, " setjmp is not const/pure\n");
614 local->looping = true;
615 local->pure_const_state = IPA_NEITHER;
616 }
617
618 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
619 switch (DECL_FUNCTION_CODE (callee_t))
620 {
621 case BUILT_IN_LONGJMP:
622 case BUILT_IN_NONLOCAL_GOTO:
623 if (dump_file)
624 fprintf (dump_file,
625 " longjmp and nonlocal goto is not const/pure\n");
626 local->pure_const_state = IPA_NEITHER;
627 local->looping = true;
628 break;
629 default:
630 break;
631 }
632 }
633 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
634 local->can_free = true;
635
636 /* When not in IPA mode, we can still handle self recursion. */
637 if (!ipa && callee_t
638 && recursive_call_p (current_function_decl, callee_t))
639 {
640 if (dump_file)
641 fprintf (dump_file, " Recursive call can loop.\n");
642 local->looping = true;
643 }
644 /* Either callee is unknown or we are doing local analysis.
645 Look to see if there are any bits available for the callee (such as by
646 declaration or because it is builtin) and process solely on the basis of
647 those bits. Handle internal calls always, those calls don't have
648 corresponding cgraph edges and thus aren't processed during
649 the propagation. */
650 else if (!ipa || gimple_call_internal_p (call))
651 {
652 enum pure_const_state_e call_state;
653 bool call_looping;
654 if (possibly_throws && cfun->can_throw_non_call_exceptions)
655 {
656 if (dump_file)
657 fprintf (dump_file, " can throw; looping\n");
658 local->looping = true;
659 }
660 if (possibly_throws_externally)
661 {
662 if (dump_file)
663 {
664 fprintf (dump_file, " can throw externally to lp %i\n",
665 lookup_stmt_eh_lp (call));
666 if (callee_t)
667 fprintf (dump_file, " callee:%s\n",
668 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
669 }
670 local->can_throw = true;
671 }
672 if (dump_file && (dump_flags & TDF_DETAILS))
673 fprintf (dump_file, " checking flags for call:");
674 state_from_flags (&call_state, &call_looping, flags,
675 ((flags & (ECF_NORETURN | ECF_NOTHROW))
676 == (ECF_NORETURN | ECF_NOTHROW))
677 || (!flag_exceptions && (flags & ECF_NORETURN)));
678 worse_state (&local->pure_const_state, &local->looping,
679 call_state, call_looping, NULL, NULL);
680 }
681 /* Direct functions calls are handled by IPA propagation. */
682 }
683
684 /* Wrapper around check_decl for loads in local more. */
685
686 static bool
687 check_load (gimple *, tree op, tree, void *data)
688 {
689 if (DECL_P (op))
690 check_decl ((funct_state)data, op, false, false);
691 else
692 check_op ((funct_state)data, op, false);
693 return false;
694 }
695
696 /* Wrapper around check_decl for stores in local more. */
697
698 static bool
699 check_store (gimple *, tree op, tree, void *data)
700 {
701 if (DECL_P (op))
702 check_decl ((funct_state)data, op, true, false);
703 else
704 check_op ((funct_state)data, op, true);
705 return false;
706 }
707
708 /* Wrapper around check_decl for loads in ipa mode. */
709
710 static bool
711 check_ipa_load (gimple *, tree op, tree, void *data)
712 {
713 if (DECL_P (op))
714 check_decl ((funct_state)data, op, false, true);
715 else
716 check_op ((funct_state)data, op, false);
717 return false;
718 }
719
720 /* Wrapper around check_decl for stores in ipa mode. */
721
722 static bool
723 check_ipa_store (gimple *, tree op, tree, void *data)
724 {
725 if (DECL_P (op))
726 check_decl ((funct_state)data, op, true, true);
727 else
728 check_op ((funct_state)data, op, true);
729 return false;
730 }
731
732 /* Look into pointer pointed to by GSIP and figure out what interesting side
733 effects it has. */
734 static void
735 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
736 {
737 gimple *stmt = gsi_stmt (*gsip);
738
739 if (is_gimple_debug (stmt))
740 return;
741
742 /* Do consider clobber as side effects before IPA, so we rather inline
743 C++ destructors and keep clobber semantics than eliminate them.
744
745 Similar logic is in ipa-modref.
746
747 TODO: We may get smarter during early optimizations on these and let
748 functions containing only clobbers to be optimized more. This is a common
749 case of C++ destructors. */
750
751 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
752 return;
753
754 if (dump_file)
755 {
756 fprintf (dump_file, " scanning: ");
757 print_gimple_stmt (dump_file, stmt, 0);
758 }
759
760 if (gimple_has_volatile_ops (stmt)
761 && !gimple_clobber_p (stmt))
762 {
763 local->pure_const_state = IPA_NEITHER;
764 if (dump_file)
765 fprintf (dump_file, " Volatile stmt is not const/pure\n");
766 }
767
768 /* Look for loads and stores. */
769 walk_stmt_load_store_ops (stmt, local,
770 ipa ? check_ipa_load : check_load,
771 ipa ? check_ipa_store : check_store);
772
773 if (gimple_code (stmt) != GIMPLE_CALL
774 && stmt_could_throw_p (cfun, stmt))
775 {
776 if (cfun->can_throw_non_call_exceptions)
777 {
778 if (dump_file)
779 fprintf (dump_file, " can throw; looping\n");
780 local->looping = true;
781 }
782 if (stmt_can_throw_external (cfun, stmt))
783 {
784 if (dump_file)
785 fprintf (dump_file, " can throw externally\n");
786 local->can_throw = true;
787 }
788 else
789 if (dump_file)
790 fprintf (dump_file, " can throw\n");
791 }
792 switch (gimple_code (stmt))
793 {
794 case GIMPLE_CALL:
795 check_call (local, as_a <gcall *> (stmt), ipa);
796 break;
797 case GIMPLE_LABEL:
798 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
799 /* Target of long jump. */
800 {
801 if (dump_file)
802 fprintf (dump_file, " nonlocal label is not const/pure\n");
803 local->pure_const_state = IPA_NEITHER;
804 }
805 break;
806 case GIMPLE_ASM:
807 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
808 {
809 if (dump_file)
810 fprintf (dump_file, " memory asm clobber is not const/pure\n");
811 /* Abandon all hope, ye who enter here. */
812 local->pure_const_state = IPA_NEITHER;
813 local->can_free = true;
814 }
815 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
816 {
817 if (dump_file)
818 fprintf (dump_file, " volatile is not const/pure\n");
819 /* Abandon all hope, ye who enter here. */
820 local->pure_const_state = IPA_NEITHER;
821 local->looping = true;
822 local->can_free = true;
823 }
824 return;
825 default:
826 break;
827 }
828 }
829
830 /* Check that RETVAL is used only in STMT and in comparisons against 0.
831 RETVAL is return value of the function and STMT is return stmt. */
832
833 static bool
834 check_retval_uses (tree retval, gimple *stmt)
835 {
836 imm_use_iterator use_iter;
837 gimple *use_stmt;
838
839 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
840 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
841 {
842 tree op2 = gimple_cond_rhs (cond);
843 if (!integer_zerop (op2))
844 return false;
845 }
846 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
847 {
848 enum tree_code code = gimple_assign_rhs_code (ga);
849 if (TREE_CODE_CLASS (code) != tcc_comparison)
850 return false;
851 if (!integer_zerop (gimple_assign_rhs2 (ga)))
852 return false;
853 }
854 else if (is_gimple_debug (use_stmt))
855 ;
856 else if (use_stmt != stmt)
857 return false;
858
859 return true;
860 }
861
862 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
863 attribute. Currently this function does a very conservative analysis.
864 FUN is considered to be a candidate if
865 1) It returns a value of pointer type.
866 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
867 a phi, and element of phi is either NULL or
868 SSA_NAME_DEF_STMT(element) is function call.
869 3) The return-value has immediate uses only within comparisons (gcond or gassign)
870 and return_stmt (and likewise a phi arg has immediate use only within comparison
871 or the phi stmt). */
872
873 #define DUMP_AND_RETURN(reason) \
874 { \
875 if (dump_file && (dump_flags & TDF_DETAILS)) \
876 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
877 (node->dump_name ()), (reason)); \
878 return false; \
879 }
880
881 static bool
882 malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
883 bitmap visited)
884 {
885 cgraph_node *node = cgraph_node::get_create (fun->decl);
886 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
887 return true;
888
889 if (!check_retval_uses (retval, ret_stmt))
890 DUMP_AND_RETURN("Return value has uses outside return stmt"
891 " and comparisons against 0.")
892
893 gimple *def = SSA_NAME_DEF_STMT (retval);
894
895 if (gcall *call_stmt = dyn_cast<gcall *> (def))
896 {
897 tree callee_decl = gimple_call_fndecl (call_stmt);
898 if (!callee_decl)
899 return false;
900
901 if (!ipa && !DECL_IS_MALLOC (callee_decl))
902 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
903 " non-ipa mode.")
904
905 cgraph_edge *cs = node->get_edge (call_stmt);
906 if (cs)
907 {
908 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
909 es->is_return_callee_uncaptured = true;
910 }
911 }
912
913 else if (gphi *phi = dyn_cast<gphi *> (def))
914 {
915 bool all_args_zero = true;
916 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
917 {
918 tree arg = gimple_phi_arg_def (phi, i);
919 if (integer_zerop (arg))
920 continue;
921
922 all_args_zero = false;
923 if (TREE_CODE (arg) != SSA_NAME)
924 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
925 if (!check_retval_uses (arg, phi))
926 DUMP_AND_RETURN ("phi arg has uses outside phi"
927 " and comparisons against 0.")
928
929 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
930 if (is_a<gphi *> (arg_def))
931 {
932 if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
933 DUMP_AND_RETURN ("nested phi fail")
934 continue;
935 }
936
937 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
938 if (!call_stmt)
939 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
940
941 tree callee_decl = gimple_call_fndecl (call_stmt);
942 if (!callee_decl)
943 return false;
944 if (!ipa && !DECL_IS_MALLOC (callee_decl))
945 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
946 " for non-ipa mode.")
947
948 cgraph_edge *cs = node->get_edge (call_stmt);
949 if (cs)
950 {
951 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
952 es->is_return_callee_uncaptured = true;
953 }
954 }
955
956 if (all_args_zero)
957 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
958 }
959
960 else
961 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
962
963 return true;
964 }
965
966 static bool
967 malloc_candidate_p (function *fun, bool ipa)
968 {
969 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
970 edge e;
971 edge_iterator ei;
972 cgraph_node *node = cgraph_node::get_create (fun->decl);
973
974 if (EDGE_COUNT (exit_block->preds) == 0
975 || !flag_delete_null_pointer_checks)
976 return false;
977
978 auto_bitmap visited;
979 FOR_EACH_EDGE (e, ei, exit_block->preds)
980 {
981 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
982 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
983
984 if (!ret_stmt)
985 return false;
986
987 tree retval = gimple_return_retval (ret_stmt);
988 if (!retval)
989 DUMP_AND_RETURN("No return value.")
990
991 if (TREE_CODE (retval) != SSA_NAME
992 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
993 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
994
995 if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
996 return false;
997 }
998
999 if (dump_file && (dump_flags & TDF_DETAILS))
1000 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1001 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1002 return true;
1003 }
1004
1005 #undef DUMP_AND_RETURN
1006
1007 /* Return true if function is known to be finite. */
1008
1009 bool
1010 finite_function_p ()
1011 {
1012 /* Const functions cannot have back edges (an
1013 indication of possible infinite loop side
1014 effect. */
1015 bool finite = true;
1016 if (mark_dfs_back_edges ())
1017 {
1018 /* Preheaders are needed for SCEV to work.
1019 Simple latches and recorded exits improve chances that loop will
1020 proved to be finite in testcases such as in loop-15.c
1021 and loop-24.c */
1022 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1023 | LOOPS_HAVE_SIMPLE_LATCHES
1024 | LOOPS_HAVE_RECORDED_EXITS);
1025 if (dump_file && (dump_flags & TDF_DETAILS))
1026 flow_loops_dump (dump_file, NULL, 0);
1027 if (mark_irreducible_loops ())
1028 {
1029 if (dump_file)
1030 fprintf (dump_file, " has irreducible loops\n");
1031 finite = false;
1032 }
1033 else
1034 {
1035 scev_initialize ();
1036 for (auto loop : loops_list (cfun, 0))
1037 if (!finite_loop_p (loop))
1038 {
1039 if (dump_file)
1040 fprintf (dump_file, " cannot prove finiteness of "
1041 "loop %i\n", loop->num);
1042 finite =false;
1043 break;
1044 }
1045 scev_finalize ();
1046 }
1047 loop_optimizer_finalize ();
1048 }
1049 return finite;
1050 }
1051
1052 /* This is the main routine for finding the reference patterns for
1053 global variables within a function FN. */
1054
1055 static funct_state
1056 analyze_function (struct cgraph_node *fn, bool ipa)
1057 {
1058 tree decl = fn->decl;
1059 funct_state l;
1060 basic_block this_block;
1061
1062 l = XCNEW (class funct_state_d);
1063 l->pure_const_state = IPA_CONST;
1064 l->state_previously_known = IPA_NEITHER;
1065 l->looping_previously_known = true;
1066 l->looping = false;
1067 l->can_throw = false;
1068 l->can_free = false;
1069 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1070 flags_from_decl_or_type (fn->decl),
1071 fn->cannot_return_p ());
1072
1073 if (fn->thunk || fn->alias)
1074 {
1075 /* Thunk gets propagated through, so nothing interesting happens. */
1076 gcc_assert (ipa);
1077 if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
1078 l->pure_const_state = IPA_NEITHER;
1079 return l;
1080 }
1081
1082 if (dump_file)
1083 {
1084 fprintf (dump_file, "\n\n local analysis of %s\n ",
1085 fn->dump_name ());
1086 }
1087
1088 push_cfun (DECL_STRUCT_FUNCTION (decl));
1089
1090 FOR_EACH_BB_FN (this_block, cfun)
1091 {
1092 gimple_stmt_iterator gsi;
1093 struct walk_stmt_info wi;
1094
1095 memset (&wi, 0, sizeof (wi));
1096 for (gsi = gsi_start_bb (this_block);
1097 !gsi_end_p (gsi);
1098 gsi_next (&gsi))
1099 {
1100 /* NULL memory accesses terminates BB. These accesses are known
1101 to trip undefined behaviour. gimple-ssa-isolate-paths turns them
1102 to volatile accesses and adds builtin_trap call which would
1103 confuse us otherwise. */
1104 if (infer_nonnull_range_by_dereference (gsi_stmt (gsi),
1105 null_pointer_node))
1106 {
1107 if (dump_file)
1108 fprintf (dump_file, " NULL memory access; terminating BB%s\n",
1109 flag_non_call_exceptions ? "; looping" : "");
1110 if (flag_non_call_exceptions)
1111 {
1112 l->looping = true;
1113 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
1114 {
1115 if (dump_file)
1116 fprintf (dump_file, " can throw externally\n");
1117 l->can_throw = true;
1118 }
1119 }
1120 break;
1121 }
1122 check_stmt (&gsi, l, ipa);
1123 if (l->pure_const_state == IPA_NEITHER
1124 && l->looping
1125 && l->can_throw
1126 && l->can_free)
1127 goto end;
1128 }
1129 }
1130
1131 end:
1132 if (l->pure_const_state != IPA_NEITHER
1133 && !l->looping
1134 && !finite_function_p ())
1135 l->looping = true;
1136
1137 if (dump_file && (dump_flags & TDF_DETAILS))
1138 fprintf (dump_file, " checking previously known:");
1139
1140 better_state (&l->pure_const_state, &l->looping,
1141 l->state_previously_known,
1142 l->looping_previously_known);
1143 if (TREE_NOTHROW (decl))
1144 l->can_throw = false;
1145
1146 l->malloc_state = STATE_MALLOC_BOTTOM;
1147 if (DECL_IS_MALLOC (decl))
1148 l->malloc_state = STATE_MALLOC;
1149 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1150 l->malloc_state = STATE_MALLOC_TOP;
1151 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1152 l->malloc_state = STATE_MALLOC;
1153
1154 pop_cfun ();
1155 if (dump_file)
1156 {
1157 if (l->looping)
1158 fprintf (dump_file, "Function is locally looping.\n");
1159 if (l->can_throw)
1160 fprintf (dump_file, "Function is locally throwing.\n");
1161 if (l->pure_const_state == IPA_CONST)
1162 fprintf (dump_file, "Function is locally const.\n");
1163 if (l->pure_const_state == IPA_PURE)
1164 fprintf (dump_file, "Function is locally pure.\n");
1165 if (l->can_free)
1166 fprintf (dump_file, "Function can locally free.\n");
1167 if (l->malloc_state == STATE_MALLOC)
1168 fprintf (dump_file, "Function is locally malloc.\n");
1169 }
1170 return l;
1171 }
1172
1173 void
1174 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1175 {
1176 /* There are some shared nodes, in particular the initializers on
1177 static declarations. We do not need to scan them more than once
1178 since all we would be interested in are the addressof
1179 operations. */
1180 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1181 {
1182 funct_state_d *a = analyze_function (node, true);
1183 new (state) funct_state_d (*a);
1184 free (a);
1185 }
1186 else
1187 /* Do not keep stale summaries. */
1188 funct_state_summaries->remove (node);
1189 }
1190
1191 /* Called when new clone is inserted to callgraph late. */
1192
1193 void
1194 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1195 funct_state_d *src_data,
1196 funct_state_d *dst_data)
1197 {
1198 new (dst_data) funct_state_d (*src_data);
1199 if (dst_data->malloc_state == STATE_MALLOC
1200 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1201 dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1202 }
1203
1204 \f
1205 void
1206 pass_ipa_pure_const::
1207 register_hooks (void)
1208 {
1209 if (init_p)
1210 return;
1211
1212 init_p = true;
1213
1214 funct_state_summaries = new funct_state_summary_t (symtab);
1215 }
1216
1217
1218 /* Analyze each function in the cgraph to see if it is locally PURE or
1219 CONST. */
1220
1221 static void
1222 pure_const_generate_summary (void)
1223 {
1224 struct cgraph_node *node;
1225
1226 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1227 pass->register_hooks ();
1228
1229 /* Process all of the functions.
1230
1231 We process AVAIL_INTERPOSABLE functions. We cannot use the results
1232 by default, but the info can be used at LTO with -fwhole-program or
1233 when function got cloned and the clone is AVAILABLE. */
1234
1235 FOR_EACH_DEFINED_FUNCTION (node)
1236 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1237 {
1238 funct_state_d *a = analyze_function (node, true);
1239 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1240 free (a);
1241 }
1242 }
1243
1244
1245 /* Serialize the ipa info for lto. */
1246
1247 static void
1248 pure_const_write_summary (void)
1249 {
1250 struct cgraph_node *node;
1251 struct lto_simple_output_block *ob
1252 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1253 unsigned int count = 0;
1254 lto_symtab_encoder_iterator lsei;
1255 lto_symtab_encoder_t encoder;
1256
1257 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1258
1259 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1260 lsei_next_function_in_partition (&lsei))
1261 {
1262 node = lsei_cgraph_node (lsei);
1263 if (node->definition && funct_state_summaries->exists (node))
1264 count++;
1265 }
1266
1267 streamer_write_uhwi_stream (ob->main_stream, count);
1268
1269 /* Process all of the functions. */
1270 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1271 lsei_next_function_in_partition (&lsei))
1272 {
1273 node = lsei_cgraph_node (lsei);
1274 funct_state_d *fs = funct_state_summaries->get (node);
1275 if (node->definition && fs != NULL)
1276 {
1277 struct bitpack_d bp;
1278 int node_ref;
1279 lto_symtab_encoder_t encoder;
1280
1281 encoder = ob->decl_state->symtab_node_encoder;
1282 node_ref = lto_symtab_encoder_encode (encoder, node);
1283 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1284
1285 /* Note that flags will need to be read in the opposite
1286 order as we are pushing the bitflags into FLAGS. */
1287 bp = bitpack_create (ob->main_stream);
1288 bp_pack_value (&bp, fs->pure_const_state, 2);
1289 bp_pack_value (&bp, fs->state_previously_known, 2);
1290 bp_pack_value (&bp, fs->looping_previously_known, 1);
1291 bp_pack_value (&bp, fs->looping, 1);
1292 bp_pack_value (&bp, fs->can_throw, 1);
1293 bp_pack_value (&bp, fs->can_free, 1);
1294 bp_pack_value (&bp, fs->malloc_state, 2);
1295 streamer_write_bitpack (&bp);
1296 }
1297 }
1298
1299 lto_destroy_simple_output_block (ob);
1300 }
1301
1302
1303 /* Deserialize the ipa info for lto. */
1304
1305 static void
1306 pure_const_read_summary (void)
1307 {
1308 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1309 struct lto_file_decl_data *file_data;
1310 unsigned int j = 0;
1311
1312 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1313 pass->register_hooks ();
1314
1315 while ((file_data = file_data_vec[j++]))
1316 {
1317 const char *data;
1318 size_t len;
1319 class lto_input_block *ib
1320 = lto_create_simple_input_block (file_data,
1321 LTO_section_ipa_pure_const,
1322 &data, &len);
1323 if (ib)
1324 {
1325 unsigned int i;
1326 unsigned int count = streamer_read_uhwi (ib);
1327
1328 for (i = 0; i < count; i++)
1329 {
1330 unsigned int index;
1331 struct cgraph_node *node;
1332 struct bitpack_d bp;
1333 funct_state fs;
1334 lto_symtab_encoder_t encoder;
1335
1336 index = streamer_read_uhwi (ib);
1337 encoder = file_data->symtab_node_encoder;
1338 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1339 index));
1340
1341 fs = funct_state_summaries->get_create (node);
1342 /* Note that the flags must be read in the opposite
1343 order in which they were written (the bitflags were
1344 pushed into FLAGS). */
1345 bp = streamer_read_bitpack (ib);
1346 fs->pure_const_state
1347 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1348 fs->state_previously_known
1349 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1350 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1351 fs->looping = bp_unpack_value (&bp, 1);
1352 fs->can_throw = bp_unpack_value (&bp, 1);
1353 fs->can_free = bp_unpack_value (&bp, 1);
1354 fs->malloc_state
1355 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1356
1357 if (dump_file)
1358 {
1359 int flags = flags_from_decl_or_type (node->decl);
1360 fprintf (dump_file, "Read info for %s ", node->dump_name ());
1361 if (flags & ECF_CONST)
1362 fprintf (dump_file, " const");
1363 if (flags & ECF_PURE)
1364 fprintf (dump_file, " pure");
1365 if (flags & ECF_NOTHROW)
1366 fprintf (dump_file, " nothrow");
1367 fprintf (dump_file, "\n pure const state: %s\n",
1368 pure_const_names[fs->pure_const_state]);
1369 fprintf (dump_file, " previously known state: %s\n",
1370 pure_const_names[fs->state_previously_known]);
1371 if (fs->looping)
1372 fprintf (dump_file," function is locally looping\n");
1373 if (fs->looping_previously_known)
1374 fprintf (dump_file," function is previously known looping\n");
1375 if (fs->can_throw)
1376 fprintf (dump_file," function is locally throwing\n");
1377 if (fs->can_free)
1378 fprintf (dump_file," function can locally free\n");
1379 fprintf (dump_file, "\n malloc state: %s\n",
1380 malloc_state_names[fs->malloc_state]);
1381 }
1382 }
1383
1384 lto_destroy_simple_input_block (file_data,
1385 LTO_section_ipa_pure_const,
1386 ib, data, len);
1387 }
1388 }
1389 }
1390
1391 /* We only propagate across edges that can throw externally and their callee
1392 is not interposable. */
1393
1394 static bool
1395 ignore_edge_for_nothrow (struct cgraph_edge *e)
1396 {
1397 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1398 return true;
1399
1400 enum availability avail;
1401 cgraph_node *ultimate_target
1402 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1403 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1404 return true;
1405 return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1406 && !e->callee->binds_to_current_def_p (e->caller))
1407 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1408 || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1409 }
1410
1411 /* Return true if NODE is self recursive function.
1412 Indirectly recursive functions appears as non-trivial strongly
1413 connected components, so we need to care about self recursion
1414 only. */
1415
1416 static bool
1417 self_recursive_p (struct cgraph_node *node)
1418 {
1419 struct cgraph_edge *e;
1420 for (e = node->callees; e; e = e->next_callee)
1421 if (e->callee->function_symbol () == node)
1422 return true;
1423 return false;
1424 }
1425
1426 /* Return true if N is cdtor that is not const or pure. In this case we may
1427 need to remove unreachable function if it is marked const/pure. */
1428
1429 static bool
1430 cdtor_p (cgraph_node *n, void *)
1431 {
1432 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1433 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1434 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1435 return false;
1436 }
1437
1438 /* Skip edges from and to nodes without ipa_pure_const enabled.
1439 Ignore not available symbols. */
1440
1441 static bool
1442 ignore_edge_for_pure_const (struct cgraph_edge *e)
1443 {
1444 enum availability avail;
1445 cgraph_node *ultimate_target
1446 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1447
1448 return (avail <= AVAIL_INTERPOSABLE
1449 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1450 || !opt_for_fn (ultimate_target->decl,
1451 flag_ipa_pure_const));
1452 }
1453
1454 /* Return true if function should be skipped for local pure const analysis. */
1455
1456 static bool
1457 skip_function_for_local_pure_const (struct cgraph_node *node)
1458 {
1459 /* Because we do not schedule pass_fixup_cfg over whole program after early
1460 optimizations we must not promote functions that are called by already
1461 processed functions. */
1462
1463 if (function_called_by_processed_nodes_p ())
1464 {
1465 if (dump_file)
1466 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1467 return true;
1468 }
1469 /* Save some work and do not analyze functions which are interposable and
1470 do not have any non-interposable aliases. */
1471 if (node->get_availability () <= AVAIL_INTERPOSABLE
1472 && !flag_lto
1473 && !node->has_aliases_p ())
1474 {
1475 if (dump_file)
1476 fprintf (dump_file,
1477 "Function is interposable; not analyzing.\n");
1478 return true;
1479 }
1480 return false;
1481 }
1482
1483 /* Make function const and output warning. If LOCAL is true,
1484 return true if anything changed. Otherwise return true if
1485 we may have introduced removale ctors. */
1486
1487 bool
1488 ipa_make_function_const (struct cgraph_node *node, bool looping, bool local)
1489 {
1490 bool cdtor = false;
1491
1492 if (TREE_READONLY (node->decl)
1493 && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
1494 return false;
1495 warn_function_const (node->decl, !looping);
1496 if (local && skip_function_for_local_pure_const (node))
1497 return false;
1498 if (dump_file)
1499 fprintf (dump_file, "Function found to be %sconst: %s\n",
1500 looping ? "looping " : "",
1501 node->dump_name ());
1502 if (!local && !looping)
1503 cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1504 if (!dbg_cnt (ipa_attr))
1505 return false;
1506 if (node->set_const_flag (true, looping))
1507 {
1508 if (dump_file)
1509 fprintf (dump_file,
1510 "Declaration updated to be %sconst: %s\n",
1511 looping ? "looping " : "",
1512 node->dump_name ());
1513 if (local)
1514 return true;
1515 return cdtor;
1516 }
1517 return false;
1518 }
1519
1520 /* Make function const and output warning. If LOCAL is true,
1521 return true if anything changed. Otherwise return true if
1522 we may have introduced removale ctors. */
1523
1524 bool
1525 ipa_make_function_pure (struct cgraph_node *node, bool looping, bool local)
1526 {
1527 bool cdtor = false;
1528
1529 if (DECL_PURE_P (node->decl)
1530 && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
1531 return false;
1532 warn_function_pure (node->decl, !looping);
1533 if (local && skip_function_for_local_pure_const (node))
1534 return false;
1535 if (dump_file)
1536 fprintf (dump_file, "Function found to be %spure: %s\n",
1537 looping ? "looping " : "",
1538 node->dump_name ());
1539 if (!local && !looping)
1540 cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1541 if (!dbg_cnt (ipa_attr))
1542 return false;
1543 if (node->set_pure_flag (true, looping))
1544 {
1545 if (dump_file)
1546 fprintf (dump_file,
1547 "Declaration updated to be %spure: %s\n",
1548 looping ? "looping " : "",
1549 node->dump_name ());
1550 if (local)
1551 return true;
1552 return cdtor;
1553 }
1554 return false;
1555 }
1556
1557 /* Produce transitive closure over the callgraph and compute pure/const
1558 attributes. */
1559
1560 static bool
1561 propagate_pure_const (void)
1562 {
1563 struct cgraph_node *node;
1564 struct cgraph_node *w;
1565 struct cgraph_node **order =
1566 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1567 int order_pos;
1568 int i;
1569 struct ipa_dfs_info * w_info;
1570 bool remove_p = false;
1571
1572 order_pos = ipa_reduced_postorder (order, true,
1573 ignore_edge_for_pure_const);
1574 if (dump_file)
1575 {
1576 cgraph_node::dump_cgraph (dump_file);
1577 ipa_print_order (dump_file, "reduced", order, order_pos);
1578 }
1579
1580 /* Propagate the local information through the call graph to produce
1581 the global information. All the nodes within a cycle will have
1582 the same info so we collapse cycles first. Then we can do the
1583 propagation in one pass from the leaves to the roots. */
1584 for (i = 0; i < order_pos; i++ )
1585 {
1586 enum pure_const_state_e pure_const_state = IPA_CONST;
1587 bool looping = false;
1588 int count = 0;
1589 node = order[i];
1590
1591 if (node->alias)
1592 continue;
1593
1594 if (dump_file && (dump_flags & TDF_DETAILS))
1595 fprintf (dump_file, "Starting cycle\n");
1596
1597 /* Find the worst state for any node in the cycle. */
1598 w = node;
1599 while (w && pure_const_state != IPA_NEITHER)
1600 {
1601 struct cgraph_edge *e;
1602 struct cgraph_edge *ie;
1603 int i;
1604 struct ipa_ref *ref = NULL;
1605
1606 funct_state w_l = funct_state_summaries->get_create (w);
1607 if (dump_file && (dump_flags & TDF_DETAILS))
1608 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1609 w->dump_name (),
1610 pure_const_names[w_l->pure_const_state],
1611 w_l->looping);
1612
1613 /* First merge in function body properties.
1614 We are safe to pass NULL as FROM and TO because we will take care
1615 of possible interposition when walking callees. */
1616 worse_state (&pure_const_state, &looping,
1617 w_l->pure_const_state, w_l->looping,
1618 NULL, NULL);
1619 if (pure_const_state == IPA_NEITHER)
1620 break;
1621
1622 count++;
1623
1624 /* We consider recursive cycles as possibly infinite.
1625 This might be relaxed since infinite recursion leads to stack
1626 overflow. */
1627 if (count > 1)
1628 looping = true;
1629
1630 /* Now walk the edges and merge in callee properties. */
1631 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1632 e = e->next_callee)
1633 {
1634 enum availability avail;
1635 struct cgraph_node *y = e->callee->
1636 function_or_virtual_thunk_symbol (&avail,
1637 e->caller);
1638 enum pure_const_state_e edge_state = IPA_CONST;
1639 bool edge_looping = false;
1640
1641 if (e->recursive_p ())
1642 looping = true;
1643
1644 if (dump_file && (dump_flags & TDF_DETAILS))
1645 {
1646 fprintf (dump_file, " Call to %s",
1647 e->callee->dump_name ());
1648 }
1649 if (avail > AVAIL_INTERPOSABLE)
1650 {
1651 funct_state y_l = funct_state_summaries->get_create (y);
1652
1653 if (dump_file && (dump_flags & TDF_DETAILS))
1654 {
1655 fprintf (dump_file,
1656 " state:%s looping:%i\n",
1657 pure_const_names[y_l->pure_const_state],
1658 y_l->looping);
1659 }
1660 if (y_l->pure_const_state > IPA_PURE
1661 && e->cannot_lead_to_return_p ())
1662 {
1663 if (dump_file && (dump_flags & TDF_DETAILS))
1664 fprintf (dump_file,
1665 " Ignoring side effects"
1666 " -> pure, looping\n");
1667 edge_state = IPA_PURE;
1668 edge_looping = true;
1669 }
1670 else
1671 {
1672 edge_state = y_l->pure_const_state;
1673 edge_looping = y_l->looping;
1674 }
1675 }
1676 else if (builtin_safe_for_const_function_p (&edge_looping,
1677 y->decl))
1678 edge_state = IPA_CONST;
1679 else
1680 state_from_flags (&edge_state, &edge_looping,
1681 flags_from_decl_or_type (y->decl),
1682 e->cannot_lead_to_return_p ());
1683
1684 /* Merge the results with what we already know. */
1685 better_state (&edge_state, &edge_looping,
1686 w_l->state_previously_known,
1687 w_l->looping_previously_known);
1688 worse_state (&pure_const_state, &looping,
1689 edge_state, edge_looping, e->caller, e->callee);
1690 if (pure_const_state == IPA_NEITHER)
1691 break;
1692 }
1693
1694 /* Now process the indirect call. */
1695 for (ie = w->indirect_calls;
1696 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1697 {
1698 enum pure_const_state_e edge_state = IPA_CONST;
1699 bool edge_looping = false;
1700
1701 if (dump_file && (dump_flags & TDF_DETAILS))
1702 fprintf (dump_file, " Indirect call");
1703 state_from_flags (&edge_state, &edge_looping,
1704 ie->indirect_info->ecf_flags,
1705 ie->cannot_lead_to_return_p ());
1706 /* Merge the results with what we already know. */
1707 better_state (&edge_state, &edge_looping,
1708 w_l->state_previously_known,
1709 w_l->looping_previously_known);
1710 worse_state (&pure_const_state, &looping,
1711 edge_state, edge_looping, NULL, NULL);
1712 if (pure_const_state == IPA_NEITHER)
1713 break;
1714 }
1715
1716 /* And finally all loads and stores. */
1717 for (i = 0; w->iterate_reference (i, ref)
1718 && pure_const_state != IPA_NEITHER; i++)
1719 {
1720 enum pure_const_state_e ref_state = IPA_CONST;
1721 bool ref_looping = false;
1722 switch (ref->use)
1723 {
1724 case IPA_REF_LOAD:
1725 /* readonly reads are safe. */
1726 if (TREE_READONLY (ref->referred->decl))
1727 break;
1728 if (dump_file && (dump_flags & TDF_DETAILS))
1729 fprintf (dump_file, " nonreadonly global var read\n");
1730 ref_state = IPA_PURE;
1731 break;
1732 case IPA_REF_STORE:
1733 if (ref->cannot_lead_to_return ())
1734 break;
1735 ref_state = IPA_NEITHER;
1736 if (dump_file && (dump_flags & TDF_DETAILS))
1737 fprintf (dump_file, " global var write\n");
1738 break;
1739 case IPA_REF_ADDR:
1740 break;
1741 default:
1742 gcc_unreachable ();
1743 }
1744 better_state (&ref_state, &ref_looping,
1745 w_l->state_previously_known,
1746 w_l->looping_previously_known);
1747 worse_state (&pure_const_state, &looping,
1748 ref_state, ref_looping, NULL, NULL);
1749 if (pure_const_state == IPA_NEITHER)
1750 break;
1751 }
1752 w_info = (struct ipa_dfs_info *) w->aux;
1753 w = w_info->next_cycle;
1754 }
1755 if (dump_file && (dump_flags & TDF_DETAILS))
1756 fprintf (dump_file, "Result %s looping %i\n",
1757 pure_const_names [pure_const_state],
1758 looping);
1759
1760 /* Find the worst state of can_free for any node in the cycle. */
1761 bool can_free = false;
1762 w = node;
1763 while (w && !can_free)
1764 {
1765 struct cgraph_edge *e;
1766 funct_state w_l = funct_state_summaries->get (w);
1767
1768 if (w_l->can_free
1769 || w->get_availability () == AVAIL_INTERPOSABLE
1770 || w->indirect_calls)
1771 can_free = true;
1772
1773 for (e = w->callees; e && !can_free; e = e->next_callee)
1774 {
1775 enum availability avail;
1776 struct cgraph_node *y = e->callee->
1777 function_or_virtual_thunk_symbol (&avail,
1778 e->caller);
1779
1780 if (avail > AVAIL_INTERPOSABLE)
1781 can_free = funct_state_summaries->get (y)->can_free;
1782 else
1783 can_free = true;
1784 }
1785 w_info = (struct ipa_dfs_info *) w->aux;
1786 w = w_info->next_cycle;
1787 }
1788
1789 /* Copy back the region's pure_const_state which is shared by
1790 all nodes in the region. */
1791 w = node;
1792 while (w)
1793 {
1794 funct_state w_l = funct_state_summaries->get (w);
1795 enum pure_const_state_e this_state = pure_const_state;
1796 bool this_looping = looping;
1797
1798 w_l->can_free = can_free;
1799 w->nonfreeing_fn = !can_free;
1800 if (!can_free && dump_file)
1801 fprintf (dump_file, "Function found not to call free: %s\n",
1802 w->dump_name ());
1803
1804 if (w_l->state_previously_known != IPA_NEITHER
1805 && this_state > w_l->state_previously_known)
1806 {
1807 if (this_state == IPA_NEITHER)
1808 this_looping = w_l->looping_previously_known;
1809 this_state = w_l->state_previously_known;
1810 }
1811 if (!this_looping && self_recursive_p (w))
1812 this_looping = true;
1813 if (!w_l->looping_previously_known)
1814 this_looping = false;
1815
1816 /* All nodes within a cycle share the same info. */
1817 w_l->pure_const_state = this_state;
1818 w_l->looping = this_looping;
1819
1820 /* Inline clones share declaration with their offline copies;
1821 do not modify their declarations since the offline copy may
1822 be different. */
1823 if (!w->inlined_to)
1824 switch (this_state)
1825 {
1826 case IPA_CONST:
1827 remove_p |= ipa_make_function_const (w, this_looping, false);
1828 break;
1829
1830 case IPA_PURE:
1831 remove_p |= ipa_make_function_pure (w, this_looping, false);
1832 break;
1833
1834 default:
1835 break;
1836 }
1837 w_info = (struct ipa_dfs_info *) w->aux;
1838 w = w_info->next_cycle;
1839 }
1840 }
1841
1842 ipa_free_postorder_info ();
1843 free (order);
1844 return remove_p;
1845 }
1846
1847 /* Produce transitive closure over the callgraph and compute nothrow
1848 attributes. */
1849
1850 static void
1851 propagate_nothrow (void)
1852 {
1853 struct cgraph_node *node;
1854 struct cgraph_node *w;
1855 struct cgraph_node **order =
1856 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1857 int order_pos;
1858 int i;
1859 struct ipa_dfs_info * w_info;
1860
1861 order_pos = ipa_reduced_postorder (order, true,
1862 ignore_edge_for_nothrow);
1863 if (dump_file)
1864 {
1865 cgraph_node::dump_cgraph (dump_file);
1866 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1867 }
1868
1869 /* Propagate the local information through the call graph to produce
1870 the global information. All the nodes within a cycle will have
1871 the same info so we collapse cycles first. Then we can do the
1872 propagation in one pass from the leaves to the roots. */
1873 for (i = 0; i < order_pos; i++ )
1874 {
1875 bool can_throw = false;
1876 node = order[i];
1877
1878 if (node->alias)
1879 continue;
1880
1881 /* Find the worst state for any node in the cycle. */
1882 w = node;
1883 while (w && !can_throw)
1884 {
1885 struct cgraph_edge *e, *ie;
1886
1887 if (!TREE_NOTHROW (w->decl))
1888 {
1889 funct_state w_l = funct_state_summaries->get_create (w);
1890
1891 if (w_l->can_throw
1892 || w->get_availability () == AVAIL_INTERPOSABLE)
1893 can_throw = true;
1894
1895 for (e = w->callees; e && !can_throw; e = e->next_callee)
1896 {
1897 enum availability avail;
1898
1899 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1900 continue;
1901
1902 struct cgraph_node *y = e->callee->
1903 function_or_virtual_thunk_symbol (&avail,
1904 e->caller);
1905
1906 /* We can use info about the callee only if we know it
1907 cannot be interposed.
1908 When callee is compiled with non-call exceptions we also
1909 must check that the declaration is bound to current
1910 body as other semantically equivalent body may still
1911 throw. */
1912 if (avail <= AVAIL_INTERPOSABLE
1913 || (!TREE_NOTHROW (y->decl)
1914 && (funct_state_summaries->get_create (y)->can_throw
1915 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1916 && !e->callee->binds_to_current_def_p (w)))))
1917 can_throw = true;
1918 }
1919 for (ie = w->indirect_calls; ie && !can_throw;
1920 ie = ie->next_callee)
1921 if (ie->can_throw_external
1922 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1923 can_throw = true;
1924 }
1925 w_info = (struct ipa_dfs_info *) w->aux;
1926 w = w_info->next_cycle;
1927 }
1928
1929 /* Copy back the region's pure_const_state which is shared by
1930 all nodes in the region. */
1931 w = node;
1932 while (w)
1933 {
1934 funct_state w_l = funct_state_summaries->get_create (w);
1935 if (!can_throw && !TREE_NOTHROW (w->decl))
1936 {
1937 /* Inline clones share declaration with their offline copies;
1938 do not modify their declarations since the offline copy may
1939 be different. */
1940 if (!w->inlined_to)
1941 {
1942 w->set_nothrow_flag (true);
1943 if (dump_file)
1944 fprintf (dump_file, "Function found to be nothrow: %s\n",
1945 w->dump_name ());
1946 }
1947 }
1948 else if (can_throw && !TREE_NOTHROW (w->decl))
1949 w_l->can_throw = true;
1950 w_info = (struct ipa_dfs_info *) w->aux;
1951 w = w_info->next_cycle;
1952 }
1953 }
1954
1955 ipa_free_postorder_info ();
1956 free (order);
1957 }
1958
1959 /* Debugging function to dump state of malloc lattice. */
1960
1961 DEBUG_FUNCTION
1962 static void
1963 dump_malloc_lattice (FILE *dump_file, const char *s)
1964 {
1965 if (!dump_file)
1966 return;
1967
1968 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1969 cgraph_node *node;
1970 FOR_EACH_FUNCTION (node)
1971 {
1972 funct_state fs = funct_state_summaries->get (node);
1973 if (fs)
1974 fprintf (dump_file, "%s: %s\n", node->dump_name (),
1975 malloc_state_names[fs->malloc_state]);
1976 }
1977 }
1978
1979 /* Propagate malloc attribute across the callgraph. */
1980
1981 static void
1982 propagate_malloc (void)
1983 {
1984 cgraph_node *node;
1985 FOR_EACH_FUNCTION (node)
1986 {
1987 if (DECL_IS_MALLOC (node->decl))
1988 if (!funct_state_summaries->exists (node))
1989 {
1990 funct_state fs = funct_state_summaries->get_create (node);
1991 fs->malloc_state = STATE_MALLOC;
1992 }
1993 }
1994
1995 dump_malloc_lattice (dump_file, "Initial");
1996 struct cgraph_node **order
1997 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1998 int order_pos = ipa_reverse_postorder (order);
1999 bool changed = true;
2000
2001 while (changed)
2002 {
2003 changed = false;
2004 /* Walk in postorder. */
2005 for (int i = order_pos - 1; i >= 0; --i)
2006 {
2007 cgraph_node *node = order[i];
2008 if (node->alias
2009 || !node->definition
2010 || !funct_state_summaries->exists (node))
2011 continue;
2012
2013 funct_state l = funct_state_summaries->get (node);
2014
2015 /* FIXME: add support for indirect-calls. */
2016 if (node->indirect_calls)
2017 {
2018 l->malloc_state = STATE_MALLOC_BOTTOM;
2019 continue;
2020 }
2021
2022 if (node->get_availability () <= AVAIL_INTERPOSABLE)
2023 {
2024 l->malloc_state = STATE_MALLOC_BOTTOM;
2025 continue;
2026 }
2027
2028 if (l->malloc_state == STATE_MALLOC_BOTTOM)
2029 continue;
2030
2031 auto_vec<cgraph_node *, 16> callees;
2032 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2033 {
2034 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
2035 if (es && es->is_return_callee_uncaptured)
2036 callees.safe_push (cs->callee);
2037 }
2038
2039 malloc_state_e new_state = l->malloc_state;
2040 for (unsigned j = 0; j < callees.length (); j++)
2041 {
2042 cgraph_node *callee = callees[j];
2043 if (!funct_state_summaries->exists (node))
2044 {
2045 new_state = STATE_MALLOC_BOTTOM;
2046 break;
2047 }
2048 malloc_state_e callee_state
2049 = funct_state_summaries->get_create (callee)->malloc_state;
2050 if (new_state < callee_state)
2051 new_state = callee_state;
2052 }
2053 if (new_state != l->malloc_state)
2054 {
2055 changed = true;
2056 l->malloc_state = new_state;
2057 }
2058 }
2059 }
2060
2061 FOR_EACH_DEFINED_FUNCTION (node)
2062 if (funct_state_summaries->exists (node))
2063 {
2064 funct_state l = funct_state_summaries->get (node);
2065 if (!node->alias
2066 && l->malloc_state == STATE_MALLOC
2067 && !node->inlined_to
2068 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
2069 {
2070 if (dump_file && (dump_flags & TDF_DETAILS))
2071 fprintf (dump_file, "Function %s found to be malloc\n",
2072 node->dump_name ());
2073
2074 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
2075 node->set_malloc_flag (true);
2076 if (!malloc_decl_p && warn_suggest_attribute_malloc)
2077 warn_function_malloc (node->decl);
2078 }
2079 }
2080
2081 dump_malloc_lattice (dump_file, "after propagation");
2082 ipa_free_postorder_info ();
2083 free (order);
2084 }
2085
2086 /* Produce the global information by preforming a transitive closure
2087 on the local information that was produced by generate_summary. */
2088
2089 unsigned int
2090 pass_ipa_pure_const::
2091 execute (function *)
2092 {
2093 bool remove_p;
2094
2095 /* Nothrow makes more function to not lead to return and improve
2096 later analysis. */
2097 propagate_nothrow ();
2098 propagate_malloc ();
2099 remove_p = propagate_pure_const ();
2100
2101 delete funct_state_summaries;
2102 return remove_p ? TODO_remove_functions : 0;
2103 }
2104
2105 static bool
2106 gate_pure_const (void)
2107 {
2108 return flag_ipa_pure_const || in_lto_p;
2109 }
2110
2111 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2112 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2113 pure_const_generate_summary, /* generate_summary */
2114 pure_const_write_summary, /* write_summary */
2115 pure_const_read_summary, /* read_summary */
2116 NULL, /* write_optimization_summary */
2117 NULL, /* read_optimization_summary */
2118 NULL, /* stmt_fixup */
2119 0, /* function_transform_todo_flags_start */
2120 NULL, /* function_transform */
2121 NULL), /* variable_transform */
2122 init_p (false) {}
2123
2124 ipa_opt_pass_d *
2125 make_pass_ipa_pure_const (gcc::context *ctxt)
2126 {
2127 return new pass_ipa_pure_const (ctxt);
2128 }
2129
2130 /* Simple local pass for pure const discovery reusing the analysis from
2131 ipa_pure_const. This pass is effective when executed together with
2132 other optimization passes in early optimization pass queue. */
2133
2134 namespace {
2135
2136 const pass_data pass_data_local_pure_const =
2137 {
2138 GIMPLE_PASS, /* type */
2139 "local-pure-const", /* name */
2140 OPTGROUP_NONE, /* optinfo_flags */
2141 TV_IPA_PURE_CONST, /* tv_id */
2142 0, /* properties_required */
2143 0, /* properties_provided */
2144 0, /* properties_destroyed */
2145 0, /* todo_flags_start */
2146 0, /* todo_flags_finish */
2147 };
2148
2149 class pass_local_pure_const : public gimple_opt_pass
2150 {
2151 public:
2152 pass_local_pure_const (gcc::context *ctxt)
2153 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2154 {}
2155
2156 /* opt_pass methods: */
2157 opt_pass * clone () final override
2158 {
2159 return new pass_local_pure_const (m_ctxt);
2160 }
2161 bool gate (function *) final override { return gate_pure_const (); }
2162 unsigned int execute (function *) final override;
2163
2164 }; // class pass_local_pure_const
2165
2166 unsigned int
2167 pass_local_pure_const::execute (function *fun)
2168 {
2169 bool changed = false;
2170 funct_state l;
2171 bool skip;
2172 struct cgraph_node *node;
2173
2174 node = cgraph_node::get (current_function_decl);
2175 skip = skip_function_for_local_pure_const (node);
2176
2177 if (!warn_suggest_attribute_const
2178 && !warn_suggest_attribute_pure
2179 && skip)
2180 return 0;
2181
2182 l = analyze_function (node, false);
2183
2184 /* Do NORETURN discovery. */
2185 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2186 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2187 {
2188 warn_function_noreturn (fun->decl);
2189 if (dump_file)
2190 fprintf (dump_file, "Function found to be noreturn: %s\n",
2191 current_function_name ());
2192
2193 /* Update declaration and reduce profile to executed once. */
2194 if (cgraph_node::get (current_function_decl)->set_noreturn_flag (true))
2195 changed = true;
2196 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2197 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2198 }
2199
2200 switch (l->pure_const_state)
2201 {
2202 case IPA_CONST:
2203 changed |= ipa_make_function_const
2204 (cgraph_node::get (current_function_decl), l->looping, true);
2205 break;
2206
2207 case IPA_PURE:
2208 changed |= ipa_make_function_pure
2209 (cgraph_node::get (current_function_decl), l->looping, true);
2210 break;
2211
2212 default:
2213 break;
2214 }
2215 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2216 {
2217 node->set_nothrow_flag (true);
2218 changed = true;
2219 if (dump_file)
2220 fprintf (dump_file, "Function found to be nothrow: %s\n",
2221 current_function_name ());
2222 }
2223
2224 if (l->malloc_state == STATE_MALLOC
2225 && !DECL_IS_MALLOC (current_function_decl))
2226 {
2227 node->set_malloc_flag (true);
2228 if (warn_suggest_attribute_malloc)
2229 warn_function_malloc (node->decl);
2230 changed = true;
2231 if (dump_file)
2232 fprintf (dump_file, "Function found to be malloc: %s\n",
2233 node->dump_name ());
2234 }
2235
2236 free (l);
2237 if (changed)
2238 return execute_fixup_cfg ();
2239 else
2240 return 0;
2241 }
2242
2243 } // anon namespace
2244
2245 gimple_opt_pass *
2246 make_pass_local_pure_const (gcc::context *ctxt)
2247 {
2248 return new pass_local_pure_const (ctxt);
2249 }
2250
2251 /* Emit noreturn warnings. */
2252
2253 namespace {
2254
2255 const pass_data pass_data_warn_function_noreturn =
2256 {
2257 GIMPLE_PASS, /* type */
2258 "*warn_function_noreturn", /* name */
2259 OPTGROUP_NONE, /* optinfo_flags */
2260 TV_NONE, /* tv_id */
2261 PROP_cfg, /* properties_required */
2262 0, /* properties_provided */
2263 0, /* properties_destroyed */
2264 0, /* todo_flags_start */
2265 0, /* todo_flags_finish */
2266 };
2267
2268 class pass_warn_function_noreturn : public gimple_opt_pass
2269 {
2270 public:
2271 pass_warn_function_noreturn (gcc::context *ctxt)
2272 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2273 {}
2274
2275 /* opt_pass methods: */
2276 bool gate (function *) final override
2277 {
2278 return warn_suggest_attribute_noreturn;
2279 }
2280 unsigned int execute (function *fun) final override
2281 {
2282 if (!TREE_THIS_VOLATILE (current_function_decl)
2283 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2284 warn_function_noreturn (current_function_decl);
2285 return 0;
2286 }
2287
2288 }; // class pass_warn_function_noreturn
2289
2290 } // anon namespace
2291
2292 gimple_opt_pass *
2293 make_pass_warn_function_noreturn (gcc::context *ctxt)
2294 {
2295 return new pass_warn_function_noreturn (ctxt);
2296 }
2297
2298 /* Simple local pass for pure const discovery reusing the analysis from
2299 ipa_pure_const. This pass is effective when executed together with
2300 other optimization passes in early optimization pass queue. */
2301
2302 namespace {
2303
2304 const pass_data pass_data_nothrow =
2305 {
2306 GIMPLE_PASS, /* type */
2307 "nothrow", /* name */
2308 OPTGROUP_NONE, /* optinfo_flags */
2309 TV_IPA_PURE_CONST, /* tv_id */
2310 0, /* properties_required */
2311 0, /* properties_provided */
2312 0, /* properties_destroyed */
2313 0, /* todo_flags_start */
2314 0, /* todo_flags_finish */
2315 };
2316
2317 class pass_nothrow : public gimple_opt_pass
2318 {
2319 public:
2320 pass_nothrow (gcc::context *ctxt)
2321 : gimple_opt_pass (pass_data_nothrow, ctxt)
2322 {}
2323
2324 /* opt_pass methods: */
2325 opt_pass * clone () final override { return new pass_nothrow (m_ctxt); }
2326 bool gate (function *) final override { return optimize; }
2327 unsigned int execute (function *) final override;
2328
2329 }; // class pass_nothrow
2330
2331 unsigned int
2332 pass_nothrow::execute (function *)
2333 {
2334 struct cgraph_node *node;
2335 basic_block this_block;
2336
2337 if (TREE_NOTHROW (current_function_decl))
2338 return 0;
2339
2340 node = cgraph_node::get (current_function_decl);
2341
2342 /* We run during lowering, we cannot really use availability yet. */
2343 if (cgraph_node::get (current_function_decl)->get_availability ()
2344 <= AVAIL_INTERPOSABLE)
2345 {
2346 if (dump_file)
2347 fprintf (dump_file, "Function is interposable;"
2348 " not analyzing.\n");
2349 return true;
2350 }
2351
2352 FOR_EACH_BB_FN (this_block, cfun)
2353 {
2354 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2355 !gsi_end_p (gsi);
2356 gsi_next (&gsi))
2357 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2358 {
2359 if (is_gimple_call (gsi_stmt (gsi)))
2360 {
2361 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2362 if (callee_t && recursive_call_p (current_function_decl,
2363 callee_t))
2364 continue;
2365 }
2366
2367 if (dump_file)
2368 {
2369 fprintf (dump_file, "Statement can throw: ");
2370 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2371 }
2372 return 0;
2373 }
2374 }
2375
2376 node->set_nothrow_flag (true);
2377
2378 bool cfg_changed = false;
2379 if (self_recursive_p (node))
2380 FOR_EACH_BB_FN (this_block, cfun)
2381 if (gimple *g = last_stmt (this_block))
2382 if (is_gimple_call (g))
2383 {
2384 tree callee_t = gimple_call_fndecl (g);
2385 if (callee_t
2386 && recursive_call_p (current_function_decl, callee_t)
2387 && maybe_clean_eh_stmt (g)
2388 && gimple_purge_dead_eh_edges (this_block))
2389 cfg_changed = true;
2390 }
2391
2392 if (dump_file)
2393 fprintf (dump_file, "Function found to be nothrow: %s\n",
2394 current_function_name ());
2395 return cfg_changed ? TODO_cleanup_cfg : 0;
2396 }
2397
2398 } // anon namespace
2399
2400 gimple_opt_pass *
2401 make_pass_nothrow (gcc::context *ctxt)
2402 {
2403 return new pass_nothrow (ctxt);
2404 }