]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-pure-const.c
Construct ipa_reduced_postorder always for overwritable (PR ipa/89009).
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
64
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
67 what is found. */
68 enum pure_const_state_e
69 {
70 IPA_CONST,
71 IPA_PURE,
72 IPA_NEITHER
73 };
74
75 static const char *pure_const_names[3] = {"const", "pure", "neither"};
76
77 enum malloc_state_e
78 {
79 STATE_MALLOC_TOP,
80 STATE_MALLOC,
81 STATE_MALLOC_BOTTOM
82 };
83
84 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
85
86 /* Holder for the const_state. There is one of these per function
87 decl. */
88 class funct_state_d
89 {
90 public:
91 funct_state_d (): pure_const_state (IPA_NEITHER),
92 state_previously_known (IPA_NEITHER), looping_previously_known (true),
93 looping (true), can_throw (true), can_free (true),
94 malloc_state (STATE_MALLOC_BOTTOM) {}
95
96 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
97 state_previously_known (s.state_previously_known),
98 looping_previously_known (s.looping_previously_known),
99 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
100 malloc_state (s.malloc_state) {}
101
102 /* See above. */
103 enum pure_const_state_e pure_const_state;
104 /* What user set here; we can be always sure about this. */
105 enum pure_const_state_e state_previously_known;
106 bool looping_previously_known;
107
108 /* True if the function could possibly infinite loop. There are a
109 lot of ways that this could be determined. We are pretty
110 conservative here. While it is possible to cse pure and const
111 calls, it is not legal to have dce get rid of the call if there
112 is a possibility that the call could infinite loop since this is
113 a behavioral change. */
114 bool looping;
115
116 bool can_throw;
117
118 /* If function can call free, munmap or otherwise make previously
119 non-trapping memory accesses trapping. */
120 bool can_free;
121
122 enum malloc_state_e malloc_state;
123 };
124
125 typedef struct funct_state_d * funct_state;
126
127 /* The storage of the funct_state is abstracted because there is the
128 possibility that it may be desirable to move this to the cgraph
129 local info. */
130
131 class funct_state_summary_t: public function_summary <funct_state_d *>
132 {
133 public:
134 funct_state_summary_t (symbol_table *symtab):
135 function_summary <funct_state_d *> (symtab) {}
136
137 virtual void insert (cgraph_node *, funct_state_d *state);
138 virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
139 funct_state_d *src_data,
140 funct_state_d *dst_data);
141 };
142
143 static funct_state_summary_t *funct_state_summaries = NULL;
144
145 static bool gate_pure_const (void);
146
147 namespace {
148
149 const pass_data pass_data_ipa_pure_const =
150 {
151 IPA_PASS, /* type */
152 "pure-const", /* name */
153 OPTGROUP_NONE, /* optinfo_flags */
154 TV_IPA_PURE_CONST, /* tv_id */
155 0, /* properties_required */
156 0, /* properties_provided */
157 0, /* properties_destroyed */
158 0, /* todo_flags_start */
159 0, /* todo_flags_finish */
160 };
161
162 class pass_ipa_pure_const : public ipa_opt_pass_d
163 {
164 public:
165 pass_ipa_pure_const(gcc::context *ctxt);
166
167 /* opt_pass methods: */
168 bool gate (function *) { return gate_pure_const (); }
169 unsigned int execute (function *fun);
170
171 void register_hooks (void);
172
173 private:
174 bool init_p;
175 }; // class pass_ipa_pure_const
176
177 } // anon namespace
178
179 /* Try to guess if function body will always be visible to compiler
180 when compiling the call and whether compiler will be able
181 to propagate the information by itself. */
182
183 static bool
184 function_always_visible_to_compiler_p (tree decl)
185 {
186 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
187 || DECL_COMDAT (decl));
188 }
189
190 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
191 is true if the function is known to be finite. The diagnostic is
192 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
193 OPTION, this function may initialize it and it is always returned
194 by the function. */
195
196 static hash_set<tree> *
197 suggest_attribute (int option, tree decl, bool known_finite,
198 hash_set<tree> *warned_about,
199 const char * attrib_name)
200 {
201 if (!option_enabled (option, &global_options))
202 return warned_about;
203 if (TREE_THIS_VOLATILE (decl)
204 || (known_finite && function_always_visible_to_compiler_p (decl)))
205 return warned_about;
206
207 if (!warned_about)
208 warned_about = new hash_set<tree>;
209 if (warned_about->contains (decl))
210 return warned_about;
211 warned_about->add (decl);
212 warning_at (DECL_SOURCE_LOCATION (decl),
213 option,
214 known_finite
215 ? G_("function might be candidate for attribute %qs")
216 : G_("function might be candidate for attribute %qs"
217 " if it is known to return normally"), attrib_name);
218 return warned_about;
219 }
220
221 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
222 is true if the function is known to be finite. */
223
224 static void
225 warn_function_pure (tree decl, bool known_finite)
226 {
227 /* Declaring a void function pure makes no sense and is diagnosed
228 by -Wattributes because calling it would have no effect. */
229 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
230 return;
231
232 static hash_set<tree> *warned_about;
233 warned_about
234 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
235 known_finite, warned_about, "pure");
236 }
237
238 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
239 is true if the function is known to be finite. */
240
241 static void
242 warn_function_const (tree decl, bool known_finite)
243 {
244 /* Declaring a void function const makes no sense is diagnosed
245 by -Wattributes because calling it would have no effect. */
246 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
247 return;
248
249 static hash_set<tree> *warned_about;
250 warned_about
251 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
252 known_finite, warned_about, "const");
253 }
254
255 /* Emit suggestion about __attribute__((malloc)) for DECL. */
256
257 static void
258 warn_function_malloc (tree decl)
259 {
260 static hash_set<tree> *warned_about;
261 warned_about
262 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
263 true, warned_about, "malloc");
264 }
265
266 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
267
268 static void
269 warn_function_noreturn (tree decl)
270 {
271 tree original_decl = decl;
272
273 static hash_set<tree> *warned_about;
274 if (!lang_hooks.missing_noreturn_ok_p (decl)
275 && targetm.warn_func_return (decl))
276 warned_about
277 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
278 true, warned_about, "noreturn");
279 }
280
281 void
282 warn_function_cold (tree decl)
283 {
284 tree original_decl = decl;
285
286 static hash_set<tree> *warned_about;
287 warned_about
288 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
289 true, warned_about, "cold");
290 }
291
292 /* Check to see if the use (or definition when CHECKING_WRITE is true)
293 variable T is legal in a function that is either pure or const. */
294
295 static inline void
296 check_decl (funct_state local,
297 tree t, bool checking_write, bool ipa)
298 {
299 /* Do not want to do anything with volatile except mark any
300 function that uses one to be not const or pure. */
301 if (TREE_THIS_VOLATILE (t))
302 {
303 local->pure_const_state = IPA_NEITHER;
304 if (dump_file)
305 fprintf (dump_file, " Volatile operand is not const/pure\n");
306 return;
307 }
308
309 /* Do not care about a local automatic that is not static. */
310 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
311 return;
312
313 /* If the variable has the "used" attribute, treat it as if it had a
314 been touched by the devil. */
315 if (DECL_PRESERVE_P (t))
316 {
317 local->pure_const_state = IPA_NEITHER;
318 if (dump_file)
319 fprintf (dump_file, " Used static/global variable is not const/pure\n");
320 return;
321 }
322
323 /* In IPA mode we are not interested in checking actual loads and stores;
324 they will be processed at propagation time using ipa_ref. */
325 if (ipa)
326 return;
327
328 /* Since we have dealt with the locals and params cases above, if we
329 are CHECKING_WRITE, this cannot be a pure or constant
330 function. */
331 if (checking_write)
332 {
333 local->pure_const_state = IPA_NEITHER;
334 if (dump_file)
335 fprintf (dump_file, " static/global memory write is not const/pure\n");
336 return;
337 }
338
339 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
340 {
341 /* Readonly reads are safe. */
342 if (TREE_READONLY (t))
343 return; /* Read of a constant, do not change the function state. */
344 else
345 {
346 if (dump_file)
347 fprintf (dump_file, " global memory read is not const\n");
348 /* Just a regular read. */
349 if (local->pure_const_state == IPA_CONST)
350 local->pure_const_state = IPA_PURE;
351 }
352 }
353 else
354 {
355 /* Compilation level statics can be read if they are readonly
356 variables. */
357 if (TREE_READONLY (t))
358 return;
359
360 if (dump_file)
361 fprintf (dump_file, " static memory read is not const\n");
362 /* Just a regular read. */
363 if (local->pure_const_state == IPA_CONST)
364 local->pure_const_state = IPA_PURE;
365 }
366 }
367
368
369 /* Check to see if the use (or definition when CHECKING_WRITE is true)
370 variable T is legal in a function that is either pure or const. */
371
372 static inline void
373 check_op (funct_state local, tree t, bool checking_write)
374 {
375 t = get_base_address (t);
376 if (t && TREE_THIS_VOLATILE (t))
377 {
378 local->pure_const_state = IPA_NEITHER;
379 if (dump_file)
380 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
381 return;
382 }
383 else if (t
384 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
385 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
386 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
387 {
388 if (dump_file)
389 fprintf (dump_file, " Indirect ref to local memory is OK\n");
390 return;
391 }
392 else if (checking_write)
393 {
394 local->pure_const_state = IPA_NEITHER;
395 if (dump_file)
396 fprintf (dump_file, " Indirect ref write is not const/pure\n");
397 return;
398 }
399 else
400 {
401 if (dump_file)
402 fprintf (dump_file, " Indirect ref read is not const\n");
403 if (local->pure_const_state == IPA_CONST)
404 local->pure_const_state = IPA_PURE;
405 }
406 }
407
408 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
409
410 static void
411 state_from_flags (enum pure_const_state_e *state, bool *looping,
412 int flags, bool cannot_lead_to_return)
413 {
414 *looping = false;
415 if (flags & ECF_LOOPING_CONST_OR_PURE)
416 {
417 *looping = true;
418 if (dump_file && (dump_flags & TDF_DETAILS))
419 fprintf (dump_file, " looping\n");
420 }
421 if (flags & ECF_CONST)
422 {
423 *state = IPA_CONST;
424 if (dump_file && (dump_flags & TDF_DETAILS))
425 fprintf (dump_file, " const\n");
426 }
427 else if (flags & ECF_PURE)
428 {
429 *state = IPA_PURE;
430 if (dump_file && (dump_flags & TDF_DETAILS))
431 fprintf (dump_file, " pure\n");
432 }
433 else if (cannot_lead_to_return)
434 {
435 *state = IPA_PURE;
436 *looping = true;
437 if (dump_file && (dump_flags & TDF_DETAILS))
438 fprintf (dump_file, " ignoring side effects->pure looping\n");
439 }
440 else
441 {
442 if (dump_file && (dump_flags & TDF_DETAILS))
443 fprintf (dump_file, " neither\n");
444 *state = IPA_NEITHER;
445 *looping = true;
446 }
447 }
448
449 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
450 into STATE and LOOPING better of the two variants.
451 Be sure to merge looping correctly. IPA_NEITHER functions
452 have looping 0 even if they don't have to return. */
453
454 static inline void
455 better_state (enum pure_const_state_e *state, bool *looping,
456 enum pure_const_state_e state2, bool looping2)
457 {
458 if (state2 < *state)
459 {
460 if (*state == IPA_NEITHER)
461 *looping = looping2;
462 else
463 *looping = MIN (*looping, looping2);
464 *state = state2;
465 }
466 else if (state2 != IPA_NEITHER)
467 *looping = MIN (*looping, looping2);
468 }
469
470 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
471 into STATE and LOOPING worse of the two variants.
472 N is the actual node called. */
473
474 static inline void
475 worse_state (enum pure_const_state_e *state, bool *looping,
476 enum pure_const_state_e state2, bool looping2,
477 struct symtab_node *from,
478 struct symtab_node *to)
479 {
480 /* Consider function:
481
482 bool a(int *p)
483 {
484 return *p==*p;
485 }
486
487 During early optimization we will turn this into:
488
489 bool a(int *p)
490 {
491 return true;
492 }
493
494 Now if this function will be detected as CONST however when interposed it
495 may end up being just pure. We always must assume the worst scenario here.
496 */
497 if (*state == IPA_CONST && state2 == IPA_CONST
498 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
499 {
500 if (dump_file && (dump_flags & TDF_DETAILS))
501 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
502 "bind to current def.\n", to->name ());
503 state2 = IPA_PURE;
504 }
505 *state = MAX (*state, state2);
506 *looping = MAX (*looping, looping2);
507 }
508
509 /* Recognize special cases of builtins that are by themselves not pure or const
510 but function using them is. */
511 static bool
512 special_builtin_state (enum pure_const_state_e *state, bool *looping,
513 tree callee)
514 {
515 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
516 switch (DECL_FUNCTION_CODE (callee))
517 {
518 case BUILT_IN_RETURN:
519 case BUILT_IN_UNREACHABLE:
520 CASE_BUILT_IN_ALLOCA:
521 case BUILT_IN_STACK_SAVE:
522 case BUILT_IN_STACK_RESTORE:
523 case BUILT_IN_EH_POINTER:
524 case BUILT_IN_EH_FILTER:
525 case BUILT_IN_UNWIND_RESUME:
526 case BUILT_IN_CXA_END_CLEANUP:
527 case BUILT_IN_EH_COPY_VALUES:
528 case BUILT_IN_FRAME_ADDRESS:
529 case BUILT_IN_APPLY:
530 case BUILT_IN_APPLY_ARGS:
531 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
532 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
533 *looping = false;
534 *state = IPA_CONST;
535 return true;
536 case BUILT_IN_PREFETCH:
537 *looping = true;
538 *state = IPA_CONST;
539 return true;
540 default:
541 break;
542 }
543 return false;
544 }
545
546 /* Check the parameters of a function call to CALL_EXPR to see if
547 there are any references in the parameters that are not allowed for
548 pure or const functions. Also check to see if this is either an
549 indirect call, a call outside the compilation unit, or has special
550 attributes that may also effect the purity. The CALL_EXPR node for
551 the entire call expression. */
552
553 static void
554 check_call (funct_state local, gcall *call, bool ipa)
555 {
556 int flags = gimple_call_flags (call);
557 tree callee_t = gimple_call_fndecl (call);
558 bool possibly_throws = stmt_could_throw_p (cfun, call);
559 bool possibly_throws_externally = (possibly_throws
560 && stmt_can_throw_external (cfun, call));
561
562 if (possibly_throws)
563 {
564 unsigned int i;
565 for (i = 0; i < gimple_num_ops (call); i++)
566 if (gimple_op (call, i)
567 && tree_could_throw_p (gimple_op (call, i)))
568 {
569 if (possibly_throws && cfun->can_throw_non_call_exceptions)
570 {
571 if (dump_file)
572 fprintf (dump_file, " operand can throw; looping\n");
573 local->looping = true;
574 }
575 if (possibly_throws_externally)
576 {
577 if (dump_file)
578 fprintf (dump_file, " operand can throw externally\n");
579 local->can_throw = true;
580 }
581 }
582 }
583
584 /* The const and pure flags are set by a variety of places in the
585 compiler (including here). If someone has already set the flags
586 for the callee, (such as for some of the builtins) we will use
587 them, otherwise we will compute our own information.
588
589 Const and pure functions have less clobber effects than other
590 functions so we process these first. Otherwise if it is a call
591 outside the compilation unit or an indirect call we punt. This
592 leaves local calls which will be processed by following the call
593 graph. */
594 if (callee_t)
595 {
596 enum pure_const_state_e call_state;
597 bool call_looping;
598
599 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
600 && !nonfreeing_call_p (call))
601 local->can_free = true;
602
603 if (special_builtin_state (&call_state, &call_looping, callee_t))
604 {
605 worse_state (&local->pure_const_state, &local->looping,
606 call_state, call_looping,
607 NULL, NULL);
608 return;
609 }
610 /* When bad things happen to bad functions, they cannot be const
611 or pure. */
612 if (setjmp_call_p (callee_t))
613 {
614 if (dump_file)
615 fprintf (dump_file, " setjmp is not const/pure\n");
616 local->looping = true;
617 local->pure_const_state = IPA_NEITHER;
618 }
619
620 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
621 switch (DECL_FUNCTION_CODE (callee_t))
622 {
623 case BUILT_IN_LONGJMP:
624 case BUILT_IN_NONLOCAL_GOTO:
625 if (dump_file)
626 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
627 local->pure_const_state = IPA_NEITHER;
628 local->looping = true;
629 break;
630 default:
631 break;
632 }
633 }
634 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
635 local->can_free = true;
636
637 /* When not in IPA mode, we can still handle self recursion. */
638 if (!ipa && callee_t
639 && recursive_call_p (current_function_decl, callee_t))
640 {
641 if (dump_file)
642 fprintf (dump_file, " Recursive call can loop.\n");
643 local->looping = true;
644 }
645 /* Either callee is unknown or we are doing local analysis.
646 Look to see if there are any bits available for the callee (such as by
647 declaration or because it is builtin) and process solely on the basis of
648 those bits. Handle internal calls always, those calls don't have
649 corresponding cgraph edges and thus aren't processed during
650 the propagation. */
651 else if (!ipa || gimple_call_internal_p (call))
652 {
653 enum pure_const_state_e call_state;
654 bool call_looping;
655 if (possibly_throws && cfun->can_throw_non_call_exceptions)
656 {
657 if (dump_file)
658 fprintf (dump_file, " can throw; looping\n");
659 local->looping = true;
660 }
661 if (possibly_throws_externally)
662 {
663 if (dump_file)
664 {
665 fprintf (dump_file, " can throw externally to lp %i\n",
666 lookup_stmt_eh_lp (call));
667 if (callee_t)
668 fprintf (dump_file, " callee:%s\n",
669 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
670 }
671 local->can_throw = true;
672 }
673 if (dump_file && (dump_flags & TDF_DETAILS))
674 fprintf (dump_file, " checking flags for call:");
675 state_from_flags (&call_state, &call_looping, flags,
676 ((flags & (ECF_NORETURN | ECF_NOTHROW))
677 == (ECF_NORETURN | ECF_NOTHROW))
678 || (!flag_exceptions && (flags & ECF_NORETURN)));
679 worse_state (&local->pure_const_state, &local->looping,
680 call_state, call_looping, NULL, NULL);
681 }
682 /* Direct functions calls are handled by IPA propagation. */
683 }
684
685 /* Wrapper around check_decl for loads in local more. */
686
687 static bool
688 check_load (gimple *, tree op, tree, void *data)
689 {
690 if (DECL_P (op))
691 check_decl ((funct_state)data, op, false, false);
692 else
693 check_op ((funct_state)data, op, false);
694 return false;
695 }
696
697 /* Wrapper around check_decl for stores in local more. */
698
699 static bool
700 check_store (gimple *, tree op, tree, void *data)
701 {
702 if (DECL_P (op))
703 check_decl ((funct_state)data, op, true, false);
704 else
705 check_op ((funct_state)data, op, true);
706 return false;
707 }
708
709 /* Wrapper around check_decl for loads in ipa mode. */
710
711 static bool
712 check_ipa_load (gimple *, tree op, tree, void *data)
713 {
714 if (DECL_P (op))
715 check_decl ((funct_state)data, op, false, true);
716 else
717 check_op ((funct_state)data, op, false);
718 return false;
719 }
720
721 /* Wrapper around check_decl for stores in ipa mode. */
722
723 static bool
724 check_ipa_store (gimple *, tree op, tree, void *data)
725 {
726 if (DECL_P (op))
727 check_decl ((funct_state)data, op, true, true);
728 else
729 check_op ((funct_state)data, op, true);
730 return false;
731 }
732
733 /* Look into pointer pointed to by GSIP and figure out what interesting side
734 effects it has. */
735 static void
736 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
737 {
738 gimple *stmt = gsi_stmt (*gsip);
739
740 if (is_gimple_debug (stmt))
741 return;
742
743 /* Do consider clobber as side effects before IPA, so we rather inline
744 C++ destructors and keep clobber semantics than eliminate them.
745
746 TODO: We may get smarter during early optimizations on these and let
747 functions containing only clobbers to be optimized more. This is a common
748 case of C++ destructors. */
749
750 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
751 return;
752
753 if (dump_file)
754 {
755 fprintf (dump_file, " scanning: ");
756 print_gimple_stmt (dump_file, stmt, 0);
757 }
758
759 if (gimple_has_volatile_ops (stmt)
760 && !gimple_clobber_p (stmt))
761 {
762 local->pure_const_state = IPA_NEITHER;
763 if (dump_file)
764 fprintf (dump_file, " Volatile stmt is not const/pure\n");
765 }
766
767 /* Look for loads and stores. */
768 walk_stmt_load_store_ops (stmt, local,
769 ipa ? check_ipa_load : check_load,
770 ipa ? check_ipa_store : check_store);
771
772 if (gimple_code (stmt) != GIMPLE_CALL
773 && stmt_could_throw_p (cfun, stmt))
774 {
775 if (cfun->can_throw_non_call_exceptions)
776 {
777 if (dump_file)
778 fprintf (dump_file, " can throw; looping\n");
779 local->looping = true;
780 }
781 if (stmt_can_throw_external (cfun, stmt))
782 {
783 if (dump_file)
784 fprintf (dump_file, " can throw externally\n");
785 local->can_throw = true;
786 }
787 else
788 if (dump_file)
789 fprintf (dump_file, " can throw\n");
790 }
791 switch (gimple_code (stmt))
792 {
793 case GIMPLE_CALL:
794 check_call (local, as_a <gcall *> (stmt), ipa);
795 break;
796 case GIMPLE_LABEL:
797 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
798 /* Target of long jump. */
799 {
800 if (dump_file)
801 fprintf (dump_file, " nonlocal label is not const/pure\n");
802 local->pure_const_state = IPA_NEITHER;
803 }
804 break;
805 case GIMPLE_ASM:
806 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
807 {
808 if (dump_file)
809 fprintf (dump_file, " memory asm clobber is not const/pure\n");
810 /* Abandon all hope, ye who enter here. */
811 local->pure_const_state = IPA_NEITHER;
812 local->can_free = true;
813 }
814 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
815 {
816 if (dump_file)
817 fprintf (dump_file, " volatile is not const/pure\n");
818 /* Abandon all hope, ye who enter here. */
819 local->pure_const_state = IPA_NEITHER;
820 local->looping = true;
821 local->can_free = true;
822 }
823 return;
824 default:
825 break;
826 }
827 }
828
829 /* Check that RETVAL is used only in STMT and in comparisons against 0.
830 RETVAL is return value of the function and STMT is return stmt. */
831
832 static bool
833 check_retval_uses (tree retval, gimple *stmt)
834 {
835 imm_use_iterator use_iter;
836 gimple *use_stmt;
837
838 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
839 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
840 {
841 tree op2 = gimple_cond_rhs (cond);
842 if (!integer_zerop (op2))
843 RETURN_FROM_IMM_USE_STMT (use_iter, false);
844 }
845 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
846 {
847 enum tree_code code = gimple_assign_rhs_code (ga);
848 if (TREE_CODE_CLASS (code) != tcc_comparison)
849 RETURN_FROM_IMM_USE_STMT (use_iter, false);
850 if (!integer_zerop (gimple_assign_rhs2 (ga)))
851 RETURN_FROM_IMM_USE_STMT (use_iter, false);
852 }
853 else if (is_gimple_debug (use_stmt))
854 ;
855 else if (use_stmt != stmt)
856 RETURN_FROM_IMM_USE_STMT (use_iter, false);
857
858 return true;
859 }
860
861 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
862 attribute. Currently this function does a very conservative analysis.
863 FUN is considered to be a candidate if
864 1) It returns a value of pointer type.
865 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
866 a phi, and element of phi is either NULL or
867 SSA_NAME_DEF_STMT(element) is function call.
868 3) The return-value has immediate uses only within comparisons (gcond or gassign)
869 and return_stmt (and likewise a phi arg has immediate use only within comparison
870 or the phi stmt). */
871
872 #define DUMP_AND_RETURN(reason) \
873 { \
874 if (dump_file && (dump_flags & TDF_DETAILS)) \
875 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
876 (node->name()), (reason)); \
877 return false; \
878 }
879
880 static bool
881 malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
882 bitmap visited)
883 {
884 cgraph_node *node = cgraph_node::get_create (fun->decl);
885 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
886 return true;
887
888 if (!check_retval_uses (retval, ret_stmt))
889 DUMP_AND_RETURN("Return value has uses outside return stmt"
890 " and comparisons against 0.")
891
892 gimple *def = SSA_NAME_DEF_STMT (retval);
893
894 if (gcall *call_stmt = dyn_cast<gcall *> (def))
895 {
896 tree callee_decl = gimple_call_fndecl (call_stmt);
897 if (!callee_decl)
898 return false;
899
900 if (!ipa && !DECL_IS_MALLOC (callee_decl))
901 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
902 " non-ipa mode.")
903
904 cgraph_edge *cs = node->get_edge (call_stmt);
905 if (cs)
906 {
907 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
908 es->is_return_callee_uncaptured = true;
909 }
910 }
911
912 else if (gphi *phi = dyn_cast<gphi *> (def))
913 {
914 bool all_args_zero = true;
915 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
916 {
917 tree arg = gimple_phi_arg_def (phi, i);
918 if (integer_zerop (arg))
919 continue;
920
921 all_args_zero = false;
922 if (TREE_CODE (arg) != SSA_NAME)
923 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
924 if (!check_retval_uses (arg, phi))
925 DUMP_AND_RETURN ("phi arg has uses outside phi"
926 " and comparisons against 0.")
927
928 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
929 if (is_a<gphi *> (arg_def))
930 {
931 if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
932 DUMP_AND_RETURN ("nested phi fail")
933 continue;
934 }
935
936 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
937 if (!call_stmt)
938 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
939
940 tree callee_decl = gimple_call_fndecl (call_stmt);
941 if (!callee_decl)
942 return false;
943 if (!ipa && !DECL_IS_MALLOC (callee_decl))
944 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
945 " for non-ipa mode.")
946
947 cgraph_edge *cs = node->get_edge (call_stmt);
948 if (cs)
949 {
950 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
951 es->is_return_callee_uncaptured = true;
952 }
953 }
954
955 if (all_args_zero)
956 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
957 }
958
959 else
960 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
961
962 return true;
963 }
964
965 static bool
966 malloc_candidate_p (function *fun, bool ipa)
967 {
968 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
969 edge e;
970 edge_iterator ei;
971 cgraph_node *node = cgraph_node::get_create (fun->decl);
972
973 if (EDGE_COUNT (exit_block->preds) == 0
974 || !flag_delete_null_pointer_checks)
975 return false;
976
977 auto_bitmap visited;
978 FOR_EACH_EDGE (e, ei, exit_block->preds)
979 {
980 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
981 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
982
983 if (!ret_stmt)
984 return false;
985
986 tree retval = gimple_return_retval (ret_stmt);
987 if (!retval)
988 DUMP_AND_RETURN("No return value.")
989
990 if (TREE_CODE (retval) != SSA_NAME
991 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
992 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
993
994 if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
995 return false;
996 }
997
998 if (dump_file && (dump_flags & TDF_DETAILS))
999 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1000 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1001 return true;
1002 }
1003
1004 #undef DUMP_AND_RETURN
1005
1006 /* This is the main routine for finding the reference patterns for
1007 global variables within a function FN. */
1008
1009 static funct_state
1010 analyze_function (struct cgraph_node *fn, bool ipa)
1011 {
1012 tree decl = fn->decl;
1013 funct_state l;
1014 basic_block this_block;
1015
1016 l = XCNEW (struct funct_state_d);
1017 l->pure_const_state = IPA_CONST;
1018 l->state_previously_known = IPA_NEITHER;
1019 l->looping_previously_known = true;
1020 l->looping = false;
1021 l->can_throw = false;
1022 l->can_free = false;
1023 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1024 flags_from_decl_or_type (fn->decl),
1025 fn->cannot_return_p ());
1026
1027 if (fn->thunk.thunk_p || fn->alias)
1028 {
1029 /* Thunk gets propagated through, so nothing interesting happens. */
1030 gcc_assert (ipa);
1031 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1032 l->pure_const_state = IPA_NEITHER;
1033 return l;
1034 }
1035
1036 if (dump_file)
1037 {
1038 fprintf (dump_file, "\n\n local analysis of %s\n ",
1039 fn->name ());
1040 }
1041
1042 push_cfun (DECL_STRUCT_FUNCTION (decl));
1043
1044 FOR_EACH_BB_FN (this_block, cfun)
1045 {
1046 gimple_stmt_iterator gsi;
1047 struct walk_stmt_info wi;
1048
1049 memset (&wi, 0, sizeof (wi));
1050 for (gsi = gsi_start_bb (this_block);
1051 !gsi_end_p (gsi);
1052 gsi_next (&gsi))
1053 {
1054 check_stmt (&gsi, l, ipa);
1055 if (l->pure_const_state == IPA_NEITHER
1056 && l->looping
1057 && l->can_throw
1058 && l->can_free)
1059 goto end;
1060 }
1061 }
1062
1063 end:
1064 if (l->pure_const_state != IPA_NEITHER)
1065 {
1066 /* Const functions cannot have back edges (an
1067 indication of possible infinite loop side
1068 effect. */
1069 if (mark_dfs_back_edges ())
1070 {
1071 /* Preheaders are needed for SCEV to work.
1072 Simple latches and recorded exits improve chances that loop will
1073 proved to be finite in testcases such as in loop-15.c
1074 and loop-24.c */
1075 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1076 | LOOPS_HAVE_SIMPLE_LATCHES
1077 | LOOPS_HAVE_RECORDED_EXITS);
1078 if (dump_file && (dump_flags & TDF_DETAILS))
1079 flow_loops_dump (dump_file, NULL, 0);
1080 if (mark_irreducible_loops ())
1081 {
1082 if (dump_file)
1083 fprintf (dump_file, " has irreducible loops\n");
1084 l->looping = true;
1085 }
1086 else
1087 {
1088 struct loop *loop;
1089 scev_initialize ();
1090 FOR_EACH_LOOP (loop, 0)
1091 if (!finite_loop_p (loop))
1092 {
1093 if (dump_file)
1094 fprintf (dump_file, " cannot prove finiteness of "
1095 "loop %i\n", loop->num);
1096 l->looping =true;
1097 break;
1098 }
1099 scev_finalize ();
1100 }
1101 loop_optimizer_finalize ();
1102 }
1103 }
1104
1105 if (dump_file && (dump_flags & TDF_DETAILS))
1106 fprintf (dump_file, " checking previously known:");
1107
1108 better_state (&l->pure_const_state, &l->looping,
1109 l->state_previously_known,
1110 l->looping_previously_known);
1111 if (TREE_NOTHROW (decl))
1112 l->can_throw = false;
1113
1114 l->malloc_state = STATE_MALLOC_BOTTOM;
1115 if (DECL_IS_MALLOC (decl))
1116 l->malloc_state = STATE_MALLOC;
1117 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1118 l->malloc_state = STATE_MALLOC_TOP;
1119 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1120 l->malloc_state = STATE_MALLOC;
1121
1122 pop_cfun ();
1123 if (dump_file)
1124 {
1125 if (l->looping)
1126 fprintf (dump_file, "Function is locally looping.\n");
1127 if (l->can_throw)
1128 fprintf (dump_file, "Function is locally throwing.\n");
1129 if (l->pure_const_state == IPA_CONST)
1130 fprintf (dump_file, "Function is locally const.\n");
1131 if (l->pure_const_state == IPA_PURE)
1132 fprintf (dump_file, "Function is locally pure.\n");
1133 if (l->can_free)
1134 fprintf (dump_file, "Function can locally free.\n");
1135 if (l->malloc_state == STATE_MALLOC)
1136 fprintf (dump_file, "Function is locally malloc.\n");
1137 }
1138 return l;
1139 }
1140
1141 void
1142 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1143 {
1144 /* There are some shared nodes, in particular the initializers on
1145 static declarations. We do not need to scan them more than once
1146 since all we would be interested in are the addressof
1147 operations. */
1148 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1149 {
1150 funct_state_d *a = analyze_function (node, true);
1151 new (state) funct_state_d (*a);
1152 free (a);
1153 }
1154 }
1155
1156 /* Called when new clone is inserted to callgraph late. */
1157
1158 void
1159 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *,
1160 funct_state_d *src_data,
1161 funct_state_d *dst_data)
1162 {
1163 new (dst_data) funct_state_d (*src_data);
1164 }
1165
1166 \f
1167 void
1168 pass_ipa_pure_const::
1169 register_hooks (void)
1170 {
1171 if (init_p)
1172 return;
1173
1174 init_p = true;
1175
1176 funct_state_summaries = new funct_state_summary_t (symtab);
1177 }
1178
1179
1180 /* Analyze each function in the cgraph to see if it is locally PURE or
1181 CONST. */
1182
1183 static void
1184 pure_const_generate_summary (void)
1185 {
1186 struct cgraph_node *node;
1187
1188 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1189 pass->register_hooks ();
1190
1191 /* Process all of the functions.
1192
1193 We process AVAIL_INTERPOSABLE functions. We cannot use the results
1194 by default, but the info can be used at LTO with -fwhole-program or
1195 when function got cloned and the clone is AVAILABLE. */
1196
1197 FOR_EACH_DEFINED_FUNCTION (node)
1198 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1199 {
1200 funct_state_d *a = analyze_function (node, true);
1201 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1202 free (a);
1203 }
1204 }
1205
1206
1207 /* Serialize the ipa info for lto. */
1208
1209 static void
1210 pure_const_write_summary (void)
1211 {
1212 struct cgraph_node *node;
1213 struct lto_simple_output_block *ob
1214 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1215 unsigned int count = 0;
1216 lto_symtab_encoder_iterator lsei;
1217 lto_symtab_encoder_t encoder;
1218
1219 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1220
1221 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1222 lsei_next_function_in_partition (&lsei))
1223 {
1224 node = lsei_cgraph_node (lsei);
1225 if (node->definition && funct_state_summaries->exists (node))
1226 count++;
1227 }
1228
1229 streamer_write_uhwi_stream (ob->main_stream, count);
1230
1231 /* Process all of the functions. */
1232 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1233 lsei_next_function_in_partition (&lsei))
1234 {
1235 node = lsei_cgraph_node (lsei);
1236 funct_state_d *fs = funct_state_summaries->get (node);
1237 if (node->definition && fs != NULL)
1238 {
1239 struct bitpack_d bp;
1240 int node_ref;
1241 lto_symtab_encoder_t encoder;
1242
1243 encoder = ob->decl_state->symtab_node_encoder;
1244 node_ref = lto_symtab_encoder_encode (encoder, node);
1245 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1246
1247 /* Note that flags will need to be read in the opposite
1248 order as we are pushing the bitflags into FLAGS. */
1249 bp = bitpack_create (ob->main_stream);
1250 bp_pack_value (&bp, fs->pure_const_state, 2);
1251 bp_pack_value (&bp, fs->state_previously_known, 2);
1252 bp_pack_value (&bp, fs->looping_previously_known, 1);
1253 bp_pack_value (&bp, fs->looping, 1);
1254 bp_pack_value (&bp, fs->can_throw, 1);
1255 bp_pack_value (&bp, fs->can_free, 1);
1256 bp_pack_value (&bp, fs->malloc_state, 2);
1257 streamer_write_bitpack (&bp);
1258 }
1259 }
1260
1261 lto_destroy_simple_output_block (ob);
1262 }
1263
1264
1265 /* Deserialize the ipa info for lto. */
1266
1267 static void
1268 pure_const_read_summary (void)
1269 {
1270 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1271 struct lto_file_decl_data *file_data;
1272 unsigned int j = 0;
1273
1274 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1275 pass->register_hooks ();
1276
1277 while ((file_data = file_data_vec[j++]))
1278 {
1279 const char *data;
1280 size_t len;
1281 struct lto_input_block *ib
1282 = lto_create_simple_input_block (file_data,
1283 LTO_section_ipa_pure_const,
1284 &data, &len);
1285 if (ib)
1286 {
1287 unsigned int i;
1288 unsigned int count = streamer_read_uhwi (ib);
1289
1290 for (i = 0; i < count; i++)
1291 {
1292 unsigned int index;
1293 struct cgraph_node *node;
1294 struct bitpack_d bp;
1295 funct_state fs;
1296 lto_symtab_encoder_t encoder;
1297
1298 index = streamer_read_uhwi (ib);
1299 encoder = file_data->symtab_node_encoder;
1300 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1301 index));
1302
1303 fs = funct_state_summaries->get_create (node);
1304 /* Note that the flags must be read in the opposite
1305 order in which they were written (the bitflags were
1306 pushed into FLAGS). */
1307 bp = streamer_read_bitpack (ib);
1308 fs->pure_const_state
1309 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1310 fs->state_previously_known
1311 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1312 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1313 fs->looping = bp_unpack_value (&bp, 1);
1314 fs->can_throw = bp_unpack_value (&bp, 1);
1315 fs->can_free = bp_unpack_value (&bp, 1);
1316 fs->malloc_state
1317 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1318
1319 if (dump_file)
1320 {
1321 int flags = flags_from_decl_or_type (node->decl);
1322 fprintf (dump_file, "Read info for %s ", node->dump_name ());
1323 if (flags & ECF_CONST)
1324 fprintf (dump_file, " const");
1325 if (flags & ECF_PURE)
1326 fprintf (dump_file, " pure");
1327 if (flags & ECF_NOTHROW)
1328 fprintf (dump_file, " nothrow");
1329 fprintf (dump_file, "\n pure const state: %s\n",
1330 pure_const_names[fs->pure_const_state]);
1331 fprintf (dump_file, " previously known state: %s\n",
1332 pure_const_names[fs->state_previously_known]);
1333 if (fs->looping)
1334 fprintf (dump_file," function is locally looping\n");
1335 if (fs->looping_previously_known)
1336 fprintf (dump_file," function is previously known looping\n");
1337 if (fs->can_throw)
1338 fprintf (dump_file," function is locally throwing\n");
1339 if (fs->can_free)
1340 fprintf (dump_file," function can locally free\n");
1341 fprintf (dump_file, "\n malloc state: %s\n",
1342 malloc_state_names[fs->malloc_state]);
1343 }
1344 }
1345
1346 lto_destroy_simple_input_block (file_data,
1347 LTO_section_ipa_pure_const,
1348 ib, data, len);
1349 }
1350 }
1351 }
1352
1353 /* We only propagate across edges that can throw externally and their callee
1354 is not interposable. */
1355
1356 static bool
1357 ignore_edge_for_nothrow (struct cgraph_edge *e)
1358 {
1359 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1360 return true;
1361
1362 enum availability avail;
1363 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1364 e->caller);
1365 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1366 return true;
1367 return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1368 && !e->callee->binds_to_current_def_p (e->caller);
1369 }
1370
1371 /* Return true if NODE is self recursive function.
1372 Indirectly recursive functions appears as non-trivial strongly
1373 connected components, so we need to care about self recursion
1374 only. */
1375
1376 static bool
1377 self_recursive_p (struct cgraph_node *node)
1378 {
1379 struct cgraph_edge *e;
1380 for (e = node->callees; e; e = e->next_callee)
1381 if (e->callee->function_symbol () == node)
1382 return true;
1383 return false;
1384 }
1385
1386 /* Return true if N is cdtor that is not const or pure. In this case we may
1387 need to remove unreachable function if it is marked const/pure. */
1388
1389 static bool
1390 cdtor_p (cgraph_node *n, void *)
1391 {
1392 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1393 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1394 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1395 return false;
1396 }
1397
1398 /* We only propagate across edges with non-interposable callee. */
1399
1400 static bool
1401 ignore_edge_for_pure_const (struct cgraph_edge *e)
1402 {
1403 enum availability avail;
1404 e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1405 return (avail <= AVAIL_INTERPOSABLE);
1406 }
1407
1408
1409 /* Produce transitive closure over the callgraph and compute pure/const
1410 attributes. */
1411
1412 static bool
1413 propagate_pure_const (void)
1414 {
1415 struct cgraph_node *node;
1416 struct cgraph_node *w;
1417 struct cgraph_node **order =
1418 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1419 int order_pos;
1420 int i;
1421 struct ipa_dfs_info * w_info;
1422 bool remove_p = false;
1423 bool has_cdtor;
1424
1425 order_pos = ipa_reduced_postorder (order, true,
1426 ignore_edge_for_pure_const);
1427 if (dump_file)
1428 {
1429 cgraph_node::dump_cgraph (dump_file);
1430 ipa_print_order (dump_file, "reduced", order, order_pos);
1431 }
1432
1433 /* Propagate the local information through the call graph to produce
1434 the global information. All the nodes within a cycle will have
1435 the same info so we collapse cycles first. Then we can do the
1436 propagation in one pass from the leaves to the roots. */
1437 for (i = 0; i < order_pos; i++ )
1438 {
1439 enum pure_const_state_e pure_const_state = IPA_CONST;
1440 bool looping = false;
1441 int count = 0;
1442 node = order[i];
1443
1444 if (node->alias)
1445 continue;
1446
1447 if (dump_file && (dump_flags & TDF_DETAILS))
1448 fprintf (dump_file, "Starting cycle\n");
1449
1450 /* Find the worst state for any node in the cycle. */
1451 w = node;
1452 while (w && pure_const_state != IPA_NEITHER)
1453 {
1454 struct cgraph_edge *e;
1455 struct cgraph_edge *ie;
1456 int i;
1457 struct ipa_ref *ref = NULL;
1458
1459 funct_state w_l = funct_state_summaries->get_create (w);
1460 if (dump_file && (dump_flags & TDF_DETAILS))
1461 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1462 w->dump_name (),
1463 pure_const_names[w_l->pure_const_state],
1464 w_l->looping);
1465
1466 /* First merge in function body properties.
1467 We are safe to pass NULL as FROM and TO because we will take care
1468 of possible interposition when walking callees. */
1469 worse_state (&pure_const_state, &looping,
1470 w_l->pure_const_state, w_l->looping,
1471 NULL, NULL);
1472 if (pure_const_state == IPA_NEITHER)
1473 break;
1474
1475 count++;
1476
1477 /* We consider recursive cycles as possibly infinite.
1478 This might be relaxed since infinite recursion leads to stack
1479 overflow. */
1480 if (count > 1)
1481 looping = true;
1482
1483 /* Now walk the edges and merge in callee properties. */
1484 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1485 e = e->next_callee)
1486 {
1487 enum availability avail;
1488 struct cgraph_node *y = e->callee->
1489 function_or_virtual_thunk_symbol (&avail,
1490 e->caller);
1491 enum pure_const_state_e edge_state = IPA_CONST;
1492 bool edge_looping = false;
1493
1494 if (dump_file && (dump_flags & TDF_DETAILS))
1495 {
1496 fprintf (dump_file, " Call to %s",
1497 e->callee->dump_name ());
1498 }
1499 if (avail > AVAIL_INTERPOSABLE)
1500 {
1501 funct_state y_l = funct_state_summaries->get (y);
1502 if (dump_file && (dump_flags & TDF_DETAILS))
1503 {
1504 fprintf (dump_file,
1505 " state:%s looping:%i\n",
1506 pure_const_names[y_l->pure_const_state],
1507 y_l->looping);
1508 }
1509 if (y_l->pure_const_state > IPA_PURE
1510 && e->cannot_lead_to_return_p ())
1511 {
1512 if (dump_file && (dump_flags & TDF_DETAILS))
1513 fprintf (dump_file,
1514 " Ignoring side effects"
1515 " -> pure, looping\n");
1516 edge_state = IPA_PURE;
1517 edge_looping = true;
1518 }
1519 else
1520 {
1521 edge_state = y_l->pure_const_state;
1522 edge_looping = y_l->looping;
1523 }
1524 }
1525 else if (special_builtin_state (&edge_state, &edge_looping,
1526 y->decl))
1527 ;
1528 else
1529 state_from_flags (&edge_state, &edge_looping,
1530 flags_from_decl_or_type (y->decl),
1531 e->cannot_lead_to_return_p ());
1532
1533 /* Merge the results with what we already know. */
1534 better_state (&edge_state, &edge_looping,
1535 w_l->state_previously_known,
1536 w_l->looping_previously_known);
1537 worse_state (&pure_const_state, &looping,
1538 edge_state, edge_looping, e->caller, e->callee);
1539 if (pure_const_state == IPA_NEITHER)
1540 break;
1541 }
1542
1543 /* Now process the indirect call. */
1544 for (ie = w->indirect_calls;
1545 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1546 {
1547 enum pure_const_state_e edge_state = IPA_CONST;
1548 bool edge_looping = false;
1549
1550 if (dump_file && (dump_flags & TDF_DETAILS))
1551 fprintf (dump_file, " Indirect call");
1552 state_from_flags (&edge_state, &edge_looping,
1553 ie->indirect_info->ecf_flags,
1554 ie->cannot_lead_to_return_p ());
1555 /* Merge the results with what we already know. */
1556 better_state (&edge_state, &edge_looping,
1557 w_l->state_previously_known,
1558 w_l->looping_previously_known);
1559 worse_state (&pure_const_state, &looping,
1560 edge_state, edge_looping, NULL, NULL);
1561 if (pure_const_state == IPA_NEITHER)
1562 break;
1563 }
1564
1565 /* And finally all loads and stores. */
1566 for (i = 0; w->iterate_reference (i, ref)
1567 && pure_const_state != IPA_NEITHER; i++)
1568 {
1569 enum pure_const_state_e ref_state = IPA_CONST;
1570 bool ref_looping = false;
1571 switch (ref->use)
1572 {
1573 case IPA_REF_LOAD:
1574 /* readonly reads are safe. */
1575 if (TREE_READONLY (ref->referred->decl))
1576 break;
1577 if (dump_file && (dump_flags & TDF_DETAILS))
1578 fprintf (dump_file, " nonreadonly global var read\n");
1579 ref_state = IPA_PURE;
1580 break;
1581 case IPA_REF_STORE:
1582 if (ref->cannot_lead_to_return ())
1583 break;
1584 ref_state = IPA_NEITHER;
1585 if (dump_file && (dump_flags & TDF_DETAILS))
1586 fprintf (dump_file, " global var write\n");
1587 break;
1588 case IPA_REF_ADDR:
1589 break;
1590 default:
1591 gcc_unreachable ();
1592 }
1593 better_state (&ref_state, &ref_looping,
1594 w_l->state_previously_known,
1595 w_l->looping_previously_known);
1596 worse_state (&pure_const_state, &looping,
1597 ref_state, ref_looping, NULL, NULL);
1598 if (pure_const_state == IPA_NEITHER)
1599 break;
1600 }
1601 w_info = (struct ipa_dfs_info *) w->aux;
1602 w = w_info->next_cycle;
1603 }
1604 if (dump_file && (dump_flags & TDF_DETAILS))
1605 fprintf (dump_file, "Result %s looping %i\n",
1606 pure_const_names [pure_const_state],
1607 looping);
1608
1609 /* Find the worst state of can_free for any node in the cycle. */
1610 bool can_free = false;
1611 w = node;
1612 while (w && !can_free)
1613 {
1614 struct cgraph_edge *e;
1615 funct_state w_l = funct_state_summaries->get (w);
1616
1617 if (w_l->can_free
1618 || w->get_availability () == AVAIL_INTERPOSABLE
1619 || w->indirect_calls)
1620 can_free = true;
1621
1622 for (e = w->callees; e && !can_free; e = e->next_callee)
1623 {
1624 enum availability avail;
1625 struct cgraph_node *y = e->callee->
1626 function_or_virtual_thunk_symbol (&avail,
1627 e->caller);
1628
1629 if (avail > AVAIL_INTERPOSABLE)
1630 can_free = funct_state_summaries->get (y)->can_free;
1631 else
1632 can_free = true;
1633 }
1634 w_info = (struct ipa_dfs_info *) w->aux;
1635 w = w_info->next_cycle;
1636 }
1637
1638 /* Copy back the region's pure_const_state which is shared by
1639 all nodes in the region. */
1640 w = node;
1641 while (w)
1642 {
1643 funct_state w_l = funct_state_summaries->get (w);
1644 enum pure_const_state_e this_state = pure_const_state;
1645 bool this_looping = looping;
1646
1647 w_l->can_free = can_free;
1648 w->nonfreeing_fn = !can_free;
1649 if (!can_free && dump_file)
1650 fprintf (dump_file, "Function found not to call free: %s\n",
1651 w->name ());
1652
1653 if (w_l->state_previously_known != IPA_NEITHER
1654 && this_state > w_l->state_previously_known)
1655 {
1656 this_state = w_l->state_previously_known;
1657 if (this_state == IPA_NEITHER)
1658 this_looping = w_l->looping_previously_known;
1659 }
1660 if (!this_looping && self_recursive_p (w))
1661 this_looping = true;
1662 if (!w_l->looping_previously_known)
1663 this_looping = false;
1664
1665 /* All nodes within a cycle share the same info. */
1666 w_l->pure_const_state = this_state;
1667 w_l->looping = this_looping;
1668
1669 /* Inline clones share declaration with their offline copies;
1670 do not modify their declarations since the offline copy may
1671 be different. */
1672 if (!w->global.inlined_to)
1673 switch (this_state)
1674 {
1675 case IPA_CONST:
1676 if (!TREE_READONLY (w->decl))
1677 {
1678 warn_function_const (w->decl, !this_looping);
1679 if (dump_file)
1680 fprintf (dump_file, "Function found to be %sconst: %s\n",
1681 this_looping ? "looping " : "",
1682 w->name ());
1683 }
1684 /* Turning constructor or destructor to non-looping const/pure
1685 enables us to possibly remove the function completely. */
1686 if (this_looping)
1687 has_cdtor = false;
1688 else
1689 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1690 NULL, true);
1691 if (w->set_const_flag (true, this_looping))
1692 {
1693 if (dump_file)
1694 fprintf (dump_file,
1695 "Declaration updated to be %sconst: %s\n",
1696 this_looping ? "looping " : "",
1697 w->name ());
1698 remove_p |= has_cdtor;
1699 }
1700 break;
1701
1702 case IPA_PURE:
1703 if (!DECL_PURE_P (w->decl))
1704 {
1705 warn_function_pure (w->decl, !this_looping);
1706 if (dump_file)
1707 fprintf (dump_file, "Function found to be %spure: %s\n",
1708 this_looping ? "looping " : "",
1709 w->name ());
1710 }
1711 if (this_looping)
1712 has_cdtor = false;
1713 else
1714 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1715 NULL, true);
1716 if (w->set_pure_flag (true, this_looping))
1717 {
1718 if (dump_file)
1719 fprintf (dump_file,
1720 "Declaration updated to be %spure: %s\n",
1721 this_looping ? "looping " : "",
1722 w->name ());
1723 remove_p |= has_cdtor;
1724 }
1725 break;
1726
1727 default:
1728 break;
1729 }
1730 w_info = (struct ipa_dfs_info *) w->aux;
1731 w = w_info->next_cycle;
1732 }
1733 }
1734
1735 ipa_free_postorder_info ();
1736 free (order);
1737 return remove_p;
1738 }
1739
1740 /* Produce transitive closure over the callgraph and compute nothrow
1741 attributes. */
1742
1743 static void
1744 propagate_nothrow (void)
1745 {
1746 struct cgraph_node *node;
1747 struct cgraph_node *w;
1748 struct cgraph_node **order =
1749 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1750 int order_pos;
1751 int i;
1752 struct ipa_dfs_info * w_info;
1753
1754 order_pos = ipa_reduced_postorder (order, true,
1755 ignore_edge_for_nothrow);
1756 if (dump_file)
1757 {
1758 cgraph_node::dump_cgraph (dump_file);
1759 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1760 }
1761
1762 /* Propagate the local information through the call graph to produce
1763 the global information. All the nodes within a cycle will have
1764 the same info so we collapse cycles first. Then we can do the
1765 propagation in one pass from the leaves to the roots. */
1766 for (i = 0; i < order_pos; i++ )
1767 {
1768 bool can_throw = false;
1769 node = order[i];
1770
1771 if (node->alias)
1772 continue;
1773
1774 /* Find the worst state for any node in the cycle. */
1775 w = node;
1776 while (w && !can_throw)
1777 {
1778 struct cgraph_edge *e, *ie;
1779
1780 if (!TREE_NOTHROW (w->decl))
1781 {
1782 funct_state w_l = funct_state_summaries->get_create (w);
1783
1784 if (w_l->can_throw
1785 || w->get_availability () == AVAIL_INTERPOSABLE)
1786 can_throw = true;
1787
1788 for (e = w->callees; e && !can_throw; e = e->next_callee)
1789 {
1790 enum availability avail;
1791
1792 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1793 continue;
1794
1795 struct cgraph_node *y = e->callee->
1796 function_or_virtual_thunk_symbol (&avail,
1797 e->caller);
1798
1799 /* We can use info about the callee only if we know it
1800 cannot be interposed.
1801 When callee is compiled with non-call exceptions we also
1802 must check that the declaration is bound to current
1803 body as other semantically equivalent body may still
1804 throw. */
1805 if (avail <= AVAIL_INTERPOSABLE
1806 || (!TREE_NOTHROW (y->decl)
1807 && (funct_state_summaries->get_create (y)->can_throw
1808 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1809 && !e->callee->binds_to_current_def_p (w)))))
1810 can_throw = true;
1811 }
1812 for (ie = w->indirect_calls; ie && !can_throw;
1813 ie = ie->next_callee)
1814 if (ie->can_throw_external
1815 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1816 can_throw = true;
1817 }
1818 w_info = (struct ipa_dfs_info *) w->aux;
1819 w = w_info->next_cycle;
1820 }
1821
1822 /* Copy back the region's pure_const_state which is shared by
1823 all nodes in the region. */
1824 w = node;
1825 while (w)
1826 {
1827 funct_state w_l = funct_state_summaries->get_create (w);
1828 if (!can_throw && !TREE_NOTHROW (w->decl))
1829 {
1830 /* Inline clones share declaration with their offline copies;
1831 do not modify their declarations since the offline copy may
1832 be different. */
1833 if (!w->global.inlined_to)
1834 {
1835 w->set_nothrow_flag (true);
1836 if (dump_file)
1837 fprintf (dump_file, "Function found to be nothrow: %s\n",
1838 w->name ());
1839 }
1840 }
1841 else if (can_throw && !TREE_NOTHROW (w->decl))
1842 w_l->can_throw = true;
1843 w_info = (struct ipa_dfs_info *) w->aux;
1844 w = w_info->next_cycle;
1845 }
1846 }
1847
1848 ipa_free_postorder_info ();
1849 free (order);
1850 }
1851
1852 /* Debugging function to dump state of malloc lattice. */
1853
1854 DEBUG_FUNCTION
1855 static void
1856 dump_malloc_lattice (FILE *dump_file, const char *s)
1857 {
1858 if (!dump_file)
1859 return;
1860
1861 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1862 cgraph_node *node;
1863 FOR_EACH_FUNCTION (node)
1864 {
1865 funct_state fs = funct_state_summaries->get (node);
1866 if (fs)
1867 fprintf (dump_file, "%s: %s\n", node->name (),
1868 malloc_state_names[fs->malloc_state]);
1869 }
1870 }
1871
1872 /* Propagate malloc attribute across the callgraph. */
1873
1874 static void
1875 propagate_malloc (void)
1876 {
1877 cgraph_node *node;
1878 FOR_EACH_FUNCTION (node)
1879 {
1880 if (DECL_IS_MALLOC (node->decl))
1881 if (!funct_state_summaries->exists (node))
1882 {
1883 funct_state fs = funct_state_summaries->get_create (node);
1884 fs->malloc_state = STATE_MALLOC;
1885 }
1886 }
1887
1888 dump_malloc_lattice (dump_file, "Initial");
1889 struct cgraph_node **order
1890 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1891 int order_pos = ipa_reverse_postorder (order);
1892 bool changed = true;
1893
1894 while (changed)
1895 {
1896 changed = false;
1897 /* Walk in postorder. */
1898 for (int i = order_pos - 1; i >= 0; --i)
1899 {
1900 cgraph_node *node = order[i];
1901 if (node->alias
1902 || !node->definition
1903 || !funct_state_summaries->exists (node))
1904 continue;
1905
1906 funct_state l = funct_state_summaries->get (node);
1907
1908 /* FIXME: add support for indirect-calls. */
1909 if (node->indirect_calls)
1910 {
1911 l->malloc_state = STATE_MALLOC_BOTTOM;
1912 continue;
1913 }
1914
1915 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1916 {
1917 l->malloc_state = STATE_MALLOC_BOTTOM;
1918 continue;
1919 }
1920
1921 if (l->malloc_state == STATE_MALLOC_BOTTOM)
1922 continue;
1923
1924 vec<cgraph_node *> callees = vNULL;
1925 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1926 {
1927 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
1928 if (es && es->is_return_callee_uncaptured)
1929 callees.safe_push (cs->callee);
1930 }
1931
1932 malloc_state_e new_state = l->malloc_state;
1933 for (unsigned j = 0; j < callees.length (); j++)
1934 {
1935 cgraph_node *callee = callees[j];
1936 if (!funct_state_summaries->exists (node))
1937 {
1938 new_state = STATE_MALLOC_BOTTOM;
1939 break;
1940 }
1941 malloc_state_e callee_state
1942 = funct_state_summaries->get_create (callee)->malloc_state;
1943 if (new_state < callee_state)
1944 new_state = callee_state;
1945 }
1946 if (new_state != l->malloc_state)
1947 {
1948 changed = true;
1949 l->malloc_state = new_state;
1950 }
1951 }
1952 }
1953
1954 FOR_EACH_DEFINED_FUNCTION (node)
1955 if (funct_state_summaries->exists (node))
1956 {
1957 funct_state l = funct_state_summaries->get (node);
1958 if (!node->alias
1959 && l->malloc_state == STATE_MALLOC
1960 && !node->global.inlined_to)
1961 {
1962 if (dump_file && (dump_flags & TDF_DETAILS))
1963 fprintf (dump_file, "Function %s found to be malloc\n",
1964 node->name ());
1965
1966 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1967 node->set_malloc_flag (true);
1968 if (!malloc_decl_p && warn_suggest_attribute_malloc)
1969 warn_function_malloc (node->decl);
1970 }
1971 }
1972
1973 dump_malloc_lattice (dump_file, "after propagation");
1974 ipa_free_postorder_info ();
1975 free (order);
1976 }
1977
1978 /* Produce the global information by preforming a transitive closure
1979 on the local information that was produced by generate_summary. */
1980
1981 unsigned int
1982 pass_ipa_pure_const::
1983 execute (function *)
1984 {
1985 bool remove_p;
1986
1987 /* Nothrow makes more function to not lead to return and improve
1988 later analysis. */
1989 propagate_nothrow ();
1990 propagate_malloc ();
1991 remove_p = propagate_pure_const ();
1992
1993 delete funct_state_summaries;
1994 return remove_p ? TODO_remove_functions : 0;
1995 }
1996
1997 static bool
1998 gate_pure_const (void)
1999 {
2000 return flag_ipa_pure_const || in_lto_p;
2001 }
2002
2003 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2004 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2005 pure_const_generate_summary, /* generate_summary */
2006 pure_const_write_summary, /* write_summary */
2007 pure_const_read_summary, /* read_summary */
2008 NULL, /* write_optimization_summary */
2009 NULL, /* read_optimization_summary */
2010 NULL, /* stmt_fixup */
2011 0, /* function_transform_todo_flags_start */
2012 NULL, /* function_transform */
2013 NULL), /* variable_transform */
2014 init_p (false) {}
2015
2016 ipa_opt_pass_d *
2017 make_pass_ipa_pure_const (gcc::context *ctxt)
2018 {
2019 return new pass_ipa_pure_const (ctxt);
2020 }
2021
2022 /* Return true if function should be skipped for local pure const analysis. */
2023
2024 static bool
2025 skip_function_for_local_pure_const (struct cgraph_node *node)
2026 {
2027 /* Because we do not schedule pass_fixup_cfg over whole program after early
2028 optimizations we must not promote functions that are called by already
2029 processed functions. */
2030
2031 if (function_called_by_processed_nodes_p ())
2032 {
2033 if (dump_file)
2034 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2035 return true;
2036 }
2037 /* Save some work and do not analyze functions which are interposable and
2038 do not have any non-interposable aliases. */
2039 if (node->get_availability () <= AVAIL_INTERPOSABLE
2040 && !node->has_aliases_p ())
2041 {
2042 if (dump_file)
2043 fprintf (dump_file,
2044 "Function is interposable; not analyzing.\n");
2045 return true;
2046 }
2047 return false;
2048 }
2049
2050 /* Simple local pass for pure const discovery reusing the analysis from
2051 ipa_pure_const. This pass is effective when executed together with
2052 other optimization passes in early optimization pass queue. */
2053
2054 namespace {
2055
2056 const pass_data pass_data_local_pure_const =
2057 {
2058 GIMPLE_PASS, /* type */
2059 "local-pure-const", /* name */
2060 OPTGROUP_NONE, /* optinfo_flags */
2061 TV_IPA_PURE_CONST, /* tv_id */
2062 0, /* properties_required */
2063 0, /* properties_provided */
2064 0, /* properties_destroyed */
2065 0, /* todo_flags_start */
2066 0, /* todo_flags_finish */
2067 };
2068
2069 class pass_local_pure_const : public gimple_opt_pass
2070 {
2071 public:
2072 pass_local_pure_const (gcc::context *ctxt)
2073 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2074 {}
2075
2076 /* opt_pass methods: */
2077 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2078 virtual bool gate (function *) { return gate_pure_const (); }
2079 virtual unsigned int execute (function *);
2080
2081 }; // class pass_local_pure_const
2082
2083 unsigned int
2084 pass_local_pure_const::execute (function *fun)
2085 {
2086 bool changed = false;
2087 funct_state l;
2088 bool skip;
2089 struct cgraph_node *node;
2090
2091 node = cgraph_node::get (current_function_decl);
2092 skip = skip_function_for_local_pure_const (node);
2093
2094 if (!warn_suggest_attribute_const
2095 && !warn_suggest_attribute_pure
2096 && skip)
2097 return 0;
2098
2099 l = analyze_function (node, false);
2100
2101 /* Do NORETURN discovery. */
2102 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2103 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2104 {
2105 warn_function_noreturn (fun->decl);
2106 if (dump_file)
2107 fprintf (dump_file, "Function found to be noreturn: %s\n",
2108 current_function_name ());
2109
2110 /* Update declaration and reduce profile to executed once. */
2111 TREE_THIS_VOLATILE (current_function_decl) = 1;
2112 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2113 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2114
2115 changed = true;
2116 }
2117
2118 switch (l->pure_const_state)
2119 {
2120 case IPA_CONST:
2121 if (!TREE_READONLY (current_function_decl))
2122 {
2123 warn_function_const (current_function_decl, !l->looping);
2124 if (dump_file)
2125 fprintf (dump_file, "Function found to be %sconst: %s\n",
2126 l->looping ? "looping " : "",
2127 current_function_name ());
2128 }
2129 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2130 && !l->looping)
2131 {
2132 if (dump_file)
2133 fprintf (dump_file, "Function found to be non-looping: %s\n",
2134 current_function_name ());
2135 }
2136 if (!skip && node->set_const_flag (true, l->looping))
2137 {
2138 if (dump_file)
2139 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2140 l->looping ? "looping " : "",
2141 current_function_name ());
2142 changed = true;
2143 }
2144 break;
2145
2146 case IPA_PURE:
2147 if (!DECL_PURE_P (current_function_decl))
2148 {
2149 warn_function_pure (current_function_decl, !l->looping);
2150 if (dump_file)
2151 fprintf (dump_file, "Function found to be %spure: %s\n",
2152 l->looping ? "looping " : "",
2153 current_function_name ());
2154 }
2155 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2156 && !l->looping)
2157 {
2158 if (dump_file)
2159 fprintf (dump_file, "Function found to be non-looping: %s\n",
2160 current_function_name ());
2161 }
2162 if (!skip && node->set_pure_flag (true, l->looping))
2163 {
2164 if (dump_file)
2165 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2166 l->looping ? "looping " : "",
2167 current_function_name ());
2168 changed = true;
2169 }
2170 break;
2171
2172 default:
2173 break;
2174 }
2175 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2176 {
2177 node->set_nothrow_flag (true);
2178 changed = true;
2179 if (dump_file)
2180 fprintf (dump_file, "Function found to be nothrow: %s\n",
2181 current_function_name ());
2182 }
2183
2184 if (l->malloc_state == STATE_MALLOC
2185 && !DECL_IS_MALLOC (current_function_decl))
2186 {
2187 node->set_malloc_flag (true);
2188 if (warn_suggest_attribute_malloc)
2189 warn_function_malloc (node->decl);
2190 changed = true;
2191 if (dump_file)
2192 fprintf (dump_file, "Function found to be malloc: %s\n",
2193 node->name ());
2194 }
2195
2196 free (l);
2197 if (changed)
2198 return execute_fixup_cfg ();
2199 else
2200 return 0;
2201 }
2202
2203 } // anon namespace
2204
2205 gimple_opt_pass *
2206 make_pass_local_pure_const (gcc::context *ctxt)
2207 {
2208 return new pass_local_pure_const (ctxt);
2209 }
2210
2211 /* Emit noreturn warnings. */
2212
2213 namespace {
2214
2215 const pass_data pass_data_warn_function_noreturn =
2216 {
2217 GIMPLE_PASS, /* type */
2218 "*warn_function_noreturn", /* name */
2219 OPTGROUP_NONE, /* optinfo_flags */
2220 TV_NONE, /* tv_id */
2221 PROP_cfg, /* properties_required */
2222 0, /* properties_provided */
2223 0, /* properties_destroyed */
2224 0, /* todo_flags_start */
2225 0, /* todo_flags_finish */
2226 };
2227
2228 class pass_warn_function_noreturn : public gimple_opt_pass
2229 {
2230 public:
2231 pass_warn_function_noreturn (gcc::context *ctxt)
2232 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2233 {}
2234
2235 /* opt_pass methods: */
2236 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
2237 virtual unsigned int execute (function *fun)
2238 {
2239 if (!TREE_THIS_VOLATILE (current_function_decl)
2240 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2241 warn_function_noreturn (current_function_decl);
2242 return 0;
2243 }
2244
2245 }; // class pass_warn_function_noreturn
2246
2247 } // anon namespace
2248
2249 gimple_opt_pass *
2250 make_pass_warn_function_noreturn (gcc::context *ctxt)
2251 {
2252 return new pass_warn_function_noreturn (ctxt);
2253 }
2254
2255 /* Simple local pass for pure const discovery reusing the analysis from
2256 ipa_pure_const. This pass is effective when executed together with
2257 other optimization passes in early optimization pass queue. */
2258
2259 namespace {
2260
2261 const pass_data pass_data_nothrow =
2262 {
2263 GIMPLE_PASS, /* type */
2264 "nothrow", /* name */
2265 OPTGROUP_NONE, /* optinfo_flags */
2266 TV_IPA_PURE_CONST, /* tv_id */
2267 0, /* properties_required */
2268 0, /* properties_provided */
2269 0, /* properties_destroyed */
2270 0, /* todo_flags_start */
2271 0, /* todo_flags_finish */
2272 };
2273
2274 class pass_nothrow : public gimple_opt_pass
2275 {
2276 public:
2277 pass_nothrow (gcc::context *ctxt)
2278 : gimple_opt_pass (pass_data_nothrow, ctxt)
2279 {}
2280
2281 /* opt_pass methods: */
2282 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2283 virtual bool gate (function *) { return optimize; }
2284 virtual unsigned int execute (function *);
2285
2286 }; // class pass_nothrow
2287
2288 unsigned int
2289 pass_nothrow::execute (function *)
2290 {
2291 struct cgraph_node *node;
2292 basic_block this_block;
2293
2294 if (TREE_NOTHROW (current_function_decl))
2295 return 0;
2296
2297 node = cgraph_node::get (current_function_decl);
2298
2299 /* We run during lowering, we cannot really use availability yet. */
2300 if (cgraph_node::get (current_function_decl)->get_availability ()
2301 <= AVAIL_INTERPOSABLE)
2302 {
2303 if (dump_file)
2304 fprintf (dump_file, "Function is interposable;"
2305 " not analyzing.\n");
2306 return true;
2307 }
2308
2309 FOR_EACH_BB_FN (this_block, cfun)
2310 {
2311 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2312 !gsi_end_p (gsi);
2313 gsi_next (&gsi))
2314 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2315 {
2316 if (is_gimple_call (gsi_stmt (gsi)))
2317 {
2318 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2319 if (callee_t && recursive_call_p (current_function_decl,
2320 callee_t))
2321 continue;
2322 }
2323
2324 if (dump_file)
2325 {
2326 fprintf (dump_file, "Statement can throw: ");
2327 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2328 }
2329 return 0;
2330 }
2331 }
2332
2333 node->set_nothrow_flag (true);
2334
2335 bool cfg_changed = false;
2336 if (self_recursive_p (node))
2337 FOR_EACH_BB_FN (this_block, cfun)
2338 if (gimple *g = last_stmt (this_block))
2339 if (is_gimple_call (g))
2340 {
2341 tree callee_t = gimple_call_fndecl (g);
2342 if (callee_t
2343 && recursive_call_p (current_function_decl, callee_t)
2344 && maybe_clean_eh_stmt (g)
2345 && gimple_purge_dead_eh_edges (this_block))
2346 cfg_changed = true;
2347 }
2348
2349 if (dump_file)
2350 fprintf (dump_file, "Function found to be nothrow: %s\n",
2351 current_function_name ());
2352 return cfg_changed ? TODO_cleanup_cfg : 0;
2353 }
2354
2355 } // anon namespace
2356
2357 gimple_opt_pass *
2358 make_pass_nothrow (gcc::context *ctxt)
2359 {
2360 return new pass_nothrow (ctxt);
2361 }