]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-pure-const.c
x86: Remove "%!" before ret
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
64 #include "symtab-thunks.h"
65 #include "dbgcnt.h"
66
67 /* Lattice values for const and pure functions. Everything starts out
68 being const, then may drop to pure and then neither depending on
69 what is found. */
70 enum pure_const_state_e
71 {
72 IPA_CONST,
73 IPA_PURE,
74 IPA_NEITHER
75 };
76
77 static const char *pure_const_names[3] = {"const", "pure", "neither"};
78
79 enum malloc_state_e
80 {
81 STATE_MALLOC_TOP,
82 STATE_MALLOC,
83 STATE_MALLOC_BOTTOM
84 };
85
86 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
87
88 /* Holder for the const_state. There is one of these per function
89 decl. */
90 class funct_state_d
91 {
92 public:
93 funct_state_d (): pure_const_state (IPA_NEITHER),
94 state_previously_known (IPA_NEITHER), looping_previously_known (true),
95 looping (true), can_throw (true), can_free (true),
96 malloc_state (STATE_MALLOC_BOTTOM) {}
97
98 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
99 state_previously_known (s.state_previously_known),
100 looping_previously_known (s.looping_previously_known),
101 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
102 malloc_state (s.malloc_state) {}
103
104 /* See above. */
105 enum pure_const_state_e pure_const_state;
106 /* What user set here; we can be always sure about this. */
107 enum pure_const_state_e state_previously_known;
108 bool looping_previously_known;
109
110 /* True if the function could possibly infinite loop. There are a
111 lot of ways that this could be determined. We are pretty
112 conservative here. While it is possible to cse pure and const
113 calls, it is not legal to have dce get rid of the call if there
114 is a possibility that the call could infinite loop since this is
115 a behavioral change. */
116 bool looping;
117
118 bool can_throw;
119
120 /* If function can call free, munmap or otherwise make previously
121 non-trapping memory accesses trapping. */
122 bool can_free;
123
124 enum malloc_state_e malloc_state;
125 };
126
127 typedef class funct_state_d * funct_state;
128
129 /* The storage of the funct_state is abstracted because there is the
130 possibility that it may be desirable to move this to the cgraph
131 local info. */
132
133 class funct_state_summary_t:
134 public fast_function_summary <funct_state_d *, va_heap>
135 {
136 public:
137 funct_state_summary_t (symbol_table *symtab):
138 fast_function_summary <funct_state_d *, va_heap> (symtab) {}
139
140 virtual void insert (cgraph_node *, funct_state_d *state);
141 virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
142 funct_state_d *src_data,
143 funct_state_d *dst_data);
144 };
145
146 static funct_state_summary_t *funct_state_summaries = NULL;
147
148 static bool gate_pure_const (void);
149
150 namespace {
151
152 const pass_data pass_data_ipa_pure_const =
153 {
154 IPA_PASS, /* type */
155 "pure-const", /* name */
156 OPTGROUP_NONE, /* optinfo_flags */
157 TV_IPA_PURE_CONST, /* tv_id */
158 0, /* properties_required */
159 0, /* properties_provided */
160 0, /* properties_destroyed */
161 0, /* todo_flags_start */
162 0, /* todo_flags_finish */
163 };
164
165 class pass_ipa_pure_const : public ipa_opt_pass_d
166 {
167 public:
168 pass_ipa_pure_const(gcc::context *ctxt);
169
170 /* opt_pass methods: */
171 bool gate (function *) { return gate_pure_const (); }
172 unsigned int execute (function *fun);
173
174 void register_hooks (void);
175
176 private:
177 bool init_p;
178 }; // class pass_ipa_pure_const
179
180 } // anon namespace
181
182 /* Try to guess if function body will always be visible to compiler
183 when compiling the call and whether compiler will be able
184 to propagate the information by itself. */
185
186 static bool
187 function_always_visible_to_compiler_p (tree decl)
188 {
189 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
190 || DECL_COMDAT (decl));
191 }
192
193 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
194 is true if the function is known to be finite. The diagnostic is
195 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
196 OPTION, this function may initialize it and it is always returned
197 by the function. */
198
199 static hash_set<tree> *
200 suggest_attribute (int option, tree decl, bool known_finite,
201 hash_set<tree> *warned_about,
202 const char * attrib_name)
203 {
204 if (!option_enabled (option, lang_hooks.option_lang_mask (), &global_options))
205 return warned_about;
206 if (TREE_THIS_VOLATILE (decl)
207 || (known_finite && function_always_visible_to_compiler_p (decl)))
208 return warned_about;
209
210 if (!warned_about)
211 warned_about = new hash_set<tree>;
212 if (warned_about->contains (decl))
213 return warned_about;
214 warned_about->add (decl);
215 warning_at (DECL_SOURCE_LOCATION (decl),
216 option,
217 known_finite
218 ? G_("function might be candidate for attribute %qs")
219 : G_("function might be candidate for attribute %qs"
220 " if it is known to return normally"), attrib_name);
221 return warned_about;
222 }
223
224 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
225 is true if the function is known to be finite. */
226
227 static void
228 warn_function_pure (tree decl, bool known_finite)
229 {
230 /* Declaring a void function pure makes no sense and is diagnosed
231 by -Wattributes because calling it would have no effect. */
232 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
233 return;
234
235 static hash_set<tree> *warned_about;
236 warned_about
237 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
238 known_finite, warned_about, "pure");
239 }
240
241 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
242 is true if the function is known to be finite. */
243
244 static void
245 warn_function_const (tree decl, bool known_finite)
246 {
247 /* Declaring a void function const makes no sense is diagnosed
248 by -Wattributes because calling it would have no effect. */
249 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
250 return;
251
252 static hash_set<tree> *warned_about;
253 warned_about
254 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
255 known_finite, warned_about, "const");
256 }
257
258 /* Emit suggestion about __attribute__((malloc)) for DECL. */
259
260 static void
261 warn_function_malloc (tree decl)
262 {
263 static hash_set<tree> *warned_about;
264 warned_about
265 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
266 true, warned_about, "malloc");
267 }
268
269 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
270
271 static void
272 warn_function_noreturn (tree decl)
273 {
274 tree original_decl = decl;
275
276 static hash_set<tree> *warned_about;
277 if (!lang_hooks.missing_noreturn_ok_p (decl)
278 && targetm.warn_func_return (decl))
279 warned_about
280 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
281 true, warned_about, "noreturn");
282 }
283
284 void
285 warn_function_cold (tree decl)
286 {
287 tree original_decl = decl;
288
289 static hash_set<tree> *warned_about;
290 warned_about
291 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
292 true, warned_about, "cold");
293 }
294
295 /* Check to see if the use (or definition when CHECKING_WRITE is true)
296 variable T is legal in a function that is either pure or const. */
297
298 static inline void
299 check_decl (funct_state local,
300 tree t, bool checking_write, bool ipa)
301 {
302 /* Do not want to do anything with volatile except mark any
303 function that uses one to be not const or pure. */
304 if (TREE_THIS_VOLATILE (t))
305 {
306 local->pure_const_state = IPA_NEITHER;
307 if (dump_file)
308 fprintf (dump_file, " Volatile operand is not const/pure\n");
309 return;
310 }
311
312 /* Do not care about a local automatic that is not static. */
313 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
314 return;
315
316 /* If the variable has the "used" attribute, treat it as if it had a
317 been touched by the devil. */
318 if (DECL_PRESERVE_P (t))
319 {
320 local->pure_const_state = IPA_NEITHER;
321 if (dump_file)
322 fprintf (dump_file, " Used static/global variable is not const/pure\n");
323 return;
324 }
325
326 /* In IPA mode we are not interested in checking actual loads and stores;
327 they will be processed at propagation time using ipa_ref. */
328 if (ipa)
329 return;
330
331 /* Since we have dealt with the locals and params cases above, if we
332 are CHECKING_WRITE, this cannot be a pure or constant
333 function. */
334 if (checking_write)
335 {
336 local->pure_const_state = IPA_NEITHER;
337 if (dump_file)
338 fprintf (dump_file, " static/global memory write is not const/pure\n");
339 return;
340 }
341
342 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
343 {
344 /* Readonly reads are safe. */
345 if (TREE_READONLY (t))
346 return; /* Read of a constant, do not change the function state. */
347 else
348 {
349 if (dump_file)
350 fprintf (dump_file, " global memory read is not const\n");
351 /* Just a regular read. */
352 if (local->pure_const_state == IPA_CONST)
353 local->pure_const_state = IPA_PURE;
354 }
355 }
356 else
357 {
358 /* Compilation level statics can be read if they are readonly
359 variables. */
360 if (TREE_READONLY (t))
361 return;
362
363 if (dump_file)
364 fprintf (dump_file, " static memory read is not const\n");
365 /* Just a regular read. */
366 if (local->pure_const_state == IPA_CONST)
367 local->pure_const_state = IPA_PURE;
368 }
369 }
370
371
372 /* Check to see if the use (or definition when CHECKING_WRITE is true)
373 variable T is legal in a function that is either pure or const. */
374
375 static inline void
376 check_op (funct_state local, tree t, bool checking_write)
377 {
378 t = get_base_address (t);
379 if (t && TREE_THIS_VOLATILE (t))
380 {
381 local->pure_const_state = IPA_NEITHER;
382 if (dump_file)
383 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
384 return;
385 }
386 else if (refs_local_or_readonly_memory_p (t))
387 {
388 if (dump_file)
389 fprintf (dump_file, " Indirect ref to local or readonly "
390 "memory is OK\n");
391 return;
392 }
393 else if (checking_write)
394 {
395 local->pure_const_state = IPA_NEITHER;
396 if (dump_file)
397 fprintf (dump_file, " Indirect ref write is not const/pure\n");
398 return;
399 }
400 else
401 {
402 if (dump_file)
403 fprintf (dump_file, " Indirect ref read is not const\n");
404 if (local->pure_const_state == IPA_CONST)
405 local->pure_const_state = IPA_PURE;
406 }
407 }
408
409 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
410
411 static void
412 state_from_flags (enum pure_const_state_e *state, bool *looping,
413 int flags, bool cannot_lead_to_return)
414 {
415 *looping = false;
416 if (flags & ECF_LOOPING_CONST_OR_PURE)
417 {
418 *looping = true;
419 if (dump_file && (dump_flags & TDF_DETAILS))
420 fprintf (dump_file, " looping\n");
421 }
422 if (flags & ECF_CONST)
423 {
424 *state = IPA_CONST;
425 if (dump_file && (dump_flags & TDF_DETAILS))
426 fprintf (dump_file, " const\n");
427 }
428 else if (flags & ECF_PURE)
429 {
430 *state = IPA_PURE;
431 if (dump_file && (dump_flags & TDF_DETAILS))
432 fprintf (dump_file, " pure\n");
433 }
434 else if (cannot_lead_to_return)
435 {
436 *state = IPA_PURE;
437 *looping = true;
438 if (dump_file && (dump_flags & TDF_DETAILS))
439 fprintf (dump_file, " ignoring side effects->pure looping\n");
440 }
441 else
442 {
443 if (dump_file && (dump_flags & TDF_DETAILS))
444 fprintf (dump_file, " neither\n");
445 *state = IPA_NEITHER;
446 *looping = true;
447 }
448 }
449
450 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
451 into STATE and LOOPING better of the two variants.
452 Be sure to merge looping correctly. IPA_NEITHER functions
453 have looping 0 even if they don't have to return. */
454
455 static inline void
456 better_state (enum pure_const_state_e *state, bool *looping,
457 enum pure_const_state_e state2, bool looping2)
458 {
459 if (state2 < *state)
460 {
461 if (*state == IPA_NEITHER)
462 *looping = looping2;
463 else
464 *looping = MIN (*looping, looping2);
465 *state = state2;
466 }
467 else if (state2 != IPA_NEITHER)
468 *looping = MIN (*looping, looping2);
469 }
470
471 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
472 into STATE and LOOPING worse of the two variants.
473 N is the actual node called. */
474
475 static inline void
476 worse_state (enum pure_const_state_e *state, bool *looping,
477 enum pure_const_state_e state2, bool looping2,
478 struct symtab_node *from,
479 struct symtab_node *to)
480 {
481 /* Consider function:
482
483 bool a(int *p)
484 {
485 return *p==*p;
486 }
487
488 During early optimization we will turn this into:
489
490 bool a(int *p)
491 {
492 return true;
493 }
494
495 Now if this function will be detected as CONST however when interposed it
496 may end up being just pure. We always must assume the worst scenario here.
497 */
498 if (*state == IPA_CONST && state2 == IPA_CONST
499 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
500 {
501 if (dump_file && (dump_flags & TDF_DETAILS))
502 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
503 "bind to current def.\n", to->dump_name ());
504 state2 = IPA_PURE;
505 }
506 *state = MAX (*state, state2);
507 *looping = MAX (*looping, looping2);
508 }
509
510 /* Recognize special cases of builtins that are by themselves not const
511 but function using them is. */
512 bool
513 builtin_safe_for_const_function_p (bool *looping, tree callee)
514 {
515 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
516 switch (DECL_FUNCTION_CODE (callee))
517 {
518 case BUILT_IN_RETURN:
519 case BUILT_IN_UNREACHABLE:
520 CASE_BUILT_IN_ALLOCA:
521 case BUILT_IN_STACK_SAVE:
522 case BUILT_IN_STACK_RESTORE:
523 case BUILT_IN_EH_POINTER:
524 case BUILT_IN_EH_FILTER:
525 case BUILT_IN_UNWIND_RESUME:
526 case BUILT_IN_CXA_END_CLEANUP:
527 case BUILT_IN_EH_COPY_VALUES:
528 case BUILT_IN_FRAME_ADDRESS:
529 case BUILT_IN_APPLY_ARGS:
530 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
531 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
532 case BUILT_IN_DWARF_CFA:
533 case BUILT_IN_RETURN_ADDRESS:
534 *looping = false;
535 return true;
536 case BUILT_IN_PREFETCH:
537 *looping = true;
538 return true;
539 default:
540 break;
541 }
542 return false;
543 }
544
545 /* Check the parameters of a function call to CALL_EXPR to see if
546 there are any references in the parameters that are not allowed for
547 pure or const functions. Also check to see if this is either an
548 indirect call, a call outside the compilation unit, or has special
549 attributes that may also effect the purity. The CALL_EXPR node for
550 the entire call expression. */
551
552 static void
553 check_call (funct_state local, gcall *call, bool ipa)
554 {
555 int flags = gimple_call_flags (call);
556 tree callee_t = gimple_call_fndecl (call);
557 bool possibly_throws = stmt_could_throw_p (cfun, call);
558 bool possibly_throws_externally = (possibly_throws
559 && stmt_can_throw_external (cfun, call));
560
561 if (possibly_throws)
562 {
563 unsigned int i;
564 for (i = 0; i < gimple_num_ops (call); i++)
565 if (gimple_op (call, i)
566 && tree_could_throw_p (gimple_op (call, i)))
567 {
568 if (possibly_throws && cfun->can_throw_non_call_exceptions)
569 {
570 if (dump_file)
571 fprintf (dump_file, " operand can throw; looping\n");
572 local->looping = true;
573 }
574 if (possibly_throws_externally)
575 {
576 if (dump_file)
577 fprintf (dump_file, " operand can throw externally\n");
578 local->can_throw = true;
579 }
580 }
581 }
582
583 /* The const and pure flags are set by a variety of places in the
584 compiler (including here). If someone has already set the flags
585 for the callee, (such as for some of the builtins) we will use
586 them, otherwise we will compute our own information.
587
588 Const and pure functions have less clobber effects than other
589 functions so we process these first. Otherwise if it is a call
590 outside the compilation unit or an indirect call we punt. This
591 leaves local calls which will be processed by following the call
592 graph. */
593 if (callee_t)
594 {
595 bool call_looping;
596
597 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
598 && !nonfreeing_call_p (call))
599 local->can_free = true;
600
601 if (builtin_safe_for_const_function_p (&call_looping, callee_t))
602 {
603 worse_state (&local->pure_const_state, &local->looping,
604 IPA_CONST, call_looping,
605 NULL, NULL);
606 return;
607 }
608 /* When bad things happen to bad functions, they cannot be const
609 or pure. */
610 if (setjmp_call_p (callee_t))
611 {
612 if (dump_file)
613 fprintf (dump_file, " setjmp is not const/pure\n");
614 local->looping = true;
615 local->pure_const_state = IPA_NEITHER;
616 }
617
618 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
619 switch (DECL_FUNCTION_CODE (callee_t))
620 {
621 case BUILT_IN_LONGJMP:
622 case BUILT_IN_NONLOCAL_GOTO:
623 if (dump_file)
624 fprintf (dump_file,
625 " longjmp and nonlocal goto is not const/pure\n");
626 local->pure_const_state = IPA_NEITHER;
627 local->looping = true;
628 break;
629 default:
630 break;
631 }
632 }
633 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
634 local->can_free = true;
635
636 /* When not in IPA mode, we can still handle self recursion. */
637 if (!ipa && callee_t
638 && recursive_call_p (current_function_decl, callee_t))
639 {
640 if (dump_file)
641 fprintf (dump_file, " Recursive call can loop.\n");
642 local->looping = true;
643 }
644 /* Either callee is unknown or we are doing local analysis.
645 Look to see if there are any bits available for the callee (such as by
646 declaration or because it is builtin) and process solely on the basis of
647 those bits. Handle internal calls always, those calls don't have
648 corresponding cgraph edges and thus aren't processed during
649 the propagation. */
650 else if (!ipa || gimple_call_internal_p (call))
651 {
652 enum pure_const_state_e call_state;
653 bool call_looping;
654 if (possibly_throws && cfun->can_throw_non_call_exceptions)
655 {
656 if (dump_file)
657 fprintf (dump_file, " can throw; looping\n");
658 local->looping = true;
659 }
660 if (possibly_throws_externally)
661 {
662 if (dump_file)
663 {
664 fprintf (dump_file, " can throw externally to lp %i\n",
665 lookup_stmt_eh_lp (call));
666 if (callee_t)
667 fprintf (dump_file, " callee:%s\n",
668 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
669 }
670 local->can_throw = true;
671 }
672 if (dump_file && (dump_flags & TDF_DETAILS))
673 fprintf (dump_file, " checking flags for call:");
674 state_from_flags (&call_state, &call_looping, flags,
675 ((flags & (ECF_NORETURN | ECF_NOTHROW))
676 == (ECF_NORETURN | ECF_NOTHROW))
677 || (!flag_exceptions && (flags & ECF_NORETURN)));
678 worse_state (&local->pure_const_state, &local->looping,
679 call_state, call_looping, NULL, NULL);
680 }
681 /* Direct functions calls are handled by IPA propagation. */
682 }
683
684 /* Wrapper around check_decl for loads in local more. */
685
686 static bool
687 check_load (gimple *, tree op, tree, void *data)
688 {
689 if (DECL_P (op))
690 check_decl ((funct_state)data, op, false, false);
691 else
692 check_op ((funct_state)data, op, false);
693 return false;
694 }
695
696 /* Wrapper around check_decl for stores in local more. */
697
698 static bool
699 check_store (gimple *, tree op, tree, void *data)
700 {
701 if (DECL_P (op))
702 check_decl ((funct_state)data, op, true, false);
703 else
704 check_op ((funct_state)data, op, true);
705 return false;
706 }
707
708 /* Wrapper around check_decl for loads in ipa mode. */
709
710 static bool
711 check_ipa_load (gimple *, tree op, tree, void *data)
712 {
713 if (DECL_P (op))
714 check_decl ((funct_state)data, op, false, true);
715 else
716 check_op ((funct_state)data, op, false);
717 return false;
718 }
719
720 /* Wrapper around check_decl for stores in ipa mode. */
721
722 static bool
723 check_ipa_store (gimple *, tree op, tree, void *data)
724 {
725 if (DECL_P (op))
726 check_decl ((funct_state)data, op, true, true);
727 else
728 check_op ((funct_state)data, op, true);
729 return false;
730 }
731
732 /* Look into pointer pointed to by GSIP and figure out what interesting side
733 effects it has. */
734 static void
735 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
736 {
737 gimple *stmt = gsi_stmt (*gsip);
738
739 if (is_gimple_debug (stmt))
740 return;
741
742 /* Do consider clobber as side effects before IPA, so we rather inline
743 C++ destructors and keep clobber semantics than eliminate them.
744
745 Similar logic is in ipa-modref.
746
747 TODO: We may get smarter during early optimizations on these and let
748 functions containing only clobbers to be optimized more. This is a common
749 case of C++ destructors. */
750
751 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
752 return;
753
754 if (dump_file)
755 {
756 fprintf (dump_file, " scanning: ");
757 print_gimple_stmt (dump_file, stmt, 0);
758 }
759
760 if (gimple_has_volatile_ops (stmt)
761 && !gimple_clobber_p (stmt))
762 {
763 local->pure_const_state = IPA_NEITHER;
764 if (dump_file)
765 fprintf (dump_file, " Volatile stmt is not const/pure\n");
766 }
767
768 /* Look for loads and stores. */
769 walk_stmt_load_store_ops (stmt, local,
770 ipa ? check_ipa_load : check_load,
771 ipa ? check_ipa_store : check_store);
772
773 if (gimple_code (stmt) != GIMPLE_CALL
774 && stmt_could_throw_p (cfun, stmt))
775 {
776 if (cfun->can_throw_non_call_exceptions)
777 {
778 if (dump_file)
779 fprintf (dump_file, " can throw; looping\n");
780 local->looping = true;
781 }
782 if (stmt_can_throw_external (cfun, stmt))
783 {
784 if (dump_file)
785 fprintf (dump_file, " can throw externally\n");
786 local->can_throw = true;
787 }
788 else
789 if (dump_file)
790 fprintf (dump_file, " can throw\n");
791 }
792 switch (gimple_code (stmt))
793 {
794 case GIMPLE_CALL:
795 check_call (local, as_a <gcall *> (stmt), ipa);
796 break;
797 case GIMPLE_LABEL:
798 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
799 /* Target of long jump. */
800 {
801 if (dump_file)
802 fprintf (dump_file, " nonlocal label is not const/pure\n");
803 local->pure_const_state = IPA_NEITHER;
804 }
805 break;
806 case GIMPLE_ASM:
807 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
808 {
809 if (dump_file)
810 fprintf (dump_file, " memory asm clobber is not const/pure\n");
811 /* Abandon all hope, ye who enter here. */
812 local->pure_const_state = IPA_NEITHER;
813 local->can_free = true;
814 }
815 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
816 {
817 if (dump_file)
818 fprintf (dump_file, " volatile is not const/pure\n");
819 /* Abandon all hope, ye who enter here. */
820 local->pure_const_state = IPA_NEITHER;
821 local->looping = true;
822 local->can_free = true;
823 }
824 return;
825 default:
826 break;
827 }
828 }
829
830 /* Check that RETVAL is used only in STMT and in comparisons against 0.
831 RETVAL is return value of the function and STMT is return stmt. */
832
833 static bool
834 check_retval_uses (tree retval, gimple *stmt)
835 {
836 imm_use_iterator use_iter;
837 gimple *use_stmt;
838
839 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
840 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
841 {
842 tree op2 = gimple_cond_rhs (cond);
843 if (!integer_zerop (op2))
844 return false;
845 }
846 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
847 {
848 enum tree_code code = gimple_assign_rhs_code (ga);
849 if (TREE_CODE_CLASS (code) != tcc_comparison)
850 return false;
851 if (!integer_zerop (gimple_assign_rhs2 (ga)))
852 return false;
853 }
854 else if (is_gimple_debug (use_stmt))
855 ;
856 else if (use_stmt != stmt)
857 return false;
858
859 return true;
860 }
861
862 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
863 attribute. Currently this function does a very conservative analysis.
864 FUN is considered to be a candidate if
865 1) It returns a value of pointer type.
866 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
867 a phi, and element of phi is either NULL or
868 SSA_NAME_DEF_STMT(element) is function call.
869 3) The return-value has immediate uses only within comparisons (gcond or gassign)
870 and return_stmt (and likewise a phi arg has immediate use only within comparison
871 or the phi stmt). */
872
873 #define DUMP_AND_RETURN(reason) \
874 { \
875 if (dump_file && (dump_flags & TDF_DETAILS)) \
876 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
877 (node->dump_name ()), (reason)); \
878 return false; \
879 }
880
881 static bool
882 malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
883 bitmap visited)
884 {
885 cgraph_node *node = cgraph_node::get_create (fun->decl);
886 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
887 return true;
888
889 if (!check_retval_uses (retval, ret_stmt))
890 DUMP_AND_RETURN("Return value has uses outside return stmt"
891 " and comparisons against 0.")
892
893 gimple *def = SSA_NAME_DEF_STMT (retval);
894
895 if (gcall *call_stmt = dyn_cast<gcall *> (def))
896 {
897 tree callee_decl = gimple_call_fndecl (call_stmt);
898 if (!callee_decl)
899 return false;
900
901 if (!ipa && !DECL_IS_MALLOC (callee_decl))
902 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
903 " non-ipa mode.")
904
905 cgraph_edge *cs = node->get_edge (call_stmt);
906 if (cs)
907 {
908 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
909 es->is_return_callee_uncaptured = true;
910 }
911 }
912
913 else if (gphi *phi = dyn_cast<gphi *> (def))
914 {
915 bool all_args_zero = true;
916 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
917 {
918 tree arg = gimple_phi_arg_def (phi, i);
919 if (integer_zerop (arg))
920 continue;
921
922 all_args_zero = false;
923 if (TREE_CODE (arg) != SSA_NAME)
924 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
925 if (!check_retval_uses (arg, phi))
926 DUMP_AND_RETURN ("phi arg has uses outside phi"
927 " and comparisons against 0.")
928
929 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
930 if (is_a<gphi *> (arg_def))
931 {
932 if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
933 DUMP_AND_RETURN ("nested phi fail")
934 continue;
935 }
936
937 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
938 if (!call_stmt)
939 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
940
941 tree callee_decl = gimple_call_fndecl (call_stmt);
942 if (!callee_decl)
943 return false;
944 if (!ipa && !DECL_IS_MALLOC (callee_decl))
945 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
946 " for non-ipa mode.")
947
948 cgraph_edge *cs = node->get_edge (call_stmt);
949 if (cs)
950 {
951 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
952 es->is_return_callee_uncaptured = true;
953 }
954 }
955
956 if (all_args_zero)
957 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
958 }
959
960 else
961 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
962
963 return true;
964 }
965
966 static bool
967 malloc_candidate_p (function *fun, bool ipa)
968 {
969 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
970 edge e;
971 edge_iterator ei;
972 cgraph_node *node = cgraph_node::get_create (fun->decl);
973
974 if (EDGE_COUNT (exit_block->preds) == 0
975 || !flag_delete_null_pointer_checks)
976 return false;
977
978 auto_bitmap visited;
979 FOR_EACH_EDGE (e, ei, exit_block->preds)
980 {
981 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
982 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
983
984 if (!ret_stmt)
985 return false;
986
987 tree retval = gimple_return_retval (ret_stmt);
988 if (!retval)
989 DUMP_AND_RETURN("No return value.")
990
991 if (TREE_CODE (retval) != SSA_NAME
992 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
993 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
994
995 if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
996 return false;
997 }
998
999 if (dump_file && (dump_flags & TDF_DETAILS))
1000 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1001 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1002 return true;
1003 }
1004
1005 #undef DUMP_AND_RETURN
1006
1007 /* Return true if function is known to be finite. */
1008
1009 bool
1010 finite_function_p ()
1011 {
1012 /* Const functions cannot have back edges (an
1013 indication of possible infinite loop side
1014 effect. */
1015 bool finite = true;
1016 if (mark_dfs_back_edges ())
1017 {
1018 /* Preheaders are needed for SCEV to work.
1019 Simple latches and recorded exits improve chances that loop will
1020 proved to be finite in testcases such as in loop-15.c
1021 and loop-24.c */
1022 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1023 | LOOPS_HAVE_SIMPLE_LATCHES
1024 | LOOPS_HAVE_RECORDED_EXITS);
1025 if (dump_file && (dump_flags & TDF_DETAILS))
1026 flow_loops_dump (dump_file, NULL, 0);
1027 if (mark_irreducible_loops ())
1028 {
1029 if (dump_file)
1030 fprintf (dump_file, " has irreducible loops\n");
1031 finite = false;
1032 }
1033 else
1034 {
1035 scev_initialize ();
1036 for (auto loop : loops_list (cfun, 0))
1037 if (!finite_loop_p (loop))
1038 {
1039 if (dump_file)
1040 fprintf (dump_file, " cannot prove finiteness of "
1041 "loop %i\n", loop->num);
1042 finite =false;
1043 break;
1044 }
1045 scev_finalize ();
1046 }
1047 loop_optimizer_finalize ();
1048 }
1049 return finite;
1050 }
1051
1052 /* This is the main routine for finding the reference patterns for
1053 global variables within a function FN. */
1054
1055 static funct_state
1056 analyze_function (struct cgraph_node *fn, bool ipa)
1057 {
1058 tree decl = fn->decl;
1059 funct_state l;
1060 basic_block this_block;
1061
1062 l = XCNEW (class funct_state_d);
1063 l->pure_const_state = IPA_CONST;
1064 l->state_previously_known = IPA_NEITHER;
1065 l->looping_previously_known = true;
1066 l->looping = false;
1067 l->can_throw = false;
1068 l->can_free = false;
1069 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1070 flags_from_decl_or_type (fn->decl),
1071 fn->cannot_return_p ());
1072
1073 if (fn->thunk || fn->alias)
1074 {
1075 /* Thunk gets propagated through, so nothing interesting happens. */
1076 gcc_assert (ipa);
1077 if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
1078 l->pure_const_state = IPA_NEITHER;
1079 return l;
1080 }
1081
1082 if (dump_file)
1083 {
1084 fprintf (dump_file, "\n\n local analysis of %s\n ",
1085 fn->dump_name ());
1086 }
1087
1088 push_cfun (DECL_STRUCT_FUNCTION (decl));
1089
1090 FOR_EACH_BB_FN (this_block, cfun)
1091 {
1092 gimple_stmt_iterator gsi;
1093 struct walk_stmt_info wi;
1094
1095 memset (&wi, 0, sizeof (wi));
1096 for (gsi = gsi_start_bb (this_block);
1097 !gsi_end_p (gsi);
1098 gsi_next (&gsi))
1099 {
1100 check_stmt (&gsi, l, ipa);
1101 if (l->pure_const_state == IPA_NEITHER
1102 && l->looping
1103 && l->can_throw
1104 && l->can_free)
1105 goto end;
1106 }
1107 }
1108
1109 end:
1110 if (l->pure_const_state != IPA_NEITHER
1111 && !l->looping
1112 && !finite_function_p ())
1113 l->looping = true;
1114
1115 if (dump_file && (dump_flags & TDF_DETAILS))
1116 fprintf (dump_file, " checking previously known:");
1117
1118 better_state (&l->pure_const_state, &l->looping,
1119 l->state_previously_known,
1120 l->looping_previously_known);
1121 if (TREE_NOTHROW (decl))
1122 l->can_throw = false;
1123
1124 l->malloc_state = STATE_MALLOC_BOTTOM;
1125 if (DECL_IS_MALLOC (decl))
1126 l->malloc_state = STATE_MALLOC;
1127 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1128 l->malloc_state = STATE_MALLOC_TOP;
1129 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1130 l->malloc_state = STATE_MALLOC;
1131
1132 pop_cfun ();
1133 if (dump_file)
1134 {
1135 if (l->looping)
1136 fprintf (dump_file, "Function is locally looping.\n");
1137 if (l->can_throw)
1138 fprintf (dump_file, "Function is locally throwing.\n");
1139 if (l->pure_const_state == IPA_CONST)
1140 fprintf (dump_file, "Function is locally const.\n");
1141 if (l->pure_const_state == IPA_PURE)
1142 fprintf (dump_file, "Function is locally pure.\n");
1143 if (l->can_free)
1144 fprintf (dump_file, "Function can locally free.\n");
1145 if (l->malloc_state == STATE_MALLOC)
1146 fprintf (dump_file, "Function is locally malloc.\n");
1147 }
1148 return l;
1149 }
1150
1151 void
1152 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1153 {
1154 /* There are some shared nodes, in particular the initializers on
1155 static declarations. We do not need to scan them more than once
1156 since all we would be interested in are the addressof
1157 operations. */
1158 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1159 {
1160 funct_state_d *a = analyze_function (node, true);
1161 new (state) funct_state_d (*a);
1162 free (a);
1163 }
1164 else
1165 /* Do not keep stale summaries. */
1166 funct_state_summaries->remove (node);
1167 }
1168
1169 /* Called when new clone is inserted to callgraph late. */
1170
1171 void
1172 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1173 funct_state_d *src_data,
1174 funct_state_d *dst_data)
1175 {
1176 new (dst_data) funct_state_d (*src_data);
1177 if (dst_data->malloc_state == STATE_MALLOC
1178 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1179 dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1180 }
1181
1182 \f
1183 void
1184 pass_ipa_pure_const::
1185 register_hooks (void)
1186 {
1187 if (init_p)
1188 return;
1189
1190 init_p = true;
1191
1192 funct_state_summaries = new funct_state_summary_t (symtab);
1193 }
1194
1195
1196 /* Analyze each function in the cgraph to see if it is locally PURE or
1197 CONST. */
1198
1199 static void
1200 pure_const_generate_summary (void)
1201 {
1202 struct cgraph_node *node;
1203
1204 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1205 pass->register_hooks ();
1206
1207 /* Process all of the functions.
1208
1209 We process AVAIL_INTERPOSABLE functions. We cannot use the results
1210 by default, but the info can be used at LTO with -fwhole-program or
1211 when function got cloned and the clone is AVAILABLE. */
1212
1213 FOR_EACH_DEFINED_FUNCTION (node)
1214 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1215 {
1216 funct_state_d *a = analyze_function (node, true);
1217 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1218 free (a);
1219 }
1220 }
1221
1222
1223 /* Serialize the ipa info for lto. */
1224
1225 static void
1226 pure_const_write_summary (void)
1227 {
1228 struct cgraph_node *node;
1229 struct lto_simple_output_block *ob
1230 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1231 unsigned int count = 0;
1232 lto_symtab_encoder_iterator lsei;
1233 lto_symtab_encoder_t encoder;
1234
1235 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1236
1237 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1238 lsei_next_function_in_partition (&lsei))
1239 {
1240 node = lsei_cgraph_node (lsei);
1241 if (node->definition && funct_state_summaries->exists (node))
1242 count++;
1243 }
1244
1245 streamer_write_uhwi_stream (ob->main_stream, count);
1246
1247 /* Process all of the functions. */
1248 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1249 lsei_next_function_in_partition (&lsei))
1250 {
1251 node = lsei_cgraph_node (lsei);
1252 funct_state_d *fs = funct_state_summaries->get (node);
1253 if (node->definition && fs != NULL)
1254 {
1255 struct bitpack_d bp;
1256 int node_ref;
1257 lto_symtab_encoder_t encoder;
1258
1259 encoder = ob->decl_state->symtab_node_encoder;
1260 node_ref = lto_symtab_encoder_encode (encoder, node);
1261 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1262
1263 /* Note that flags will need to be read in the opposite
1264 order as we are pushing the bitflags into FLAGS. */
1265 bp = bitpack_create (ob->main_stream);
1266 bp_pack_value (&bp, fs->pure_const_state, 2);
1267 bp_pack_value (&bp, fs->state_previously_known, 2);
1268 bp_pack_value (&bp, fs->looping_previously_known, 1);
1269 bp_pack_value (&bp, fs->looping, 1);
1270 bp_pack_value (&bp, fs->can_throw, 1);
1271 bp_pack_value (&bp, fs->can_free, 1);
1272 bp_pack_value (&bp, fs->malloc_state, 2);
1273 streamer_write_bitpack (&bp);
1274 }
1275 }
1276
1277 lto_destroy_simple_output_block (ob);
1278 }
1279
1280
1281 /* Deserialize the ipa info for lto. */
1282
1283 static void
1284 pure_const_read_summary (void)
1285 {
1286 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1287 struct lto_file_decl_data *file_data;
1288 unsigned int j = 0;
1289
1290 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1291 pass->register_hooks ();
1292
1293 while ((file_data = file_data_vec[j++]))
1294 {
1295 const char *data;
1296 size_t len;
1297 class lto_input_block *ib
1298 = lto_create_simple_input_block (file_data,
1299 LTO_section_ipa_pure_const,
1300 &data, &len);
1301 if (ib)
1302 {
1303 unsigned int i;
1304 unsigned int count = streamer_read_uhwi (ib);
1305
1306 for (i = 0; i < count; i++)
1307 {
1308 unsigned int index;
1309 struct cgraph_node *node;
1310 struct bitpack_d bp;
1311 funct_state fs;
1312 lto_symtab_encoder_t encoder;
1313
1314 index = streamer_read_uhwi (ib);
1315 encoder = file_data->symtab_node_encoder;
1316 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1317 index));
1318
1319 fs = funct_state_summaries->get_create (node);
1320 /* Note that the flags must be read in the opposite
1321 order in which they were written (the bitflags were
1322 pushed into FLAGS). */
1323 bp = streamer_read_bitpack (ib);
1324 fs->pure_const_state
1325 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1326 fs->state_previously_known
1327 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1328 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1329 fs->looping = bp_unpack_value (&bp, 1);
1330 fs->can_throw = bp_unpack_value (&bp, 1);
1331 fs->can_free = bp_unpack_value (&bp, 1);
1332 fs->malloc_state
1333 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1334
1335 if (dump_file)
1336 {
1337 int flags = flags_from_decl_or_type (node->decl);
1338 fprintf (dump_file, "Read info for %s ", node->dump_name ());
1339 if (flags & ECF_CONST)
1340 fprintf (dump_file, " const");
1341 if (flags & ECF_PURE)
1342 fprintf (dump_file, " pure");
1343 if (flags & ECF_NOTHROW)
1344 fprintf (dump_file, " nothrow");
1345 fprintf (dump_file, "\n pure const state: %s\n",
1346 pure_const_names[fs->pure_const_state]);
1347 fprintf (dump_file, " previously known state: %s\n",
1348 pure_const_names[fs->state_previously_known]);
1349 if (fs->looping)
1350 fprintf (dump_file," function is locally looping\n");
1351 if (fs->looping_previously_known)
1352 fprintf (dump_file," function is previously known looping\n");
1353 if (fs->can_throw)
1354 fprintf (dump_file," function is locally throwing\n");
1355 if (fs->can_free)
1356 fprintf (dump_file," function can locally free\n");
1357 fprintf (dump_file, "\n malloc state: %s\n",
1358 malloc_state_names[fs->malloc_state]);
1359 }
1360 }
1361
1362 lto_destroy_simple_input_block (file_data,
1363 LTO_section_ipa_pure_const,
1364 ib, data, len);
1365 }
1366 }
1367 }
1368
1369 /* We only propagate across edges that can throw externally and their callee
1370 is not interposable. */
1371
1372 static bool
1373 ignore_edge_for_nothrow (struct cgraph_edge *e)
1374 {
1375 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1376 return true;
1377
1378 enum availability avail;
1379 cgraph_node *ultimate_target
1380 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1381 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1382 return true;
1383 return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1384 && !e->callee->binds_to_current_def_p (e->caller))
1385 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1386 || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1387 }
1388
1389 /* Return true if NODE is self recursive function.
1390 Indirectly recursive functions appears as non-trivial strongly
1391 connected components, so we need to care about self recursion
1392 only. */
1393
1394 static bool
1395 self_recursive_p (struct cgraph_node *node)
1396 {
1397 struct cgraph_edge *e;
1398 for (e = node->callees; e; e = e->next_callee)
1399 if (e->callee->function_symbol () == node)
1400 return true;
1401 return false;
1402 }
1403
1404 /* Return true if N is cdtor that is not const or pure. In this case we may
1405 need to remove unreachable function if it is marked const/pure. */
1406
1407 static bool
1408 cdtor_p (cgraph_node *n, void *)
1409 {
1410 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1411 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1412 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1413 return false;
1414 }
1415
1416 /* Skip edges from and to nodes without ipa_pure_const enabled.
1417 Ignore not available symbols. */
1418
1419 static bool
1420 ignore_edge_for_pure_const (struct cgraph_edge *e)
1421 {
1422 enum availability avail;
1423 cgraph_node *ultimate_target
1424 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1425
1426 return (avail <= AVAIL_INTERPOSABLE
1427 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1428 || !opt_for_fn (ultimate_target->decl,
1429 flag_ipa_pure_const));
1430 }
1431
1432 /* Return true if function should be skipped for local pure const analysis. */
1433
1434 static bool
1435 skip_function_for_local_pure_const (struct cgraph_node *node)
1436 {
1437 /* Because we do not schedule pass_fixup_cfg over whole program after early
1438 optimizations we must not promote functions that are called by already
1439 processed functions. */
1440
1441 if (function_called_by_processed_nodes_p ())
1442 {
1443 if (dump_file)
1444 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1445 return true;
1446 }
1447 /* Save some work and do not analyze functions which are interposable and
1448 do not have any non-interposable aliases. */
1449 if (node->get_availability () <= AVAIL_INTERPOSABLE
1450 && !flag_lto
1451 && !node->has_aliases_p ())
1452 {
1453 if (dump_file)
1454 fprintf (dump_file,
1455 "Function is interposable; not analyzing.\n");
1456 return true;
1457 }
1458 return false;
1459 }
1460
1461 /* Make function const and output warning. If LOCAL is true,
1462 return true if anything changed. Otherwise return true if
1463 we may have introduced removale ctors. */
1464
1465 bool
1466 ipa_make_function_const (struct cgraph_node *node, bool looping, bool local)
1467 {
1468 bool cdtor = false;
1469
1470 if (TREE_READONLY (node->decl)
1471 && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
1472 return false;
1473 warn_function_const (node->decl, !looping);
1474 if (local && skip_function_for_local_pure_const (node))
1475 return false;
1476 if (dump_file)
1477 fprintf (dump_file, "Function found to be %sconst: %s\n",
1478 looping ? "looping " : "",
1479 node->dump_name ());
1480 if (!local && !looping)
1481 cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1482 if (!dbg_cnt (ipa_attr))
1483 return false;
1484 if (node->set_const_flag (true, looping))
1485 {
1486 if (dump_file)
1487 fprintf (dump_file,
1488 "Declaration updated to be %sconst: %s\n",
1489 looping ? "looping " : "",
1490 node->dump_name ());
1491 if (local)
1492 return true;
1493 return cdtor;
1494 }
1495 return false;
1496 }
1497
1498 /* Make function const and output warning. If LOCAL is true,
1499 return true if anything changed. Otherwise return true if
1500 we may have introduced removale ctors. */
1501
1502 bool
1503 ipa_make_function_pure (struct cgraph_node *node, bool looping, bool local)
1504 {
1505 bool cdtor = false;
1506
1507 if (DECL_PURE_P (node->decl)
1508 && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
1509 return false;
1510 warn_function_pure (node->decl, !looping);
1511 if (local && skip_function_for_local_pure_const (node))
1512 return false;
1513 if (dump_file)
1514 fprintf (dump_file, "Function found to be %spure: %s\n",
1515 looping ? "looping " : "",
1516 node->dump_name ());
1517 if (!local && !looping)
1518 cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1519 if (!dbg_cnt (ipa_attr))
1520 return false;
1521 if (node->set_pure_flag (true, looping))
1522 {
1523 if (dump_file)
1524 fprintf (dump_file,
1525 "Declaration updated to be %spure: %s\n",
1526 looping ? "looping " : "",
1527 node->dump_name ());
1528 if (local)
1529 return true;
1530 return cdtor;
1531 }
1532 return false;
1533 }
1534
1535 /* Produce transitive closure over the callgraph and compute pure/const
1536 attributes. */
1537
1538 static bool
1539 propagate_pure_const (void)
1540 {
1541 struct cgraph_node *node;
1542 struct cgraph_node *w;
1543 struct cgraph_node **order =
1544 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1545 int order_pos;
1546 int i;
1547 struct ipa_dfs_info * w_info;
1548 bool remove_p = false;
1549
1550 order_pos = ipa_reduced_postorder (order, true,
1551 ignore_edge_for_pure_const);
1552 if (dump_file)
1553 {
1554 cgraph_node::dump_cgraph (dump_file);
1555 ipa_print_order (dump_file, "reduced", order, order_pos);
1556 }
1557
1558 /* Propagate the local information through the call graph to produce
1559 the global information. All the nodes within a cycle will have
1560 the same info so we collapse cycles first. Then we can do the
1561 propagation in one pass from the leaves to the roots. */
1562 for (i = 0; i < order_pos; i++ )
1563 {
1564 enum pure_const_state_e pure_const_state = IPA_CONST;
1565 bool looping = false;
1566 int count = 0;
1567 node = order[i];
1568
1569 if (node->alias)
1570 continue;
1571
1572 if (dump_file && (dump_flags & TDF_DETAILS))
1573 fprintf (dump_file, "Starting cycle\n");
1574
1575 /* Find the worst state for any node in the cycle. */
1576 w = node;
1577 while (w && pure_const_state != IPA_NEITHER)
1578 {
1579 struct cgraph_edge *e;
1580 struct cgraph_edge *ie;
1581 int i;
1582 struct ipa_ref *ref = NULL;
1583
1584 funct_state w_l = funct_state_summaries->get_create (w);
1585 if (dump_file && (dump_flags & TDF_DETAILS))
1586 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1587 w->dump_name (),
1588 pure_const_names[w_l->pure_const_state],
1589 w_l->looping);
1590
1591 /* First merge in function body properties.
1592 We are safe to pass NULL as FROM and TO because we will take care
1593 of possible interposition when walking callees. */
1594 worse_state (&pure_const_state, &looping,
1595 w_l->pure_const_state, w_l->looping,
1596 NULL, NULL);
1597 if (pure_const_state == IPA_NEITHER)
1598 break;
1599
1600 count++;
1601
1602 /* We consider recursive cycles as possibly infinite.
1603 This might be relaxed since infinite recursion leads to stack
1604 overflow. */
1605 if (count > 1)
1606 looping = true;
1607
1608 /* Now walk the edges and merge in callee properties. */
1609 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1610 e = e->next_callee)
1611 {
1612 enum availability avail;
1613 struct cgraph_node *y = e->callee->
1614 function_or_virtual_thunk_symbol (&avail,
1615 e->caller);
1616 enum pure_const_state_e edge_state = IPA_CONST;
1617 bool edge_looping = false;
1618
1619 if (e->recursive_p ())
1620 looping = true;
1621
1622 if (dump_file && (dump_flags & TDF_DETAILS))
1623 {
1624 fprintf (dump_file, " Call to %s",
1625 e->callee->dump_name ());
1626 }
1627 if (avail > AVAIL_INTERPOSABLE)
1628 {
1629 funct_state y_l = funct_state_summaries->get_create (y);
1630
1631 if (dump_file && (dump_flags & TDF_DETAILS))
1632 {
1633 fprintf (dump_file,
1634 " state:%s looping:%i\n",
1635 pure_const_names[y_l->pure_const_state],
1636 y_l->looping);
1637 }
1638 if (y_l->pure_const_state > IPA_PURE
1639 && e->cannot_lead_to_return_p ())
1640 {
1641 if (dump_file && (dump_flags & TDF_DETAILS))
1642 fprintf (dump_file,
1643 " Ignoring side effects"
1644 " -> pure, looping\n");
1645 edge_state = IPA_PURE;
1646 edge_looping = true;
1647 }
1648 else
1649 {
1650 edge_state = y_l->pure_const_state;
1651 edge_looping = y_l->looping;
1652 }
1653 }
1654 else if (builtin_safe_for_const_function_p (&edge_looping,
1655 y->decl))
1656 edge_state = IPA_CONST;
1657 else
1658 state_from_flags (&edge_state, &edge_looping,
1659 flags_from_decl_or_type (y->decl),
1660 e->cannot_lead_to_return_p ());
1661
1662 /* Merge the results with what we already know. */
1663 better_state (&edge_state, &edge_looping,
1664 w_l->state_previously_known,
1665 w_l->looping_previously_known);
1666 worse_state (&pure_const_state, &looping,
1667 edge_state, edge_looping, e->caller, e->callee);
1668 if (pure_const_state == IPA_NEITHER)
1669 break;
1670 }
1671
1672 /* Now process the indirect call. */
1673 for (ie = w->indirect_calls;
1674 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1675 {
1676 enum pure_const_state_e edge_state = IPA_CONST;
1677 bool edge_looping = false;
1678
1679 if (dump_file && (dump_flags & TDF_DETAILS))
1680 fprintf (dump_file, " Indirect call");
1681 state_from_flags (&edge_state, &edge_looping,
1682 ie->indirect_info->ecf_flags,
1683 ie->cannot_lead_to_return_p ());
1684 /* Merge the results with what we already know. */
1685 better_state (&edge_state, &edge_looping,
1686 w_l->state_previously_known,
1687 w_l->looping_previously_known);
1688 worse_state (&pure_const_state, &looping,
1689 edge_state, edge_looping, NULL, NULL);
1690 if (pure_const_state == IPA_NEITHER)
1691 break;
1692 }
1693
1694 /* And finally all loads and stores. */
1695 for (i = 0; w->iterate_reference (i, ref)
1696 && pure_const_state != IPA_NEITHER; i++)
1697 {
1698 enum pure_const_state_e ref_state = IPA_CONST;
1699 bool ref_looping = false;
1700 switch (ref->use)
1701 {
1702 case IPA_REF_LOAD:
1703 /* readonly reads are safe. */
1704 if (TREE_READONLY (ref->referred->decl))
1705 break;
1706 if (dump_file && (dump_flags & TDF_DETAILS))
1707 fprintf (dump_file, " nonreadonly global var read\n");
1708 ref_state = IPA_PURE;
1709 break;
1710 case IPA_REF_STORE:
1711 if (ref->cannot_lead_to_return ())
1712 break;
1713 ref_state = IPA_NEITHER;
1714 if (dump_file && (dump_flags & TDF_DETAILS))
1715 fprintf (dump_file, " global var write\n");
1716 break;
1717 case IPA_REF_ADDR:
1718 break;
1719 default:
1720 gcc_unreachable ();
1721 }
1722 better_state (&ref_state, &ref_looping,
1723 w_l->state_previously_known,
1724 w_l->looping_previously_known);
1725 worse_state (&pure_const_state, &looping,
1726 ref_state, ref_looping, NULL, NULL);
1727 if (pure_const_state == IPA_NEITHER)
1728 break;
1729 }
1730 w_info = (struct ipa_dfs_info *) w->aux;
1731 w = w_info->next_cycle;
1732 }
1733 if (dump_file && (dump_flags & TDF_DETAILS))
1734 fprintf (dump_file, "Result %s looping %i\n",
1735 pure_const_names [pure_const_state],
1736 looping);
1737
1738 /* Find the worst state of can_free for any node in the cycle. */
1739 bool can_free = false;
1740 w = node;
1741 while (w && !can_free)
1742 {
1743 struct cgraph_edge *e;
1744 funct_state w_l = funct_state_summaries->get (w);
1745
1746 if (w_l->can_free
1747 || w->get_availability () == AVAIL_INTERPOSABLE
1748 || w->indirect_calls)
1749 can_free = true;
1750
1751 for (e = w->callees; e && !can_free; e = e->next_callee)
1752 {
1753 enum availability avail;
1754 struct cgraph_node *y = e->callee->
1755 function_or_virtual_thunk_symbol (&avail,
1756 e->caller);
1757
1758 if (avail > AVAIL_INTERPOSABLE)
1759 can_free = funct_state_summaries->get (y)->can_free;
1760 else
1761 can_free = true;
1762 }
1763 w_info = (struct ipa_dfs_info *) w->aux;
1764 w = w_info->next_cycle;
1765 }
1766
1767 /* Copy back the region's pure_const_state which is shared by
1768 all nodes in the region. */
1769 w = node;
1770 while (w)
1771 {
1772 funct_state w_l = funct_state_summaries->get (w);
1773 enum pure_const_state_e this_state = pure_const_state;
1774 bool this_looping = looping;
1775
1776 w_l->can_free = can_free;
1777 w->nonfreeing_fn = !can_free;
1778 if (!can_free && dump_file)
1779 fprintf (dump_file, "Function found not to call free: %s\n",
1780 w->dump_name ());
1781
1782 if (w_l->state_previously_known != IPA_NEITHER
1783 && this_state > w_l->state_previously_known)
1784 {
1785 this_state = w_l->state_previously_known;
1786 if (this_state == IPA_NEITHER)
1787 this_looping = w_l->looping_previously_known;
1788 }
1789 if (!this_looping && self_recursive_p (w))
1790 this_looping = true;
1791 if (!w_l->looping_previously_known)
1792 this_looping = false;
1793
1794 /* All nodes within a cycle share the same info. */
1795 w_l->pure_const_state = this_state;
1796 w_l->looping = this_looping;
1797
1798 /* Inline clones share declaration with their offline copies;
1799 do not modify their declarations since the offline copy may
1800 be different. */
1801 if (!w->inlined_to)
1802 switch (this_state)
1803 {
1804 case IPA_CONST:
1805 remove_p |= ipa_make_function_const (w, this_looping, false);
1806 break;
1807
1808 case IPA_PURE:
1809 remove_p |= ipa_make_function_pure (w, this_looping, false);
1810 break;
1811
1812 default:
1813 break;
1814 }
1815 w_info = (struct ipa_dfs_info *) w->aux;
1816 w = w_info->next_cycle;
1817 }
1818 }
1819
1820 ipa_free_postorder_info ();
1821 free (order);
1822 return remove_p;
1823 }
1824
1825 /* Produce transitive closure over the callgraph and compute nothrow
1826 attributes. */
1827
1828 static void
1829 propagate_nothrow (void)
1830 {
1831 struct cgraph_node *node;
1832 struct cgraph_node *w;
1833 struct cgraph_node **order =
1834 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1835 int order_pos;
1836 int i;
1837 struct ipa_dfs_info * w_info;
1838
1839 order_pos = ipa_reduced_postorder (order, true,
1840 ignore_edge_for_nothrow);
1841 if (dump_file)
1842 {
1843 cgraph_node::dump_cgraph (dump_file);
1844 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1845 }
1846
1847 /* Propagate the local information through the call graph to produce
1848 the global information. All the nodes within a cycle will have
1849 the same info so we collapse cycles first. Then we can do the
1850 propagation in one pass from the leaves to the roots. */
1851 for (i = 0; i < order_pos; i++ )
1852 {
1853 bool can_throw = false;
1854 node = order[i];
1855
1856 if (node->alias)
1857 continue;
1858
1859 /* Find the worst state for any node in the cycle. */
1860 w = node;
1861 while (w && !can_throw)
1862 {
1863 struct cgraph_edge *e, *ie;
1864
1865 if (!TREE_NOTHROW (w->decl))
1866 {
1867 funct_state w_l = funct_state_summaries->get_create (w);
1868
1869 if (w_l->can_throw
1870 || w->get_availability () == AVAIL_INTERPOSABLE)
1871 can_throw = true;
1872
1873 for (e = w->callees; e && !can_throw; e = e->next_callee)
1874 {
1875 enum availability avail;
1876
1877 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1878 continue;
1879
1880 struct cgraph_node *y = e->callee->
1881 function_or_virtual_thunk_symbol (&avail,
1882 e->caller);
1883
1884 /* We can use info about the callee only if we know it
1885 cannot be interposed.
1886 When callee is compiled with non-call exceptions we also
1887 must check that the declaration is bound to current
1888 body as other semantically equivalent body may still
1889 throw. */
1890 if (avail <= AVAIL_INTERPOSABLE
1891 || (!TREE_NOTHROW (y->decl)
1892 && (funct_state_summaries->get_create (y)->can_throw
1893 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1894 && !e->callee->binds_to_current_def_p (w)))))
1895 can_throw = true;
1896 }
1897 for (ie = w->indirect_calls; ie && !can_throw;
1898 ie = ie->next_callee)
1899 if (ie->can_throw_external
1900 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1901 can_throw = true;
1902 }
1903 w_info = (struct ipa_dfs_info *) w->aux;
1904 w = w_info->next_cycle;
1905 }
1906
1907 /* Copy back the region's pure_const_state which is shared by
1908 all nodes in the region. */
1909 w = node;
1910 while (w)
1911 {
1912 funct_state w_l = funct_state_summaries->get_create (w);
1913 if (!can_throw && !TREE_NOTHROW (w->decl))
1914 {
1915 /* Inline clones share declaration with their offline copies;
1916 do not modify their declarations since the offline copy may
1917 be different. */
1918 if (!w->inlined_to)
1919 {
1920 w->set_nothrow_flag (true);
1921 if (dump_file)
1922 fprintf (dump_file, "Function found to be nothrow: %s\n",
1923 w->dump_name ());
1924 }
1925 }
1926 else if (can_throw && !TREE_NOTHROW (w->decl))
1927 w_l->can_throw = true;
1928 w_info = (struct ipa_dfs_info *) w->aux;
1929 w = w_info->next_cycle;
1930 }
1931 }
1932
1933 ipa_free_postorder_info ();
1934 free (order);
1935 }
1936
1937 /* Debugging function to dump state of malloc lattice. */
1938
1939 DEBUG_FUNCTION
1940 static void
1941 dump_malloc_lattice (FILE *dump_file, const char *s)
1942 {
1943 if (!dump_file)
1944 return;
1945
1946 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1947 cgraph_node *node;
1948 FOR_EACH_FUNCTION (node)
1949 {
1950 funct_state fs = funct_state_summaries->get (node);
1951 if (fs)
1952 fprintf (dump_file, "%s: %s\n", node->dump_name (),
1953 malloc_state_names[fs->malloc_state]);
1954 }
1955 }
1956
1957 /* Propagate malloc attribute across the callgraph. */
1958
1959 static void
1960 propagate_malloc (void)
1961 {
1962 cgraph_node *node;
1963 FOR_EACH_FUNCTION (node)
1964 {
1965 if (DECL_IS_MALLOC (node->decl))
1966 if (!funct_state_summaries->exists (node))
1967 {
1968 funct_state fs = funct_state_summaries->get_create (node);
1969 fs->malloc_state = STATE_MALLOC;
1970 }
1971 }
1972
1973 dump_malloc_lattice (dump_file, "Initial");
1974 struct cgraph_node **order
1975 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1976 int order_pos = ipa_reverse_postorder (order);
1977 bool changed = true;
1978
1979 while (changed)
1980 {
1981 changed = false;
1982 /* Walk in postorder. */
1983 for (int i = order_pos - 1; i >= 0; --i)
1984 {
1985 cgraph_node *node = order[i];
1986 if (node->alias
1987 || !node->definition
1988 || !funct_state_summaries->exists (node))
1989 continue;
1990
1991 funct_state l = funct_state_summaries->get (node);
1992
1993 /* FIXME: add support for indirect-calls. */
1994 if (node->indirect_calls)
1995 {
1996 l->malloc_state = STATE_MALLOC_BOTTOM;
1997 continue;
1998 }
1999
2000 if (node->get_availability () <= AVAIL_INTERPOSABLE)
2001 {
2002 l->malloc_state = STATE_MALLOC_BOTTOM;
2003 continue;
2004 }
2005
2006 if (l->malloc_state == STATE_MALLOC_BOTTOM)
2007 continue;
2008
2009 auto_vec<cgraph_node *, 16> callees;
2010 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2011 {
2012 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
2013 if (es && es->is_return_callee_uncaptured)
2014 callees.safe_push (cs->callee);
2015 }
2016
2017 malloc_state_e new_state = l->malloc_state;
2018 for (unsigned j = 0; j < callees.length (); j++)
2019 {
2020 cgraph_node *callee = callees[j];
2021 if (!funct_state_summaries->exists (node))
2022 {
2023 new_state = STATE_MALLOC_BOTTOM;
2024 break;
2025 }
2026 malloc_state_e callee_state
2027 = funct_state_summaries->get_create (callee)->malloc_state;
2028 if (new_state < callee_state)
2029 new_state = callee_state;
2030 }
2031 if (new_state != l->malloc_state)
2032 {
2033 changed = true;
2034 l->malloc_state = new_state;
2035 }
2036 }
2037 }
2038
2039 FOR_EACH_DEFINED_FUNCTION (node)
2040 if (funct_state_summaries->exists (node))
2041 {
2042 funct_state l = funct_state_summaries->get (node);
2043 if (!node->alias
2044 && l->malloc_state == STATE_MALLOC
2045 && !node->inlined_to
2046 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
2047 {
2048 if (dump_file && (dump_flags & TDF_DETAILS))
2049 fprintf (dump_file, "Function %s found to be malloc\n",
2050 node->dump_name ());
2051
2052 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
2053 node->set_malloc_flag (true);
2054 if (!malloc_decl_p && warn_suggest_attribute_malloc)
2055 warn_function_malloc (node->decl);
2056 }
2057 }
2058
2059 dump_malloc_lattice (dump_file, "after propagation");
2060 ipa_free_postorder_info ();
2061 free (order);
2062 }
2063
2064 /* Produce the global information by preforming a transitive closure
2065 on the local information that was produced by generate_summary. */
2066
2067 unsigned int
2068 pass_ipa_pure_const::
2069 execute (function *)
2070 {
2071 bool remove_p;
2072
2073 /* Nothrow makes more function to not lead to return and improve
2074 later analysis. */
2075 propagate_nothrow ();
2076 propagate_malloc ();
2077 remove_p = propagate_pure_const ();
2078
2079 delete funct_state_summaries;
2080 return remove_p ? TODO_remove_functions : 0;
2081 }
2082
2083 static bool
2084 gate_pure_const (void)
2085 {
2086 return flag_ipa_pure_const || in_lto_p;
2087 }
2088
2089 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2090 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2091 pure_const_generate_summary, /* generate_summary */
2092 pure_const_write_summary, /* write_summary */
2093 pure_const_read_summary, /* read_summary */
2094 NULL, /* write_optimization_summary */
2095 NULL, /* read_optimization_summary */
2096 NULL, /* stmt_fixup */
2097 0, /* function_transform_todo_flags_start */
2098 NULL, /* function_transform */
2099 NULL), /* variable_transform */
2100 init_p (false) {}
2101
2102 ipa_opt_pass_d *
2103 make_pass_ipa_pure_const (gcc::context *ctxt)
2104 {
2105 return new pass_ipa_pure_const (ctxt);
2106 }
2107
2108 /* Simple local pass for pure const discovery reusing the analysis from
2109 ipa_pure_const. This pass is effective when executed together with
2110 other optimization passes in early optimization pass queue. */
2111
2112 namespace {
2113
2114 const pass_data pass_data_local_pure_const =
2115 {
2116 GIMPLE_PASS, /* type */
2117 "local-pure-const", /* name */
2118 OPTGROUP_NONE, /* optinfo_flags */
2119 TV_IPA_PURE_CONST, /* tv_id */
2120 0, /* properties_required */
2121 0, /* properties_provided */
2122 0, /* properties_destroyed */
2123 0, /* todo_flags_start */
2124 0, /* todo_flags_finish */
2125 };
2126
2127 class pass_local_pure_const : public gimple_opt_pass
2128 {
2129 public:
2130 pass_local_pure_const (gcc::context *ctxt)
2131 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2132 {}
2133
2134 /* opt_pass methods: */
2135 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2136 virtual bool gate (function *) { return gate_pure_const (); }
2137 virtual unsigned int execute (function *);
2138
2139 }; // class pass_local_pure_const
2140
2141 unsigned int
2142 pass_local_pure_const::execute (function *fun)
2143 {
2144 bool changed = false;
2145 funct_state l;
2146 bool skip;
2147 struct cgraph_node *node;
2148
2149 node = cgraph_node::get (current_function_decl);
2150 skip = skip_function_for_local_pure_const (node);
2151
2152 if (!warn_suggest_attribute_const
2153 && !warn_suggest_attribute_pure
2154 && skip)
2155 return 0;
2156
2157 l = analyze_function (node, false);
2158
2159 /* Do NORETURN discovery. */
2160 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2161 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2162 {
2163 warn_function_noreturn (fun->decl);
2164 if (dump_file)
2165 fprintf (dump_file, "Function found to be noreturn: %s\n",
2166 current_function_name ());
2167
2168 /* Update declaration and reduce profile to executed once. */
2169 if (cgraph_node::get (current_function_decl)->set_noreturn_flag (true))
2170 changed = true;
2171 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2172 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2173 }
2174
2175 switch (l->pure_const_state)
2176 {
2177 case IPA_CONST:
2178 changed |= ipa_make_function_const
2179 (cgraph_node::get (current_function_decl), l->looping, true);
2180 break;
2181
2182 case IPA_PURE:
2183 changed |= ipa_make_function_pure
2184 (cgraph_node::get (current_function_decl), l->looping, true);
2185 break;
2186
2187 default:
2188 break;
2189 }
2190 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2191 {
2192 node->set_nothrow_flag (true);
2193 changed = true;
2194 if (dump_file)
2195 fprintf (dump_file, "Function found to be nothrow: %s\n",
2196 current_function_name ());
2197 }
2198
2199 if (l->malloc_state == STATE_MALLOC
2200 && !DECL_IS_MALLOC (current_function_decl))
2201 {
2202 node->set_malloc_flag (true);
2203 if (warn_suggest_attribute_malloc)
2204 warn_function_malloc (node->decl);
2205 changed = true;
2206 if (dump_file)
2207 fprintf (dump_file, "Function found to be malloc: %s\n",
2208 node->dump_name ());
2209 }
2210
2211 free (l);
2212 if (changed)
2213 return execute_fixup_cfg ();
2214 else
2215 return 0;
2216 }
2217
2218 } // anon namespace
2219
2220 gimple_opt_pass *
2221 make_pass_local_pure_const (gcc::context *ctxt)
2222 {
2223 return new pass_local_pure_const (ctxt);
2224 }
2225
2226 /* Emit noreturn warnings. */
2227
2228 namespace {
2229
2230 const pass_data pass_data_warn_function_noreturn =
2231 {
2232 GIMPLE_PASS, /* type */
2233 "*warn_function_noreturn", /* name */
2234 OPTGROUP_NONE, /* optinfo_flags */
2235 TV_NONE, /* tv_id */
2236 PROP_cfg, /* properties_required */
2237 0, /* properties_provided */
2238 0, /* properties_destroyed */
2239 0, /* todo_flags_start */
2240 0, /* todo_flags_finish */
2241 };
2242
2243 class pass_warn_function_noreturn : public gimple_opt_pass
2244 {
2245 public:
2246 pass_warn_function_noreturn (gcc::context *ctxt)
2247 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2248 {}
2249
2250 /* opt_pass methods: */
2251 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
2252 virtual unsigned int execute (function *fun)
2253 {
2254 if (!TREE_THIS_VOLATILE (current_function_decl)
2255 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2256 warn_function_noreturn (current_function_decl);
2257 return 0;
2258 }
2259
2260 }; // class pass_warn_function_noreturn
2261
2262 } // anon namespace
2263
2264 gimple_opt_pass *
2265 make_pass_warn_function_noreturn (gcc::context *ctxt)
2266 {
2267 return new pass_warn_function_noreturn (ctxt);
2268 }
2269
2270 /* Simple local pass for pure const discovery reusing the analysis from
2271 ipa_pure_const. This pass is effective when executed together with
2272 other optimization passes in early optimization pass queue. */
2273
2274 namespace {
2275
2276 const pass_data pass_data_nothrow =
2277 {
2278 GIMPLE_PASS, /* type */
2279 "nothrow", /* name */
2280 OPTGROUP_NONE, /* optinfo_flags */
2281 TV_IPA_PURE_CONST, /* tv_id */
2282 0, /* properties_required */
2283 0, /* properties_provided */
2284 0, /* properties_destroyed */
2285 0, /* todo_flags_start */
2286 0, /* todo_flags_finish */
2287 };
2288
2289 class pass_nothrow : public gimple_opt_pass
2290 {
2291 public:
2292 pass_nothrow (gcc::context *ctxt)
2293 : gimple_opt_pass (pass_data_nothrow, ctxt)
2294 {}
2295
2296 /* opt_pass methods: */
2297 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2298 virtual bool gate (function *) { return optimize; }
2299 virtual unsigned int execute (function *);
2300
2301 }; // class pass_nothrow
2302
2303 unsigned int
2304 pass_nothrow::execute (function *)
2305 {
2306 struct cgraph_node *node;
2307 basic_block this_block;
2308
2309 if (TREE_NOTHROW (current_function_decl))
2310 return 0;
2311
2312 node = cgraph_node::get (current_function_decl);
2313
2314 /* We run during lowering, we cannot really use availability yet. */
2315 if (cgraph_node::get (current_function_decl)->get_availability ()
2316 <= AVAIL_INTERPOSABLE)
2317 {
2318 if (dump_file)
2319 fprintf (dump_file, "Function is interposable;"
2320 " not analyzing.\n");
2321 return true;
2322 }
2323
2324 FOR_EACH_BB_FN (this_block, cfun)
2325 {
2326 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2327 !gsi_end_p (gsi);
2328 gsi_next (&gsi))
2329 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2330 {
2331 if (is_gimple_call (gsi_stmt (gsi)))
2332 {
2333 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2334 if (callee_t && recursive_call_p (current_function_decl,
2335 callee_t))
2336 continue;
2337 }
2338
2339 if (dump_file)
2340 {
2341 fprintf (dump_file, "Statement can throw: ");
2342 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2343 }
2344 return 0;
2345 }
2346 }
2347
2348 node->set_nothrow_flag (true);
2349
2350 bool cfg_changed = false;
2351 if (self_recursive_p (node))
2352 FOR_EACH_BB_FN (this_block, cfun)
2353 if (gimple *g = last_stmt (this_block))
2354 if (is_gimple_call (g))
2355 {
2356 tree callee_t = gimple_call_fndecl (g);
2357 if (callee_t
2358 && recursive_call_p (current_function_decl, callee_t)
2359 && maybe_clean_eh_stmt (g)
2360 && gimple_purge_dead_eh_edges (this_block))
2361 cfg_changed = true;
2362 }
2363
2364 if (dump_file)
2365 fprintf (dump_file, "Function found to be nothrow: %s\n",
2366 current_function_name ());
2367 return cfg_changed ? TODO_cleanup_cfg : 0;
2368 }
2369
2370 } // anon namespace
2371
2372 gimple_opt_pass *
2373 make_pass_nothrow (gcc::context *ctxt)
2374 {
2375 return new pass_nothrow (ctxt);
2376 }