]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-pure-const.c
* ipa-inline-analysis.c (redirect_to_unreachable): Be prepared for
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
CommitLineData
f7d118a9 1/* Callgraph based analysis of static variables.
d353bf18 2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
f7d118a9 3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
8c4c00c1 9Software Foundation; either version 3, or (at your option) any later
f7d118a9 10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
f7d118a9 20
f0b5f617 21/* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
f7d118a9 24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30/* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
37#include "tm.h"
b20a8bb4 38#include "hash-set.h"
39#include "machmode.h"
40#include "vec.h"
41#include "double-int.h"
42#include "input.h"
43#include "alias.h"
44#include "symtab.h"
45#include "wide-int.h"
46#include "inchash.h"
f7d118a9 47#include "tree.h"
b20a8bb4 48#include "fold-const.h"
9ed99284 49#include "print-tree.h"
50#include "calls.h"
94ea8568 51#include "predict.h"
94ea8568 52#include "hard-reg-set.h"
53#include "input.h"
54#include "function.h"
55#include "dominance.h"
56#include "cfg.h"
57#include "cfganal.h"
bc61cadb 58#include "basic-block.h"
59#include "tree-ssa-alias.h"
60#include "internal-fn.h"
61#include "tree-eh.h"
62#include "gimple-expr.h"
63#include "is-a.h"
073c1fd5 64#include "gimple.h"
dcf1a1ec 65#include "gimple-iterator.h"
66#include "gimple-walk.h"
073c1fd5 67#include "tree-cfg.h"
05d9c18a 68#include "tree-ssa-loop-niter.h"
f7d118a9 69#include "tree-inline.h"
70#include "tree-pass.h"
71#include "langhooks.h"
1140c305 72#include "hash-map.h"
73#include "plugin-api.h"
74#include "ipa-ref.h"
75#include "cgraph.h"
f7d118a9 76#include "ipa-utils.h"
f7d118a9 77#include "flags.h"
f7d118a9 78#include "diagnostic.h"
ce084dfc 79#include "gimple-pretty-print.h"
f7d118a9 80#include "langhooks.h"
959fce68 81#include "target.h"
7bfefa9d 82#include "lto-streamer.h"
2541503d 83#include "data-streamer.h"
84#include "tree-streamer.h"
c9263b6a 85#include "cfgloop.h"
86#include "tree-scalar-evolution.h"
2c06958d 87#include "intl.h"
88#include "opts.h"
f7d118a9 89
90/* Lattice values for const and pure functions. Everything starts out
91 being const, then may drop to pure and then neither depending on
92 what is found. */
93enum pure_const_state_e
94{
95 IPA_CONST,
96 IPA_PURE,
97 IPA_NEITHER
98};
99
fc94a528 100const char *pure_const_names[3] = {"const", "pure", "neither"};
101
cb886925 102/* Holder for the const_state. There is one of these per function
103 decl. */
48e1416a 104struct funct_state_d
f7d118a9 105{
cb886925 106 /* See above. */
f7d118a9 107 enum pure_const_state_e pure_const_state;
b5cebd44 108 /* What user set here; we can be always sure about this. */
48e1416a 109 enum pure_const_state_e state_previously_known;
110 bool looping_previously_known;
cb886925 111
112 /* True if the function could possibly infinite loop. There are a
113 lot of ways that this could be determined. We are pretty
114 conservative here. While it is possible to cse pure and const
115 calls, it is not legal to have dce get rid of the call if there
116 is a possibility that the call could infinite loop since this is
117 a behavioral change. */
9c2a0c05 118 bool looping;
cb886925 119
b5cebd44 120 bool can_throw;
04c849b3 121
122 /* If function can call free, munmap or otherwise make previously
123 non-trapping memory accesses trapping. */
124 bool can_free;
f7d118a9 125};
126
db86e424 127/* State used when we know nothing about function. */
128static struct funct_state_d varying_state
04c849b3 129 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true };
db86e424 130
131
f7d118a9 132typedef struct funct_state_d * funct_state;
133
cb886925 134/* The storage of the funct_state is abstracted because there is the
135 possibility that it may be desirable to move this to the cgraph
48e1416a 136 local info. */
cb886925 137
138/* Array, indexed by cgraph node uid, of function states. */
139
f1f41a6c 140static vec<funct_state> funct_state_vec;
cb886925 141
415309e2 142static bool gate_pure_const (void);
143
144namespace {
145
146const pass_data pass_data_ipa_pure_const =
147{
148 IPA_PASS, /* type */
149 "pure-const", /* name */
150 OPTGROUP_NONE, /* optinfo_flags */
151 TV_IPA_PURE_CONST, /* tv_id */
152 0, /* properties_required */
153 0, /* properties_provided */
154 0, /* properties_destroyed */
155 0, /* todo_flags_start */
156 0, /* todo_flags_finish */
157};
158
159class pass_ipa_pure_const : public ipa_opt_pass_d
160{
161public:
162 pass_ipa_pure_const(gcc::context *ctxt);
163
164 /* opt_pass methods: */
165 bool gate (function *) { return gate_pure_const (); }
166 unsigned int execute (function *fun);
167
168 void register_hooks (void);
169
170private:
171 bool init_p;
172
173 /* Holders of ipa cgraph hooks: */
174 struct cgraph_node_hook_list *function_insertion_hook_holder;
175 struct cgraph_2node_hook_list *node_duplication_hook_holder;
176 struct cgraph_node_hook_list *node_removal_hook_holder;
177
178}; // class pass_ipa_pure_const
179
180} // anon namespace
cb886925 181
2c06958d 182/* Try to guess if function body will always be visible to compiler
183 when compiling the call and whether compiler will be able
184 to propagate the information by itself. */
185
186static bool
187function_always_visible_to_compiler_p (tree decl)
188{
189 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
190}
191
192/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
193 is true if the function is known to be finite. The diagnostic is
431205b7 194 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
2c06958d 195 OPTION, this function may initialize it and it is always returned
196 by the function. */
197
431205b7 198static hash_set<tree> *
2c06958d 199suggest_attribute (int option, tree decl, bool known_finite,
431205b7 200 hash_set<tree> *warned_about,
2c06958d 201 const char * attrib_name)
202{
2c5d2e39 203 if (!option_enabled (option, &global_options))
2c06958d 204 return warned_about;
205 if (TREE_THIS_VOLATILE (decl)
206 || (known_finite && function_always_visible_to_compiler_p (decl)))
207 return warned_about;
208
209 if (!warned_about)
431205b7 210 warned_about = new hash_set<tree>;
211 if (warned_about->contains (decl))
2c06958d 212 return warned_about;
431205b7 213 warned_about->add (decl);
2c06958d 214 warning_at (DECL_SOURCE_LOCATION (decl),
215 option,
216 known_finite
217 ? _("function might be candidate for attribute %<%s%>")
218 : _("function might be candidate for attribute %<%s%>"
219 " if it is known to return normally"), attrib_name);
220 return warned_about;
221}
222
223/* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
224 is true if the function is known to be finite. */
225
226static void
227warn_function_pure (tree decl, bool known_finite)
228{
431205b7 229 static hash_set<tree> *warned_about;
2c06958d 230
231 warned_about
232 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
233 known_finite, warned_about, "pure");
234}
235
236/* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
237 is true if the function is known to be finite. */
238
239static void
240warn_function_const (tree decl, bool known_finite)
241{
431205b7 242 static hash_set<tree> *warned_about;
2c06958d 243 warned_about
244 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
245 known_finite, warned_about, "const");
246}
43d60d64 247
64641360 248static void
43d60d64 249warn_function_noreturn (tree decl)
250{
431205b7 251 static hash_set<tree> *warned_about;
08c6cbd2 252 if (!lang_hooks.missing_noreturn_ok_p (decl)
253 && targetm.warn_func_return (decl))
43d60d64 254 warned_about
255 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
256 true, warned_about, "noreturn");
257}
9631926a 258
1ee39ad1 259/* Return true if we have a function state for NODE. */
260
261static inline bool
262has_function_state (struct cgraph_node *node)
263{
f1f41a6c 264 if (!funct_state_vec.exists ()
265 || funct_state_vec.length () <= (unsigned int)node->uid)
1ee39ad1 266 return false;
f1f41a6c 267 return funct_state_vec[node->uid] != NULL;
1ee39ad1 268}
269
48e1416a 270/* Return the function state from NODE. */
f7d118a9 271
272static inline funct_state
273get_function_state (struct cgraph_node *node)
274{
f1f41a6c 275 if (!funct_state_vec.exists ()
276 || funct_state_vec.length () <= (unsigned int)node->uid
277 || !funct_state_vec[node->uid])
1ee39ad1 278 /* We might want to put correct previously_known state into varying. */
db86e424 279 return &varying_state;
f1f41a6c 280 return funct_state_vec[node->uid];
cb886925 281}
282
283/* Set the function state S for NODE. */
284
285static inline void
286set_function_state (struct cgraph_node *node, funct_state s)
287{
f1f41a6c 288 if (!funct_state_vec.exists ()
289 || funct_state_vec.length () <= (unsigned int)node->uid)
290 funct_state_vec.safe_grow_cleared (node->uid + 1);
291 funct_state_vec[node->uid] = s;
f7d118a9 292}
293
f0b5f617 294/* Check to see if the use (or definition when CHECKING_WRITE is true)
f7d118a9 295 variable T is legal in a function that is either pure or const. */
296
48e1416a 297static inline void
298check_decl (funct_state local,
023a28e1 299 tree t, bool checking_write, bool ipa)
f7d118a9 300{
f7d118a9 301 /* Do not want to do anything with volatile except mark any
302 function that uses one to be not const or pure. */
48e1416a 303 if (TREE_THIS_VOLATILE (t))
304 {
f7d118a9 305 local->pure_const_state = IPA_NEITHER;
b5cebd44 306 if (dump_file)
307 fprintf (dump_file, " Volatile operand is not const/pure");
f7d118a9 308 return;
309 }
310
311 /* Do not care about a local automatic that is not static. */
312 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
313 return;
314
b5cebd44 315 /* If the variable has the "used" attribute, treat it as if it had a
316 been touched by the devil. */
83a23b05 317 if (DECL_PRESERVE_P (t))
b5cebd44 318 {
319 local->pure_const_state = IPA_NEITHER;
320 if (dump_file)
321 fprintf (dump_file, " Used static/global variable is not const/pure\n");
322 return;
323 }
324
023a28e1 325 /* In IPA mode we are not interested in checking actual loads and stores;
326 they will be processed at propagation time using ipa_ref. */
327 if (ipa)
328 return;
329
f7d118a9 330 /* Since we have dealt with the locals and params cases above, if we
331 are CHECKING_WRITE, this cannot be a pure or constant
332 function. */
48e1416a 333 if (checking_write)
bc17fd99 334 {
335 local->pure_const_state = IPA_NEITHER;
b5cebd44 336 if (dump_file)
337 fprintf (dump_file, " static/global memory write is not const/pure\n");
bc17fd99 338 return;
339 }
f7d118a9 340
341 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
342 {
b5cebd44 343 /* Readonly reads are safe. */
344 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
345 return; /* Read of a constant, do not change the function state. */
48e1416a 346 else
f7d118a9 347 {
b5cebd44 348 if (dump_file)
349 fprintf (dump_file, " global memory read is not const\n");
f7d118a9 350 /* Just a regular read. */
351 if (local->pure_const_state == IPA_CONST)
352 local->pure_const_state = IPA_PURE;
353 }
354 }
b5cebd44 355 else
356 {
357 /* Compilation level statics can be read if they are readonly
358 variables. */
359 if (TREE_READONLY (t))
360 return;
361
362 if (dump_file)
363 fprintf (dump_file, " static memory read is not const\n");
364 /* Just a regular read. */
365 if (local->pure_const_state == IPA_CONST)
366 local->pure_const_state = IPA_PURE;
367 }
f7d118a9 368}
369
f7d118a9 370
b5cebd44 371/* Check to see if the use (or definition when CHECKING_WRITE is true)
372 variable T is legal in a function that is either pure or const. */
f7d118a9 373
48e1416a 374static inline void
5ed0b345 375check_op (funct_state local, tree t, bool checking_write)
f7d118a9 376{
3ae61172 377 t = get_base_address (t);
378 if (t && TREE_THIS_VOLATILE (t))
f7d118a9 379 {
5ed0b345 380 local->pure_const_state = IPA_NEITHER;
381 if (dump_file)
382 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
383 return;
384 }
3ae61172 385 else if (t
182cf5a9 386 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
3ae61172 387 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
388 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
389 {
390 if (dump_file)
391 fprintf (dump_file, " Indirect ref to local memory is OK\n");
392 return;
393 }
5ed0b345 394 else if (checking_write)
395 {
396 local->pure_const_state = IPA_NEITHER;
397 if (dump_file)
398 fprintf (dump_file, " Indirect ref write is not const/pure\n");
399 return;
400 }
401 else
402 {
403 if (dump_file)
404 fprintf (dump_file, " Indirect ref read is not const\n");
405 if (local->pure_const_state == IPA_CONST)
406 local->pure_const_state = IPA_PURE;
f7d118a9 407 }
408}
409
023a28e1 410/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
411
412static void
413state_from_flags (enum pure_const_state_e *state, bool *looping,
414 int flags, bool cannot_lead_to_return)
415{
416 *looping = false;
417 if (flags & ECF_LOOPING_CONST_OR_PURE)
418 {
419 *looping = true;
420 if (dump_file && (dump_flags & TDF_DETAILS))
421 fprintf (dump_file, " looping");
422 }
423 if (flags & ECF_CONST)
424 {
425 *state = IPA_CONST;
426 if (dump_file && (dump_flags & TDF_DETAILS))
427 fprintf (dump_file, " const\n");
428 }
429 else if (flags & ECF_PURE)
430 {
431 *state = IPA_PURE;
432 if (dump_file && (dump_flags & TDF_DETAILS))
433 fprintf (dump_file, " pure\n");
434 }
435 else if (cannot_lead_to_return)
436 {
437 *state = IPA_PURE;
438 *looping = true;
439 if (dump_file && (dump_flags & TDF_DETAILS))
440 fprintf (dump_file, " ignoring side effects->pure looping\n");
441 }
442 else
443 {
444 if (dump_file && (dump_flags & TDF_DETAILS))
9631926a 445 fprintf (dump_file, " neither\n");
023a28e1 446 *state = IPA_NEITHER;
447 *looping = true;
448 }
449}
450
451/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
452 into STATE and LOOPING better of the two variants.
453 Be sure to merge looping correctly. IPA_NEITHER functions
454 have looping 0 even if they don't have to return. */
455
456static inline void
457better_state (enum pure_const_state_e *state, bool *looping,
458 enum pure_const_state_e state2, bool looping2)
459{
460 if (state2 < *state)
461 {
462 if (*state == IPA_NEITHER)
463 *looping = looping2;
464 else
465 *looping = MIN (*looping, looping2);
26fc128e 466 *state = state2;
023a28e1 467 }
468 else if (state2 != IPA_NEITHER)
469 *looping = MIN (*looping, looping2);
470}
471
472/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
473 into STATE and LOOPING worse of the two variants. */
474
475static inline void
476worse_state (enum pure_const_state_e *state, bool *looping,
477 enum pure_const_state_e state2, bool looping2)
478{
479 *state = MAX (*state, state2);
480 *looping = MAX (*looping, looping2);
481}
482
0a10fd82 483/* Recognize special cases of builtins that are by themselves not pure or const
7dd42908 484 but function using them is. */
485static bool
a53e7471 486special_builtin_state (enum pure_const_state_e *state, bool *looping,
7dd42908 487 tree callee)
488{
489 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
490 switch (DECL_FUNCTION_CODE (callee))
491 {
492 case BUILT_IN_RETURN:
493 case BUILT_IN_UNREACHABLE:
494 case BUILT_IN_ALLOCA:
581bf1c2 495 case BUILT_IN_ALLOCA_WITH_ALIGN:
7dd42908 496 case BUILT_IN_STACK_SAVE:
497 case BUILT_IN_STACK_RESTORE:
498 case BUILT_IN_EH_POINTER:
499 case BUILT_IN_EH_FILTER:
500 case BUILT_IN_UNWIND_RESUME:
501 case BUILT_IN_CXA_END_CLEANUP:
502 case BUILT_IN_EH_COPY_VALUES:
503 case BUILT_IN_FRAME_ADDRESS:
504 case BUILT_IN_APPLY:
505 case BUILT_IN_APPLY_ARGS:
7dd42908 506 *looping = false;
507 *state = IPA_CONST;
508 return true;
509 case BUILT_IN_PREFETCH:
510 *looping = true;
511 *state = IPA_CONST;
512 return true;
5213d6c9 513 default:
514 break;
7dd42908 515 }
516 return false;
517}
518
f7d118a9 519/* Check the parameters of a function call to CALL_EXPR to see if
520 there are any references in the parameters that are not allowed for
521 pure or const functions. Also check to see if this is either an
522 indirect call, a call outside the compilation unit, or has special
523 attributes that may also effect the purity. The CALL_EXPR node for
524 the entire call expression. */
525
526static void
1a91d914 527check_call (funct_state local, gcall *call, bool ipa)
f7d118a9 528{
75a70cf9 529 int flags = gimple_call_flags (call);
b5cebd44 530 tree callee_t = gimple_call_fndecl (call);
b5cebd44 531 bool possibly_throws = stmt_could_throw_p (call);
532 bool possibly_throws_externally = (possibly_throws
533 && stmt_can_throw_external (call));
f7d118a9 534
b5cebd44 535 if (possibly_throws)
536 {
537 unsigned int i;
538 for (i = 0; i < gimple_num_ops (call); i++)
539 if (gimple_op (call, i)
540 && tree_could_throw_p (gimple_op (call, i)))
541 {
cbeb677e 542 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 543 {
544 if (dump_file)
545 fprintf (dump_file, " operand can throw; looping\n");
546 local->looping = true;
547 }
548 if (possibly_throws_externally)
549 {
550 if (dump_file)
551 fprintf (dump_file, " operand can throw externally\n");
552 local->can_throw = true;
553 }
554 }
555 }
48e1416a 556
f7d118a9 557 /* The const and pure flags are set by a variety of places in the
558 compiler (including here). If someone has already set the flags
559 for the callee, (such as for some of the builtins) we will use
48e1416a 560 them, otherwise we will compute our own information.
561
f7d118a9 562 Const and pure functions have less clobber effects than other
563 functions so we process these first. Otherwise if it is a call
564 outside the compilation unit or an indirect call we punt. This
565 leaves local calls which will be processed by following the call
48e1416a 566 graph. */
f7d118a9 567 if (callee_t)
568 {
7dd42908 569 enum pure_const_state_e call_state;
570 bool call_looping;
571
04c849b3 572 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
573 && !nonfreeing_call_p (call))
574 local->can_free = true;
575
a53e7471 576 if (special_builtin_state (&call_state, &call_looping, callee_t))
7dd42908 577 {
578 worse_state (&local->pure_const_state, &local->looping,
579 call_state, call_looping);
580 return;
581 }
f7d118a9 582 /* When bad things happen to bad functions, they cannot be const
583 or pure. */
584 if (setjmp_call_p (callee_t))
9c2a0c05 585 {
b5cebd44 586 if (dump_file)
587 fprintf (dump_file, " setjmp is not const/pure\n");
588 local->looping = true;
9c2a0c05 589 local->pure_const_state = IPA_NEITHER;
9c2a0c05 590 }
f7d118a9 591
592 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
593 switch (DECL_FUNCTION_CODE (callee_t))
594 {
595 case BUILT_IN_LONGJMP:
596 case BUILT_IN_NONLOCAL_GOTO:
b5cebd44 597 if (dump_file)
598 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
f7d118a9 599 local->pure_const_state = IPA_NEITHER;
b5cebd44 600 local->looping = true;
f7d118a9 601 break;
602 default:
603 break;
604 }
605 }
04c849b3 606 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
607 local->can_free = true;
f7d118a9 608
b5cebd44 609 /* When not in IPA mode, we can still handle self recursion. */
e9de52cc 610 if (!ipa && callee_t
611 && recursive_call_p (current_function_decl, callee_t))
2c06958d 612 {
613 if (dump_file)
614 fprintf (dump_file, " Recursive call can loop.\n");
615 local->looping = true;
616 }
0a10fd82 617 /* Either callee is unknown or we are doing local analysis.
ef378c27 618 Look to see if there are any bits available for the callee (such as by
619 declaration or because it is builtin) and process solely on the basis of
620 those bits. */
023a28e1 621 else if (!ipa)
f7d118a9 622 {
023a28e1 623 enum pure_const_state_e call_state;
624 bool call_looping;
cbeb677e 625 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 626 {
627 if (dump_file)
628 fprintf (dump_file, " can throw; looping\n");
629 local->looping = true;
630 }
631 if (possibly_throws_externally)
632 {
633 if (dump_file)
634 {
e38def9c 635 fprintf (dump_file, " can throw externally to lp %i\n",
636 lookup_stmt_eh_lp (call));
b5cebd44 637 if (callee_t)
638 fprintf (dump_file, " callee:%s\n",
639 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
640 }
641 local->can_throw = true;
642 }
023a28e1 643 if (dump_file && (dump_flags & TDF_DETAILS))
644 fprintf (dump_file, " checking flags for call:");
645 state_from_flags (&call_state, &call_looping, flags,
646 ((flags & (ECF_NORETURN | ECF_NOTHROW))
647 == (ECF_NORETURN | ECF_NOTHROW))
648 || (!flag_exceptions && (flags & ECF_NORETURN)));
649 worse_state (&local->pure_const_state, &local->looping,
650 call_state, call_looping);
f7d118a9 651 }
b5cebd44 652 /* Direct functions calls are handled by IPA propagation. */
f7d118a9 653}
654
023a28e1 655/* Wrapper around check_decl for loads in local more. */
5ed0b345 656
657static bool
5b26a9e3 658check_load (gimple, tree op, tree, void *data)
5ed0b345 659{
660 if (DECL_P (op))
023a28e1 661 check_decl ((funct_state)data, op, false, false);
5ed0b345 662 else
663 check_op ((funct_state)data, op, false);
664 return false;
665}
666
023a28e1 667/* Wrapper around check_decl for stores in local more. */
5ed0b345 668
669static bool
5b26a9e3 670check_store (gimple, tree op, tree, void *data)
5ed0b345 671{
672 if (DECL_P (op))
023a28e1 673 check_decl ((funct_state)data, op, true, false);
674 else
675 check_op ((funct_state)data, op, true);
676 return false;
677}
678
679/* Wrapper around check_decl for loads in ipa mode. */
680
681static bool
5b26a9e3 682check_ipa_load (gimple, tree op, tree, void *data)
023a28e1 683{
684 if (DECL_P (op))
685 check_decl ((funct_state)data, op, false, true);
686 else
687 check_op ((funct_state)data, op, false);
688 return false;
689}
690
691/* Wrapper around check_decl for stores in ipa mode. */
692
693static bool
5b26a9e3 694check_ipa_store (gimple, tree op, tree, void *data)
023a28e1 695{
696 if (DECL_P (op))
697 check_decl ((funct_state)data, op, true, true);
5ed0b345 698 else
699 check_op ((funct_state)data, op, true);
700 return false;
701}
702
dd277d48 703/* Look into pointer pointed to by GSIP and figure out what interesting side
704 effects it has. */
b5cebd44 705static void
706check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
f7d118a9 707{
b5cebd44 708 gimple stmt = gsi_stmt (*gsip);
f7d118a9 709
9845d120 710 if (is_gimple_debug (stmt))
711 return;
712
b5cebd44 713 if (dump_file)
f7d118a9 714 {
b5cebd44 715 fprintf (dump_file, " scanning: ");
716 print_gimple_stmt (dump_file, stmt, 0, 0);
717 }
dd277d48 718
541c6dbb 719 if (gimple_has_volatile_ops (stmt)
720 && !gimple_clobber_p (stmt))
4b68df2e 721 {
722 local->pure_const_state = IPA_NEITHER;
723 if (dump_file)
724 fprintf (dump_file, " Volatile stmt is not const/pure\n");
725 }
726
5ed0b345 727 /* Look for loads and stores. */
023a28e1 728 walk_stmt_load_store_ops (stmt, local,
729 ipa ? check_ipa_load : check_load,
730 ipa ? check_ipa_store : check_store);
b5cebd44 731
732 if (gimple_code (stmt) != GIMPLE_CALL
733 && stmt_could_throw_p (stmt))
734 {
cbeb677e 735 if (cfun->can_throw_non_call_exceptions)
b5cebd44 736 {
737 if (dump_file)
10f4615f 738 fprintf (dump_file, " can throw; looping\n");
b5cebd44 739 local->looping = true;
740 }
741 if (stmt_can_throw_external (stmt))
742 {
743 if (dump_file)
10f4615f 744 fprintf (dump_file, " can throw externally\n");
b5cebd44 745 local->can_throw = true;
746 }
10f4615f 747 else
748 if (dump_file)
749 fprintf (dump_file, " can throw\n");
75a70cf9 750 }
75a70cf9 751 switch (gimple_code (stmt))
752 {
b5cebd44 753 case GIMPLE_CALL:
1a91d914 754 check_call (local, as_a <gcall *> (stmt), ipa);
f7d118a9 755 break;
75a70cf9 756 case GIMPLE_LABEL:
1a91d914 757 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
f7d118a9 758 /* Target of long jump. */
9c2a0c05 759 {
b5cebd44 760 if (dump_file)
10f4615f 761 fprintf (dump_file, " nonlocal label is not const/pure\n");
9c2a0c05 762 local->pure_const_state = IPA_NEITHER;
9c2a0c05 763 }
f7d118a9 764 break;
75a70cf9 765 case GIMPLE_ASM:
1a91d914 766 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
b5cebd44 767 {
97cf41ec 768 if (dump_file)
10f4615f 769 fprintf (dump_file, " memory asm clobber is not const/pure\n");
97cf41ec 770 /* Abandon all hope, ye who enter here. */
771 local->pure_const_state = IPA_NEITHER;
04c849b3 772 local->can_free = true;
b5cebd44 773 }
1a91d914 774 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
b5cebd44 775 {
776 if (dump_file)
10f4615f 777 fprintf (dump_file, " volatile is not const/pure\n");
b5cebd44 778 /* Abandon all hope, ye who enter here. */
779 local->pure_const_state = IPA_NEITHER;
04c849b3 780 local->looping = true;
781 local->can_free = true;
b5cebd44 782 }
783 return;
f7d118a9 784 default:
785 break;
786 }
f7d118a9 787}
788
cb886925 789
f7d118a9 790/* This is the main routine for finding the reference patterns for
791 global variables within a function FN. */
792
b5cebd44 793static funct_state
794analyze_function (struct cgraph_node *fn, bool ipa)
f7d118a9 795{
02774f2d 796 tree decl = fn->decl;
b5cebd44 797 funct_state l;
798 basic_block this_block;
f7d118a9 799
b5cebd44 800 l = XCNEW (struct funct_state_d);
801 l->pure_const_state = IPA_CONST;
df9b545b 802 l->state_previously_known = IPA_NEITHER;
803 l->looping_previously_known = true;
b5cebd44 804 l->looping = false;
805 l->can_throw = false;
04c849b3 806 l->can_free = false;
91bf9d9a 807 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
02774f2d 808 flags_from_decl_or_type (fn->decl),
415d1b9a 809 fn->cannot_return_p ());
91bf9d9a 810
02774f2d 811 if (fn->thunk.thunk_p || fn->alias)
91bf9d9a 812 {
813 /* Thunk gets propagated through, so nothing interesting happens. */
814 gcc_assert (ipa);
cfd85d03 815 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
816 l->pure_const_state = IPA_NEITHER;
91bf9d9a 817 return l;
818 }
f7d118a9 819
820 if (dump_file)
821 {
48e1416a 822 fprintf (dump_file, "\n\n local analysis of %s\n ",
f1c8b4d7 823 fn->name ());
f7d118a9 824 }
48e1416a 825
b5cebd44 826 push_cfun (DECL_STRUCT_FUNCTION (decl));
48e1416a 827
fc00614f 828 FOR_EACH_BB_FN (this_block, cfun)
f7d118a9 829 {
b5cebd44 830 gimple_stmt_iterator gsi;
831 struct walk_stmt_info wi;
f7d118a9 832
9af5ce0c 833 memset (&wi, 0, sizeof (wi));
b5cebd44 834 for (gsi = gsi_start_bb (this_block);
835 !gsi_end_p (gsi);
836 gsi_next (&gsi))
f7d118a9 837 {
b5cebd44 838 check_stmt (&gsi, l, ipa);
04c849b3 839 if (l->pure_const_state == IPA_NEITHER
840 && l->looping
841 && l->can_throw
842 && l->can_free)
b5cebd44 843 goto end;
f7d118a9 844 }
845 }
dcccac3e 846
847end:
b5cebd44 848 if (l->pure_const_state != IPA_NEITHER)
849 {
850 /* Const functions cannot have back edges (an
851 indication of possible infinite loop side
852 effect. */
853 if (mark_dfs_back_edges ())
c9263b6a 854 {
b1887855 855 /* Preheaders are needed for SCEV to work.
0a10fd82 856 Simple latches and recorded exits improve chances that loop will
8ff30f9a 857 proved to be finite in testcases such as in loop-15.c
858 and loop-24.c */
859 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
860 | LOOPS_HAVE_SIMPLE_LATCHES
b1887855 861 | LOOPS_HAVE_RECORDED_EXITS);
c9263b6a 862 if (dump_file && (dump_flags & TDF_DETAILS))
863 flow_loops_dump (dump_file, NULL, 0);
864 if (mark_irreducible_loops ())
865 {
866 if (dump_file)
867 fprintf (dump_file, " has irreducible loops\n");
868 l->looping = true;
869 }
48e1416a 870 else
c9263b6a 871 {
c9263b6a 872 struct loop *loop;
873 scev_initialize ();
f21d4d00 874 FOR_EACH_LOOP (loop, 0)
c9263b6a 875 if (!finite_loop_p (loop))
876 {
877 if (dump_file)
8ff30f9a 878 fprintf (dump_file, " can not prove finiteness of "
879 "loop %i\n", loop->num);
c9263b6a 880 l->looping =true;
f21d4d00 881 break;
c9263b6a 882 }
883 scev_finalize ();
884 }
885 loop_optimizer_finalize ();
886 }
b5cebd44 887 }
888
023a28e1 889 if (dump_file && (dump_flags & TDF_DETAILS))
890 fprintf (dump_file, " checking previously known:");
023a28e1 891
892 better_state (&l->pure_const_state, &l->looping,
893 l->state_previously_known,
894 l->looping_previously_known);
b5cebd44 895 if (TREE_NOTHROW (decl))
896 l->can_throw = false;
897
898 pop_cfun ();
dcccac3e 899 if (dump_file)
900 {
b5cebd44 901 if (l->looping)
902 fprintf (dump_file, "Function is locally looping.\n");
903 if (l->can_throw)
904 fprintf (dump_file, "Function is locally throwing.\n");
905 if (l->pure_const_state == IPA_CONST)
906 fprintf (dump_file, "Function is locally const.\n");
907 if (l->pure_const_state == IPA_PURE)
908 fprintf (dump_file, "Function is locally pure.\n");
04c849b3 909 if (l->can_free)
910 fprintf (dump_file, "Function can locally free.\n");
dcccac3e 911 }
b5cebd44 912 return l;
f7d118a9 913}
914
50828ed8 915/* Called when new function is inserted to callgraph late. */
916static void
917add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
918{
415d1b9a 919 if (node->get_availability () < AVAIL_INTERPOSABLE)
86844d6c 920 return;
50828ed8 921 /* There are some shared nodes, in particular the initializers on
922 static declarations. We do not need to scan them more than once
923 since all we would be interested in are the addressof
924 operations. */
d1f68cd8 925 if (node->get_availability () > AVAIL_INTERPOSABLE
926 && opt_for_fn (node->decl, flag_ipa_pure_const))
2c06958d 927 set_function_state (node, analyze_function (node, true));
50828ed8 928}
929
86844d6c 930/* Called when new clone is inserted to callgraph late. */
931
932static void
933duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
934 void *data ATTRIBUTE_UNUSED)
935{
1ee39ad1 936 if (has_function_state (src))
86844d6c 937 {
938 funct_state l = XNEW (struct funct_state_d);
1ee39ad1 939 gcc_assert (!has_function_state (dst));
86844d6c 940 memcpy (l, get_function_state (src), sizeof (*l));
941 set_function_state (dst, l);
942 }
943}
944
945/* Called when new clone is inserted to callgraph late. */
946
947static void
948remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
949{
1ee39ad1 950 if (has_function_state (node))
86844d6c 951 {
db86e424 952 funct_state l = get_function_state (node);
953 if (l != &varying_state)
954 free (l);
86844d6c 955 set_function_state (node, NULL);
956 }
957}
958
f7d118a9 959\f
415309e2 960void
961pass_ipa_pure_const::
7bfefa9d 962register_hooks (void)
f7d118a9 963{
7bfefa9d 964 if (init_p)
965 return;
966
967 init_p = true;
f7d118a9 968
86844d6c 969 node_removal_hook_holder =
35ee1c66 970 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
86844d6c 971 node_duplication_hook_holder =
35ee1c66 972 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
50828ed8 973 function_insertion_hook_holder =
35ee1c66 974 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
7bfefa9d 975}
976
977
978/* Analyze each function in the cgraph to see if it is locally PURE or
979 CONST. */
980
48e1416a 981static void
80880eb6 982pure_const_generate_summary (void)
7bfefa9d 983{
984 struct cgraph_node *node;
985
415309e2 986 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
987 pass->register_hooks ();
7bfefa9d 988
48e1416a 989 /* Process all of the functions.
f7d118a9 990
415d1b9a 991 We process AVAIL_INTERPOSABLE functions. We can not use the results
ef378c27 992 by default, but the info can be used at LTO with -fwhole-program or
0a10fd82 993 when function got cloned and the clone is AVAILABLE. */
ef378c27 994
7c455d87 995 FOR_EACH_DEFINED_FUNCTION (node)
d1f68cd8 996 if (node->get_availability () >= AVAIL_INTERPOSABLE
997 && opt_for_fn (node->decl, flag_ipa_pure_const))
b5cebd44 998 set_function_state (node, analyze_function (node, true));
cb886925 999}
1000
7bfefa9d 1001
1002/* Serialize the ipa info for lto. */
1003
1004static void
eab36a5a 1005pure_const_write_summary (void)
7bfefa9d 1006{
1007 struct cgraph_node *node;
1008 struct lto_simple_output_block *ob
1009 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1010 unsigned int count = 0;
eab36a5a 1011 lto_symtab_encoder_iterator lsei;
1012 lto_symtab_encoder_t encoder;
7bfefa9d 1013
eab36a5a 1014 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1015
1016 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1017 lsei_next_function_in_partition (&lsei))
7bfefa9d 1018 {
eab36a5a 1019 node = lsei_cgraph_node (lsei);
02774f2d 1020 if (node->definition && has_function_state (node))
7bfefa9d 1021 count++;
1022 }
48e1416a 1023
7f385784 1024 streamer_write_uhwi_stream (ob->main_stream, count);
48e1416a 1025
7bfefa9d 1026 /* Process all of the functions. */
eab36a5a 1027 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1028 lsei_next_function_in_partition (&lsei))
7bfefa9d 1029 {
eab36a5a 1030 node = lsei_cgraph_node (lsei);
02774f2d 1031 if (node->definition && has_function_state (node))
7bfefa9d 1032 {
30baba90 1033 struct bitpack_d bp;
7bfefa9d 1034 funct_state fs;
1035 int node_ref;
70225339 1036 lto_symtab_encoder_t encoder;
48e1416a 1037
7bfefa9d 1038 fs = get_function_state (node);
1039
70225339 1040 encoder = ob->decl_state->symtab_node_encoder;
02774f2d 1041 node_ref = lto_symtab_encoder_encode (encoder, node);
7f385784 1042 streamer_write_uhwi_stream (ob->main_stream, node_ref);
48e1416a 1043
7bfefa9d 1044 /* Note that flags will need to be read in the opposite
1045 order as we are pushing the bitflags into FLAGS. */
30baba90 1046 bp = bitpack_create (ob->main_stream);
1047 bp_pack_value (&bp, fs->pure_const_state, 2);
1048 bp_pack_value (&bp, fs->state_previously_known, 2);
1049 bp_pack_value (&bp, fs->looping_previously_known, 1);
1050 bp_pack_value (&bp, fs->looping, 1);
1051 bp_pack_value (&bp, fs->can_throw, 1);
04c849b3 1052 bp_pack_value (&bp, fs->can_free, 1);
7f385784 1053 streamer_write_bitpack (&bp);
7bfefa9d 1054 }
1055 }
1056
1057 lto_destroy_simple_output_block (ob);
1058}
1059
1060
1061/* Deserialize the ipa info for lto. */
1062
48e1416a 1063static void
7bfefa9d 1064pure_const_read_summary (void)
1065{
1066 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1067 struct lto_file_decl_data *file_data;
1068 unsigned int j = 0;
1069
415309e2 1070 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1071 pass->register_hooks ();
1072
7bfefa9d 1073 while ((file_data = file_data_vec[j++]))
1074 {
1075 const char *data;
1076 size_t len;
1077 struct lto_input_block *ib
48e1416a 1078 = lto_create_simple_input_block (file_data,
1079 LTO_section_ipa_pure_const,
7bfefa9d 1080 &data, &len);
1081 if (ib)
1082 {
1083 unsigned int i;
7f385784 1084 unsigned int count = streamer_read_uhwi (ib);
7bfefa9d 1085
1086 for (i = 0; i < count; i++)
1087 {
1088 unsigned int index;
1089 struct cgraph_node *node;
30baba90 1090 struct bitpack_d bp;
7bfefa9d 1091 funct_state fs;
70225339 1092 lto_symtab_encoder_t encoder;
7bfefa9d 1093
1094 fs = XCNEW (struct funct_state_d);
7f385784 1095 index = streamer_read_uhwi (ib);
70225339 1096 encoder = file_data->symtab_node_encoder;
415d1b9a 1097 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1098 index));
7bfefa9d 1099 set_function_state (node, fs);
1100
1101 /* Note that the flags must be read in the opposite
1102 order in which they were written (the bitflags were
1103 pushed into FLAGS). */
7f385784 1104 bp = streamer_read_bitpack (ib);
7bfefa9d 1105 fs->pure_const_state
30baba90 1106 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
7bfefa9d 1107 fs->state_previously_known
30baba90 1108 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1109 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1110 fs->looping = bp_unpack_value (&bp, 1);
1111 fs->can_throw = bp_unpack_value (&bp, 1);
04c849b3 1112 fs->can_free = bp_unpack_value (&bp, 1);
fc94a528 1113 if (dump_file)
1114 {
02774f2d 1115 int flags = flags_from_decl_or_type (node->decl);
fc94a528 1116 fprintf (dump_file, "Read info for %s/%i ",
f1c8b4d7 1117 node->name (),
02774f2d 1118 node->order);
fc94a528 1119 if (flags & ECF_CONST)
1120 fprintf (dump_file, " const");
1121 if (flags & ECF_PURE)
1122 fprintf (dump_file, " pure");
1123 if (flags & ECF_NOTHROW)
1124 fprintf (dump_file, " nothrow");
1125 fprintf (dump_file, "\n pure const state: %s\n",
1126 pure_const_names[fs->pure_const_state]);
1127 fprintf (dump_file, " previously known state: %s\n",
1128 pure_const_names[fs->looping_previously_known]);
1129 if (fs->looping)
1130 fprintf (dump_file," function is locally looping\n");
1131 if (fs->looping_previously_known)
1132 fprintf (dump_file," function is previously known looping\n");
1133 if (fs->can_throw)
1134 fprintf (dump_file," function is locally throwing\n");
04c849b3 1135 if (fs->can_free)
1136 fprintf (dump_file," function can locally free\n");
fc94a528 1137 }
7bfefa9d 1138 }
1139
48e1416a 1140 lto_destroy_simple_input_block (file_data,
1141 LTO_section_ipa_pure_const,
7bfefa9d 1142 ib, data, len);
1143 }
1144 }
1145}
1146
1147
17b28e52 1148static bool
1149ignore_edge (struct cgraph_edge *e)
1150{
1151 return (!e->can_throw_external);
1152}
1153
b2c2e188 1154/* Return true if NODE is self recursive function.
e9de52cc 1155 Indirectly recursive functions appears as non-trivial strongly
1156 connected components, so we need to care about self recursion
1157 only. */
a1e88032 1158
1159static bool
1160self_recursive_p (struct cgraph_node *node)
1161{
1162 struct cgraph_edge *e;
1163 for (e = node->callees; e; e = e->next_callee)
415d1b9a 1164 if (e->callee->function_symbol () == node)
a1e88032 1165 return true;
1166 return false;
1167}
1168
366970c6 1169/* Return true if N is cdtor that is not const or pure. In this case we may
1170 need to remove unreachable function if it is marked const/pure. */
1171
1172static bool
1173cdtor_p (cgraph_node *n, void *)
1174{
1175 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1176 return !TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl);
1177 return false;
1178}
1179
c0240443 1180/* Produce transitive closure over the callgraph and compute pure/const
1181 attributes. */
023a28e1 1182
366970c6 1183static bool
c0240443 1184propagate_pure_const (void)
cb886925 1185{
1186 struct cgraph_node *node;
1187 struct cgraph_node *w;
1188 struct cgraph_node **order =
35ee1c66 1189 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
cb886925 1190 int order_pos;
1191 int i;
1192 struct ipa_dfs_info * w_info;
366970c6 1193 bool remove_p = false;
cb886925 1194
7771d558 1195 order_pos = ipa_reduced_postorder (order, true, false, NULL);
f7d118a9 1196 if (dump_file)
1197 {
415d1b9a 1198 cgraph_node::dump_cgraph (dump_file);
48669653 1199 ipa_print_order (dump_file, "reduced", order, order_pos);
f7d118a9 1200 }
1201
9d75589a 1202 /* Propagate the local information through the call graph to produce
f7d118a9 1203 the global information. All the nodes within a cycle will have
1204 the same info so we collapse cycles first. Then we can do the
1205 propagation in one pass from the leaves to the roots. */
1206 for (i = 0; i < order_pos; i++ )
1207 {
1208 enum pure_const_state_e pure_const_state = IPA_CONST;
9c2a0c05 1209 bool looping = false;
54157bf1 1210 int count = 0;
f7d118a9 1211 node = order[i];
1212
02774f2d 1213 if (node->alias)
8c1fce46 1214 continue;
1215
fc94a528 1216 if (dump_file && (dump_flags & TDF_DETAILS))
1217 fprintf (dump_file, "Starting cycle\n");
1218
f7d118a9 1219 /* Find the worst state for any node in the cycle. */
1220 w = node;
023a28e1 1221 while (w && pure_const_state != IPA_NEITHER)
f7d118a9 1222 {
b5cebd44 1223 struct cgraph_edge *e;
023a28e1 1224 struct cgraph_edge *ie;
1225 int i;
51ce5652 1226 struct ipa_ref *ref = NULL;
023a28e1 1227
f7d118a9 1228 funct_state w_l = get_function_state (w);
fc94a528 1229 if (dump_file && (dump_flags & TDF_DETAILS))
1230 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
f1c8b4d7 1231 w->name (),
02774f2d 1232 w->order,
fc94a528 1233 pure_const_names[w_l->pure_const_state],
1234 w_l->looping);
f7d118a9 1235
023a28e1 1236 /* First merge in function body properties. */
1237 worse_state (&pure_const_state, &looping,
1238 w_l->pure_const_state, w_l->looping);
1239 if (pure_const_state == IPA_NEITHER)
1240 break;
1241
1242 /* For overwritable nodes we can not assume anything. */
415d1b9a 1243 if (w->get_availability () == AVAIL_INTERPOSABLE)
ef378c27 1244 {
023a28e1 1245 worse_state (&pure_const_state, &looping,
1246 w_l->state_previously_known,
1247 w_l->looping_previously_known);
fc94a528 1248 if (dump_file && (dump_flags & TDF_DETAILS))
1249 {
1250 fprintf (dump_file,
1251 " Overwritable. state %s looping %i\n",
1252 pure_const_names[w_l->state_previously_known],
1253 w_l->looping_previously_known);
1254 }
023a28e1 1255 break;
ef378c27 1256 }
9c2a0c05 1257
b5cebd44 1258 count++;
1259
023a28e1 1260 /* We consider recursive cycles as possibly infinite.
1261 This might be relaxed since infinite recursion leads to stack
1262 overflow. */
b5cebd44 1263 if (count > 1)
1264 looping = true;
48e1416a 1265
023a28e1 1266 /* Now walk the edges and merge in callee properties. */
48e1416a 1267 for (e = w->callees; e; e = e->next_callee)
f7d118a9 1268 {
b2c2e188 1269 enum availability avail;
cfd85d03 1270 struct cgraph_node *y = e->callee->
1271 function_or_virtual_thunk_symbol (&avail);
fc94a528 1272 enum pure_const_state_e edge_state = IPA_CONST;
1273 bool edge_looping = false;
54157bf1 1274
fc94a528 1275 if (dump_file && (dump_flags & TDF_DETAILS))
1276 {
1277 fprintf (dump_file,
1278 " Call to %s/%i",
f1c8b4d7 1279 e->callee->name (),
02774f2d 1280 e->callee->order);
fc94a528 1281 }
415d1b9a 1282 if (avail > AVAIL_INTERPOSABLE)
f7d118a9 1283 {
b5cebd44 1284 funct_state y_l = get_function_state (y);
fc94a528 1285 if (dump_file && (dump_flags & TDF_DETAILS))
1286 {
1287 fprintf (dump_file,
1288 " state:%s looping:%i\n",
1289 pure_const_names[y_l->pure_const_state],
1290 y_l->looping);
1291 }
4c170941 1292 if (y_l->pure_const_state > IPA_PURE
35ee1c66 1293 && e->cannot_lead_to_return_p ())
fc94a528 1294 {
1295 if (dump_file && (dump_flags & TDF_DETAILS))
023a28e1 1296 fprintf (dump_file,
1297 " Ignoring side effects"
1298 " -> pure, looping\n");
fc94a528 1299 edge_state = IPA_PURE;
1300 edge_looping = true;
1301 }
1302 else
1303 {
1304 edge_state = y_l->pure_const_state;
1305 edge_looping = y_l->looping;
1306 }
f7d118a9 1307 }
a53e7471 1308 else if (special_builtin_state (&edge_state, &edge_looping,
02774f2d 1309 y->decl))
7dd42908 1310 ;
ef378c27 1311 else
023a28e1 1312 state_from_flags (&edge_state, &edge_looping,
02774f2d 1313 flags_from_decl_or_type (y->decl),
35ee1c66 1314 e->cannot_lead_to_return_p ());
023a28e1 1315
1316 /* Merge the results with what we already know. */
1317 better_state (&edge_state, &edge_looping,
1318 w_l->state_previously_known,
1319 w_l->looping_previously_known);
1320 worse_state (&pure_const_state, &looping,
1321 edge_state, edge_looping);
1322 if (pure_const_state == IPA_NEITHER)
1323 break;
1324 }
1325 if (pure_const_state == IPA_NEITHER)
1326 break;
ef378c27 1327
023a28e1 1328 /* Now process the indirect call. */
451e322a 1329 for (ie = w->indirect_calls; ie; ie = ie->next_callee)
023a28e1 1330 {
1331 enum pure_const_state_e edge_state = IPA_CONST;
1332 bool edge_looping = false;
4c170941 1333
023a28e1 1334 if (dump_file && (dump_flags & TDF_DETAILS))
1335 fprintf (dump_file, " Indirect call");
1336 state_from_flags (&edge_state, &edge_looping,
1337 ie->indirect_info->ecf_flags,
35ee1c66 1338 ie->cannot_lead_to_return_p ());
023a28e1 1339 /* Merge the results with what we already know. */
1340 better_state (&edge_state, &edge_looping,
1341 w_l->state_previously_known,
1342 w_l->looping_previously_known);
1343 worse_state (&pure_const_state, &looping,
1344 edge_state, edge_looping);
fc94a528 1345 if (pure_const_state == IPA_NEITHER)
1346 break;
f7d118a9 1347 }
023a28e1 1348 if (pure_const_state == IPA_NEITHER)
1349 break;
1350
1351 /* And finally all loads and stores. */
51ce5652 1352 for (i = 0; w->iterate_reference (i, ref); i++)
023a28e1 1353 {
1354 enum pure_const_state_e ref_state = IPA_CONST;
1355 bool ref_looping = false;
1356 switch (ref->use)
1357 {
1358 case IPA_REF_LOAD:
1359 /* readonly reads are safe. */
51ce5652 1360 if (TREE_READONLY (ref->referred->decl))
023a28e1 1361 break;
1362 if (dump_file && (dump_flags & TDF_DETAILS))
1363 fprintf (dump_file, " nonreadonly global var read\n");
1364 ref_state = IPA_PURE;
1365 break;
1366 case IPA_REF_STORE:
51ce5652 1367 if (ref->cannot_lead_to_return ())
023a28e1 1368 break;
1369 ref_state = IPA_NEITHER;
1370 if (dump_file && (dump_flags & TDF_DETAILS))
1371 fprintf (dump_file, " global var write\n");
1372 break;
1373 case IPA_REF_ADDR:
058a1b7a 1374 case IPA_REF_CHKP:
023a28e1 1375 break;
5cb7c8cf 1376 default:
1377 gcc_unreachable ();
023a28e1 1378 }
1379 better_state (&ref_state, &ref_looping,
1380 w_l->state_previously_known,
1381 w_l->looping_previously_known);
1382 worse_state (&pure_const_state, &looping,
1383 ref_state, ref_looping);
1384 if (pure_const_state == IPA_NEITHER)
1385 break;
1386 }
02774f2d 1387 w_info = (struct ipa_dfs_info *) w->aux;
f7d118a9 1388 w = w_info->next_cycle;
1389 }
fc94a528 1390 if (dump_file && (dump_flags & TDF_DETAILS))
1391 fprintf (dump_file, "Result %s looping %i\n",
1392 pure_const_names [pure_const_state],
1393 looping);
f7d118a9 1394
04c849b3 1395 /* Find the worst state of can_free for any node in the cycle. */
1396 bool can_free = false;
1397 w = node;
1398 while (w && !can_free)
1399 {
1400 struct cgraph_edge *e;
1401 funct_state w_l = get_function_state (w);
1402
1403 if (w_l->can_free
1404 || w->get_availability () == AVAIL_INTERPOSABLE
1405 || w->indirect_calls)
1406 can_free = true;
1407
1408 for (e = w->callees; e && !can_free; e = e->next_callee)
1409 {
1410 enum availability avail;
cfd85d03 1411 struct cgraph_node *y = e->callee->
1412 function_or_virtual_thunk_symbol (&avail);
04c849b3 1413
1414 if (avail > AVAIL_INTERPOSABLE)
1415 can_free = get_function_state (y)->can_free;
1416 else
1417 can_free = true;
1418 }
1419 w_info = (struct ipa_dfs_info *) w->aux;
1420 w = w_info->next_cycle;
1421 }
1422
f7d118a9 1423 /* Copy back the region's pure_const_state which is shared by
1424 all nodes in the region. */
1425 w = node;
1426 while (w)
1427 {
1428 funct_state w_l = get_function_state (w);
b5cebd44 1429 enum pure_const_state_e this_state = pure_const_state;
1430 bool this_looping = looping;
f7d118a9 1431
04c849b3 1432 w_l->can_free = can_free;
1433 w->nonfreeing_fn = !can_free;
1434 if (!can_free && dump_file)
1435 fprintf (dump_file, "Function found not to call free: %s\n",
1436 w->name ());
1437
df9b545b 1438 if (w_l->state_previously_known != IPA_NEITHER
1439 && this_state > w_l->state_previously_known)
4c170941 1440 {
1441 this_state = w_l->state_previously_known;
1442 this_looping |= w_l->looping_previously_known;
1443 }
a1e88032 1444 if (!this_looping && self_recursive_p (w))
1445 this_looping = true;
df9b545b 1446 if (!w_l->looping_previously_known)
1447 this_looping = false;
cb886925 1448
b5cebd44 1449 /* All nodes within a cycle share the same info. */
1450 w_l->pure_const_state = this_state;
1451 w_l->looping = this_looping;
1452
ce2d198d 1453 /* Inline clones share declaration with their offline copies;
1454 do not modify their declarations since the offline copy may
1455 be different. */
1456 if (!w->global.inlined_to)
1457 switch (this_state)
1458 {
1459 case IPA_CONST:
1460 if (!TREE_READONLY (w->decl))
1461 {
1462 warn_function_const (w->decl, !this_looping);
1463 if (dump_file)
1464 fprintf (dump_file, "Function found to be %sconst: %s\n",
1465 this_looping ? "looping " : "",
1466 w->name ());
1467 }
366970c6 1468 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1469 NULL, true);
415d1b9a 1470 w->set_const_flag (true, this_looping);
ce2d198d 1471 break;
48e1416a 1472
ce2d198d 1473 case IPA_PURE:
1474 if (!DECL_PURE_P (w->decl))
1475 {
1476 warn_function_pure (w->decl, !this_looping);
1477 if (dump_file)
1478 fprintf (dump_file, "Function found to be %spure: %s\n",
1479 this_looping ? "looping " : "",
1480 w->name ());
1481 }
366970c6 1482 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1483 NULL, true);
415d1b9a 1484 w->set_pure_flag (true, this_looping);
ce2d198d 1485 break;
48e1416a 1486
ce2d198d 1487 default:
1488 break;
1489 }
02774f2d 1490 w_info = (struct ipa_dfs_info *) w->aux;
17b28e52 1491 w = w_info->next_cycle;
1492 }
1493 }
1494
7771d558 1495 ipa_free_postorder_info ();
c0240443 1496 free (order);
366970c6 1497 return remove_p;
c0240443 1498}
1499
1500/* Produce transitive closure over the callgraph and compute nothrow
1501 attributes. */
1502
1503static void
1504propagate_nothrow (void)
1505{
1506 struct cgraph_node *node;
1507 struct cgraph_node *w;
1508 struct cgraph_node **order =
35ee1c66 1509 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
c0240443 1510 int order_pos;
1511 int i;
1512 struct ipa_dfs_info * w_info;
1513
7771d558 1514 order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
17b28e52 1515 if (dump_file)
1516 {
415d1b9a 1517 cgraph_node::dump_cgraph (dump_file);
7771d558 1518 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
17b28e52 1519 }
c0240443 1520
9d75589a 1521 /* Propagate the local information through the call graph to produce
17b28e52 1522 the global information. All the nodes within a cycle will have
1523 the same info so we collapse cycles first. Then we can do the
1524 propagation in one pass from the leaves to the roots. */
1525 for (i = 0; i < order_pos; i++ )
1526 {
1527 bool can_throw = false;
1528 node = order[i];
1529
02774f2d 1530 if (node->alias)
8c1fce46 1531 continue;
1532
17b28e52 1533 /* Find the worst state for any node in the cycle. */
1534 w = node;
b2c90c54 1535 while (w && !can_throw)
17b28e52 1536 {
023a28e1 1537 struct cgraph_edge *e, *ie;
17b28e52 1538 funct_state w_l = get_function_state (w);
1539
ef378c27 1540 if (w_l->can_throw
415d1b9a 1541 || w->get_availability () == AVAIL_INTERPOSABLE)
17b28e52 1542 can_throw = true;
1543
b2c90c54 1544 for (e = w->callees; e && !can_throw; e = e->next_callee)
17b28e52 1545 {
b2c2e188 1546 enum availability avail;
cfd85d03 1547 struct cgraph_node *y = e->callee->
1548 function_or_virtual_thunk_symbol (&avail);
17b28e52 1549
415d1b9a 1550 if (avail > AVAIL_INTERPOSABLE)
17b28e52 1551 {
1552 funct_state y_l = get_function_state (y);
1553
02774f2d 1554 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
17b28e52 1555 && e->can_throw_external)
1556 can_throw = true;
1557 }
02774f2d 1558 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
ef378c27 1559 can_throw = true;
17b28e52 1560 }
b2c90c54 1561 for (ie = w->indirect_calls; ie && !can_throw; ie = ie->next_callee)
023a28e1 1562 if (ie->can_throw_external)
b2c90c54 1563 can_throw = true;
02774f2d 1564 w_info = (struct ipa_dfs_info *) w->aux;
17b28e52 1565 w = w_info->next_cycle;
1566 }
1567
1568 /* Copy back the region's pure_const_state which is shared by
1569 all nodes in the region. */
1570 w = node;
1571 while (w)
1572 {
ecfab407 1573 funct_state w_l = get_function_state (w);
02774f2d 1574 if (!can_throw && !TREE_NOTHROW (w->decl))
b5cebd44 1575 {
ce2d198d 1576 /* Inline clones share declaration with their offline copies;
1577 do not modify their declarations since the offline copy may
1578 be different. */
1579 if (!w->global.inlined_to)
1580 {
415d1b9a 1581 w->set_nothrow_flag (true);
ce2d198d 1582 if (dump_file)
1583 fprintf (dump_file, "Function found to be nothrow: %s\n",
1584 w->name ());
1585 }
f7d118a9 1586 }
02774f2d 1587 else if (can_throw && !TREE_NOTHROW (w->decl))
ecfab407 1588 w_l->can_throw = true;
02774f2d 1589 w_info = (struct ipa_dfs_info *) w->aux;
f7d118a9 1590 w = w_info->next_cycle;
1591 }
1592 }
1593
7771d558 1594 ipa_free_postorder_info ();
f7d118a9 1595 free (order);
c0240443 1596}
1597
1598
1599/* Produce the global information by preforming a transitive closure
1600 on the local information that was produced by generate_summary. */
1601
415309e2 1602unsigned int
1603pass_ipa_pure_const::
1604execute (function *)
c0240443 1605{
1606 struct cgraph_node *node;
366970c6 1607 bool remove_p;
c0240443 1608
35ee1c66 1609 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
1610 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
1611 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
c0240443 1612
1613 /* Nothrow makes more function to not lead to return and improve
1614 later analysis. */
1615 propagate_nothrow ();
366970c6 1616 remove_p = propagate_pure_const ();
c0240443 1617
1618 /* Cleanup. */
c6d207aa 1619 FOR_EACH_FUNCTION (node)
c0240443 1620 if (has_function_state (node))
1621 free (get_function_state (node));
f1f41a6c 1622 funct_state_vec.release ();
366970c6 1623 return remove_p ? TODO_remove_functions : 0;
f7d118a9 1624}
1625
1626static bool
1627gate_pure_const (void)
1628{
d1f68cd8 1629 return flag_ipa_pure_const || in_lto_p;
f7d118a9 1630}
1631
415309e2 1632pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
1633 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
1634 pure_const_generate_summary, /* generate_summary */
1635 pure_const_write_summary, /* write_summary */
1636 pure_const_read_summary, /* read_summary */
1637 NULL, /* write_optimization_summary */
1638 NULL, /* read_optimization_summary */
1639 NULL, /* stmt_fixup */
1640 0, /* function_transform_todo_flags_start */
1641 NULL, /* function_transform */
1642 NULL), /* variable_transform */
1643 init_p(false),
1644 function_insertion_hook_holder(NULL),
1645 node_duplication_hook_holder(NULL),
1646 node_removal_hook_holder(NULL)
f7d118a9 1647{
415309e2 1648}
cbe8bda8 1649
1650ipa_opt_pass_d *
1651make_pass_ipa_pure_const (gcc::context *ctxt)
1652{
1653 return new pass_ipa_pure_const (ctxt);
1654}
1655
2c06958d 1656/* Return true if function should be skipped for local pure const analysis. */
b5cebd44 1657
2c06958d 1658static bool
1659skip_function_for_local_pure_const (struct cgraph_node *node)
b5cebd44 1660{
b5cebd44 1661 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1662 we must not promote functions that are called by already processed functions. */
1663
1664 if (function_called_by_processed_nodes_p ())
1665 {
1666 if (dump_file)
1667 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2c06958d 1668 return true;
b5cebd44 1669 }
415d1b9a 1670 if (node->get_availability () <= AVAIL_INTERPOSABLE)
b5cebd44 1671 {
1672 if (dump_file)
0a10fd82 1673 fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
2c06958d 1674 return true;
b5cebd44 1675 }
2c06958d 1676 return false;
1677}
1678
1679/* Simple local pass for pure const discovery reusing the analysis from
1680 ipa_pure_const. This pass is effective when executed together with
1681 other optimization passes in early optimization pass queue. */
b5cebd44 1682
65b0537f 1683namespace {
1684
1685const pass_data pass_data_local_pure_const =
1686{
1687 GIMPLE_PASS, /* type */
1688 "local-pure-const", /* name */
1689 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 1690 TV_IPA_PURE_CONST, /* tv_id */
1691 0, /* properties_required */
1692 0, /* properties_provided */
1693 0, /* properties_destroyed */
1694 0, /* todo_flags_start */
1695 0, /* todo_flags_finish */
1696};
1697
1698class pass_local_pure_const : public gimple_opt_pass
1699{
1700public:
1701 pass_local_pure_const (gcc::context *ctxt)
1702 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
1703 {}
1704
1705 /* opt_pass methods: */
1706 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
1707 virtual bool gate (function *) { return gate_pure_const (); }
1708 virtual unsigned int execute (function *);
1709
1710}; // class pass_local_pure_const
1711
1712unsigned int
1713pass_local_pure_const::execute (function *fun)
2c06958d 1714{
1715 bool changed = false;
1716 funct_state l;
1717 bool skip;
1718 struct cgraph_node *node;
1719
415d1b9a 1720 node = cgraph_node::get (current_function_decl);
2c06958d 1721 skip = skip_function_for_local_pure_const (node);
1722 if (!warn_suggest_attribute_const
1723 && !warn_suggest_attribute_pure
1724 && skip)
1725 return 0;
e0933141 1726
ab3db708 1727 l = analyze_function (node, false);
1728
1729 /* Do NORETURN discovery. */
e0933141 1730 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
65b0537f 1731 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
e0933141 1732 {
65b0537f 1733 warn_function_noreturn (fun->decl);
e0933141 1734 if (dump_file)
65b0537f 1735 fprintf (dump_file, "Function found to be noreturn: %s\n",
1736 current_function_name ());
e0933141 1737
1738 /* Update declaration and reduce profile to executed once. */
1739 TREE_THIS_VOLATILE (current_function_decl) = 1;
1740 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
65b0537f 1741 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
e0933141 1742
1743 changed = true;
1744 }
ef378c27 1745
b5cebd44 1746 switch (l->pure_const_state)
1747 {
1748 case IPA_CONST:
1749 if (!TREE_READONLY (current_function_decl))
1750 {
2c06958d 1751 warn_function_const (current_function_decl, !l->looping);
1752 if (!skip)
1753 {
415d1b9a 1754 node->set_const_flag (true, l->looping);
2c06958d 1755 changed = true;
1756 }
b5cebd44 1757 if (dump_file)
1758 fprintf (dump_file, "Function found to be %sconst: %s\n",
1759 l->looping ? "looping " : "",
9631926a 1760 current_function_name ());
b5cebd44 1761 }
1762 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1763 && !l->looping)
1764 {
2c06958d 1765 if (!skip)
1766 {
415d1b9a 1767 node->set_const_flag (true, false);
2c06958d 1768 changed = true;
1769 }
b5cebd44 1770 if (dump_file)
1771 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 1772 current_function_name ());
b5cebd44 1773 }
1774 break;
1775
1776 case IPA_PURE:
2c06958d 1777 if (!DECL_PURE_P (current_function_decl))
b5cebd44 1778 {
2c06958d 1779 if (!skip)
1780 {
415d1b9a 1781 node->set_pure_flag (true, l->looping);
2c06958d 1782 changed = true;
1783 }
1784 warn_function_pure (current_function_decl, !l->looping);
b5cebd44 1785 if (dump_file)
1786 fprintf (dump_file, "Function found to be %spure: %s\n",
1787 l->looping ? "looping " : "",
9631926a 1788 current_function_name ());
b5cebd44 1789 }
1790 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1791 && !l->looping)
1792 {
2c06958d 1793 if (!skip)
1794 {
415d1b9a 1795 node->set_pure_flag (true, false);
2c06958d 1796 changed = true;
1797 }
b5cebd44 1798 if (dump_file)
1799 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 1800 current_function_name ());
b5cebd44 1801 }
1802 break;
1803
1804 default:
1805 break;
1806 }
1807 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1808 {
415d1b9a 1809 node->set_nothrow_flag (true);
b5cebd44 1810 changed = true;
1811 if (dump_file)
1812 fprintf (dump_file, "Function found to be nothrow: %s\n",
9631926a 1813 current_function_name ());
b5cebd44 1814 }
dd045aee 1815 free (l);
b5cebd44 1816 if (changed)
1817 return execute_fixup_cfg ();
1818 else
1819 return 0;
1820}
1821
cbe8bda8 1822} // anon namespace
1823
1824gimple_opt_pass *
1825make_pass_local_pure_const (gcc::context *ctxt)
1826{
1827 return new pass_local_pure_const (ctxt);
1828}
64641360 1829
1830/* Emit noreturn warnings. */
1831
64641360 1832namespace {
1833
1834const pass_data pass_data_warn_function_noreturn =
1835{
1836 GIMPLE_PASS, /* type */
1837 "*warn_function_noreturn", /* name */
1838 OPTGROUP_NONE, /* optinfo_flags */
64641360 1839 TV_NONE, /* tv_id */
1840 PROP_cfg, /* properties_required */
1841 0, /* properties_provided */
1842 0, /* properties_destroyed */
1843 0, /* todo_flags_start */
1844 0, /* todo_flags_finish */
1845};
1846
1847class pass_warn_function_noreturn : public gimple_opt_pass
1848{
1849public:
1850 pass_warn_function_noreturn (gcc::context *ctxt)
1851 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
1852 {}
1853
1854 /* opt_pass methods: */
31315c24 1855 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
65b0537f 1856 virtual unsigned int execute (function *fun)
1857 {
1858 if (!TREE_THIS_VOLATILE (current_function_decl)
1859 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1860 warn_function_noreturn (current_function_decl);
1861 return 0;
1862 }
64641360 1863
1864}; // class pass_warn_function_noreturn
1865
1866} // anon namespace
1867
1868gimple_opt_pass *
1869make_pass_warn_function_noreturn (gcc::context *ctxt)
1870{
1871 return new pass_warn_function_noreturn (ctxt);
1872}