]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-pure-const.c
Merge lto branch into trunk.
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "tree.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "tree-pass.h"
42 #include "langhooks.h"
43 #include "pointer-set.h"
44 #include "ggc.h"
45 #include "ipa-utils.h"
46 #include "gimple.h"
47 #include "cgraph.h"
48 #include "output.h"
49 #include "flags.h"
50 #include "timevar.h"
51 #include "diagnostic.h"
52 #include "langhooks.h"
53 #include "target.h"
54 #include "lto-streamer.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57
58 static struct pointer_set_t *visited_nodes;
59
60 /* Lattice values for const and pure functions. Everything starts out
61 being const, then may drop to pure and then neither depending on
62 what is found. */
63 enum pure_const_state_e
64 {
65 IPA_CONST,
66 IPA_PURE,
67 IPA_NEITHER
68 };
69
70 /* Holder for the const_state. There is one of these per function
71 decl. */
72 struct funct_state_d
73 {
74 /* See above. */
75 enum pure_const_state_e pure_const_state;
76 /* What user set here; we can be always sure about this. */
77 enum pure_const_state_e state_previously_known;
78 bool looping_previously_known;
79
80 /* True if the function could possibly infinite loop. There are a
81 lot of ways that this could be determined. We are pretty
82 conservative here. While it is possible to cse pure and const
83 calls, it is not legal to have dce get rid of the call if there
84 is a possibility that the call could infinite loop since this is
85 a behavioral change. */
86 bool looping;
87
88 bool can_throw;
89 };
90
91 typedef struct funct_state_d * funct_state;
92
93 /* The storage of the funct_state is abstracted because there is the
94 possibility that it may be desirable to move this to the cgraph
95 local info. */
96
97 /* Array, indexed by cgraph node uid, of function states. */
98
99 DEF_VEC_P (funct_state);
100 DEF_VEC_ALLOC_P (funct_state, heap);
101 static VEC (funct_state, heap) *funct_state_vec;
102
103 /* Holders of ipa cgraph hooks: */
104 static struct cgraph_node_hook_list *function_insertion_hook_holder;
105 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
106 static struct cgraph_node_hook_list *node_removal_hook_holder;
107
108 /* Init the function state. */
109
110 static void
111 finish_state (void)
112 {
113 free (funct_state_vec);
114 }
115
116
117 /* Return the function state from NODE. */
118
119 static inline funct_state
120 get_function_state (struct cgraph_node *node)
121 {
122 if (!funct_state_vec
123 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
124 return NULL;
125 return VEC_index (funct_state, funct_state_vec, node->uid);
126 }
127
128 /* Set the function state S for NODE. */
129
130 static inline void
131 set_function_state (struct cgraph_node *node, funct_state s)
132 {
133 if (!funct_state_vec
134 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
135 VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
136 VEC_replace (funct_state, funct_state_vec, node->uid, s);
137 }
138
139 /* Check to see if the use (or definition when CHECKING_WRITE is true)
140 variable T is legal in a function that is either pure or const. */
141
142 static inline void
143 check_decl (funct_state local,
144 tree t, bool checking_write)
145 {
146 /* Do not want to do anything with volatile except mark any
147 function that uses one to be not const or pure. */
148 if (TREE_THIS_VOLATILE (t))
149 {
150 local->pure_const_state = IPA_NEITHER;
151 if (dump_file)
152 fprintf (dump_file, " Volatile operand is not const/pure");
153 return;
154 }
155
156 /* Do not care about a local automatic that is not static. */
157 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
158 return;
159
160 /* If the variable has the "used" attribute, treat it as if it had a
161 been touched by the devil. */
162 if (lookup_attribute ("used", DECL_ATTRIBUTES (t)))
163 {
164 local->pure_const_state = IPA_NEITHER;
165 if (dump_file)
166 fprintf (dump_file, " Used static/global variable is not const/pure\n");
167 return;
168 }
169
170 /* Since we have dealt with the locals and params cases above, if we
171 are CHECKING_WRITE, this cannot be a pure or constant
172 function. */
173 if (checking_write)
174 {
175 local->pure_const_state = IPA_NEITHER;
176 if (dump_file)
177 fprintf (dump_file, " static/global memory write is not const/pure\n");
178 return;
179 }
180
181 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
182 {
183 /* Readonly reads are safe. */
184 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
185 return; /* Read of a constant, do not change the function state. */
186 else
187 {
188 if (dump_file)
189 fprintf (dump_file, " global memory read is not const\n");
190 /* Just a regular read. */
191 if (local->pure_const_state == IPA_CONST)
192 local->pure_const_state = IPA_PURE;
193 }
194 }
195 else
196 {
197 /* Compilation level statics can be read if they are readonly
198 variables. */
199 if (TREE_READONLY (t))
200 return;
201
202 if (dump_file)
203 fprintf (dump_file, " static memory read is not const\n");
204 /* Just a regular read. */
205 if (local->pure_const_state == IPA_CONST)
206 local->pure_const_state = IPA_PURE;
207 }
208 }
209
210
211 /* Check to see if the use (or definition when CHECKING_WRITE is true)
212 variable T is legal in a function that is either pure or const. */
213
214 static inline void
215 check_op (funct_state local, tree t, bool checking_write)
216 {
217 t = get_base_address (t);
218 if (t && TREE_THIS_VOLATILE (t))
219 {
220 local->pure_const_state = IPA_NEITHER;
221 if (dump_file)
222 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
223 return;
224 }
225 else if (t
226 && INDIRECT_REF_P (t)
227 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
228 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
229 {
230 if (dump_file)
231 fprintf (dump_file, " Indirect ref to local memory is OK\n");
232 return;
233 }
234 else if (checking_write)
235 {
236 local->pure_const_state = IPA_NEITHER;
237 if (dump_file)
238 fprintf (dump_file, " Indirect ref write is not const/pure\n");
239 return;
240 }
241 else
242 {
243 if (dump_file)
244 fprintf (dump_file, " Indirect ref read is not const\n");
245 if (local->pure_const_state == IPA_CONST)
246 local->pure_const_state = IPA_PURE;
247 }
248 }
249
250 /* Check the parameters of a function call to CALL_EXPR to see if
251 there are any references in the parameters that are not allowed for
252 pure or const functions. Also check to see if this is either an
253 indirect call, a call outside the compilation unit, or has special
254 attributes that may also effect the purity. The CALL_EXPR node for
255 the entire call expression. */
256
257 static void
258 check_call (funct_state local, gimple call, bool ipa)
259 {
260 int flags = gimple_call_flags (call);
261 tree callee_t = gimple_call_fndecl (call);
262 struct cgraph_node* callee;
263 enum availability avail = AVAIL_NOT_AVAILABLE;
264 bool possibly_throws = stmt_could_throw_p (call);
265 bool possibly_throws_externally = (possibly_throws
266 && stmt_can_throw_external (call));
267
268 if (possibly_throws)
269 {
270 unsigned int i;
271 for (i = 0; i < gimple_num_ops (call); i++)
272 if (gimple_op (call, i)
273 && tree_could_throw_p (gimple_op (call, i)))
274 {
275 if (possibly_throws && flag_non_call_exceptions)
276 {
277 if (dump_file)
278 fprintf (dump_file, " operand can throw; looping\n");
279 local->looping = true;
280 }
281 if (possibly_throws_externally)
282 {
283 if (dump_file)
284 fprintf (dump_file, " operand can throw externally\n");
285 local->can_throw = true;
286 }
287 }
288 }
289
290 /* The const and pure flags are set by a variety of places in the
291 compiler (including here). If someone has already set the flags
292 for the callee, (such as for some of the builtins) we will use
293 them, otherwise we will compute our own information.
294
295 Const and pure functions have less clobber effects than other
296 functions so we process these first. Otherwise if it is a call
297 outside the compilation unit or an indirect call we punt. This
298 leaves local calls which will be processed by following the call
299 graph. */
300 if (callee_t)
301 {
302 callee = cgraph_node(callee_t);
303 avail = cgraph_function_body_availability (callee);
304
305 /* When bad things happen to bad functions, they cannot be const
306 or pure. */
307 if (setjmp_call_p (callee_t))
308 {
309 if (dump_file)
310 fprintf (dump_file, " setjmp is not const/pure\n");
311 local->looping = true;
312 local->pure_const_state = IPA_NEITHER;
313 }
314
315 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
316 switch (DECL_FUNCTION_CODE (callee_t))
317 {
318 case BUILT_IN_LONGJMP:
319 case BUILT_IN_NONLOCAL_GOTO:
320 if (dump_file)
321 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
322 local->pure_const_state = IPA_NEITHER;
323 local->looping = true;
324 break;
325 default:
326 break;
327 }
328 }
329
330 /* When not in IPA mode, we can still handle self recursion. */
331 if (!ipa && callee_t == current_function_decl)
332 local->looping = true;
333 /* The callee is either unknown (indirect call) or there is just no
334 scannable code for it (external call) . We look to see if there
335 are any bits available for the callee (such as by declaration or
336 because it is builtin) and process solely on the basis of those
337 bits. */
338 else if (avail <= AVAIL_OVERWRITABLE || !ipa)
339 {
340 if (possibly_throws && flag_non_call_exceptions)
341 {
342 if (dump_file)
343 fprintf (dump_file, " can throw; looping\n");
344 local->looping = true;
345 }
346 if (possibly_throws_externally)
347 {
348 if (dump_file)
349 {
350 fprintf (dump_file, " can throw externally to lp %i\n",
351 lookup_stmt_eh_lp (call));
352 if (callee_t)
353 fprintf (dump_file, " callee:%s\n",
354 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
355 }
356 local->can_throw = true;
357 }
358 if (flags & ECF_CONST)
359 {
360 if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
361 local->looping = true;
362 }
363 else if (flags & ECF_PURE)
364 {
365 if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
366 local->looping = true;
367 if (dump_file)
368 fprintf (dump_file, " pure function call in not const\n");
369 if (local->pure_const_state == IPA_CONST)
370 local->pure_const_state = IPA_PURE;
371 }
372 else
373 {
374 if (dump_file)
375 fprintf (dump_file, " uknown function call is not const/pure\n");
376 local->pure_const_state = IPA_NEITHER;
377 local->looping = true;
378 }
379 }
380 /* Direct functions calls are handled by IPA propagation. */
381 }
382
383 /* Wrapper around check_decl for loads. */
384
385 static bool
386 check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
387 {
388 if (DECL_P (op))
389 check_decl ((funct_state)data, op, false);
390 else
391 check_op ((funct_state)data, op, false);
392 return false;
393 }
394
395 /* Wrapper around check_decl for stores. */
396
397 static bool
398 check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
399 {
400 if (DECL_P (op))
401 check_decl ((funct_state)data, op, true);
402 else
403 check_op ((funct_state)data, op, true);
404 return false;
405 }
406
407 /* Look into pointer pointed to by GSIP and figure out what interesting side
408 effects it has. */
409 static void
410 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
411 {
412 gimple stmt = gsi_stmt (*gsip);
413 unsigned int i = 0;
414
415 if (is_gimple_debug (stmt))
416 return;
417
418 if (dump_file)
419 {
420 fprintf (dump_file, " scanning: ");
421 print_gimple_stmt (dump_file, stmt, 0, 0);
422 }
423
424 /* Look for loads and stores. */
425 walk_stmt_load_store_ops (stmt, local, check_load, check_store);
426
427 if (gimple_code (stmt) != GIMPLE_CALL
428 && stmt_could_throw_p (stmt))
429 {
430 if (flag_non_call_exceptions)
431 {
432 if (dump_file)
433 fprintf (dump_file, " can throw; looping");
434 local->looping = true;
435 }
436 if (stmt_can_throw_external (stmt))
437 {
438 if (dump_file)
439 fprintf (dump_file, " can throw externally");
440 local->can_throw = true;
441 }
442 }
443 switch (gimple_code (stmt))
444 {
445 case GIMPLE_CALL:
446 check_call (local, stmt, ipa);
447 break;
448 case GIMPLE_LABEL:
449 if (DECL_NONLOCAL (gimple_label_label (stmt)))
450 /* Target of long jump. */
451 {
452 if (dump_file)
453 fprintf (dump_file, " nonlocal label is not const/pure");
454 local->pure_const_state = IPA_NEITHER;
455 }
456 break;
457 case GIMPLE_ASM:
458 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
459 {
460 tree op = gimple_asm_clobber_op (stmt, i);
461 if (simple_cst_equal(TREE_VALUE (op), memory_identifier_string) == 1)
462 {
463 if (dump_file)
464 fprintf (dump_file, " memory asm clobber is not const/pure");
465 /* Abandon all hope, ye who enter here. */
466 local->pure_const_state = IPA_NEITHER;
467 }
468 }
469 if (gimple_asm_volatile_p (stmt))
470 {
471 if (dump_file)
472 fprintf (dump_file, " volatile is not const/pure");
473 /* Abandon all hope, ye who enter here. */
474 local->pure_const_state = IPA_NEITHER;
475 local->looping = true;
476 }
477 return;
478 default:
479 break;
480 }
481 }
482
483
484 /* This is the main routine for finding the reference patterns for
485 global variables within a function FN. */
486
487 static funct_state
488 analyze_function (struct cgraph_node *fn, bool ipa)
489 {
490 tree decl = fn->decl;
491 tree old_decl = current_function_decl;
492 funct_state l;
493 basic_block this_block;
494
495 if (cgraph_function_body_availability (fn) <= AVAIL_OVERWRITABLE)
496 {
497 if (dump_file)
498 fprintf (dump_file, "Function is not available or overwrittable; not analyzing.\n");
499 return NULL;
500 }
501
502 l = XCNEW (struct funct_state_d);
503 l->pure_const_state = IPA_CONST;
504 l->state_previously_known = IPA_NEITHER;
505 l->looping_previously_known = true;
506 l->looping = false;
507 l->can_throw = false;
508
509 if (dump_file)
510 {
511 fprintf (dump_file, "\n\n local analysis of %s\n ",
512 cgraph_node_name (fn));
513 }
514
515 push_cfun (DECL_STRUCT_FUNCTION (decl));
516 current_function_decl = decl;
517
518 FOR_EACH_BB (this_block)
519 {
520 gimple_stmt_iterator gsi;
521 struct walk_stmt_info wi;
522
523 memset (&wi, 0, sizeof(wi));
524 for (gsi = gsi_start_bb (this_block);
525 !gsi_end_p (gsi);
526 gsi_next (&gsi))
527 {
528 check_stmt (&gsi, l, ipa);
529 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
530 goto end;
531 }
532 }
533
534 end:
535 if (l->pure_const_state != IPA_NEITHER)
536 {
537 /* Const functions cannot have back edges (an
538 indication of possible infinite loop side
539 effect. */
540 if (mark_dfs_back_edges ())
541 {
542 /* Preheaders are needed for SCEV to work.
543 Simple lateches and recorded exits improve chances that loop will
544 proved to be finite in testcases such as in loop-15.c and loop-24.c */
545 loop_optimizer_init (LOOPS_NORMAL
546 | LOOPS_HAVE_RECORDED_EXITS);
547 if (dump_file && (dump_flags & TDF_DETAILS))
548 flow_loops_dump (dump_file, NULL, 0);
549 if (mark_irreducible_loops ())
550 {
551 if (dump_file)
552 fprintf (dump_file, " has irreducible loops\n");
553 l->looping = true;
554 }
555 else
556 {
557 loop_iterator li;
558 struct loop *loop;
559 scev_initialize ();
560 FOR_EACH_LOOP (li, loop, 0)
561 if (!finite_loop_p (loop))
562 {
563 if (dump_file)
564 fprintf (dump_file, " can not prove finiteness of loop %i\n", loop->num);
565 l->looping =true;
566 break;
567 }
568 scev_finalize ();
569 }
570 loop_optimizer_finalize ();
571 }
572 }
573
574 if (TREE_READONLY (decl))
575 {
576 l->pure_const_state = IPA_CONST;
577 l->state_previously_known = IPA_CONST;
578 if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
579 l->looping = false, l->looping_previously_known = false;
580 }
581 if (DECL_PURE_P (decl))
582 {
583 if (l->pure_const_state != IPA_CONST)
584 l->pure_const_state = IPA_PURE;
585 l->state_previously_known = IPA_PURE;
586 if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
587 l->looping = false, l->looping_previously_known = false;
588 }
589 if (TREE_NOTHROW (decl))
590 l->can_throw = false;
591
592 pop_cfun ();
593 current_function_decl = old_decl;
594 if (dump_file)
595 {
596 if (l->looping)
597 fprintf (dump_file, "Function is locally looping.\n");
598 if (l->can_throw)
599 fprintf (dump_file, "Function is locally throwing.\n");
600 if (l->pure_const_state == IPA_CONST)
601 fprintf (dump_file, "Function is locally const.\n");
602 if (l->pure_const_state == IPA_PURE)
603 fprintf (dump_file, "Function is locally pure.\n");
604 }
605 return l;
606 }
607
608 /* Called when new function is inserted to callgraph late. */
609 static void
610 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
611 {
612 if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
613 return;
614 /* There are some shared nodes, in particular the initializers on
615 static declarations. We do not need to scan them more than once
616 since all we would be interested in are the addressof
617 operations. */
618 visited_nodes = pointer_set_create ();
619 set_function_state (node, analyze_function (node, true));
620 pointer_set_destroy (visited_nodes);
621 visited_nodes = NULL;
622 }
623
624 /* Called when new clone is inserted to callgraph late. */
625
626 static void
627 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
628 void *data ATTRIBUTE_UNUSED)
629 {
630 if (get_function_state (src))
631 {
632 funct_state l = XNEW (struct funct_state_d);
633 gcc_assert (!get_function_state (dst));
634 memcpy (l, get_function_state (src), sizeof (*l));
635 set_function_state (dst, l);
636 }
637 }
638
639 /* Called when new clone is inserted to callgraph late. */
640
641 static void
642 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
643 {
644 if (get_function_state (node))
645 {
646 free (get_function_state (node));
647 set_function_state (node, NULL);
648 }
649 }
650
651 \f
652 static void
653 register_hooks (void)
654 {
655 static bool init_p = false;
656
657 if (init_p)
658 return;
659
660 init_p = true;
661
662 node_removal_hook_holder =
663 cgraph_add_node_removal_hook (&remove_node_data, NULL);
664 node_duplication_hook_holder =
665 cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
666 function_insertion_hook_holder =
667 cgraph_add_function_insertion_hook (&add_new_function, NULL);
668 }
669
670
671 /* Analyze each function in the cgraph to see if it is locally PURE or
672 CONST. */
673
674 static void
675 generate_summary (void)
676 {
677 struct cgraph_node *node;
678
679 register_hooks ();
680
681 /* There are some shared nodes, in particular the initializers on
682 static declarations. We do not need to scan them more than once
683 since all we would be interested in are the addressof
684 operations. */
685 visited_nodes = pointer_set_create ();
686
687 /* Process all of the functions.
688
689 We do NOT process any AVAIL_OVERWRITABLE functions, we cannot
690 guarantee that what we learn about the one we see will be true
691 for the one that overrides it.
692 */
693 for (node = cgraph_nodes; node; node = node->next)
694 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
695 set_function_state (node, analyze_function (node, true));
696
697 pointer_set_destroy (visited_nodes);
698 visited_nodes = NULL;
699 }
700
701
702 /* Serialize the ipa info for lto. */
703
704 static void
705 pure_const_write_summary (cgraph_node_set set)
706 {
707 struct cgraph_node *node;
708 struct lto_simple_output_block *ob
709 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
710 unsigned int count = 0;
711 cgraph_node_set_iterator csi;
712
713 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
714 {
715 node = csi_node (csi);
716 if (node->analyzed && get_function_state (node) != NULL)
717 count++;
718 }
719
720 lto_output_uleb128_stream (ob->main_stream, count);
721
722 /* Process all of the functions. */
723 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
724 {
725 node = csi_node (csi);
726 if (node->analyzed && get_function_state (node) != NULL)
727 {
728 struct bitpack_d *bp;
729 funct_state fs;
730 int node_ref;
731 lto_cgraph_encoder_t encoder;
732
733 fs = get_function_state (node);
734
735 encoder = ob->decl_state->cgraph_node_encoder;
736 node_ref = lto_cgraph_encoder_encode (encoder, node);
737 lto_output_uleb128_stream (ob->main_stream, node_ref);
738
739 /* Note that flags will need to be read in the opposite
740 order as we are pushing the bitflags into FLAGS. */
741 bp = bitpack_create ();
742 bp_pack_value (bp, fs->pure_const_state, 2);
743 bp_pack_value (bp, fs->state_previously_known, 2);
744 bp_pack_value (bp, fs->looping_previously_known, 1);
745 bp_pack_value (bp, fs->looping, 1);
746 bp_pack_value (bp, fs->can_throw, 1);
747 lto_output_bitpack (ob->main_stream, bp);
748 bitpack_delete (bp);
749 }
750 }
751
752 lto_destroy_simple_output_block (ob);
753 }
754
755
756 /* Deserialize the ipa info for lto. */
757
758 static void
759 pure_const_read_summary (void)
760 {
761 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
762 struct lto_file_decl_data *file_data;
763 unsigned int j = 0;
764
765 register_hooks ();
766 while ((file_data = file_data_vec[j++]))
767 {
768 const char *data;
769 size_t len;
770 struct lto_input_block *ib
771 = lto_create_simple_input_block (file_data,
772 LTO_section_ipa_pure_const,
773 &data, &len);
774 if (ib)
775 {
776 unsigned int i;
777 unsigned int count = lto_input_uleb128 (ib);
778
779 for (i = 0; i < count; i++)
780 {
781 unsigned int index;
782 struct cgraph_node *node;
783 struct bitpack_d *bp;
784 funct_state fs;
785 lto_cgraph_encoder_t encoder;
786
787 fs = XCNEW (struct funct_state_d);
788 index = lto_input_uleb128 (ib);
789 encoder = file_data->cgraph_node_encoder;
790 node = lto_cgraph_encoder_deref (encoder, index);
791 set_function_state (node, fs);
792
793 /* Note that the flags must be read in the opposite
794 order in which they were written (the bitflags were
795 pushed into FLAGS). */
796 bp = lto_input_bitpack (ib);
797 fs->pure_const_state
798 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
799 fs->state_previously_known
800 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
801 fs->looping_previously_known = bp_unpack_value (bp, 1);
802 fs->looping = bp_unpack_value (bp, 1);
803 fs->can_throw = bp_unpack_value (bp, 1);
804 bitpack_delete (bp);
805 }
806
807 lto_destroy_simple_input_block (file_data,
808 LTO_section_ipa_pure_const,
809 ib, data, len);
810 }
811 }
812 }
813
814
815 static bool
816 ignore_edge (struct cgraph_edge *e)
817 {
818 return (!e->can_throw_external);
819 }
820
821 /* Return true if NODE is self recursive function. */
822
823 static bool
824 self_recursive_p (struct cgraph_node *node)
825 {
826 struct cgraph_edge *e;
827 for (e = node->callees; e; e = e->next_callee)
828 if (e->callee == node)
829 return true;
830 return false;
831 }
832
833 /* Produce the global information by preforming a transitive closure
834 on the local information that was produced by generate_summary.
835 Note that there is no function_transform pass since this only
836 updates the function_decl. */
837
838 static unsigned int
839 propagate (void)
840 {
841 struct cgraph_node *node;
842 struct cgraph_node *w;
843 struct cgraph_node **order =
844 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
845 int order_pos;
846 int i;
847 struct ipa_dfs_info * w_info;
848
849 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
850 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
851 cgraph_remove_node_removal_hook (node_removal_hook_holder);
852 order_pos = ipa_utils_reduced_inorder (order, true, false, NULL);
853 if (dump_file)
854 {
855 dump_cgraph (dump_file);
856 ipa_utils_print_order(dump_file, "reduced", order, order_pos);
857 }
858
859 /* Propagate the local information thru the call graph to produce
860 the global information. All the nodes within a cycle will have
861 the same info so we collapse cycles first. Then we can do the
862 propagation in one pass from the leaves to the roots. */
863 for (i = 0; i < order_pos; i++ )
864 {
865 enum pure_const_state_e pure_const_state = IPA_CONST;
866 bool looping = false;
867 int count = 0;
868 node = order[i];
869
870 /* Find the worst state for any node in the cycle. */
871 w = node;
872 while (w)
873 {
874 struct cgraph_edge *e;
875 funct_state w_l = get_function_state (w);
876 if (pure_const_state < w_l->pure_const_state)
877 pure_const_state = w_l->pure_const_state;
878
879 if (w_l->looping)
880 looping = true;
881
882 if (pure_const_state == IPA_NEITHER)
883 break;
884
885 count++;
886
887 if (count > 1)
888 looping = true;
889
890 for (e = w->callees; e; e = e->next_callee)
891 {
892 struct cgraph_node *y = e->callee;
893
894 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
895 {
896 funct_state y_l = get_function_state (y);
897 if (pure_const_state < y_l->pure_const_state)
898 pure_const_state = y_l->pure_const_state;
899 if (pure_const_state == IPA_NEITHER)
900 break;
901 if (y_l->looping)
902 looping = true;
903 }
904 }
905 w_info = (struct ipa_dfs_info *) w->aux;
906 w = w_info->next_cycle;
907 }
908
909 /* Copy back the region's pure_const_state which is shared by
910 all nodes in the region. */
911 w = node;
912 while (w)
913 {
914 funct_state w_l = get_function_state (w);
915 enum pure_const_state_e this_state = pure_const_state;
916 bool this_looping = looping;
917
918 if (w_l->state_previously_known != IPA_NEITHER
919 && this_state > w_l->state_previously_known)
920 this_state = w_l->state_previously_known;
921 if (!this_looping && self_recursive_p (w))
922 this_looping = true;
923 if (!w_l->looping_previously_known)
924 this_looping = false;
925
926 /* All nodes within a cycle share the same info. */
927 w_l->pure_const_state = this_state;
928 w_l->looping = this_looping;
929
930 switch (this_state)
931 {
932 case IPA_CONST:
933 if (!TREE_READONLY (w->decl) && dump_file)
934 fprintf (dump_file, "Function found to be %sconst: %s\n",
935 this_looping ? "looping " : "",
936 cgraph_node_name (w));
937 TREE_READONLY (w->decl) = 1;
938 DECL_LOOPING_CONST_OR_PURE_P (w->decl) = this_looping;
939 break;
940
941 case IPA_PURE:
942 if (!DECL_PURE_P (w->decl) && dump_file)
943 fprintf (dump_file, "Function found to be %spure: %s\n",
944 this_looping ? "looping " : "",
945 cgraph_node_name (w));
946 DECL_PURE_P (w->decl) = 1;
947 DECL_LOOPING_CONST_OR_PURE_P (w->decl) = this_looping;
948 break;
949
950 default:
951 break;
952 }
953 w_info = (struct ipa_dfs_info *) w->aux;
954 w = w_info->next_cycle;
955 }
956 }
957
958 /* Cleanup. */
959 for (node = cgraph_nodes; node; node = node->next)
960 {
961 /* Get rid of the aux information. */
962 if (node->aux)
963 {
964 w_info = (struct ipa_dfs_info *) node->aux;
965 free (node->aux);
966 node->aux = NULL;
967 }
968 }
969 order_pos = ipa_utils_reduced_inorder (order, true, false, ignore_edge);
970 if (dump_file)
971 {
972 dump_cgraph (dump_file);
973 ipa_utils_print_order(dump_file, "reduced for nothrow", order, order_pos);
974 }
975 /* Propagate the local information thru the call graph to produce
976 the global information. All the nodes within a cycle will have
977 the same info so we collapse cycles first. Then we can do the
978 propagation in one pass from the leaves to the roots. */
979 for (i = 0; i < order_pos; i++ )
980 {
981 bool can_throw = false;
982 node = order[i];
983
984 /* Find the worst state for any node in the cycle. */
985 w = node;
986 while (w)
987 {
988 struct cgraph_edge *e;
989 funct_state w_l = get_function_state (w);
990
991 if (w_l->can_throw)
992 can_throw = true;
993
994 if (can_throw)
995 break;
996
997 for (e = w->callees; e; e = e->next_callee)
998 {
999 struct cgraph_node *y = e->callee;
1000
1001 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1002 {
1003 funct_state y_l = get_function_state (y);
1004
1005 if (can_throw)
1006 break;
1007 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1008 && e->can_throw_external)
1009 can_throw = true;
1010 }
1011 }
1012 w_info = (struct ipa_dfs_info *) w->aux;
1013 w = w_info->next_cycle;
1014 }
1015
1016 /* Copy back the region's pure_const_state which is shared by
1017 all nodes in the region. */
1018 w = node;
1019 while (w)
1020 {
1021 funct_state w_l = get_function_state (w);
1022 if (!can_throw && !TREE_NOTHROW (w->decl))
1023 {
1024 struct cgraph_edge *e;
1025 TREE_NOTHROW (w->decl) = true;
1026 for (e = w->callers; e; e = e->next_caller)
1027 e->can_throw_external = false;
1028 if (dump_file)
1029 fprintf (dump_file, "Function found to be nothrow: %s\n",
1030 cgraph_node_name (w));
1031 }
1032 else if (can_throw && !TREE_NOTHROW (w->decl))
1033 w_l->can_throw = true;
1034 w_info = (struct ipa_dfs_info *) w->aux;
1035 w = w_info->next_cycle;
1036 }
1037 }
1038
1039 /* Cleanup. */
1040 for (node = cgraph_nodes; node; node = node->next)
1041 {
1042 /* Get rid of the aux information. */
1043 if (node->aux)
1044 {
1045 w_info = (struct ipa_dfs_info *) node->aux;
1046 free (node->aux);
1047 node->aux = NULL;
1048 }
1049 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
1050 free (get_function_state (node));
1051 }
1052
1053 free (order);
1054 VEC_free (funct_state, heap, funct_state_vec);
1055 finish_state ();
1056 return 0;
1057 }
1058
1059 static bool
1060 gate_pure_const (void)
1061 {
1062 return (flag_ipa_pure_const
1063 /* Don't bother doing anything if the program has errors. */
1064 && !(errorcount || sorrycount));
1065 }
1066
1067 struct ipa_opt_pass_d pass_ipa_pure_const =
1068 {
1069 {
1070 IPA_PASS,
1071 "pure-const", /* name */
1072 gate_pure_const, /* gate */
1073 propagate, /* execute */
1074 NULL, /* sub */
1075 NULL, /* next */
1076 0, /* static_pass_number */
1077 TV_IPA_PURE_CONST, /* tv_id */
1078 0, /* properties_required */
1079 0, /* properties_provided */
1080 0, /* properties_destroyed */
1081 0, /* todo_flags_start */
1082 0 /* todo_flags_finish */
1083 },
1084 generate_summary, /* generate_summary */
1085 pure_const_write_summary, /* write_summary */
1086 pure_const_read_summary, /* read_summary */
1087 NULL, /* function_read_summary */
1088 0, /* TODOs */
1089 NULL, /* function_transform */
1090 NULL /* variable_transform */
1091 };
1092
1093 /* Simple local pass for pure const discovery reusing the analysis from
1094 ipa_pure_const. This pass is effective when executed together with
1095 other optimization passes in early optimization pass queue. */
1096
1097 static unsigned int
1098 local_pure_const (void)
1099 {
1100 bool changed = false;
1101 funct_state l;
1102
1103 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1104 we must not promote functions that are called by already processed functions. */
1105
1106 if (function_called_by_processed_nodes_p ())
1107 {
1108 if (dump_file)
1109 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1110 return 0;
1111 }
1112
1113 l = analyze_function (cgraph_node (current_function_decl), false);
1114 if (!l)
1115 {
1116 if (dump_file)
1117 fprintf (dump_file, "Function has wrong visibility; ignoring\n");
1118 return 0;
1119 }
1120
1121 switch (l->pure_const_state)
1122 {
1123 case IPA_CONST:
1124 if (!TREE_READONLY (current_function_decl))
1125 {
1126 TREE_READONLY (current_function_decl) = 1;
1127 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) = l->looping;
1128 changed = true;
1129 if (dump_file)
1130 fprintf (dump_file, "Function found to be %sconst: %s\n",
1131 l->looping ? "looping " : "",
1132 lang_hooks.decl_printable_name (current_function_decl,
1133 2));
1134 }
1135 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1136 && !l->looping)
1137 {
1138 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) = false;
1139 changed = true;
1140 if (dump_file)
1141 fprintf (dump_file, "Function found to be non-looping: %s\n",
1142 lang_hooks.decl_printable_name (current_function_decl,
1143 2));
1144 }
1145 break;
1146
1147 case IPA_PURE:
1148 if (!TREE_READONLY (current_function_decl))
1149 {
1150 DECL_PURE_P (current_function_decl) = 1;
1151 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) = l->looping;
1152 changed = true;
1153 if (dump_file)
1154 fprintf (dump_file, "Function found to be %spure: %s\n",
1155 l->looping ? "looping " : "",
1156 lang_hooks.decl_printable_name (current_function_decl,
1157 2));
1158 }
1159 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1160 && !l->looping)
1161 {
1162 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) = false;
1163 changed = true;
1164 if (dump_file)
1165 fprintf (dump_file, "Function found to be non-looping: %s\n",
1166 lang_hooks.decl_printable_name (current_function_decl,
1167 2));
1168 }
1169 break;
1170
1171 default:
1172 break;
1173 }
1174 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1175 {
1176 struct cgraph_edge *e;
1177
1178 TREE_NOTHROW (current_function_decl) = true;
1179 for (e = cgraph_node (current_function_decl)->callers;
1180 e; e = e->next_caller)
1181 e->can_throw_external = false;
1182 changed = true;
1183 if (dump_file)
1184 fprintf (dump_file, "Function found to be nothrow: %s\n",
1185 lang_hooks.decl_printable_name (current_function_decl,
1186 2));
1187 }
1188 if (l)
1189 free (l);
1190 if (changed)
1191 return execute_fixup_cfg ();
1192 else
1193 return 0;
1194 }
1195
1196 struct gimple_opt_pass pass_local_pure_const =
1197 {
1198 {
1199 GIMPLE_PASS,
1200 "local-pure-const", /* name */
1201 gate_pure_const, /* gate */
1202 local_pure_const, /* execute */
1203 NULL, /* sub */
1204 NULL, /* next */
1205 0, /* static_pass_number */
1206 TV_IPA_PURE_CONST, /* tv_id */
1207 0, /* properties_required */
1208 0, /* properties_provided */
1209 0, /* properties_destroyed */
1210 0, /* todo_flags_start */
1211 0 /* todo_flags_finish */
1212 }
1213 };