]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-pure-const.c
Remove trailing white spaces.
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "tree.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "tree-pass.h"
42 #include "langhooks.h"
43 #include "pointer-set.h"
44 #include "ggc.h"
45 #include "ipa-utils.h"
46 #include "gimple.h"
47 #include "cgraph.h"
48 #include "output.h"
49 #include "flags.h"
50 #include "timevar.h"
51 #include "diagnostic.h"
52 #include "langhooks.h"
53 #include "target.h"
54 #include "lto-streamer.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57
58 static struct pointer_set_t *visited_nodes;
59
60 /* Lattice values for const and pure functions. Everything starts out
61 being const, then may drop to pure and then neither depending on
62 what is found. */
63 enum pure_const_state_e
64 {
65 IPA_CONST,
66 IPA_PURE,
67 IPA_NEITHER
68 };
69
70 /* Holder for the const_state. There is one of these per function
71 decl. */
72 struct funct_state_d
73 {
74 /* See above. */
75 enum pure_const_state_e pure_const_state;
76 /* What user set here; we can be always sure about this. */
77 enum pure_const_state_e state_previously_known;
78 bool looping_previously_known;
79
80 /* True if the function could possibly infinite loop. There are a
81 lot of ways that this could be determined. We are pretty
82 conservative here. While it is possible to cse pure and const
83 calls, it is not legal to have dce get rid of the call if there
84 is a possibility that the call could infinite loop since this is
85 a behavioral change. */
86 bool looping;
87
88 bool can_throw;
89 };
90
91 typedef struct funct_state_d * funct_state;
92
93 /* The storage of the funct_state is abstracted because there is the
94 possibility that it may be desirable to move this to the cgraph
95 local info. */
96
97 /* Array, indexed by cgraph node uid, of function states. */
98
99 DEF_VEC_P (funct_state);
100 DEF_VEC_ALLOC_P (funct_state, heap);
101 static VEC (funct_state, heap) *funct_state_vec;
102
103 /* Holders of ipa cgraph hooks: */
104 static struct cgraph_node_hook_list *function_insertion_hook_holder;
105 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
106 static struct cgraph_node_hook_list *node_removal_hook_holder;
107
108 /* Init the function state. */
109
110 static void
111 finish_state (void)
112 {
113 free (funct_state_vec);
114 }
115
116
117 /* Return the function state from NODE. */
118
119 static inline funct_state
120 get_function_state (struct cgraph_node *node)
121 {
122 if (!funct_state_vec
123 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
124 return NULL;
125 return VEC_index (funct_state, funct_state_vec, node->uid);
126 }
127
128 /* Set the function state S for NODE. */
129
130 static inline void
131 set_function_state (struct cgraph_node *node, funct_state s)
132 {
133 if (!funct_state_vec
134 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
135 VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
136 VEC_replace (funct_state, funct_state_vec, node->uid, s);
137 }
138
139 /* Check to see if the use (or definition when CHECKING_WRITE is true)
140 variable T is legal in a function that is either pure or const. */
141
142 static inline void
143 check_decl (funct_state local,
144 tree t, bool checking_write)
145 {
146 /* Do not want to do anything with volatile except mark any
147 function that uses one to be not const or pure. */
148 if (TREE_THIS_VOLATILE (t))
149 {
150 local->pure_const_state = IPA_NEITHER;
151 if (dump_file)
152 fprintf (dump_file, " Volatile operand is not const/pure");
153 return;
154 }
155
156 /* Do not care about a local automatic that is not static. */
157 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
158 return;
159
160 /* If the variable has the "used" attribute, treat it as if it had a
161 been touched by the devil. */
162 if (lookup_attribute ("used", DECL_ATTRIBUTES (t)))
163 {
164 local->pure_const_state = IPA_NEITHER;
165 if (dump_file)
166 fprintf (dump_file, " Used static/global variable is not const/pure\n");
167 return;
168 }
169
170 /* Since we have dealt with the locals and params cases above, if we
171 are CHECKING_WRITE, this cannot be a pure or constant
172 function. */
173 if (checking_write)
174 {
175 local->pure_const_state = IPA_NEITHER;
176 if (dump_file)
177 fprintf (dump_file, " static/global memory write is not const/pure\n");
178 return;
179 }
180
181 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
182 {
183 /* Readonly reads are safe. */
184 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
185 return; /* Read of a constant, do not change the function state. */
186 else
187 {
188 if (dump_file)
189 fprintf (dump_file, " global memory read is not const\n");
190 /* Just a regular read. */
191 if (local->pure_const_state == IPA_CONST)
192 local->pure_const_state = IPA_PURE;
193 }
194 }
195 else
196 {
197 /* Compilation level statics can be read if they are readonly
198 variables. */
199 if (TREE_READONLY (t))
200 return;
201
202 if (dump_file)
203 fprintf (dump_file, " static memory read is not const\n");
204 /* Just a regular read. */
205 if (local->pure_const_state == IPA_CONST)
206 local->pure_const_state = IPA_PURE;
207 }
208 }
209
210
211 /* Check to see if the use (or definition when CHECKING_WRITE is true)
212 variable T is legal in a function that is either pure or const. */
213
214 static inline void
215 check_op (funct_state local, tree t, bool checking_write)
216 {
217 t = get_base_address (t);
218 if (t && TREE_THIS_VOLATILE (t))
219 {
220 local->pure_const_state = IPA_NEITHER;
221 if (dump_file)
222 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
223 return;
224 }
225 else if (t
226 && INDIRECT_REF_P (t)
227 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
228 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
229 {
230 if (dump_file)
231 fprintf (dump_file, " Indirect ref to local memory is OK\n");
232 return;
233 }
234 else if (checking_write)
235 {
236 local->pure_const_state = IPA_NEITHER;
237 if (dump_file)
238 fprintf (dump_file, " Indirect ref write is not const/pure\n");
239 return;
240 }
241 else
242 {
243 if (dump_file)
244 fprintf (dump_file, " Indirect ref read is not const\n");
245 if (local->pure_const_state == IPA_CONST)
246 local->pure_const_state = IPA_PURE;
247 }
248 }
249
250 /* Check the parameters of a function call to CALL_EXPR to see if
251 there are any references in the parameters that are not allowed for
252 pure or const functions. Also check to see if this is either an
253 indirect call, a call outside the compilation unit, or has special
254 attributes that may also effect the purity. The CALL_EXPR node for
255 the entire call expression. */
256
257 static void
258 check_call (funct_state local, gimple call, bool ipa)
259 {
260 int flags = gimple_call_flags (call);
261 tree callee_t = gimple_call_fndecl (call);
262 struct cgraph_node* callee;
263 enum availability avail = AVAIL_NOT_AVAILABLE;
264 bool possibly_throws = stmt_could_throw_p (call);
265 bool possibly_throws_externally = (possibly_throws
266 && stmt_can_throw_external (call));
267
268 if (possibly_throws)
269 {
270 unsigned int i;
271 for (i = 0; i < gimple_num_ops (call); i++)
272 if (gimple_op (call, i)
273 && tree_could_throw_p (gimple_op (call, i)))
274 {
275 if (possibly_throws && flag_non_call_exceptions)
276 {
277 if (dump_file)
278 fprintf (dump_file, " operand can throw; looping\n");
279 local->looping = true;
280 }
281 if (possibly_throws_externally)
282 {
283 if (dump_file)
284 fprintf (dump_file, " operand can throw externally\n");
285 local->can_throw = true;
286 }
287 }
288 }
289
290 /* The const and pure flags are set by a variety of places in the
291 compiler (including here). If someone has already set the flags
292 for the callee, (such as for some of the builtins) we will use
293 them, otherwise we will compute our own information.
294
295 Const and pure functions have less clobber effects than other
296 functions so we process these first. Otherwise if it is a call
297 outside the compilation unit or an indirect call we punt. This
298 leaves local calls which will be processed by following the call
299 graph. */
300 if (callee_t)
301 {
302 callee = cgraph_node(callee_t);
303 avail = cgraph_function_body_availability (callee);
304
305 /* When bad things happen to bad functions, they cannot be const
306 or pure. */
307 if (setjmp_call_p (callee_t))
308 {
309 if (dump_file)
310 fprintf (dump_file, " setjmp is not const/pure\n");
311 local->looping = true;
312 local->pure_const_state = IPA_NEITHER;
313 }
314
315 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
316 switch (DECL_FUNCTION_CODE (callee_t))
317 {
318 case BUILT_IN_LONGJMP:
319 case BUILT_IN_NONLOCAL_GOTO:
320 if (dump_file)
321 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
322 local->pure_const_state = IPA_NEITHER;
323 local->looping = true;
324 break;
325 default:
326 break;
327 }
328 }
329
330 /* When not in IPA mode, we can still handle self recursion. */
331 if (!ipa && callee_t == current_function_decl)
332 local->looping = true;
333 /* Either calle is unknown or we are doing local analysis.
334 Look to see if there are any bits available for the callee (such as by
335 declaration or because it is builtin) and process solely on the basis of
336 those bits. */
337 else if (!ipa || !callee_t)
338 {
339 if (possibly_throws && flag_non_call_exceptions)
340 {
341 if (dump_file)
342 fprintf (dump_file, " can throw; looping\n");
343 local->looping = true;
344 }
345 if (possibly_throws_externally)
346 {
347 if (dump_file)
348 {
349 fprintf (dump_file, " can throw externally to lp %i\n",
350 lookup_stmt_eh_lp (call));
351 if (callee_t)
352 fprintf (dump_file, " callee:%s\n",
353 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
354 }
355 local->can_throw = true;
356 }
357 if (flags & ECF_CONST)
358 {
359 if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
360 local->looping = true;
361 }
362 else if (flags & ECF_PURE)
363 {
364 if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
365 local->looping = true;
366 if (dump_file)
367 fprintf (dump_file, " pure function call in not const\n");
368 if (local->pure_const_state == IPA_CONST)
369 local->pure_const_state = IPA_PURE;
370 }
371 else
372 {
373 if (dump_file)
374 fprintf (dump_file, " uknown function call is not const/pure\n");
375 local->pure_const_state = IPA_NEITHER;
376 local->looping = true;
377 }
378 }
379 /* Direct functions calls are handled by IPA propagation. */
380 }
381
382 /* Wrapper around check_decl for loads. */
383
384 static bool
385 check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
386 {
387 if (DECL_P (op))
388 check_decl ((funct_state)data, op, false);
389 else
390 check_op ((funct_state)data, op, false);
391 return false;
392 }
393
394 /* Wrapper around check_decl for stores. */
395
396 static bool
397 check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
398 {
399 if (DECL_P (op))
400 check_decl ((funct_state)data, op, true);
401 else
402 check_op ((funct_state)data, op, true);
403 return false;
404 }
405
406 /* Look into pointer pointed to by GSIP and figure out what interesting side
407 effects it has. */
408 static void
409 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
410 {
411 gimple stmt = gsi_stmt (*gsip);
412 unsigned int i = 0;
413
414 if (is_gimple_debug (stmt))
415 return;
416
417 if (dump_file)
418 {
419 fprintf (dump_file, " scanning: ");
420 print_gimple_stmt (dump_file, stmt, 0, 0);
421 }
422
423 /* Look for loads and stores. */
424 walk_stmt_load_store_ops (stmt, local, check_load, check_store);
425
426 if (gimple_code (stmt) != GIMPLE_CALL
427 && stmt_could_throw_p (stmt))
428 {
429 if (flag_non_call_exceptions)
430 {
431 if (dump_file)
432 fprintf (dump_file, " can throw; looping");
433 local->looping = true;
434 }
435 if (stmt_can_throw_external (stmt))
436 {
437 if (dump_file)
438 fprintf (dump_file, " can throw externally");
439 local->can_throw = true;
440 }
441 }
442 switch (gimple_code (stmt))
443 {
444 case GIMPLE_CALL:
445 check_call (local, stmt, ipa);
446 break;
447 case GIMPLE_LABEL:
448 if (DECL_NONLOCAL (gimple_label_label (stmt)))
449 /* Target of long jump. */
450 {
451 if (dump_file)
452 fprintf (dump_file, " nonlocal label is not const/pure");
453 local->pure_const_state = IPA_NEITHER;
454 }
455 break;
456 case GIMPLE_ASM:
457 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
458 {
459 tree op = gimple_asm_clobber_op (stmt, i);
460 if (simple_cst_equal(TREE_VALUE (op), memory_identifier_string) == 1)
461 {
462 if (dump_file)
463 fprintf (dump_file, " memory asm clobber is not const/pure");
464 /* Abandon all hope, ye who enter here. */
465 local->pure_const_state = IPA_NEITHER;
466 }
467 }
468 if (gimple_asm_volatile_p (stmt))
469 {
470 if (dump_file)
471 fprintf (dump_file, " volatile is not const/pure");
472 /* Abandon all hope, ye who enter here. */
473 local->pure_const_state = IPA_NEITHER;
474 local->looping = true;
475 }
476 return;
477 default:
478 break;
479 }
480 }
481
482
483 /* This is the main routine for finding the reference patterns for
484 global variables within a function FN. */
485
486 static funct_state
487 analyze_function (struct cgraph_node *fn, bool ipa)
488 {
489 tree decl = fn->decl;
490 tree old_decl = current_function_decl;
491 funct_state l;
492 basic_block this_block;
493
494 l = XCNEW (struct funct_state_d);
495 l->pure_const_state = IPA_CONST;
496 l->state_previously_known = IPA_NEITHER;
497 l->looping_previously_known = true;
498 l->looping = false;
499 l->can_throw = false;
500
501 if (dump_file)
502 {
503 fprintf (dump_file, "\n\n local analysis of %s\n ",
504 cgraph_node_name (fn));
505 }
506
507 push_cfun (DECL_STRUCT_FUNCTION (decl));
508 current_function_decl = decl;
509
510 FOR_EACH_BB (this_block)
511 {
512 gimple_stmt_iterator gsi;
513 struct walk_stmt_info wi;
514
515 memset (&wi, 0, sizeof(wi));
516 for (gsi = gsi_start_bb (this_block);
517 !gsi_end_p (gsi);
518 gsi_next (&gsi))
519 {
520 check_stmt (&gsi, l, ipa);
521 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
522 goto end;
523 }
524 }
525
526 end:
527 if (l->pure_const_state != IPA_NEITHER)
528 {
529 /* Const functions cannot have back edges (an
530 indication of possible infinite loop side
531 effect. */
532 if (mark_dfs_back_edges ())
533 {
534 /* Preheaders are needed for SCEV to work.
535 Simple lateches and recorded exits improve chances that loop will
536 proved to be finite in testcases such as in loop-15.c and loop-24.c */
537 loop_optimizer_init (LOOPS_NORMAL
538 | LOOPS_HAVE_RECORDED_EXITS);
539 if (dump_file && (dump_flags & TDF_DETAILS))
540 flow_loops_dump (dump_file, NULL, 0);
541 if (mark_irreducible_loops ())
542 {
543 if (dump_file)
544 fprintf (dump_file, " has irreducible loops\n");
545 l->looping = true;
546 }
547 else
548 {
549 loop_iterator li;
550 struct loop *loop;
551 scev_initialize ();
552 FOR_EACH_LOOP (li, loop, 0)
553 if (!finite_loop_p (loop))
554 {
555 if (dump_file)
556 fprintf (dump_file, " can not prove finiteness of loop %i\n", loop->num);
557 l->looping =true;
558 break;
559 }
560 scev_finalize ();
561 }
562 loop_optimizer_finalize ();
563 }
564 }
565
566 if (TREE_READONLY (decl))
567 {
568 l->pure_const_state = IPA_CONST;
569 l->state_previously_known = IPA_CONST;
570 if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
571 l->looping = false, l->looping_previously_known = false;
572 }
573 if (DECL_PURE_P (decl))
574 {
575 if (l->pure_const_state != IPA_CONST)
576 l->pure_const_state = IPA_PURE;
577 l->state_previously_known = IPA_PURE;
578 if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
579 l->looping = false, l->looping_previously_known = false;
580 }
581 if (TREE_NOTHROW (decl))
582 l->can_throw = false;
583
584 pop_cfun ();
585 current_function_decl = old_decl;
586 if (dump_file)
587 {
588 if (l->looping)
589 fprintf (dump_file, "Function is locally looping.\n");
590 if (l->can_throw)
591 fprintf (dump_file, "Function is locally throwing.\n");
592 if (l->pure_const_state == IPA_CONST)
593 fprintf (dump_file, "Function is locally const.\n");
594 if (l->pure_const_state == IPA_PURE)
595 fprintf (dump_file, "Function is locally pure.\n");
596 }
597 return l;
598 }
599
600 /* Called when new function is inserted to callgraph late. */
601 static void
602 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
603 {
604 if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
605 return;
606 /* There are some shared nodes, in particular the initializers on
607 static declarations. We do not need to scan them more than once
608 since all we would be interested in are the addressof
609 operations. */
610 visited_nodes = pointer_set_create ();
611 set_function_state (node, analyze_function (node, true));
612 pointer_set_destroy (visited_nodes);
613 visited_nodes = NULL;
614 }
615
616 /* Called when new clone is inserted to callgraph late. */
617
618 static void
619 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
620 void *data ATTRIBUTE_UNUSED)
621 {
622 if (get_function_state (src))
623 {
624 funct_state l = XNEW (struct funct_state_d);
625 gcc_assert (!get_function_state (dst));
626 memcpy (l, get_function_state (src), sizeof (*l));
627 set_function_state (dst, l);
628 }
629 }
630
631 /* Called when new clone is inserted to callgraph late. */
632
633 static void
634 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
635 {
636 if (get_function_state (node))
637 {
638 free (get_function_state (node));
639 set_function_state (node, NULL);
640 }
641 }
642
643 \f
644 static void
645 register_hooks (void)
646 {
647 static bool init_p = false;
648
649 if (init_p)
650 return;
651
652 init_p = true;
653
654 node_removal_hook_holder =
655 cgraph_add_node_removal_hook (&remove_node_data, NULL);
656 node_duplication_hook_holder =
657 cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
658 function_insertion_hook_holder =
659 cgraph_add_function_insertion_hook (&add_new_function, NULL);
660 }
661
662
663 /* Analyze each function in the cgraph to see if it is locally PURE or
664 CONST. */
665
666 static void
667 generate_summary (void)
668 {
669 struct cgraph_node *node;
670
671 register_hooks ();
672
673 /* There are some shared nodes, in particular the initializers on
674 static declarations. We do not need to scan them more than once
675 since all we would be interested in are the addressof
676 operations. */
677 visited_nodes = pointer_set_create ();
678
679 /* Process all of the functions.
680
681 We process AVAIL_OVERWRITABLE functions. We can not use the results
682 by default, but the info can be used at LTO with -fwhole-program or
683 when function got clonned and the clone is AVAILABLE. */
684
685 for (node = cgraph_nodes; node; node = node->next)
686 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
687 set_function_state (node, analyze_function (node, true));
688
689 pointer_set_destroy (visited_nodes);
690 visited_nodes = NULL;
691 }
692
693
694 /* Serialize the ipa info for lto. */
695
696 static void
697 pure_const_write_summary (cgraph_node_set set)
698 {
699 struct cgraph_node *node;
700 struct lto_simple_output_block *ob
701 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
702 unsigned int count = 0;
703 cgraph_node_set_iterator csi;
704
705 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
706 {
707 node = csi_node (csi);
708 if (node->analyzed && get_function_state (node) != NULL)
709 count++;
710 }
711
712 lto_output_uleb128_stream (ob->main_stream, count);
713
714 /* Process all of the functions. */
715 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
716 {
717 node = csi_node (csi);
718 if (node->analyzed && get_function_state (node) != NULL)
719 {
720 struct bitpack_d *bp;
721 funct_state fs;
722 int node_ref;
723 lto_cgraph_encoder_t encoder;
724
725 fs = get_function_state (node);
726
727 encoder = ob->decl_state->cgraph_node_encoder;
728 node_ref = lto_cgraph_encoder_encode (encoder, node);
729 lto_output_uleb128_stream (ob->main_stream, node_ref);
730
731 /* Note that flags will need to be read in the opposite
732 order as we are pushing the bitflags into FLAGS. */
733 bp = bitpack_create ();
734 bp_pack_value (bp, fs->pure_const_state, 2);
735 bp_pack_value (bp, fs->state_previously_known, 2);
736 bp_pack_value (bp, fs->looping_previously_known, 1);
737 bp_pack_value (bp, fs->looping, 1);
738 bp_pack_value (bp, fs->can_throw, 1);
739 lto_output_bitpack (ob->main_stream, bp);
740 bitpack_delete (bp);
741 }
742 }
743
744 lto_destroy_simple_output_block (ob);
745 }
746
747
748 /* Deserialize the ipa info for lto. */
749
750 static void
751 pure_const_read_summary (void)
752 {
753 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
754 struct lto_file_decl_data *file_data;
755 unsigned int j = 0;
756
757 register_hooks ();
758 while ((file_data = file_data_vec[j++]))
759 {
760 const char *data;
761 size_t len;
762 struct lto_input_block *ib
763 = lto_create_simple_input_block (file_data,
764 LTO_section_ipa_pure_const,
765 &data, &len);
766 if (ib)
767 {
768 unsigned int i;
769 unsigned int count = lto_input_uleb128 (ib);
770
771 for (i = 0; i < count; i++)
772 {
773 unsigned int index;
774 struct cgraph_node *node;
775 struct bitpack_d *bp;
776 funct_state fs;
777 lto_cgraph_encoder_t encoder;
778
779 fs = XCNEW (struct funct_state_d);
780 index = lto_input_uleb128 (ib);
781 encoder = file_data->cgraph_node_encoder;
782 node = lto_cgraph_encoder_deref (encoder, index);
783 set_function_state (node, fs);
784
785 /* Note that the flags must be read in the opposite
786 order in which they were written (the bitflags were
787 pushed into FLAGS). */
788 bp = lto_input_bitpack (ib);
789 fs->pure_const_state
790 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
791 fs->state_previously_known
792 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
793 fs->looping_previously_known = bp_unpack_value (bp, 1);
794 fs->looping = bp_unpack_value (bp, 1);
795 fs->can_throw = bp_unpack_value (bp, 1);
796 bitpack_delete (bp);
797 }
798
799 lto_destroy_simple_input_block (file_data,
800 LTO_section_ipa_pure_const,
801 ib, data, len);
802 }
803 }
804 }
805
806
807 static bool
808 ignore_edge (struct cgraph_edge *e)
809 {
810 return (!e->can_throw_external);
811 }
812
813 /* Return true if NODE is self recursive function. */
814
815 static bool
816 self_recursive_p (struct cgraph_node *node)
817 {
818 struct cgraph_edge *e;
819 for (e = node->callees; e; e = e->next_callee)
820 if (e->callee == node)
821 return true;
822 return false;
823 }
824
825 /* Produce the global information by preforming a transitive closure
826 on the local information that was produced by generate_summary.
827 Note that there is no function_transform pass since this only
828 updates the function_decl. */
829
830 static unsigned int
831 propagate (void)
832 {
833 struct cgraph_node *node;
834 struct cgraph_node *w;
835 struct cgraph_node **order =
836 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
837 int order_pos;
838 int i;
839 struct ipa_dfs_info * w_info;
840
841 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
842 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
843 cgraph_remove_node_removal_hook (node_removal_hook_holder);
844 order_pos = ipa_utils_reduced_inorder (order, true, false, NULL);
845 if (dump_file)
846 {
847 dump_cgraph (dump_file);
848 ipa_utils_print_order(dump_file, "reduced", order, order_pos);
849 }
850
851 /* Propagate the local information thru the call graph to produce
852 the global information. All the nodes within a cycle will have
853 the same info so we collapse cycles first. Then we can do the
854 propagation in one pass from the leaves to the roots. */
855 for (i = 0; i < order_pos; i++ )
856 {
857 enum pure_const_state_e pure_const_state = IPA_CONST;
858 bool looping = false;
859 int count = 0;
860 node = order[i];
861
862 /* Find the worst state for any node in the cycle. */
863 w = node;
864 while (w)
865 {
866 struct cgraph_edge *e;
867 funct_state w_l = get_function_state (w);
868 if (pure_const_state < w_l->pure_const_state)
869 pure_const_state = w_l->pure_const_state;
870
871 if (w_l->looping)
872 looping = true;
873 if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
874 {
875 looping |= w_l->looping_previously_known;
876 if (pure_const_state < w_l->state_previously_known)
877 pure_const_state = w_l->state_previously_known;
878 }
879
880 if (pure_const_state == IPA_NEITHER)
881 break;
882
883 count++;
884
885 if (count > 1)
886 looping = true;
887
888 for (e = w->callees; e; e = e->next_callee)
889 {
890 struct cgraph_node *y = e->callee;
891
892 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
893 {
894 funct_state y_l = get_function_state (y);
895 if (pure_const_state < y_l->pure_const_state)
896 pure_const_state = y_l->pure_const_state;
897 if (pure_const_state == IPA_NEITHER)
898 break;
899 if (y_l->looping)
900 looping = true;
901 }
902 else
903 {
904 int flags = flags_from_decl_or_type (y->decl);
905
906 if (flags & ECF_LOOPING_CONST_OR_PURE)
907 looping = true;
908 if (flags & ECF_CONST)
909 ;
910 else if ((flags & ECF_PURE) && pure_const_state == IPA_CONST)
911 pure_const_state = IPA_PURE;
912 else
913 pure_const_state = IPA_NEITHER, looping = true;
914
915 }
916 }
917 w_info = (struct ipa_dfs_info *) w->aux;
918 w = w_info->next_cycle;
919 }
920
921 /* Copy back the region's pure_const_state which is shared by
922 all nodes in the region. */
923 w = node;
924 while (w)
925 {
926 funct_state w_l = get_function_state (w);
927 enum pure_const_state_e this_state = pure_const_state;
928 bool this_looping = looping;
929
930 if (w_l->state_previously_known != IPA_NEITHER
931 && this_state > w_l->state_previously_known)
932 this_state = w_l->state_previously_known;
933 if (!this_looping && self_recursive_p (w))
934 this_looping = true;
935 if (!w_l->looping_previously_known)
936 this_looping = false;
937
938 /* All nodes within a cycle share the same info. */
939 w_l->pure_const_state = this_state;
940 w_l->looping = this_looping;
941
942 switch (this_state)
943 {
944 case IPA_CONST:
945 if (!TREE_READONLY (w->decl) && dump_file)
946 fprintf (dump_file, "Function found to be %sconst: %s\n",
947 this_looping ? "looping " : "",
948 cgraph_node_name (w));
949 TREE_READONLY (w->decl) = 1;
950 DECL_LOOPING_CONST_OR_PURE_P (w->decl) = this_looping;
951 break;
952
953 case IPA_PURE:
954 if (!DECL_PURE_P (w->decl) && dump_file)
955 fprintf (dump_file, "Function found to be %spure: %s\n",
956 this_looping ? "looping " : "",
957 cgraph_node_name (w));
958 DECL_PURE_P (w->decl) = 1;
959 DECL_LOOPING_CONST_OR_PURE_P (w->decl) = this_looping;
960 break;
961
962 default:
963 break;
964 }
965 w_info = (struct ipa_dfs_info *) w->aux;
966 w = w_info->next_cycle;
967 }
968 }
969
970 /* Cleanup. */
971 for (node = cgraph_nodes; node; node = node->next)
972 {
973 /* Get rid of the aux information. */
974 if (node->aux)
975 {
976 w_info = (struct ipa_dfs_info *) node->aux;
977 free (node->aux);
978 node->aux = NULL;
979 }
980 }
981 order_pos = ipa_utils_reduced_inorder (order, true, false, ignore_edge);
982 if (dump_file)
983 {
984 dump_cgraph (dump_file);
985 ipa_utils_print_order(dump_file, "reduced for nothrow", order, order_pos);
986 }
987 /* Propagate the local information thru the call graph to produce
988 the global information. All the nodes within a cycle will have
989 the same info so we collapse cycles first. Then we can do the
990 propagation in one pass from the leaves to the roots. */
991 for (i = 0; i < order_pos; i++ )
992 {
993 bool can_throw = false;
994 node = order[i];
995
996 /* Find the worst state for any node in the cycle. */
997 w = node;
998 while (w)
999 {
1000 struct cgraph_edge *e;
1001 funct_state w_l = get_function_state (w);
1002
1003 if (w_l->can_throw
1004 || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1005 can_throw = true;
1006
1007 if (can_throw)
1008 break;
1009
1010 for (e = w->callees; e; e = e->next_callee)
1011 {
1012 struct cgraph_node *y = e->callee;
1013
1014 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1015 {
1016 funct_state y_l = get_function_state (y);
1017
1018 if (can_throw)
1019 break;
1020 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1021 && e->can_throw_external)
1022 can_throw = true;
1023 }
1024 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1025 can_throw = true;
1026 }
1027 w_info = (struct ipa_dfs_info *) w->aux;
1028 w = w_info->next_cycle;
1029 }
1030
1031 /* Copy back the region's pure_const_state which is shared by
1032 all nodes in the region. */
1033 w = node;
1034 while (w)
1035 {
1036 funct_state w_l = get_function_state (w);
1037 if (!can_throw && !TREE_NOTHROW (w->decl))
1038 {
1039 struct cgraph_edge *e;
1040 TREE_NOTHROW (w->decl) = true;
1041 for (e = w->callers; e; e = e->next_caller)
1042 e->can_throw_external = false;
1043 if (dump_file)
1044 fprintf (dump_file, "Function found to be nothrow: %s\n",
1045 cgraph_node_name (w));
1046 }
1047 else if (can_throw && !TREE_NOTHROW (w->decl))
1048 w_l->can_throw = true;
1049 w_info = (struct ipa_dfs_info *) w->aux;
1050 w = w_info->next_cycle;
1051 }
1052 }
1053
1054 /* Cleanup. */
1055 for (node = cgraph_nodes; node; node = node->next)
1056 {
1057 /* Get rid of the aux information. */
1058 if (node->aux)
1059 {
1060 w_info = (struct ipa_dfs_info *) node->aux;
1061 free (node->aux);
1062 node->aux = NULL;
1063 }
1064 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
1065 free (get_function_state (node));
1066 }
1067
1068 free (order);
1069 VEC_free (funct_state, heap, funct_state_vec);
1070 finish_state ();
1071 return 0;
1072 }
1073
1074 static bool
1075 gate_pure_const (void)
1076 {
1077 return (flag_ipa_pure_const
1078 /* Don't bother doing anything if the program has errors. */
1079 && !(errorcount || sorrycount));
1080 }
1081
1082 struct ipa_opt_pass_d pass_ipa_pure_const =
1083 {
1084 {
1085 IPA_PASS,
1086 "pure-const", /* name */
1087 gate_pure_const, /* gate */
1088 propagate, /* execute */
1089 NULL, /* sub */
1090 NULL, /* next */
1091 0, /* static_pass_number */
1092 TV_IPA_PURE_CONST, /* tv_id */
1093 0, /* properties_required */
1094 0, /* properties_provided */
1095 0, /* properties_destroyed */
1096 0, /* todo_flags_start */
1097 0 /* todo_flags_finish */
1098 },
1099 generate_summary, /* generate_summary */
1100 pure_const_write_summary, /* write_summary */
1101 pure_const_read_summary, /* read_summary */
1102 NULL, /* function_read_summary */
1103 NULL, /* stmt_fixup */
1104 0, /* TODOs */
1105 NULL, /* function_transform */
1106 NULL /* variable_transform */
1107 };
1108
1109 /* Simple local pass for pure const discovery reusing the analysis from
1110 ipa_pure_const. This pass is effective when executed together with
1111 other optimization passes in early optimization pass queue. */
1112
1113 static unsigned int
1114 local_pure_const (void)
1115 {
1116 bool changed = false;
1117 funct_state l;
1118
1119 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1120 we must not promote functions that are called by already processed functions. */
1121
1122 if (function_called_by_processed_nodes_p ())
1123 {
1124 if (dump_file)
1125 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1126 return 0;
1127 }
1128 if (cgraph_function_body_availability (cgraph_node (current_function_decl))
1129 <= AVAIL_OVERWRITABLE)
1130 {
1131 if (dump_file)
1132 fprintf (dump_file, "Function has wrong visibility; ignoring\n");
1133 return 0;
1134 }
1135
1136 l = analyze_function (cgraph_node (current_function_decl), false);
1137
1138 switch (l->pure_const_state)
1139 {
1140 case IPA_CONST:
1141 if (!TREE_READONLY (current_function_decl))
1142 {
1143 TREE_READONLY (current_function_decl) = 1;
1144 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) = l->looping;
1145 changed = true;
1146 if (dump_file)
1147 fprintf (dump_file, "Function found to be %sconst: %s\n",
1148 l->looping ? "looping " : "",
1149 lang_hooks.decl_printable_name (current_function_decl,
1150 2));
1151 }
1152 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1153 && !l->looping)
1154 {
1155 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) = false;
1156 changed = true;
1157 if (dump_file)
1158 fprintf (dump_file, "Function found to be non-looping: %s\n",
1159 lang_hooks.decl_printable_name (current_function_decl,
1160 2));
1161 }
1162 break;
1163
1164 case IPA_PURE:
1165 if (!TREE_READONLY (current_function_decl))
1166 {
1167 DECL_PURE_P (current_function_decl) = 1;
1168 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) = l->looping;
1169 changed = true;
1170 if (dump_file)
1171 fprintf (dump_file, "Function found to be %spure: %s\n",
1172 l->looping ? "looping " : "",
1173 lang_hooks.decl_printable_name (current_function_decl,
1174 2));
1175 }
1176 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1177 && !l->looping)
1178 {
1179 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl) = false;
1180 changed = true;
1181 if (dump_file)
1182 fprintf (dump_file, "Function found to be non-looping: %s\n",
1183 lang_hooks.decl_printable_name (current_function_decl,
1184 2));
1185 }
1186 break;
1187
1188 default:
1189 break;
1190 }
1191 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1192 {
1193 struct cgraph_edge *e;
1194
1195 TREE_NOTHROW (current_function_decl) = true;
1196 for (e = cgraph_node (current_function_decl)->callers;
1197 e; e = e->next_caller)
1198 e->can_throw_external = false;
1199 changed = true;
1200 if (dump_file)
1201 fprintf (dump_file, "Function found to be nothrow: %s\n",
1202 lang_hooks.decl_printable_name (current_function_decl,
1203 2));
1204 }
1205 if (l)
1206 free (l);
1207 if (changed)
1208 return execute_fixup_cfg ();
1209 else
1210 return 0;
1211 }
1212
1213 struct gimple_opt_pass pass_local_pure_const =
1214 {
1215 {
1216 GIMPLE_PASS,
1217 "local-pure-const", /* name */
1218 gate_pure_const, /* gate */
1219 local_pure_const, /* execute */
1220 NULL, /* sub */
1221 NULL, /* next */
1222 0, /* static_pass_number */
1223 TV_IPA_PURE_CONST, /* tv_id */
1224 0, /* properties_required */
1225 0, /* properties_provided */
1226 0, /* properties_destroyed */
1227 0, /* todo_flags_start */
1228 0 /* todo_flags_finish */
1229 }
1230 };