]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-cfg.c
* expr.c (emit_group_load_1): Don't die on const_int orig_src.
[thirdparty/gcc.git] / gcc / tree-cfg.c
CommitLineData
6de9cd9a
DN
1/* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "output.h"
32#include "errors.h"
33#include "flags.h"
34#include "function.h"
35#include "expr.h"
36#include "ggc.h"
37#include "langhooks.h"
38#include "diagnostic.h"
39#include "tree-flow.h"
40#include "timevar.h"
41#include "tree-dump.h"
42#include "tree-pass.h"
43#include "toplev.h"
44#include "except.h"
45#include "cfgloop.h"
42759f1e 46#include "cfglayout.h"
92b6dff3 47#include "hashtab.h"
6de9cd9a
DN
48
49/* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
51
52/* Local declarations. */
53
54/* Initial capacity for the basic block array. */
55static const int initial_cfg_capacity = 20;
56
57/* Mapping of labels to their associated blocks. This can greatly speed up
58 building of the CFG in code with lots of gotos. */
59static GTY(()) varray_type label_to_block_map;
60
d6be0d7f
JL
61/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
62 which use a particular edge. The CASE_LABEL_EXPRs are chained together
63 via their TREE_CHAIN field, which we clear after we're done with the
64 hash table to prevent problems with duplication of SWITCH_EXPRs.
92b6dff3 65
d6be0d7f
JL
66 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
67 update the case vector in response to edge redirections.
92b6dff3 68
d6be0d7f
JL
69 Right now this table is set up and torn down at key points in the
70 compilation process. It would be nice if we could make the table
71 more persistent. The key is getting notification of changes to
72 the CFG (particularly edge removal, creation and redirection). */
73
74struct edge_to_cases_elt
92b6dff3
JL
75{
76 /* The edge itself. Necessary for hashing and equality tests. */
77 edge e;
78
d6be0d7f
JL
79 /* The case labels associated with this edge. We link these up via
80 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
81 when we destroy the hash table. This prevents problems when copying
82 SWITCH_EXPRs. */
83 tree case_labels;
92b6dff3
JL
84};
85
d6be0d7f 86static htab_t edge_to_cases;
92b6dff3 87
6de9cd9a
DN
88/* CFG statistics. */
89struct cfg_stats_d
90{
91 long num_merged_labels;
92};
93
94static struct cfg_stats_d cfg_stats;
95
96/* Nonzero if we found a computed goto while building basic blocks. */
97static bool found_computed_goto;
98
99/* Basic blocks and flowgraphs. */
100static basic_block create_bb (void *, void *, basic_block);
101static void create_block_annotation (basic_block);
102static void free_blocks_annotations (void);
103static void clear_blocks_annotations (void);
104static void make_blocks (tree);
105static void factor_computed_gotos (void);
6de9cd9a
DN
106
107/* Edges. */
108static void make_edges (void);
109static void make_ctrl_stmt_edges (basic_block);
110static void make_exit_edges (basic_block);
111static void make_cond_expr_edges (basic_block);
112static void make_switch_expr_edges (basic_block);
113static void make_goto_expr_edges (basic_block);
114static edge tree_redirect_edge_and_branch (edge, basic_block);
115static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
116static void split_critical_edges (void);
117
118/* Various helpers. */
119static inline bool stmt_starts_bb_p (tree, tree);
120static int tree_verify_flow_info (void);
121static void tree_make_forwarder_block (edge);
122static bool thread_jumps (void);
123static bool tree_forwarder_block_p (basic_block);
6de9cd9a
DN
124static void tree_cfg2vcg (FILE *);
125
126/* Flowgraph optimization and cleanup. */
127static void tree_merge_blocks (basic_block, basic_block);
128static bool tree_can_merge_blocks_p (basic_block, basic_block);
129static void remove_bb (basic_block);
6de9cd9a
DN
130static bool cleanup_control_flow (void);
131static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
132static edge find_taken_edge_cond_expr (basic_block, tree);
133static edge find_taken_edge_switch_expr (basic_block, tree);
134static tree find_case_label_for_value (tree, tree);
135static bool phi_alternatives_equal (basic_block, edge, edge);
136
137
138/*---------------------------------------------------------------------------
139 Create basic blocks
140---------------------------------------------------------------------------*/
141
142/* Entry point to the CFG builder for trees. TP points to the list of
143 statements to be added to the flowgraph. */
144
145static void
146build_tree_cfg (tree *tp)
147{
148 /* Register specific tree functions. */
149 tree_register_cfg_hooks ();
150
151 /* Initialize rbi_pool. */
152 alloc_rbi_pool ();
153
154 /* Initialize the basic block array. */
155 init_flow ();
878f99d2 156 profile_status = PROFILE_ABSENT;
6de9cd9a
DN
157 n_basic_blocks = 0;
158 last_basic_block = 0;
159 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
160 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
161
162 /* Build a mapping of labels to their associated blocks. */
163 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
164 "label to block map");
165
166 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
167 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
168
169 found_computed_goto = 0;
170 make_blocks (*tp);
171
172 /* Computed gotos are hell to deal with, especially if there are
173 lots of them with a large number of destinations. So we factor
174 them to a common computed goto location before we build the
175 edge list. After we convert back to normal form, we will un-factor
176 the computed gotos since factoring introduces an unwanted jump. */
177 if (found_computed_goto)
178 factor_computed_gotos ();
179
f0b698c1 180 /* Make sure there is always at least one block, even if it's empty. */
6de9cd9a
DN
181 if (n_basic_blocks == 0)
182 create_empty_bb (ENTRY_BLOCK_PTR);
183
184 create_block_annotation (ENTRY_BLOCK_PTR);
185 create_block_annotation (EXIT_BLOCK_PTR);
186
187 /* Adjust the size of the array. */
188 VARRAY_GROW (basic_block_info, n_basic_blocks);
189
f667741c
SB
190 /* To speed up statement iterator walks, we first purge dead labels. */
191 cleanup_dead_labels ();
192
193 /* Group case nodes to reduce the number of edges.
194 We do this after cleaning up dead labels because otherwise we miss
195 a lot of obvious case merging opportunities. */
196 group_case_labels ();
197
6de9cd9a
DN
198 /* Create the edges of the flowgraph. */
199 make_edges ();
200
201 /* Debugging dumps. */
202
203 /* Write the flowgraph to a VCG file. */
204 {
205 int local_dump_flags;
206 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
207 if (dump_file)
208 {
209 tree_cfg2vcg (dump_file);
210 dump_end (TDI_vcg, dump_file);
211 }
212 }
213
214 /* Dump a textual representation of the flowgraph. */
215 if (dump_file)
216 dump_tree_cfg (dump_file, dump_flags);
217}
218
219static void
220execute_build_cfg (void)
221{
222 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
223}
224
225struct tree_opt_pass pass_build_cfg =
226{
227 "cfg", /* name */
228 NULL, /* gate */
229 execute_build_cfg, /* execute */
230 NULL, /* sub */
231 NULL, /* next */
232 0, /* static_pass_number */
233 TV_TREE_CFG, /* tv_id */
234 PROP_gimple_leh, /* properties_required */
235 PROP_cfg, /* properties_provided */
236 0, /* properties_destroyed */
237 0, /* todo_flags_start */
9f8628ba
PB
238 TODO_verify_stmts, /* todo_flags_finish */
239 0 /* letter */
6de9cd9a
DN
240};
241
242/* Search the CFG for any computed gotos. If found, factor them to a
243 common computed goto site. Also record the location of that site so
244 that we can un-factor the gotos after we have converted back to
245 normal form. */
246
247static void
248factor_computed_gotos (void)
249{
250 basic_block bb;
251 tree factored_label_decl = NULL;
252 tree var = NULL;
253 tree factored_computed_goto_label = NULL;
254 tree factored_computed_goto = NULL;
255
256 /* We know there are one or more computed gotos in this function.
257 Examine the last statement in each basic block to see if the block
258 ends with a computed goto. */
259
260 FOR_EACH_BB (bb)
261 {
262 block_stmt_iterator bsi = bsi_last (bb);
263 tree last;
264
265 if (bsi_end_p (bsi))
266 continue;
267 last = bsi_stmt (bsi);
268
269 /* Ignore the computed goto we create when we factor the original
270 computed gotos. */
271 if (last == factored_computed_goto)
272 continue;
273
274 /* If the last statement is a computed goto, factor it. */
275 if (computed_goto_p (last))
276 {
277 tree assignment;
278
279 /* The first time we find a computed goto we need to create
280 the factored goto block and the variable each original
281 computed goto will use for their goto destination. */
282 if (! factored_computed_goto)
283 {
284 basic_block new_bb = create_empty_bb (bb);
285 block_stmt_iterator new_bsi = bsi_start (new_bb);
286
287 /* Create the destination of the factored goto. Each original
288 computed goto will put its desired destination into this
289 variable and jump to the label we create immediately
290 below. */
291 var = create_tmp_var (ptr_type_node, "gotovar");
292
293 /* Build a label for the new block which will contain the
294 factored computed goto. */
295 factored_label_decl = create_artificial_label ();
296 factored_computed_goto_label
297 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
298 bsi_insert_after (&new_bsi, factored_computed_goto_label,
299 BSI_NEW_STMT);
300
301 /* Build our new computed goto. */
302 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
303 bsi_insert_after (&new_bsi, factored_computed_goto,
304 BSI_NEW_STMT);
305 }
306
307 /* Copy the original computed goto's destination into VAR. */
308 assignment = build (MODIFY_EXPR, ptr_type_node,
309 var, GOTO_DESTINATION (last));
310 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
311
312 /* And re-vector the computed goto to the new destination. */
313 GOTO_DESTINATION (last) = factored_label_decl;
314 }
315 }
316}
317
318
319/* Create annotations for a single basic block. */
320
321static void
322create_block_annotation (basic_block bb)
323{
324 /* Verify that the tree_annotations field is clear. */
1e128c5f 325 gcc_assert (!bb->tree_annotations);
6de9cd9a
DN
326 bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d));
327}
328
329
330/* Free the annotations for all the basic blocks. */
331
332static void free_blocks_annotations (void)
333{
334 clear_blocks_annotations ();
335}
336
337
338/* Clear the annotations for all the basic blocks. */
339
340static void
341clear_blocks_annotations (void)
342{
343 basic_block bb;
344
345 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
346 bb->tree_annotations = NULL;
347}
348
349
350/* Build a flowgraph for the statement_list STMT_LIST. */
351
352static void
353make_blocks (tree stmt_list)
354{
355 tree_stmt_iterator i = tsi_start (stmt_list);
356 tree stmt = NULL;
357 bool start_new_block = true;
358 bool first_stmt_of_list = true;
359 basic_block bb = ENTRY_BLOCK_PTR;
360
361 while (!tsi_end_p (i))
362 {
363 tree prev_stmt;
364
365 prev_stmt = stmt;
366 stmt = tsi_stmt (i);
367
368 /* If the statement starts a new basic block or if we have determined
369 in a previous pass that we need to create a new block for STMT, do
370 so now. */
371 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
372 {
373 if (!first_stmt_of_list)
374 stmt_list = tsi_split_statement_list_before (&i);
375 bb = create_basic_block (stmt_list, NULL, bb);
376 start_new_block = false;
377 }
378
379 /* Now add STMT to BB and create the subgraphs for special statement
380 codes. */
381 set_bb_for_stmt (stmt, bb);
382
383 if (computed_goto_p (stmt))
384 found_computed_goto = true;
385
386 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
387 next iteration. */
388 if (stmt_ends_bb_p (stmt))
389 start_new_block = true;
390
391 tsi_next (&i);
392 first_stmt_of_list = false;
393 }
394}
395
396
397/* Create and return a new empty basic block after bb AFTER. */
398
399static basic_block
400create_bb (void *h, void *e, basic_block after)
401{
402 basic_block bb;
403
1e128c5f 404 gcc_assert (!e);
6de9cd9a 405
27fd69fa
KH
406 /* Create and initialize a new basic block. Since alloc_block uses
407 ggc_alloc_cleared to allocate a basic block, we do not have to
408 clear the newly allocated basic block here. */
6de9cd9a 409 bb = alloc_block ();
6de9cd9a
DN
410
411 bb->index = last_basic_block;
412 bb->flags = BB_NEW;
413 bb->stmt_list = h ? h : alloc_stmt_list ();
414
415 /* Add the new block to the linked list of blocks. */
416 link_block (bb, after);
417
418 /* Grow the basic block array if needed. */
419 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
420 {
421 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
422 VARRAY_GROW (basic_block_info, new_size);
423 }
424
425 /* Add the newly created block to the array. */
426 BASIC_BLOCK (last_basic_block) = bb;
427
428 create_block_annotation (bb);
429
430 n_basic_blocks++;
431 last_basic_block++;
432
433 initialize_bb_rbi (bb);
434 return bb;
435}
436
437
438/*---------------------------------------------------------------------------
439 Edge creation
440---------------------------------------------------------------------------*/
441
442/* Join all the blocks in the flowgraph. */
443
444static void
445make_edges (void)
446{
447 basic_block bb;
6de9cd9a
DN
448
449 /* Create an edge from entry to the first block with executable
450 statements in it. */
451 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
452
453 /* Traverse basic block array placing edges. */
454 FOR_EACH_BB (bb)
455 {
456 tree first = first_stmt (bb);
457 tree last = last_stmt (bb);
458
459 if (first)
460 {
461 /* Edges for statements that always alter flow control. */
462 if (is_ctrl_stmt (last))
463 make_ctrl_stmt_edges (bb);
464
465 /* Edges for statements that sometimes alter flow control. */
466 if (is_ctrl_altering_stmt (last))
467 make_exit_edges (bb);
468 }
469
470 /* Finally, if no edges were created above, this is a regular
471 basic block that only needs a fallthru edge. */
628f6a4e 472 if (EDGE_COUNT (bb->succs) == 0)
6de9cd9a
DN
473 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
474 }
475
6de9cd9a
DN
476 /* We do not care about fake edges, so remove any that the CFG
477 builder inserted for completeness. */
6809cbf9 478 remove_fake_exit_edges ();
6de9cd9a 479
6de9cd9a
DN
480 /* Clean up the graph and warn for unreachable code. */
481 cleanup_tree_cfg ();
482}
483
484
485/* Create edges for control statement at basic block BB. */
486
487static void
488make_ctrl_stmt_edges (basic_block bb)
489{
490 tree last = last_stmt (bb);
6de9cd9a 491
1e128c5f 492 gcc_assert (last);
6de9cd9a
DN
493 switch (TREE_CODE (last))
494 {
495 case GOTO_EXPR:
496 make_goto_expr_edges (bb);
497 break;
498
499 case RETURN_EXPR:
500 make_edge (bb, EXIT_BLOCK_PTR, 0);
501 break;
502
503 case COND_EXPR:
504 make_cond_expr_edges (bb);
505 break;
506
507 case SWITCH_EXPR:
508 make_switch_expr_edges (bb);
509 break;
510
511 case RESX_EXPR:
512 make_eh_edges (last);
513 /* Yet another NORETURN hack. */
628f6a4e 514 if (EDGE_COUNT (bb->succs) == 0)
6de9cd9a
DN
515 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
516 break;
517
518 default:
1e128c5f 519 gcc_unreachable ();
6de9cd9a
DN
520 }
521}
522
523
524/* Create exit edges for statements in block BB that alter the flow of
525 control. Statements that alter the control flow are 'goto', 'return'
526 and calls to non-returning functions. */
527
528static void
529make_exit_edges (basic_block bb)
530{
cd709752 531 tree last = last_stmt (bb), op;
6de9cd9a 532
1e128c5f 533 gcc_assert (last);
6de9cd9a
DN
534 switch (TREE_CODE (last))
535 {
536 case CALL_EXPR:
537 /* If this function receives a nonlocal goto, then we need to
538 make edges from this call site to all the nonlocal goto
539 handlers. */
540 if (TREE_SIDE_EFFECTS (last)
541 && current_function_has_nonlocal_label)
542 make_goto_expr_edges (bb);
543
544 /* If this statement has reachable exception handlers, then
545 create abnormal edges to them. */
546 make_eh_edges (last);
547
548 /* Some calls are known not to return. For such calls we create
549 a fake edge.
550
551 We really need to revamp how we build edges so that it's not
552 such a bloody pain to avoid creating edges for this case since
553 all we do is remove these edges when we're done building the
554 CFG. */
6e14af16 555 if (call_expr_flags (last) & ECF_NORETURN)
6de9cd9a
DN
556 {
557 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
558 return;
559 }
560
561 /* Don't forget the fall-thru edge. */
562 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
563 break;
564
565 case MODIFY_EXPR:
566 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
567 may have an abnormal edge. Search the RHS for this case and
568 create any required edges. */
cd709752
RH
569 op = get_call_expr_in (last);
570 if (op && TREE_SIDE_EFFECTS (op)
6de9cd9a
DN
571 && current_function_has_nonlocal_label)
572 make_goto_expr_edges (bb);
573
574 make_eh_edges (last);
575 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
576 break;
577
578 default:
1e128c5f 579 gcc_unreachable ();
6de9cd9a
DN
580 }
581}
582
583
584/* Create the edges for a COND_EXPR starting at block BB.
585 At this point, both clauses must contain only simple gotos. */
586
587static void
588make_cond_expr_edges (basic_block bb)
589{
590 tree entry = last_stmt (bb);
591 basic_block then_bb, else_bb;
592 tree then_label, else_label;
593
1e128c5f
GB
594 gcc_assert (entry);
595 gcc_assert (TREE_CODE (entry) == COND_EXPR);
6de9cd9a
DN
596
597 /* Entry basic blocks for each component. */
598 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
599 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
600 then_bb = label_to_block (then_label);
601 else_bb = label_to_block (else_label);
602
603 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
604 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
605}
606
d6be0d7f 607/* Hashing routine for EDGE_TO_CASES. */
92b6dff3
JL
608
609static hashval_t
d6be0d7f 610edge_to_cases_hash (const void *p)
92b6dff3 611{
d6be0d7f 612 edge e = ((struct edge_to_cases_elt *)p)->e;
92b6dff3
JL
613
614 /* Hash on the edge itself (which is a pointer). */
615 return htab_hash_pointer (e);
616}
617
d6be0d7f 618/* Equality routine for EDGE_TO_CASES, edges are unique, so testing
92b6dff3
JL
619 for equality is just a pointer comparison. */
620
621static int
d6be0d7f 622edge_to_cases_eq (const void *p1, const void *p2)
92b6dff3 623{
d6be0d7f
JL
624 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
625 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
92b6dff3
JL
626
627 return e1 == e2;
628}
629
d6be0d7f
JL
630/* Called for each element in the hash table (P) as we delete the
631 edge to cases hash table.
632
633 Clear all the TREE_CHAINs to prevent problems with copying of
634 SWITCH_EXPRs and structure sharing rules, then free the hash table
635 element. */
636
637static void
638edge_to_cases_cleanup (void *p)
639{
640 struct edge_to_cases_elt *elt = p;
641 tree t, next;
642
643 for (t = elt->case_labels; t; t = next)
644 {
645 next = TREE_CHAIN (t);
646 TREE_CHAIN (t) = NULL;
647 }
648 free (p);
649}
650
651/* Start recording information mapping edges to case labels. */
652
653static void
654start_recording_case_labels (void)
655{
656 gcc_assert (edge_to_cases == NULL);
657
658 edge_to_cases = htab_create (37,
659 edge_to_cases_hash,
660 edge_to_cases_eq,
661 edge_to_cases_cleanup);
662}
663
664/* Return nonzero if we are recording information for case labels. */
665
666static bool
667recording_case_labels_p (void)
668{
669 return (edge_to_cases != NULL);
670}
671
672/* Stop recording information mapping edges to case labels and
673 remove any information we have recorded. */
674static void
675end_recording_case_labels (void)
676{
677 htab_delete (edge_to_cases);
678 edge_to_cases = NULL;
679}
680
92b6dff3
JL
681/* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
682
683static void
684record_switch_edge (edge e, tree case_label)
685{
d6be0d7f 686 struct edge_to_cases_elt *elt;
92b6dff3
JL
687 void **slot;
688
689 /* Build a hash table element so we can see if E is already
690 in the table. */
d6be0d7f 691 elt = xmalloc (sizeof (struct edge_to_cases_elt));
92b6dff3 692 elt->e = e;
d6be0d7f 693 elt->case_labels = case_label;
92b6dff3 694
d6be0d7f 695 slot = htab_find_slot (edge_to_cases, elt, INSERT);
92b6dff3
JL
696
697 if (*slot == NULL)
698 {
699 /* E was not in the hash table. Install E into the hash table. */
700 *slot = (void *)elt;
701 }
702 else
703 {
704 /* E was already in the hash table. Free ELT as we do not need it
705 anymore. */
706 free (elt);
707
708 /* Get the entry stored in the hash table. */
d6be0d7f 709 elt = (struct edge_to_cases_elt *) *slot;
92b6dff3 710
d6be0d7f
JL
711 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
712 TREE_CHAIN (case_label) = elt->case_labels;
713 elt->case_labels = case_label;
92b6dff3
JL
714 }
715}
716
d6be0d7f
JL
717/* If we are inside a {start,end}_recording_cases block, then return
718 a chain of CASE_LABEL_EXPRs from T which reference E.
719
720 Otherwise return NULL. */
92b6dff3
JL
721
722static tree
d6be0d7f 723get_cases_for_edge (edge e, tree t)
92b6dff3 724{
d6be0d7f 725 struct edge_to_cases_elt elt, *elt_p;
92b6dff3 726 void **slot;
d6be0d7f
JL
727 size_t i, n;
728 tree vec;
92b6dff3 729
d6be0d7f
JL
730 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
731 chains available. Return NULL so the caller can detect this case. */
732 if (!recording_case_labels_p ())
733 return NULL;
734
735restart:
92b6dff3 736 elt.e = e;
d6be0d7f
JL
737 elt.case_labels = NULL;
738 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
92b6dff3
JL
739
740 if (slot)
741 {
d6be0d7f
JL
742 elt_p = (struct edge_to_cases_elt *)*slot;
743 return elt_p->case_labels;
92b6dff3
JL
744 }
745
d6be0d7f
JL
746 /* If we did not find E in the hash table, then this must be the first
747 time we have been queried for information about E & T. Add all the
748 elements from T to the hash table then perform the query again. */
92b6dff3 749
d6be0d7f 750 vec = SWITCH_LABELS (t);
92b6dff3 751 n = TREE_VEC_LENGTH (vec);
92b6dff3
JL
752 for (i = 0; i < n; i++)
753 {
d6be0d7f
JL
754 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
755 basic_block label_bb = label_to_block (lab);
756 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
92b6dff3 757 }
d6be0d7f 758 goto restart;
92b6dff3 759}
6de9cd9a
DN
760
761/* Create the edges for a SWITCH_EXPR starting at block BB.
762 At this point, the switch body has been lowered and the
763 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
764
765static void
766make_switch_expr_edges (basic_block bb)
767{
768 tree entry = last_stmt (bb);
769 size_t i, n;
770 tree vec;
771
772 vec = SWITCH_LABELS (entry);
773 n = TREE_VEC_LENGTH (vec);
774
775 for (i = 0; i < n; ++i)
776 {
777 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
778 basic_block label_bb = label_to_block (lab);
d6be0d7f 779 make_edge (bb, label_bb, 0);
6de9cd9a
DN
780 }
781}
782
783
784/* Return the basic block holding label DEST. */
785
786basic_block
787label_to_block (tree dest)
788{
242229bb
JH
789 int uid = LABEL_DECL_UID (dest);
790
f0b698c1
KH
791 /* We would die hard when faced by an undefined label. Emit a label to
792 the very first basic block. This will hopefully make even the dataflow
242229bb
JH
793 and undefined variable warnings quite right. */
794 if ((errorcount || sorrycount) && uid < 0)
795 {
796 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
797 tree stmt;
798
799 stmt = build1 (LABEL_EXPR, void_type_node, dest);
800 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
801 uid = LABEL_DECL_UID (dest);
802 }
803 return VARRAY_BB (label_to_block_map, uid);
6de9cd9a
DN
804}
805
806
807/* Create edges for a goto statement at block BB. */
808
809static void
810make_goto_expr_edges (basic_block bb)
811{
812 tree goto_t, dest;
813 basic_block target_bb;
814 int for_call;
815 block_stmt_iterator last = bsi_last (bb);
816
817 goto_t = bsi_stmt (last);
818
819 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
820 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
821 from a nonlocal goto. */
822 if (TREE_CODE (goto_t) != GOTO_EXPR)
823 {
824 dest = error_mark_node;
825 for_call = 1;
826 }
827 else
828 {
829 dest = GOTO_DESTINATION (goto_t);
830 for_call = 0;
831
832 /* A GOTO to a local label creates normal edges. */
833 if (simple_goto_p (goto_t))
834 {
62b857ea 835 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
9506ac2b
PB
836#ifdef USE_MAPPED_LOCATION
837 e->goto_locus = EXPR_LOCATION (goto_t);
838#else
62b857ea 839 e->goto_locus = EXPR_LOCUS (goto_t);
9506ac2b 840#endif
6de9cd9a
DN
841 bsi_remove (&last);
842 return;
843 }
844
9cf737f8 845 /* Nothing more to do for nonlocal gotos. */
6de9cd9a
DN
846 if (TREE_CODE (dest) == LABEL_DECL)
847 return;
848
849 /* Computed gotos remain. */
850 }
851
852 /* Look for the block starting with the destination label. In the
853 case of a computed goto, make an edge to any label block we find
854 in the CFG. */
855 FOR_EACH_BB (target_bb)
856 {
857 block_stmt_iterator bsi;
858
859 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
860 {
861 tree target = bsi_stmt (bsi);
862
863 if (TREE_CODE (target) != LABEL_EXPR)
864 break;
865
866 if (
867 /* Computed GOTOs. Make an edge to every label block that has
868 been marked as a potential target for a computed goto. */
869 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
870 /* Nonlocal GOTO target. Make an edge to every label block
871 that has been marked as a potential target for a nonlocal
872 goto. */
873 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
874 {
875 make_edge (bb, target_bb, EDGE_ABNORMAL);
876 break;
877 }
878 }
879 }
880
881 /* Degenerate case of computed goto with no labels. */
628f6a4e 882 if (!for_call && EDGE_COUNT (bb->succs) == 0)
6de9cd9a
DN
883 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
884}
885
886
887/*---------------------------------------------------------------------------
888 Flowgraph analysis
889---------------------------------------------------------------------------*/
890
891/* Remove unreachable blocks and other miscellaneous clean up work. */
892
56b043c8 893bool
6de9cd9a
DN
894cleanup_tree_cfg (void)
895{
56b043c8 896 bool retval = false;
6de9cd9a
DN
897
898 timevar_push (TV_TREE_CLEANUP_CFG);
899
8f28be81 900 retval = cleanup_control_flow ();
26d4492f 901 retval |= delete_unreachable_blocks ();
d6be0d7f
JL
902
903 /* thread_jumps can redirect edges out of SWITCH_EXPRs, which can get
904 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
905 mappings around the call to thread_jumps. */
906 start_recording_case_labels ();
ab51c2f0 907 retval |= thread_jumps ();
d6be0d7f 908 end_recording_case_labels ();
6de9cd9a 909
8f28be81
KH
910#ifdef ENABLE_CHECKING
911 if (retval)
26d4492f
KH
912 {
913 gcc_assert (!cleanup_control_flow ());
914 gcc_assert (!delete_unreachable_blocks ());
ab51c2f0 915 gcc_assert (!thread_jumps ());
26d4492f 916 }
8f28be81
KH
917#endif
918
6de9cd9a
DN
919 /* Merging the blocks creates no new opportunities for the other
920 optimizations, so do it here. */
086aa12e 921 retval |= merge_seq_blocks ();
6de9cd9a
DN
922
923 compact_blocks ();
924
925#ifdef ENABLE_CHECKING
926 verify_flow_info ();
927#endif
928 timevar_pop (TV_TREE_CLEANUP_CFG);
56b043c8 929 return retval;
6de9cd9a
DN
930}
931
932
f698d217
SB
933/* Cleanup useless labels in basic blocks. This is something we wish
934 to do early because it allows us to group case labels before creating
935 the edges for the CFG, and it speeds up block statement iterators in
936 all passes later on.
937 We only run this pass once, running it more than once is probably not
938 profitable. */
939
940/* A map from basic block index to the leading label of that block. */
941static tree *label_for_bb;
942
943/* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
944static void
945update_eh_label (struct eh_region *region)
946{
947 tree old_label = get_eh_region_tree_label (region);
948 if (old_label)
949 {
165b54c3
SB
950 tree new_label;
951 basic_block bb = label_to_block (old_label);
952
953 /* ??? After optimizing, there may be EH regions with labels
954 that have already been removed from the function body, so
955 there is no basic block for them. */
956 if (! bb)
957 return;
958
959 new_label = label_for_bb[bb->index];
f698d217
SB
960 set_eh_region_tree_label (region, new_label);
961 }
962}
963
242229bb
JH
964/* Given LABEL return the first label in the same basic block. */
965static tree
966main_block_label (tree label)
967{
968 basic_block bb = label_to_block (label);
969
970 /* label_to_block possibly inserted undefined label into the chain. */
971 if (!label_for_bb[bb->index])
972 label_for_bb[bb->index] = label;
973 return label_for_bb[bb->index];
974}
975
b986ebf3 976/* Cleanup redundant labels. This is a three-step process:
f698d217
SB
977 1) Find the leading label for each block.
978 2) Redirect all references to labels to the leading labels.
979 3) Cleanup all useless labels. */
6de9cd9a 980
165b54c3 981void
6de9cd9a
DN
982cleanup_dead_labels (void)
983{
984 basic_block bb;
f698d217 985 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
6de9cd9a
DN
986
987 /* Find a suitable label for each block. We use the first user-defined
f0b698c1 988 label if there is one, or otherwise just the first label we see. */
6de9cd9a
DN
989 FOR_EACH_BB (bb)
990 {
991 block_stmt_iterator i;
992
993 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
994 {
995 tree label, stmt = bsi_stmt (i);
996
997 if (TREE_CODE (stmt) != LABEL_EXPR)
998 break;
999
1000 label = LABEL_EXPR_LABEL (stmt);
1001
1002 /* If we have not yet seen a label for the current block,
1003 remember this one and see if there are more labels. */
1004 if (! label_for_bb[bb->index])
1005 {
1006 label_for_bb[bb->index] = label;
1007 continue;
1008 }
1009
1010 /* If we did see a label for the current block already, but it
1011 is an artificially created label, replace it if the current
1012 label is a user defined label. */
1013 if (! DECL_ARTIFICIAL (label)
1014 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
1015 {
1016 label_for_bb[bb->index] = label;
1017 break;
1018 }
1019 }
1020 }
1021
f698d217
SB
1022 /* Now redirect all jumps/branches to the selected label.
1023 First do so for each block ending in a control statement. */
6de9cd9a
DN
1024 FOR_EACH_BB (bb)
1025 {
1026 tree stmt = last_stmt (bb);
1027 if (!stmt)
1028 continue;
1029
1030 switch (TREE_CODE (stmt))
1031 {
1032 case COND_EXPR:
1033 {
1034 tree true_branch, false_branch;
6de9cd9a
DN
1035
1036 true_branch = COND_EXPR_THEN (stmt);
1037 false_branch = COND_EXPR_ELSE (stmt);
6de9cd9a 1038
242229bb
JH
1039 GOTO_DESTINATION (true_branch)
1040 = main_block_label (GOTO_DESTINATION (true_branch));
1041 GOTO_DESTINATION (false_branch)
1042 = main_block_label (GOTO_DESTINATION (false_branch));
6de9cd9a
DN
1043
1044 break;
1045 }
1046
1047 case SWITCH_EXPR:
1048 {
1049 size_t i;
1050 tree vec = SWITCH_LABELS (stmt);
1051 size_t n = TREE_VEC_LENGTH (vec);
1052
1053 /* Replace all destination labels. */
1054 for (i = 0; i < n; ++i)
92b6dff3
JL
1055 {
1056 tree elt = TREE_VEC_ELT (vec, i);
1057 tree label = main_block_label (CASE_LABEL (elt));
d6be0d7f 1058 CASE_LABEL (elt) = label;
92b6dff3 1059 }
6de9cd9a
DN
1060 break;
1061 }
1062
f667741c
SB
1063 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1064 remove them until after we've created the CFG edges. */
1065 case GOTO_EXPR:
242229bb
JH
1066 if (! computed_goto_p (stmt))
1067 {
1068 GOTO_DESTINATION (stmt)
1069 = main_block_label (GOTO_DESTINATION (stmt));
1070 break;
1071 }
f667741c 1072
6de9cd9a
DN
1073 default:
1074 break;
1075 }
1076 }
1077
f698d217
SB
1078 for_each_eh_region (update_eh_label);
1079
6de9cd9a
DN
1080 /* Finally, purge dead labels. All user-defined labels and labels that
1081 can be the target of non-local gotos are preserved. */
1082 FOR_EACH_BB (bb)
1083 {
1084 block_stmt_iterator i;
1085 tree label_for_this_bb = label_for_bb[bb->index];
1086
1087 if (! label_for_this_bb)
1088 continue;
1089
1090 for (i = bsi_start (bb); !bsi_end_p (i); )
1091 {
1092 tree label, stmt = bsi_stmt (i);
1093
1094 if (TREE_CODE (stmt) != LABEL_EXPR)
1095 break;
1096
1097 label = LABEL_EXPR_LABEL (stmt);
1098
1099 if (label == label_for_this_bb
1100 || ! DECL_ARTIFICIAL (label)
1101 || DECL_NONLOCAL (label))
1102 bsi_next (&i);
1103 else
1104 bsi_remove (&i);
1105 }
1106 }
1107
1108 free (label_for_bb);
1109}
1110
f667741c
SB
1111/* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1112 and scan the sorted vector of cases. Combine the ones jumping to the
1113 same label.
1114 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1115
165b54c3 1116void
f667741c
SB
1117group_case_labels (void)
1118{
1119 basic_block bb;
1120
1121 FOR_EACH_BB (bb)
1122 {
1123 tree stmt = last_stmt (bb);
1124 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1125 {
1126 tree labels = SWITCH_LABELS (stmt);
1127 int old_size = TREE_VEC_LENGTH (labels);
1128 int i, j, new_size = old_size;
29c4d22b
AP
1129 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1130 tree default_label;
1131
66efeafc 1132 /* The default label is always the last case in a switch
29c4d22b
AP
1133 statement after gimplification. */
1134 default_label = CASE_LABEL (default_case);
f667741c
SB
1135
1136 /* Look for possible opportunities to merge cases.
1137 Ignore the last element of the label vector because it
1138 must be the default case. */
1139 i = 0;
d717e500 1140 while (i < old_size - 1)
f667741c
SB
1141 {
1142 tree base_case, base_label, base_high, type;
1143 base_case = TREE_VEC_ELT (labels, i);
1144
1e128c5f 1145 gcc_assert (base_case);
f667741c 1146 base_label = CASE_LABEL (base_case);
31e9eea2
SB
1147
1148 /* Discard cases that have the same destination as the
1149 default case. */
1150 if (base_label == default_label)
1151 {
1152 TREE_VEC_ELT (labels, i) = NULL_TREE;
1153 i++;
29c4d22b 1154 new_size--;
31e9eea2
SB
1155 continue;
1156 }
1157
1158 type = TREE_TYPE (CASE_LOW (base_case));
f667741c
SB
1159 base_high = CASE_HIGH (base_case) ?
1160 CASE_HIGH (base_case) : CASE_LOW (base_case);
d717e500 1161 i++;
f667741c
SB
1162 /* Try to merge case labels. Break out when we reach the end
1163 of the label vector or when we cannot merge the next case
1164 label with the current one. */
d717e500 1165 while (i < old_size - 1)
f667741c 1166 {
d717e500 1167 tree merge_case = TREE_VEC_ELT (labels, i);
f667741c
SB
1168 tree merge_label = CASE_LABEL (merge_case);
1169 tree t = int_const_binop (PLUS_EXPR, base_high,
1170 integer_one_node, 1);
1171
1172 /* Merge the cases if they jump to the same place,
1173 and their ranges are consecutive. */
1174 if (merge_label == base_label
1175 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1176 {
1177 base_high = CASE_HIGH (merge_case) ?
1178 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1179 CASE_HIGH (base_case) = base_high;
1180 TREE_VEC_ELT (labels, i) = NULL_TREE;
1181 new_size--;
d717e500 1182 i++;
f667741c
SB
1183 }
1184 else
1185 break;
1186 }
1187 }
1188
1189 /* Compress the case labels in the label vector, and adjust the
1190 length of the vector. */
1191 for (i = 0, j = 0; i < new_size; i++)
1192 {
1193 while (! TREE_VEC_ELT (labels, j))
1194 j++;
1195 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1196 }
1197 TREE_VEC_LENGTH (labels) = new_size;
1198 }
1199 }
1200}
6de9cd9a
DN
1201
1202/* Checks whether we can merge block B into block A. */
1203
1204static bool
1205tree_can_merge_blocks_p (basic_block a, basic_block b)
1206{
1207 tree stmt;
1208 block_stmt_iterator bsi;
1209
628f6a4e 1210 if (EDGE_COUNT (a->succs) != 1)
6de9cd9a
DN
1211 return false;
1212
628f6a4e 1213 if (EDGE_SUCC (a, 0)->flags & EDGE_ABNORMAL)
6de9cd9a
DN
1214 return false;
1215
628f6a4e 1216 if (EDGE_SUCC (a, 0)->dest != b)
6de9cd9a
DN
1217 return false;
1218
1219 if (b == EXIT_BLOCK_PTR)
1220 return false;
1221
628f6a4e 1222 if (EDGE_COUNT (b->preds) > 1)
6de9cd9a
DN
1223 return false;
1224
1225 /* If A ends by a statement causing exceptions or something similar, we
1226 cannot merge the blocks. */
1227 stmt = last_stmt (a);
1228 if (stmt && stmt_ends_bb_p (stmt))
1229 return false;
1230
1231 /* Do not allow a block with only a non-local label to be merged. */
1232 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1233 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1234 return false;
1235
1236 /* There may be no phi nodes at the start of b. Most of these degenerate
1237 phi nodes should be cleaned up by kill_redundant_phi_nodes. */
1238 if (phi_nodes (b))
1239 return false;
1240
1241 /* Do not remove user labels. */
1242 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1243 {
1244 stmt = bsi_stmt (bsi);
1245 if (TREE_CODE (stmt) != LABEL_EXPR)
1246 break;
1247 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1248 return false;
1249 }
1250
1251 return true;
1252}
1253
1254
1255/* Merge block B into block A. */
1256
1257static void
1258tree_merge_blocks (basic_block a, basic_block b)
1259{
1260 block_stmt_iterator bsi;
1261 tree_stmt_iterator last;
1262
1263 if (dump_file)
1264 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1265
1266 /* Ensure that B follows A. */
1267 move_block_after (b, a);
1268
628f6a4e 1269 gcc_assert (EDGE_SUCC (a, 0)->flags & EDGE_FALLTHRU);
1e128c5f 1270 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
6de9cd9a
DN
1271
1272 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1273 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1274 {
1275 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1276 bsi_remove (&bsi);
1277 else
1278 {
1279 set_bb_for_stmt (bsi_stmt (bsi), a);
1280 bsi_next (&bsi);
1281 }
1282 }
1283
1284 /* Merge the chains. */
1285 last = tsi_last (a->stmt_list);
1286 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1287 b->stmt_list = NULL;
1288}
1289
1290
1291/* Walk the function tree removing unnecessary statements.
1292
1293 * Empty statement nodes are removed
1294
1295 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1296
1297 * Unnecessary COND_EXPRs are removed
1298
1299 * Some unnecessary BIND_EXPRs are removed
1300
1301 Clearly more work could be done. The trick is doing the analysis
1302 and removal fast enough to be a net improvement in compile times.
1303
1304 Note that when we remove a control structure such as a COND_EXPR
1305 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1306 to ensure we eliminate all the useless code. */
1307
1308struct rus_data
1309{
1310 tree *last_goto;
1311 bool repeat;
1312 bool may_throw;
1313 bool may_branch;
1314 bool has_label;
1315};
1316
1317static void remove_useless_stmts_1 (tree *, struct rus_data *);
1318
1319static bool
1320remove_useless_stmts_warn_notreached (tree stmt)
1321{
9506ac2b 1322 if (EXPR_HAS_LOCATION (stmt))
6de9cd9a 1323 {
9506ac2b
PB
1324 location_t loc = EXPR_LOCATION (stmt);
1325 warning ("%Hwill never be executed", &loc);
6de9cd9a
DN
1326 return true;
1327 }
1328
1329 switch (TREE_CODE (stmt))
1330 {
1331 case STATEMENT_LIST:
1332 {
1333 tree_stmt_iterator i;
1334 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1335 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1336 return true;
1337 }
1338 break;
1339
1340 case COND_EXPR:
1341 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1342 return true;
1343 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1344 return true;
1345 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1346 return true;
1347 break;
1348
1349 case TRY_FINALLY_EXPR:
1350 case TRY_CATCH_EXPR:
1351 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1352 return true;
1353 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1354 return true;
1355 break;
1356
1357 case CATCH_EXPR:
1358 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1359 case EH_FILTER_EXPR:
1360 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1361 case BIND_EXPR:
1362 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1363
1364 default:
1365 /* Not a live container. */
1366 break;
1367 }
1368
1369 return false;
1370}
1371
1372static void
1373remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1374{
1375 tree then_clause, else_clause, cond;
1376 bool save_has_label, then_has_label, else_has_label;
1377
1378 save_has_label = data->has_label;
1379 data->has_label = false;
1380 data->last_goto = NULL;
1381
1382 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1383
1384 then_has_label = data->has_label;
1385 data->has_label = false;
1386 data->last_goto = NULL;
1387
1388 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1389
1390 else_has_label = data->has_label;
1391 data->has_label = save_has_label | then_has_label | else_has_label;
1392
6de9cd9a
DN
1393 then_clause = COND_EXPR_THEN (*stmt_p);
1394 else_clause = COND_EXPR_ELSE (*stmt_p);
1395 cond = COND_EXPR_COND (*stmt_p);
1396
1397 /* If neither arm does anything at all, we can remove the whole IF. */
1398 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1399 {
1400 *stmt_p = build_empty_stmt ();
1401 data->repeat = true;
1402 }
1403
1404 /* If there are no reachable statements in an arm, then we can
1405 zap the entire conditional. */
1406 else if (integer_nonzerop (cond) && !else_has_label)
1407 {
1408 if (warn_notreached)
1409 remove_useless_stmts_warn_notreached (else_clause);
1410 *stmt_p = then_clause;
1411 data->repeat = true;
1412 }
1413 else if (integer_zerop (cond) && !then_has_label)
1414 {
1415 if (warn_notreached)
1416 remove_useless_stmts_warn_notreached (then_clause);
1417 *stmt_p = else_clause;
1418 data->repeat = true;
1419 }
1420
1421 /* Check a couple of simple things on then/else with single stmts. */
1422 else
1423 {
1424 tree then_stmt = expr_only (then_clause);
1425 tree else_stmt = expr_only (else_clause);
1426
1427 /* Notice branches to a common destination. */
1428 if (then_stmt && else_stmt
1429 && TREE_CODE (then_stmt) == GOTO_EXPR
1430 && TREE_CODE (else_stmt) == GOTO_EXPR
1431 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1432 {
1433 *stmt_p = then_stmt;
1434 data->repeat = true;
1435 }
1436
1437 /* If the THEN/ELSE clause merely assigns a value to a variable or
1438 parameter which is already known to contain that value, then
1439 remove the useless THEN/ELSE clause. */
1440 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1441 {
1442 if (else_stmt
1443 && TREE_CODE (else_stmt) == MODIFY_EXPR
1444 && TREE_OPERAND (else_stmt, 0) == cond
1445 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1446 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1447 }
1448 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1449 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1450 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1451 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1452 {
1453 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1454 ? then_stmt : else_stmt);
1455 tree *location = (TREE_CODE (cond) == EQ_EXPR
1456 ? &COND_EXPR_THEN (*stmt_p)
1457 : &COND_EXPR_ELSE (*stmt_p));
1458
1459 if (stmt
1460 && TREE_CODE (stmt) == MODIFY_EXPR
1461 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1462 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1463 *location = alloc_stmt_list ();
1464 }
1465 }
1466
1467 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1468 would be re-introduced during lowering. */
1469 data->last_goto = NULL;
1470}
1471
1472
1473static void
1474remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1475{
1476 bool save_may_branch, save_may_throw;
1477 bool this_may_branch, this_may_throw;
1478
1479 /* Collect may_branch and may_throw information for the body only. */
1480 save_may_branch = data->may_branch;
1481 save_may_throw = data->may_throw;
1482 data->may_branch = false;
1483 data->may_throw = false;
1484 data->last_goto = NULL;
1485
1486 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1487
1488 this_may_branch = data->may_branch;
1489 this_may_throw = data->may_throw;
1490 data->may_branch |= save_may_branch;
1491 data->may_throw |= save_may_throw;
1492 data->last_goto = NULL;
1493
1494 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1495
1496 /* If the body is empty, then we can emit the FINALLY block without
1497 the enclosing TRY_FINALLY_EXPR. */
1498 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1499 {
1500 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1501 data->repeat = true;
1502 }
1503
1504 /* If the handler is empty, then we can emit the TRY block without
1505 the enclosing TRY_FINALLY_EXPR. */
1506 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1507 {
1508 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1509 data->repeat = true;
1510 }
1511
1512 /* If the body neither throws, nor branches, then we can safely
1513 string the TRY and FINALLY blocks together. */
1514 else if (!this_may_branch && !this_may_throw)
1515 {
1516 tree stmt = *stmt_p;
1517 *stmt_p = TREE_OPERAND (stmt, 0);
1518 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1519 data->repeat = true;
1520 }
1521}
1522
1523
1524static void
1525remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1526{
1527 bool save_may_throw, this_may_throw;
1528 tree_stmt_iterator i;
1529 tree stmt;
1530
1531 /* Collect may_throw information for the body only. */
1532 save_may_throw = data->may_throw;
1533 data->may_throw = false;
1534 data->last_goto = NULL;
1535
1536 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1537
1538 this_may_throw = data->may_throw;
1539 data->may_throw = save_may_throw;
1540
1541 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1542 if (!this_may_throw)
1543 {
1544 if (warn_notreached)
1545 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1546 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1547 data->repeat = true;
1548 return;
1549 }
1550
1551 /* Process the catch clause specially. We may be able to tell that
1552 no exceptions propagate past this point. */
1553
1554 this_may_throw = true;
1555 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1556 stmt = tsi_stmt (i);
1557 data->last_goto = NULL;
1558
1559 switch (TREE_CODE (stmt))
1560 {
1561 case CATCH_EXPR:
1562 for (; !tsi_end_p (i); tsi_next (&i))
1563 {
1564 stmt = tsi_stmt (i);
1565 /* If we catch all exceptions, then the body does not
1566 propagate exceptions past this point. */
1567 if (CATCH_TYPES (stmt) == NULL)
1568 this_may_throw = false;
1569 data->last_goto = NULL;
1570 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1571 }
1572 break;
1573
1574 case EH_FILTER_EXPR:
1575 if (EH_FILTER_MUST_NOT_THROW (stmt))
1576 this_may_throw = false;
1577 else if (EH_FILTER_TYPES (stmt) == NULL)
1578 this_may_throw = false;
1579 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1580 break;
1581
1582 default:
1583 /* Otherwise this is a cleanup. */
1584 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1585
1586 /* If the cleanup is empty, then we can emit the TRY block without
1587 the enclosing TRY_CATCH_EXPR. */
1588 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1589 {
1590 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1591 data->repeat = true;
1592 }
1593 break;
1594 }
1595 data->may_throw |= this_may_throw;
1596}
1597
1598
1599static void
1600remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1601{
1602 tree block;
1603
1604 /* First remove anything underneath the BIND_EXPR. */
1605 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1606
1607 /* If the BIND_EXPR has no variables, then we can pull everything
1608 up one level and remove the BIND_EXPR, unless this is the toplevel
1609 BIND_EXPR for the current function or an inlined function.
1610
1611 When this situation occurs we will want to apply this
1612 optimization again. */
1613 block = BIND_EXPR_BLOCK (*stmt_p);
1614 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1615 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1616 && (! block
1617 || ! BLOCK_ABSTRACT_ORIGIN (block)
1618 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1619 != FUNCTION_DECL)))
1620 {
1621 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1622 data->repeat = true;
1623 }
1624}
1625
1626
1627static void
1628remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1629{
1630 tree dest = GOTO_DESTINATION (*stmt_p);
1631
1632 data->may_branch = true;
1633 data->last_goto = NULL;
1634
1635 /* Record the last goto expr, so that we can delete it if unnecessary. */
1636 if (TREE_CODE (dest) == LABEL_DECL)
1637 data->last_goto = stmt_p;
1638}
1639
1640
1641static void
1642remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1643{
1644 tree label = LABEL_EXPR_LABEL (*stmt_p);
1645
1646 data->has_label = true;
1647
1648 /* We do want to jump across non-local label receiver code. */
1649 if (DECL_NONLOCAL (label))
1650 data->last_goto = NULL;
1651
1652 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1653 {
1654 *data->last_goto = build_empty_stmt ();
1655 data->repeat = true;
1656 }
1657
1658 /* ??? Add something here to delete unused labels. */
1659}
1660
1661
1662/* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1663 decl. This allows us to eliminate redundant or useless
1664 calls to "const" functions.
1665
1666 Gimplifier already does the same operation, but we may notice functions
1667 being const and pure once their calls has been gimplified, so we need
1668 to update the flag. */
1669
1670static void
1671update_call_expr_flags (tree call)
1672{
1673 tree decl = get_callee_fndecl (call);
1674 if (!decl)
1675 return;
1676 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1677 TREE_SIDE_EFFECTS (call) = 0;
1678 if (TREE_NOTHROW (decl))
1679 TREE_NOTHROW (call) = 1;
1680}
1681
1682
1683/* T is CALL_EXPR. Set current_function_calls_* flags. */
1684
1685void
1686notice_special_calls (tree t)
1687{
1688 int flags = call_expr_flags (t);
1689
1690 if (flags & ECF_MAY_BE_ALLOCA)
1691 current_function_calls_alloca = true;
1692 if (flags & ECF_RETURNS_TWICE)
1693 current_function_calls_setjmp = true;
1694}
1695
1696
1697/* Clear flags set by notice_special_calls. Used by dead code removal
1698 to update the flags. */
1699
1700void
1701clear_special_calls (void)
1702{
1703 current_function_calls_alloca = false;
1704 current_function_calls_setjmp = false;
1705}
1706
1707
1708static void
1709remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1710{
cd709752 1711 tree t = *tp, op;
6de9cd9a
DN
1712
1713 switch (TREE_CODE (t))
1714 {
1715 case COND_EXPR:
1716 remove_useless_stmts_cond (tp, data);
1717 break;
1718
1719 case TRY_FINALLY_EXPR:
1720 remove_useless_stmts_tf (tp, data);
1721 break;
1722
1723 case TRY_CATCH_EXPR:
1724 remove_useless_stmts_tc (tp, data);
1725 break;
1726
1727 case BIND_EXPR:
1728 remove_useless_stmts_bind (tp, data);
1729 break;
1730
1731 case GOTO_EXPR:
1732 remove_useless_stmts_goto (tp, data);
1733 break;
1734
1735 case LABEL_EXPR:
1736 remove_useless_stmts_label (tp, data);
1737 break;
1738
1739 case RETURN_EXPR:
53e782e5 1740 fold_stmt (tp);
6de9cd9a
DN
1741 data->last_goto = NULL;
1742 data->may_branch = true;
1743 break;
1744
1745 case CALL_EXPR:
53e782e5 1746 fold_stmt (tp);
6de9cd9a
DN
1747 data->last_goto = NULL;
1748 notice_special_calls (t);
1749 update_call_expr_flags (t);
1750 if (tree_could_throw_p (t))
1751 data->may_throw = true;
1752 break;
1753
1754 case MODIFY_EXPR:
1755 data->last_goto = NULL;
53e782e5 1756 fold_stmt (tp);
cd709752
RH
1757 op = get_call_expr_in (t);
1758 if (op)
6de9cd9a 1759 {
cd709752
RH
1760 update_call_expr_flags (op);
1761 notice_special_calls (op);
6de9cd9a
DN
1762 }
1763 if (tree_could_throw_p (t))
1764 data->may_throw = true;
1765 break;
1766
1767 case STATEMENT_LIST:
1768 {
1769 tree_stmt_iterator i = tsi_start (t);
1770 while (!tsi_end_p (i))
1771 {
1772 t = tsi_stmt (i);
1773 if (IS_EMPTY_STMT (t))
1774 {
1775 tsi_delink (&i);
1776 continue;
1777 }
1778
1779 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1780
1781 t = tsi_stmt (i);
1782 if (TREE_CODE (t) == STATEMENT_LIST)
1783 {
1784 tsi_link_before (&i, t, TSI_SAME_STMT);
1785 tsi_delink (&i);
1786 }
1787 else
1788 tsi_next (&i);
1789 }
1790 }
1791 break;
8e14584d 1792 case ASM_EXPR:
53e782e5
AP
1793 fold_stmt (tp);
1794 data->last_goto = NULL;
1795 break;
6de9cd9a
DN
1796
1797 default:
1798 data->last_goto = NULL;
1799 break;
1800 }
1801}
1802
1803static void
1804remove_useless_stmts (void)
1805{
1806 struct rus_data data;
1807
1808 clear_special_calls ();
1809
1810 do
1811 {
1812 memset (&data, 0, sizeof (data));
1813 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1814 }
1815 while (data.repeat);
1816}
1817
1818
1819struct tree_opt_pass pass_remove_useless_stmts =
1820{
1821 "useless", /* name */
1822 NULL, /* gate */
1823 remove_useless_stmts, /* execute */
1824 NULL, /* sub */
1825 NULL, /* next */
1826 0, /* static_pass_number */
1827 0, /* tv_id */
1828 PROP_gimple_any, /* properties_required */
1829 0, /* properties_provided */
1830 0, /* properties_destroyed */
1831 0, /* todo_flags_start */
9f8628ba
PB
1832 TODO_dump_func, /* todo_flags_finish */
1833 0 /* letter */
6de9cd9a
DN
1834};
1835
1836
1837/* Remove obviously useless statements in basic block BB. */
1838
1839static void
1840cfg_remove_useless_stmts_bb (basic_block bb)
1841{
1842 block_stmt_iterator bsi;
1843 tree stmt = NULL_TREE;
1844 tree cond, var = NULL_TREE, val = NULL_TREE;
1845 struct var_ann_d *ann;
1846
1847 /* Check whether we come here from a condition, and if so, get the
1848 condition. */
628f6a4e
BE
1849 if (EDGE_COUNT (bb->preds) != 1
1850 || !(EDGE_PRED (bb, 0)->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
6de9cd9a
DN
1851 return;
1852
628f6a4e 1853 cond = COND_EXPR_COND (last_stmt (EDGE_PRED (bb, 0)->src));
6de9cd9a
DN
1854
1855 if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1856 {
1857 var = cond;
628f6a4e 1858 val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE
6de9cd9a
DN
1859 ? boolean_false_node : boolean_true_node);
1860 }
1861 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR
1862 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1863 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL))
1864 {
1865 var = TREE_OPERAND (cond, 0);
628f6a4e 1866 val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE
6de9cd9a
DN
1867 ? boolean_true_node : boolean_false_node);
1868 }
1869 else
1870 {
628f6a4e 1871 if (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE)
6de9cd9a
DN
1872 cond = invert_truthvalue (cond);
1873 if (TREE_CODE (cond) == EQ_EXPR
1874 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1875 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1876 && (TREE_CODE (TREE_OPERAND (cond, 1)) == VAR_DECL
1877 || TREE_CODE (TREE_OPERAND (cond, 1)) == PARM_DECL
1878 || TREE_CONSTANT (TREE_OPERAND (cond, 1))))
1879 {
1880 var = TREE_OPERAND (cond, 0);
1881 val = TREE_OPERAND (cond, 1);
1882 }
1883 else
1884 return;
1885 }
1886
1887 /* Only work for normal local variables. */
1888 ann = var_ann (var);
1889 if (!ann
1890 || ann->may_aliases
1891 || TREE_ADDRESSABLE (var))
1892 return;
1893
1894 if (! TREE_CONSTANT (val))
1895 {
1896 ann = var_ann (val);
1897 if (!ann
1898 || ann->may_aliases
1899 || TREE_ADDRESSABLE (val))
1900 return;
1901 }
1902
1903 /* Ignore floating point variables, since comparison behaves weird for
1904 them. */
1905 if (FLOAT_TYPE_P (TREE_TYPE (var)))
1906 return;
1907
1908 for (bsi = bsi_start (bb); !bsi_end_p (bsi);)
1909 {
1910 stmt = bsi_stmt (bsi);
1911
1912 /* If the THEN/ELSE clause merely assigns a value to a variable/parameter
1913 which is already known to contain that value, then remove the useless
1914 THEN/ELSE clause. */
1915 if (TREE_CODE (stmt) == MODIFY_EXPR
1916 && TREE_OPERAND (stmt, 0) == var
1917 && operand_equal_p (val, TREE_OPERAND (stmt, 1), 0))
1918 {
1919 bsi_remove (&bsi);
1920 continue;
1921 }
1922
631b67ce
RK
1923 /* Invalidate the var if we encounter something that could modify it.
1924 Likewise for the value it was previously set to. Note that we only
1925 consider values that are either a VAR_DECL or PARM_DECL so we
1926 can test for conflict very simply. */
6de9cd9a 1927 if (TREE_CODE (stmt) == ASM_EXPR
6de9cd9a 1928 || (TREE_CODE (stmt) == MODIFY_EXPR
631b67ce
RK
1929 && (TREE_OPERAND (stmt, 0) == var
1930 || TREE_OPERAND (stmt, 0) == val)))
6de9cd9a
DN
1931 return;
1932
1933 bsi_next (&bsi);
1934 }
1935}
1936
1937
1938/* A CFG-aware version of remove_useless_stmts. */
1939
1940void
1941cfg_remove_useless_stmts (void)
1942{
1943 basic_block bb;
1944
1945#ifdef ENABLE_CHECKING
1946 verify_flow_info ();
1947#endif
1948
1949 FOR_EACH_BB (bb)
1950 {
1951 cfg_remove_useless_stmts_bb (bb);
1952 }
1953}
1954
1955
1956/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1957
1958static void
1959remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1960{
1961 tree phi;
1962
1963 /* Since this block is no longer reachable, we can just delete all
1964 of its PHI nodes. */
1965 phi = phi_nodes (bb);
1966 while (phi)
1967 {
17192884 1968 tree next = PHI_CHAIN (phi);
6de9cd9a
DN
1969 remove_phi_node (phi, NULL_TREE, bb);
1970 phi = next;
1971 }
1972
1973 /* Remove edges to BB's successors. */
628f6a4e 1974 while (EDGE_COUNT (bb->succs) > 0)
d0d2cc21 1975 remove_edge (EDGE_SUCC (bb, 0));
6de9cd9a
DN
1976}
1977
1978
1979/* Remove statements of basic block BB. */
1980
1981static void
1982remove_bb (basic_block bb)
1983{
1984 block_stmt_iterator i;
9506ac2b 1985 source_locus loc = 0;
6de9cd9a
DN
1986
1987 if (dump_file)
1988 {
1989 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1990 if (dump_flags & TDF_DETAILS)
1991 {
1992 dump_bb (bb, dump_file, 0);
1993 fprintf (dump_file, "\n");
1994 }
1995 }
1996
1997 /* Remove all the instructions in the block. */
77568960 1998 for (i = bsi_start (bb); !bsi_end_p (i);)
6de9cd9a
DN
1999 {
2000 tree stmt = bsi_stmt (i);
77568960
AP
2001 if (TREE_CODE (stmt) == LABEL_EXPR
2002 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt)))
2003 {
2004 basic_block new_bb = bb->prev_bb;
2005 block_stmt_iterator new_bsi = bsi_after_labels (new_bb);
2006
2007 bsi_remove (&i);
2008 bsi_insert_after (&new_bsi, stmt, BSI_NEW_STMT);
2009 }
2010 else
2011 {
2012 release_defs (stmt);
6de9cd9a 2013
77568960
AP
2014 set_bb_for_stmt (stmt, NULL);
2015 bsi_remove (&i);
2016 }
6de9cd9a
DN
2017
2018 /* Don't warn for removed gotos. Gotos are often removed due to
2019 jump threading, thus resulting in bogus warnings. Not great,
2020 since this way we lose warnings for gotos in the original
2021 program that are indeed unreachable. */
9506ac2b
PB
2022 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2023#ifdef USE_MAPPED_LOCATION
2024 loc = EXPR_LOCATION (stmt);
2025#else
6de9cd9a 2026 loc = EXPR_LOCUS (stmt);
9506ac2b 2027#endif
6de9cd9a
DN
2028 }
2029
2030 /* If requested, give a warning that the first statement in the
2031 block is unreachable. We walk statements backwards in the
2032 loop above, so the last statement we process is the first statement
2033 in the block. */
2034 if (warn_notreached && loc)
9506ac2b
PB
2035#ifdef USE_MAPPED_LOCATION
2036 warning ("%Hwill never be executed", &loc);
2037#else
6de9cd9a 2038 warning ("%Hwill never be executed", loc);
9506ac2b 2039#endif
6de9cd9a
DN
2040
2041 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2042}
2043
6de9cd9a
DN
2044/* Try to remove superfluous control structures. */
2045
2046static bool
2047cleanup_control_flow (void)
2048{
2049 basic_block bb;
2050 block_stmt_iterator bsi;
2051 bool retval = false;
2052 tree stmt;
2053
2054 FOR_EACH_BB (bb)
2055 {
2056 bsi = bsi_last (bb);
2057
2058 if (bsi_end_p (bsi))
2059 continue;
2060
2061 stmt = bsi_stmt (bsi);
2062 if (TREE_CODE (stmt) == COND_EXPR
2063 || TREE_CODE (stmt) == SWITCH_EXPR)
2064 retval |= cleanup_control_expr_graph (bb, bsi);
2065 }
2066 return retval;
2067}
2068
2069
2070/* Disconnect an unreachable block in the control expression starting
2071 at block BB. */
2072
2073static bool
2074cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
2075{
2076 edge taken_edge;
2077 bool retval = false;
2078 tree expr = bsi_stmt (bsi), val;
2079
628f6a4e 2080 if (EDGE_COUNT (bb->succs) > 1)
6de9cd9a 2081 {
628f6a4e
BE
2082 edge e;
2083 edge_iterator ei;
6de9cd9a
DN
2084
2085 switch (TREE_CODE (expr))
2086 {
2087 case COND_EXPR:
2088 val = COND_EXPR_COND (expr);
2089 break;
2090
2091 case SWITCH_EXPR:
2092 val = SWITCH_COND (expr);
2093 if (TREE_CODE (val) != INTEGER_CST)
2094 return false;
2095 break;
2096
2097 default:
1e128c5f 2098 gcc_unreachable ();
6de9cd9a
DN
2099 }
2100
2101 taken_edge = find_taken_edge (bb, val);
2102 if (!taken_edge)
2103 return false;
2104
2105 /* Remove all the edges except the one that is always executed. */
628f6a4e 2106 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6de9cd9a 2107 {
6de9cd9a
DN
2108 if (e != taken_edge)
2109 {
2110 taken_edge->probability += e->probability;
2111 taken_edge->count += e->count;
d0d2cc21 2112 remove_edge (e);
6de9cd9a
DN
2113 retval = true;
2114 }
628f6a4e
BE
2115 else
2116 ei_next (&ei);
6de9cd9a
DN
2117 }
2118 if (taken_edge->probability > REG_BR_PROB_BASE)
2119 taken_edge->probability = REG_BR_PROB_BASE;
2120 }
2121 else
628f6a4e 2122 taken_edge = EDGE_SUCC (bb, 0);
6de9cd9a
DN
2123
2124 bsi_remove (&bsi);
2125 taken_edge->flags = EDGE_FALLTHRU;
2126
2127 /* We removed some paths from the cfg. */
fce22de5 2128 free_dominance_info (CDI_DOMINATORS);
6de9cd9a
DN
2129
2130 return retval;
2131}
2132
2133
35920270
KH
2134/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2135 predicate VAL, return the edge that will be taken out of the block.
2136 If VAL does not match a unique edge, NULL is returned. */
6de9cd9a
DN
2137
2138edge
2139find_taken_edge (basic_block bb, tree val)
2140{
2141 tree stmt;
2142
2143 stmt = last_stmt (bb);
2144
1e128c5f
GB
2145 gcc_assert (stmt);
2146 gcc_assert (is_ctrl_stmt (stmt));
65f4323d 2147 gcc_assert (val);
6de9cd9a 2148
255cd731 2149 /* If VAL is a predicate of the form N RELOP N, where N is an
6a97296a 2150 SSA_NAME, we can usually determine its truth value. */
65f4323d 2151 if (COMPARISON_CLASS_P (val))
6a97296a 2152 val = fold (val);
255cd731 2153
6de9cd9a
DN
2154 /* If VAL is not a constant, we can't determine which edge might
2155 be taken. */
65f4323d 2156 if (!really_constant_p (val))
6de9cd9a
DN
2157 return NULL;
2158
2159 if (TREE_CODE (stmt) == COND_EXPR)
2160 return find_taken_edge_cond_expr (bb, val);
2161
2162 if (TREE_CODE (stmt) == SWITCH_EXPR)
2163 return find_taken_edge_switch_expr (bb, val);
2164
35920270 2165 gcc_unreachable ();
6de9cd9a
DN
2166}
2167
2168
2169/* Given a constant value VAL and the entry block BB to a COND_EXPR
2170 statement, determine which of the two edges will be taken out of the
2171 block. Return NULL if either edge may be taken. */
2172
2173static edge
2174find_taken_edge_cond_expr (basic_block bb, tree val)
2175{
2176 edge true_edge, false_edge;
2177
2178 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2179
6de9cd9a
DN
2180 /* Otherwise, try to determine which branch of the if() will be taken.
2181 If VAL is a constant but it can't be reduced to a 0 or a 1, then
2182 we don't really know which edge will be taken at runtime. This
2183 may happen when comparing addresses (e.g., if (&var1 == 4)). */
2184 if (integer_nonzerop (val))
2185 return true_edge;
2186 else if (integer_zerop (val))
2187 return false_edge;
2188 else
2189 return NULL;
2190}
2191
2192
2193/* Given a constant value VAL and the entry block BB to a SWITCH_EXPR
2194 statement, determine which edge will be taken out of the block. Return
2195 NULL if any edge may be taken. */
2196
2197static edge
2198find_taken_edge_switch_expr (basic_block bb, tree val)
2199{
2200 tree switch_expr, taken_case;
2201 basic_block dest_bb;
2202 edge e;
2203
2204 if (TREE_CODE (val) != INTEGER_CST)
2205 return NULL;
2206
2207 switch_expr = last_stmt (bb);
2208 taken_case = find_case_label_for_value (switch_expr, val);
2209 dest_bb = label_to_block (CASE_LABEL (taken_case));
2210
2211 e = find_edge (bb, dest_bb);
1e128c5f 2212 gcc_assert (e);
6de9cd9a
DN
2213 return e;
2214}
2215
2216
f667741c
SB
2217/* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2218 We can make optimal use here of the fact that the case labels are
2219 sorted: We can do a binary search for a case matching VAL. */
6de9cd9a
DN
2220
2221static tree
2222find_case_label_for_value (tree switch_expr, tree val)
2223{
2224 tree vec = SWITCH_LABELS (switch_expr);
f667741c
SB
2225 size_t low, high, n = TREE_VEC_LENGTH (vec);
2226 tree default_case = TREE_VEC_ELT (vec, n - 1);
6de9cd9a 2227
f667741c 2228 for (low = -1, high = n - 1; high - low > 1; )
6de9cd9a 2229 {
f667741c 2230 size_t i = (high + low) / 2;
6de9cd9a 2231 tree t = TREE_VEC_ELT (vec, i);
f667741c
SB
2232 int cmp;
2233
2234 /* Cache the result of comparing CASE_LOW and val. */
2235 cmp = tree_int_cst_compare (CASE_LOW (t), val);
6de9cd9a 2236
f667741c
SB
2237 if (cmp > 0)
2238 high = i;
2239 else
2240 low = i;
2241
2242 if (CASE_HIGH (t) == NULL)
6de9cd9a 2243 {
f667741c
SB
2244 /* A singe-valued case label. */
2245 if (cmp == 0)
6de9cd9a
DN
2246 return t;
2247 }
2248 else
2249 {
2250 /* A case range. We can only handle integer ranges. */
f667741c 2251 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
6de9cd9a
DN
2252 return t;
2253 }
2254 }
2255
6de9cd9a
DN
2256 return default_case;
2257}
2258
2259
2260/* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2261 those alternatives are equal in each of the PHI nodes, then return
2262 true, else return false. */
2263
2264static bool
2265phi_alternatives_equal (basic_block dest, edge e1, edge e2)
2266{
2267 tree phi, val1, val2;
2268 int n1, n2;
2269
17192884 2270 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
2271 {
2272 n1 = phi_arg_from_edge (phi, e1);
2273 n2 = phi_arg_from_edge (phi, e2);
2274
1e128c5f
GB
2275 gcc_assert (n1 >= 0);
2276 gcc_assert (n2 >= 0);
6de9cd9a
DN
2277
2278 val1 = PHI_ARG_DEF (phi, n1);
2279 val2 = PHI_ARG_DEF (phi, n2);
2280
cdef8bc6 2281 if (!operand_equal_for_phi_arg_p (val1, val2))
6de9cd9a
DN
2282 return false;
2283 }
2284
2285 return true;
2286}
2287
2288
6de9cd9a
DN
2289/*---------------------------------------------------------------------------
2290 Debugging functions
2291---------------------------------------------------------------------------*/
2292
2293/* Dump tree-specific information of block BB to file OUTF. */
2294
2295void
2296tree_dump_bb (basic_block bb, FILE *outf, int indent)
2297{
2298 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2299}
2300
2301
2302/* Dump a basic block on stderr. */
2303
2304void
2305debug_tree_bb (basic_block bb)
2306{
2307 dump_bb (bb, stderr, 0);
2308}
2309
2310
2311/* Dump basic block with index N on stderr. */
2312
2313basic_block
2314debug_tree_bb_n (int n)
2315{
2316 debug_tree_bb (BASIC_BLOCK (n));
2317 return BASIC_BLOCK (n);
2318}
2319
2320
2321/* Dump the CFG on stderr.
2322
2323 FLAGS are the same used by the tree dumping functions
2324 (see TDF_* in tree.h). */
2325
2326void
2327debug_tree_cfg (int flags)
2328{
2329 dump_tree_cfg (stderr, flags);
2330}
2331
2332
2333/* Dump the program showing basic block boundaries on the given FILE.
2334
2335 FLAGS are the same used by the tree dumping functions (see TDF_* in
2336 tree.h). */
2337
2338void
2339dump_tree_cfg (FILE *file, int flags)
2340{
2341 if (flags & TDF_DETAILS)
2342 {
2343 const char *funcname
673fda6b 2344 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2345
2346 fputc ('\n', file);
2347 fprintf (file, ";; Function %s\n\n", funcname);
2348 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2349 n_basic_blocks, n_edges, last_basic_block);
2350
2351 brief_dump_cfg (file);
2352 fprintf (file, "\n");
2353 }
2354
2355 if (flags & TDF_STATS)
2356 dump_cfg_stats (file);
2357
2358 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2359}
2360
2361
2362/* Dump CFG statistics on FILE. */
2363
2364void
2365dump_cfg_stats (FILE *file)
2366{
2367 static long max_num_merged_labels = 0;
2368 unsigned long size, total = 0;
f7fda749 2369 int n_edges;
6de9cd9a
DN
2370 basic_block bb;
2371 const char * const fmt_str = "%-30s%-13s%12s\n";
f7fda749 2372 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
6de9cd9a
DN
2373 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2374 const char *funcname
673fda6b 2375 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2376
2377
2378 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2379
2380 fprintf (file, "---------------------------------------------------------\n");
2381 fprintf (file, fmt_str, "", " Number of ", "Memory");
2382 fprintf (file, fmt_str, "", " instances ", "used ");
2383 fprintf (file, "---------------------------------------------------------\n");
2384
2385 size = n_basic_blocks * sizeof (struct basic_block_def);
2386 total += size;
f7fda749
RH
2387 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2388 SCALE (size), LABEL (size));
6de9cd9a
DN
2389
2390 n_edges = 0;
2391 FOR_EACH_BB (bb)
628f6a4e 2392 n_edges += EDGE_COUNT (bb->succs);
6de9cd9a
DN
2393 size = n_edges * sizeof (struct edge_def);
2394 total += size;
2395 fprintf (file, fmt_str_1, "Edges", n_edges, SCALE (size), LABEL (size));
2396
2397 size = n_basic_blocks * sizeof (struct bb_ann_d);
2398 total += size;
2399 fprintf (file, fmt_str_1, "Basic block annotations", n_basic_blocks,
2400 SCALE (size), LABEL (size));
2401
2402 fprintf (file, "---------------------------------------------------------\n");
2403 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2404 LABEL (total));
2405 fprintf (file, "---------------------------------------------------------\n");
2406 fprintf (file, "\n");
2407
2408 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2409 max_num_merged_labels = cfg_stats.num_merged_labels;
2410
2411 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2412 cfg_stats.num_merged_labels, max_num_merged_labels);
2413
2414 fprintf (file, "\n");
2415}
2416
2417
2418/* Dump CFG statistics on stderr. Keep extern so that it's always
2419 linked in the final executable. */
2420
2421void
2422debug_cfg_stats (void)
2423{
2424 dump_cfg_stats (stderr);
2425}
2426
2427
2428/* Dump the flowgraph to a .vcg FILE. */
2429
2430static void
2431tree_cfg2vcg (FILE *file)
2432{
2433 edge e;
628f6a4e 2434 edge_iterator ei;
6de9cd9a
DN
2435 basic_block bb;
2436 const char *funcname
673fda6b 2437 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2438
2439 /* Write the file header. */
2440 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2441 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2442 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2443
2444 /* Write blocks and edges. */
628f6a4e 2445 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
6de9cd9a
DN
2446 {
2447 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2448 e->dest->index);
2449
2450 if (e->flags & EDGE_FAKE)
2451 fprintf (file, " linestyle: dotted priority: 10");
2452 else
2453 fprintf (file, " linestyle: solid priority: 100");
2454
2455 fprintf (file, " }\n");
2456 }
2457 fputc ('\n', file);
2458
2459 FOR_EACH_BB (bb)
2460 {
2461 enum tree_code head_code, end_code;
2462 const char *head_name, *end_name;
2463 int head_line = 0;
2464 int end_line = 0;
2465 tree first = first_stmt (bb);
2466 tree last = last_stmt (bb);
2467
2468 if (first)
2469 {
2470 head_code = TREE_CODE (first);
2471 head_name = tree_code_name[head_code];
2472 head_line = get_lineno (first);
2473 }
2474 else
2475 head_name = "no-statement";
2476
2477 if (last)
2478 {
2479 end_code = TREE_CODE (last);
2480 end_name = tree_code_name[end_code];
2481 end_line = get_lineno (last);
2482 }
2483 else
2484 end_name = "no-statement";
2485
2486 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2487 bb->index, bb->index, head_name, head_line, end_name,
2488 end_line);
2489
628f6a4e 2490 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
2491 {
2492 if (e->dest == EXIT_BLOCK_PTR)
2493 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2494 else
2495 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2496
2497 if (e->flags & EDGE_FAKE)
2498 fprintf (file, " priority: 10 linestyle: dotted");
2499 else
2500 fprintf (file, " priority: 100 linestyle: solid");
2501
2502 fprintf (file, " }\n");
2503 }
2504
2505 if (bb->next_bb != EXIT_BLOCK_PTR)
2506 fputc ('\n', file);
2507 }
2508
2509 fputs ("}\n\n", file);
2510}
2511
2512
2513
2514/*---------------------------------------------------------------------------
2515 Miscellaneous helpers
2516---------------------------------------------------------------------------*/
2517
2518/* Return true if T represents a stmt that always transfers control. */
2519
2520bool
2521is_ctrl_stmt (tree t)
2522{
2523 return (TREE_CODE (t) == COND_EXPR
2524 || TREE_CODE (t) == SWITCH_EXPR
2525 || TREE_CODE (t) == GOTO_EXPR
2526 || TREE_CODE (t) == RETURN_EXPR
2527 || TREE_CODE (t) == RESX_EXPR);
2528}
2529
2530
2531/* Return true if T is a statement that may alter the flow of control
2532 (e.g., a call to a non-returning function). */
2533
2534bool
2535is_ctrl_altering_stmt (tree t)
2536{
cd709752 2537 tree call;
6de9cd9a 2538
1e128c5f 2539 gcc_assert (t);
cd709752
RH
2540 call = get_call_expr_in (t);
2541 if (call)
6de9cd9a 2542 {
6de9cd9a
DN
2543 /* A non-pure/const CALL_EXPR alters flow control if the current
2544 function has nonlocal labels. */
cd709752 2545 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
6de9cd9a
DN
2546 return true;
2547
2548 /* A CALL_EXPR also alters control flow if it does not return. */
6e14af16 2549 if (call_expr_flags (call) & ECF_NORETURN)
6de9cd9a 2550 return true;
6de9cd9a
DN
2551 }
2552
2553 /* If a statement can throw, it alters control flow. */
2554 return tree_can_throw_internal (t);
2555}
2556
2557
2558/* Return true if T is a computed goto. */
2559
2560bool
2561computed_goto_p (tree t)
2562{
2563 return (TREE_CODE (t) == GOTO_EXPR
2564 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2565}
2566
2567
2568/* Checks whether EXPR is a simple local goto. */
2569
2570bool
2571simple_goto_p (tree expr)
2572{
ab8907ef
RH
2573 return (TREE_CODE (expr) == GOTO_EXPR
2574 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
6de9cd9a
DN
2575}
2576
2577
2578/* Return true if T should start a new basic block. PREV_T is the
2579 statement preceding T. It is used when T is a label or a case label.
2580 Labels should only start a new basic block if their previous statement
2581 wasn't a label. Otherwise, sequence of labels would generate
2582 unnecessary basic blocks that only contain a single label. */
2583
2584static inline bool
2585stmt_starts_bb_p (tree t, tree prev_t)
2586{
2587 enum tree_code code;
2588
2589 if (t == NULL_TREE)
2590 return false;
2591
2592 /* LABEL_EXPRs start a new basic block only if the preceding
2593 statement wasn't a label of the same type. This prevents the
2594 creation of consecutive blocks that have nothing but a single
2595 label. */
2596 code = TREE_CODE (t);
2597 if (code == LABEL_EXPR)
2598 {
2599 /* Nonlocal and computed GOTO targets always start a new block. */
2600 if (code == LABEL_EXPR
2601 && (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2602 || FORCED_LABEL (LABEL_EXPR_LABEL (t))))
2603 return true;
2604
2605 if (prev_t && TREE_CODE (prev_t) == code)
2606 {
2607 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2608 return true;
2609
2610 cfg_stats.num_merged_labels++;
2611 return false;
2612 }
2613 else
2614 return true;
2615 }
2616
2617 return false;
2618}
2619
2620
2621/* Return true if T should end a basic block. */
2622
2623bool
2624stmt_ends_bb_p (tree t)
2625{
2626 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2627}
2628
2629
2630/* Add gotos that used to be represented implicitly in the CFG. */
2631
2632void
2633disband_implicit_edges (void)
2634{
2635 basic_block bb;
2636 block_stmt_iterator last;
2637 edge e;
628f6a4e 2638 edge_iterator ei;
eb4e1c01 2639 tree stmt, label;
6de9cd9a
DN
2640
2641 FOR_EACH_BB (bb)
2642 {
2643 last = bsi_last (bb);
2644 stmt = last_stmt (bb);
2645
2646 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2647 {
2648 /* Remove superfluous gotos from COND_EXPR branches. Moved
2649 from cfg_remove_useless_stmts here since it violates the
2650 invariants for tree--cfg correspondence and thus fits better
2651 here where we do it anyway. */
9ff3d2de
JL
2652 e = find_edge (bb, bb->next_bb);
2653 if (e)
6de9cd9a 2654 {
6de9cd9a
DN
2655 if (e->flags & EDGE_TRUE_VALUE)
2656 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2657 else if (e->flags & EDGE_FALSE_VALUE)
2658 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2659 else
1e128c5f 2660 gcc_unreachable ();
6de9cd9a
DN
2661 e->flags |= EDGE_FALLTHRU;
2662 }
2663
2664 continue;
2665 }
2666
2667 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2668 {
2669 /* Remove the RETURN_EXPR if we may fall though to the exit
2670 instead. */
628f6a4e
BE
2671 gcc_assert (EDGE_COUNT (bb->succs) == 1);
2672 gcc_assert (EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR);
6de9cd9a
DN
2673
2674 if (bb->next_bb == EXIT_BLOCK_PTR
2675 && !TREE_OPERAND (stmt, 0))
2676 {
2677 bsi_remove (&last);
628f6a4e 2678 EDGE_SUCC (bb, 0)->flags |= EDGE_FALLTHRU;
6de9cd9a
DN
2679 }
2680 continue;
2681 }
2682
2683 /* There can be no fallthru edge if the last statement is a control
2684 one. */
2685 if (stmt && is_ctrl_stmt (stmt))
2686 continue;
2687
2688 /* Find a fallthru edge and emit the goto if necessary. */
628f6a4e 2689 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
2690 if (e->flags & EDGE_FALLTHRU)
2691 break;
2692
62b857ea 2693 if (!e || e->dest == bb->next_bb)
6de9cd9a
DN
2694 continue;
2695
1e128c5f 2696 gcc_assert (e->dest != EXIT_BLOCK_PTR);
6de9cd9a
DN
2697 label = tree_block_label (e->dest);
2698
62b857ea 2699 stmt = build1 (GOTO_EXPR, void_type_node, label);
9506ac2b
PB
2700#ifdef USE_MAPPED_LOCATION
2701 SET_EXPR_LOCATION (stmt, e->goto_locus);
2702#else
62b857ea 2703 SET_EXPR_LOCUS (stmt, e->goto_locus);
9506ac2b 2704#endif
62b857ea 2705 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
6de9cd9a
DN
2706 e->flags &= ~EDGE_FALLTHRU;
2707 }
2708}
2709
242229bb 2710/* Remove block annotations and other datastructures. */
6de9cd9a
DN
2711
2712void
242229bb 2713delete_tree_cfg_annotations (void)
6de9cd9a 2714{
242229bb 2715 basic_block bb;
6de9cd9a
DN
2716 if (n_basic_blocks > 0)
2717 free_blocks_annotations ();
2718
6de9cd9a
DN
2719 label_to_block_map = NULL;
2720 free_rbi_pool ();
242229bb
JH
2721 FOR_EACH_BB (bb)
2722 bb->rbi = NULL;
6de9cd9a
DN
2723}
2724
2725
2726/* Return the first statement in basic block BB. */
2727
2728tree
2729first_stmt (basic_block bb)
2730{
2731 block_stmt_iterator i = bsi_start (bb);
2732 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2733}
2734
2735
2736/* Return the last statement in basic block BB. */
2737
2738tree
2739last_stmt (basic_block bb)
2740{
2741 block_stmt_iterator b = bsi_last (bb);
2742 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2743}
2744
2745
2746/* Return a pointer to the last statement in block BB. */
2747
2748tree *
2749last_stmt_ptr (basic_block bb)
2750{
2751 block_stmt_iterator last = bsi_last (bb);
2752 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2753}
2754
2755
2756/* Return the last statement of an otherwise empty block. Return NULL
2757 if the block is totally empty, or if it contains more than one
2758 statement. */
2759
2760tree
2761last_and_only_stmt (basic_block bb)
2762{
2763 block_stmt_iterator i = bsi_last (bb);
2764 tree last, prev;
2765
2766 if (bsi_end_p (i))
2767 return NULL_TREE;
2768
2769 last = bsi_stmt (i);
2770 bsi_prev (&i);
2771 if (bsi_end_p (i))
2772 return last;
2773
2774 /* Empty statements should no longer appear in the instruction stream.
2775 Everything that might have appeared before should be deleted by
2776 remove_useless_stmts, and the optimizers should just bsi_remove
2777 instead of smashing with build_empty_stmt.
2778
2779 Thus the only thing that should appear here in a block containing
2780 one executable statement is a label. */
2781 prev = bsi_stmt (i);
2782 if (TREE_CODE (prev) == LABEL_EXPR)
2783 return last;
2784 else
2785 return NULL_TREE;
2786}
2787
2788
2789/* Mark BB as the basic block holding statement T. */
2790
2791void
2792set_bb_for_stmt (tree t, basic_block bb)
2793{
30d396e3
ZD
2794 if (TREE_CODE (t) == PHI_NODE)
2795 PHI_BB (t) = bb;
2796 else if (TREE_CODE (t) == STATEMENT_LIST)
6de9cd9a
DN
2797 {
2798 tree_stmt_iterator i;
2799 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2800 set_bb_for_stmt (tsi_stmt (i), bb);
2801 }
2802 else
2803 {
2804 stmt_ann_t ann = get_stmt_ann (t);
2805 ann->bb = bb;
2806
2807 /* If the statement is a label, add the label to block-to-labels map
2808 so that we can speed up edge creation for GOTO_EXPRs. */
2809 if (TREE_CODE (t) == LABEL_EXPR)
2810 {
2811 int uid;
2812
2813 t = LABEL_EXPR_LABEL (t);
2814 uid = LABEL_DECL_UID (t);
2815 if (uid == -1)
2816 {
2817 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2818 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2819 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2820 }
2821 else
1e128c5f
GB
2822 /* We're moving an existing label. Make sure that we've
2823 removed it from the old block. */
2824 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
6de9cd9a
DN
2825 VARRAY_BB (label_to_block_map, uid) = bb;
2826 }
2827 }
2828}
2829
8b11a64c
ZD
2830/* Finds iterator for STMT. */
2831
2832extern block_stmt_iterator
1a1804c2 2833bsi_for_stmt (tree stmt)
8b11a64c
ZD
2834{
2835 block_stmt_iterator bsi;
2836
2837 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2838 if (bsi_stmt (bsi) == stmt)
2839 return bsi;
2840
1e128c5f 2841 gcc_unreachable ();
8b11a64c 2842}
6de9cd9a
DN
2843
2844/* Insert statement (or statement list) T before the statement
2845 pointed-to by iterator I. M specifies how to update iterator I
2846 after insertion (see enum bsi_iterator_update). */
2847
2848void
2849bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2850{
2851 set_bb_for_stmt (t, i->bb);
6de9cd9a 2852 tsi_link_before (&i->tsi, t, m);
68b9f53b 2853 modify_stmt (t);
6de9cd9a
DN
2854}
2855
2856
2857/* Insert statement (or statement list) T after the statement
2858 pointed-to by iterator I. M specifies how to update iterator I
2859 after insertion (see enum bsi_iterator_update). */
2860
2861void
2862bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2863{
2864 set_bb_for_stmt (t, i->bb);
6de9cd9a 2865 tsi_link_after (&i->tsi, t, m);
68b9f53b 2866 modify_stmt (t);
6de9cd9a
DN
2867}
2868
2869
2870/* Remove the statement pointed to by iterator I. The iterator is updated
2871 to the next statement. */
2872
2873void
2874bsi_remove (block_stmt_iterator *i)
2875{
2876 tree t = bsi_stmt (*i);
2877 set_bb_for_stmt (t, NULL);
6de9cd9a
DN
2878 tsi_delink (&i->tsi);
2879}
2880
2881
2882/* Move the statement at FROM so it comes right after the statement at TO. */
2883
2884void
2885bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2886{
2887 tree stmt = bsi_stmt (*from);
2888 bsi_remove (from);
2889 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2890}
2891
2892
2893/* Move the statement at FROM so it comes right before the statement at TO. */
2894
2895void
2896bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2897{
2898 tree stmt = bsi_stmt (*from);
2899 bsi_remove (from);
2900 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2901}
2902
2903
2904/* Move the statement at FROM to the end of basic block BB. */
2905
2906void
2907bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2908{
2909 block_stmt_iterator last = bsi_last (bb);
2910
2911 /* Have to check bsi_end_p because it could be an empty block. */
2912 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2913 bsi_move_before (from, &last);
2914 else
2915 bsi_move_after (from, &last);
2916}
2917
2918
2919/* Replace the contents of the statement pointed to by iterator BSI
2920 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2921 information of the original statement is preserved. */
2922
2923void
2924bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
2925{
2926 int eh_region;
2927 tree orig_stmt = bsi_stmt (*bsi);
2928
2929 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2930 set_bb_for_stmt (stmt, bsi->bb);
2931
2932 /* Preserve EH region information from the original statement, if
2933 requested by the caller. */
2934 if (preserve_eh_info)
2935 {
2936 eh_region = lookup_stmt_eh_region (orig_stmt);
2937 if (eh_region >= 0)
2938 add_stmt_to_eh_region (stmt, eh_region);
2939 }
2940
2941 *bsi_stmt_ptr (*bsi) = stmt;
2942 modify_stmt (stmt);
2943}
2944
2945
2946/* Insert the statement pointed-to by BSI into edge E. Every attempt
2947 is made to place the statement in an existing basic block, but
2948 sometimes that isn't possible. When it isn't possible, the edge is
2949 split and the statement is added to the new block.
2950
2951 In all cases, the returned *BSI points to the correct location. The
2952 return value is true if insertion should be done after the location,
82b85a85
ZD
2953 or false if it should be done before the location. If new basic block
2954 has to be created, it is stored in *NEW_BB. */
6de9cd9a
DN
2955
2956static bool
82b85a85
ZD
2957tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2958 basic_block *new_bb)
6de9cd9a
DN
2959{
2960 basic_block dest, src;
2961 tree tmp;
2962
2963 dest = e->dest;
2964 restart:
2965
2966 /* If the destination has one predecessor which has no PHI nodes,
2967 insert there. Except for the exit block.
2968
2969 The requirement for no PHI nodes could be relaxed. Basically we
2970 would have to examine the PHIs to prove that none of them used
e28d0cfb 2971 the value set by the statement we want to insert on E. That
6de9cd9a 2972 hardly seems worth the effort. */
628f6a4e 2973 if (EDGE_COUNT (dest->preds) == 1
6de9cd9a
DN
2974 && ! phi_nodes (dest)
2975 && dest != EXIT_BLOCK_PTR)
2976 {
2977 *bsi = bsi_start (dest);
2978 if (bsi_end_p (*bsi))
2979 return true;
2980
2981 /* Make sure we insert after any leading labels. */
2982 tmp = bsi_stmt (*bsi);
2983 while (TREE_CODE (tmp) == LABEL_EXPR)
2984 {
2985 bsi_next (bsi);
2986 if (bsi_end_p (*bsi))
2987 break;
2988 tmp = bsi_stmt (*bsi);
2989 }
2990
2991 if (bsi_end_p (*bsi))
2992 {
2993 *bsi = bsi_last (dest);
2994 return true;
2995 }
2996 else
2997 return false;
2998 }
2999
3000 /* If the source has one successor, the edge is not abnormal and
3001 the last statement does not end a basic block, insert there.
3002 Except for the entry block. */
3003 src = e->src;
3004 if ((e->flags & EDGE_ABNORMAL) == 0
628f6a4e 3005 && EDGE_COUNT (src->succs) == 1
6de9cd9a
DN
3006 && src != ENTRY_BLOCK_PTR)
3007 {
3008 *bsi = bsi_last (src);
3009 if (bsi_end_p (*bsi))
3010 return true;
3011
3012 tmp = bsi_stmt (*bsi);
3013 if (!stmt_ends_bb_p (tmp))
3014 return true;
ce068299
JH
3015
3016 /* Insert code just before returning the value. We may need to decompose
3017 the return in the case it contains non-trivial operand. */
3018 if (TREE_CODE (tmp) == RETURN_EXPR)
3019 {
3020 tree op = TREE_OPERAND (tmp, 0);
3021 if (!is_gimple_val (op))
3022 {
1e128c5f 3023 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
ce068299
JH
3024 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3025 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3026 }
3027 bsi_prev (bsi);
3028 return true;
3029 }
6de9cd9a
DN
3030 }
3031
3032 /* Otherwise, create a new basic block, and split this edge. */
3033 dest = split_edge (e);
82b85a85
ZD
3034 if (new_bb)
3035 *new_bb = dest;
628f6a4e 3036 e = EDGE_PRED (dest, 0);
6de9cd9a
DN
3037 goto restart;
3038}
3039
3040
3041/* This routine will commit all pending edge insertions, creating any new
8e731e4e 3042 basic blocks which are necessary. */
6de9cd9a
DN
3043
3044void
8e731e4e 3045bsi_commit_edge_inserts (void)
6de9cd9a
DN
3046{
3047 basic_block bb;
3048 edge e;
628f6a4e 3049 edge_iterator ei;
6de9cd9a 3050
edfaf675 3051 bsi_commit_one_edge_insert (EDGE_SUCC (ENTRY_BLOCK_PTR, 0), NULL);
6de9cd9a
DN
3052
3053 FOR_EACH_BB (bb)
628f6a4e 3054 FOR_EACH_EDGE (e, ei, bb->succs)
edfaf675 3055 bsi_commit_one_edge_insert (e, NULL);
6de9cd9a
DN
3056}
3057
3058
edfaf675
AM
3059/* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3060 to this block, otherwise set it to NULL. */
6de9cd9a 3061
edfaf675
AM
3062void
3063bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
6de9cd9a 3064{
edfaf675
AM
3065 if (new_bb)
3066 *new_bb = NULL;
6de9cd9a
DN
3067 if (PENDING_STMT (e))
3068 {
3069 block_stmt_iterator bsi;
3070 tree stmt = PENDING_STMT (e);
3071
3072 PENDING_STMT (e) = NULL_TREE;
3073
edfaf675 3074 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
6de9cd9a
DN
3075 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3076 else
3077 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3078 }
3079}
3080
3081
3082/* Add STMT to the pending list of edge E. No actual insertion is
3083 made until a call to bsi_commit_edge_inserts () is made. */
3084
3085void
3086bsi_insert_on_edge (edge e, tree stmt)
3087{
3088 append_to_statement_list (stmt, &PENDING_STMT (e));
3089}
3090
82b85a85
ZD
3091/* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If new block has to
3092 be created, it is returned. */
3093
3094basic_block
3095bsi_insert_on_edge_immediate (edge e, tree stmt)
3096{
3097 block_stmt_iterator bsi;
3098 basic_block new_bb = NULL;
3099
1e128c5f 3100 gcc_assert (!PENDING_STMT (e));
82b85a85
ZD
3101
3102 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3103 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3104 else
3105 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3106
3107 return new_bb;
3108}
6de9cd9a 3109
6de9cd9a
DN
3110/*---------------------------------------------------------------------------
3111 Tree specific functions for CFG manipulation
3112---------------------------------------------------------------------------*/
3113
4f7db7f7
KH
3114/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3115
3116static void
3117reinstall_phi_args (edge new_edge, edge old_edge)
3118{
3119 tree var, phi;
3120
3121 if (!PENDING_STMT (old_edge))
3122 return;
3123
3124 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3125 var && phi;
3126 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3127 {
3128 tree result = TREE_PURPOSE (var);
3129 tree arg = TREE_VALUE (var);
3130
3131 gcc_assert (result == PHI_RESULT (phi));
3132
d2e398df 3133 add_phi_arg (phi, arg, new_edge);
4f7db7f7
KH
3134 }
3135
3136 PENDING_STMT (old_edge) = NULL;
3137}
3138
6de9cd9a
DN
3139/* Split a (typically critical) edge EDGE_IN. Return the new block.
3140 Abort on abnormal edges. */
3141
3142static basic_block
3143tree_split_edge (edge edge_in)
3144{
3145 basic_block new_bb, after_bb, dest, src;
3146 edge new_edge, e;
6de9cd9a
DN
3147
3148 /* Abnormal edges cannot be split. */
1e128c5f 3149 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
6de9cd9a
DN
3150
3151 src = edge_in->src;
3152 dest = edge_in->dest;
3153
3154 /* Place the new block in the block list. Try to keep the new block
3155 near its "logical" location. This is of most help to humans looking
3156 at debugging dumps. */
7510740d 3157 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
6de9cd9a 3158 after_bb = edge_in->src;
7510740d
KH
3159 else
3160 after_bb = dest->prev_bb;
6de9cd9a
DN
3161
3162 new_bb = create_empty_bb (after_bb);
b829f3fa
JH
3163 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3164 new_bb->count = edge_in->count;
6de9cd9a 3165 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
b829f3fa
JH
3166 new_edge->probability = REG_BR_PROB_BASE;
3167 new_edge->count = edge_in->count;
6de9cd9a 3168
1e128c5f
GB
3169 e = redirect_edge_and_branch (edge_in, new_bb);
3170 gcc_assert (e);
4f7db7f7 3171 reinstall_phi_args (new_edge, e);
6de9cd9a
DN
3172
3173 return new_bb;
3174}
3175
3176
3177/* Return true when BB has label LABEL in it. */
3178
3179static bool
3180has_label_p (basic_block bb, tree label)
3181{
3182 block_stmt_iterator bsi;
3183
3184 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3185 {
3186 tree stmt = bsi_stmt (bsi);
3187
3188 if (TREE_CODE (stmt) != LABEL_EXPR)
3189 return false;
3190 if (LABEL_EXPR_LABEL (stmt) == label)
3191 return true;
3192 }
3193 return false;
3194}
3195
3196
3197/* Callback for walk_tree, check that all elements with address taken are
3198 properly noticed as such. */
3199
3200static tree
2fbe90f2 3201verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
6de9cd9a
DN
3202{
3203 tree t = *tp, x;
3204
3205 if (TYPE_P (t))
3206 *walk_subtrees = 0;
2fbe90f2 3207
50b04185
RK
3208 /* Check operand N for being valid GIMPLE and give error MSG if not.
3209 We check for constants explicitly since they are not considered
3210 gimple invariants if they overflowed. */
2fbe90f2 3211#define CHECK_OP(N, MSG) \
6615c446
JO
3212 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3213 && !is_gimple_val (TREE_OPERAND (t, N))) \
2fbe90f2 3214 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
6de9cd9a
DN
3215
3216 switch (TREE_CODE (t))
3217 {
3218 case SSA_NAME:
3219 if (SSA_NAME_IN_FREE_LIST (t))
3220 {
3221 error ("SSA name in freelist but still referenced");
3222 return *tp;
3223 }
3224 break;
3225
3226 case MODIFY_EXPR:
3227 x = TREE_OPERAND (t, 0);
3228 if (TREE_CODE (x) == BIT_FIELD_REF
3229 && is_gimple_reg (TREE_OPERAND (x, 0)))
3230 {
3231 error ("GIMPLE register modified with BIT_FIELD_REF");
2fbe90f2 3232 return t;
6de9cd9a
DN
3233 }
3234 break;
3235
3236 case ADDR_EXPR:
2fbe90f2
RK
3237 /* Skip any references (they will be checked when we recurse down the
3238 tree) and ensure that any variable used as a prefix is marked
3239 addressable. */
3240 for (x = TREE_OPERAND (t, 0);
3241 (handled_component_p (x)
3242 || TREE_CODE (x) == REALPART_EXPR
3243 || TREE_CODE (x) == IMAGPART_EXPR);
44de5aeb
RK
3244 x = TREE_OPERAND (x, 0))
3245 ;
3246
6de9cd9a
DN
3247 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3248 return NULL;
3249 if (!TREE_ADDRESSABLE (x))
3250 {
3251 error ("address taken, but ADDRESSABLE bit not set");
3252 return x;
3253 }
3254 break;
3255
3256 case COND_EXPR:
a6234684 3257 x = COND_EXPR_COND (t);
6de9cd9a
DN
3258 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3259 {
3260 error ("non-boolean used in condition");
3261 return x;
3262 }
3263 break;
3264
3265 case NOP_EXPR:
3266 case CONVERT_EXPR:
3267 case FIX_TRUNC_EXPR:
3268 case FIX_CEIL_EXPR:
3269 case FIX_FLOOR_EXPR:
3270 case FIX_ROUND_EXPR:
3271 case FLOAT_EXPR:
3272 case NEGATE_EXPR:
3273 case ABS_EXPR:
3274 case BIT_NOT_EXPR:
3275 case NON_LVALUE_EXPR:
3276 case TRUTH_NOT_EXPR:
2fbe90f2 3277 CHECK_OP (0, "Invalid operand to unary operator");
6de9cd9a
DN
3278 break;
3279
3280 case REALPART_EXPR:
3281 case IMAGPART_EXPR:
2fbe90f2
RK
3282 case COMPONENT_REF:
3283 case ARRAY_REF:
3284 case ARRAY_RANGE_REF:
3285 case BIT_FIELD_REF:
3286 case VIEW_CONVERT_EXPR:
3287 /* We have a nest of references. Verify that each of the operands
3288 that determine where to reference is either a constant or a variable,
3289 verify that the base is valid, and then show we've already checked
3290 the subtrees. */
3291 while (TREE_CODE (t) == REALPART_EXPR || TREE_CODE (t) == IMAGPART_EXPR
3292 || handled_component_p (t))
3293 {
3294 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3295 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3296 else if (TREE_CODE (t) == ARRAY_REF
3297 || TREE_CODE (t) == ARRAY_RANGE_REF)
3298 {
3299 CHECK_OP (1, "Invalid array index.");
3300 if (TREE_OPERAND (t, 2))
3301 CHECK_OP (2, "Invalid array lower bound.");
3302 if (TREE_OPERAND (t, 3))
3303 CHECK_OP (3, "Invalid array stride.");
3304 }
3305 else if (TREE_CODE (t) == BIT_FIELD_REF)
3306 {
3307 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3308 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3309 }
3310
3311 t = TREE_OPERAND (t, 0);
3312 }
3313
6615c446 3314 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
2fbe90f2
RK
3315 {
3316 error ("Invalid reference prefix.");
3317 return t;
3318 }
3319 *walk_subtrees = 0;
6de9cd9a
DN
3320 break;
3321
3322 case LT_EXPR:
3323 case LE_EXPR:
3324 case GT_EXPR:
3325 case GE_EXPR:
3326 case EQ_EXPR:
3327 case NE_EXPR:
3328 case UNORDERED_EXPR:
3329 case ORDERED_EXPR:
3330 case UNLT_EXPR:
3331 case UNLE_EXPR:
3332 case UNGT_EXPR:
3333 case UNGE_EXPR:
3334 case UNEQ_EXPR:
d1a7edaf 3335 case LTGT_EXPR:
6de9cd9a
DN
3336 case PLUS_EXPR:
3337 case MINUS_EXPR:
3338 case MULT_EXPR:
3339 case TRUNC_DIV_EXPR:
3340 case CEIL_DIV_EXPR:
3341 case FLOOR_DIV_EXPR:
3342 case ROUND_DIV_EXPR:
3343 case TRUNC_MOD_EXPR:
3344 case CEIL_MOD_EXPR:
3345 case FLOOR_MOD_EXPR:
3346 case ROUND_MOD_EXPR:
3347 case RDIV_EXPR:
3348 case EXACT_DIV_EXPR:
3349 case MIN_EXPR:
3350 case MAX_EXPR:
3351 case LSHIFT_EXPR:
3352 case RSHIFT_EXPR:
3353 case LROTATE_EXPR:
3354 case RROTATE_EXPR:
3355 case BIT_IOR_EXPR:
3356 case BIT_XOR_EXPR:
3357 case BIT_AND_EXPR:
50b04185
RK
3358 CHECK_OP (0, "Invalid operand to binary operator");
3359 CHECK_OP (1, "Invalid operand to binary operator");
6de9cd9a
DN
3360 break;
3361
3362 default:
3363 break;
3364 }
3365 return NULL;
2fbe90f2
RK
3366
3367#undef CHECK_OP
6de9cd9a
DN
3368}
3369
3370
3371/* Verify STMT, return true if STMT is not in GIMPLE form.
3372 TODO: Implement type checking. */
3373
3374static bool
1eaba2f2 3375verify_stmt (tree stmt, bool last_in_block)
6de9cd9a
DN
3376{
3377 tree addr;
3378
3379 if (!is_gimple_stmt (stmt))
3380 {
3381 error ("Is not a valid GIMPLE statement.");
1eaba2f2 3382 goto fail;
6de9cd9a
DN
3383 }
3384
3385 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3386 if (addr)
3387 {
3388 debug_generic_stmt (addr);
3389 return true;
3390 }
3391
1eaba2f2
RH
3392 /* If the statement is marked as part of an EH region, then it is
3393 expected that the statement could throw. Verify that when we
3394 have optimizations that simplify statements such that we prove
3395 that they cannot throw, that we update other data structures
3396 to match. */
3397 if (lookup_stmt_eh_region (stmt) >= 0)
3398 {
3399 if (!tree_could_throw_p (stmt))
3400 {
971801ff 3401 error ("Statement marked for throw, but doesn%'t.");
1eaba2f2
RH
3402 goto fail;
3403 }
3404 if (!last_in_block && tree_can_throw_internal (stmt))
3405 {
3406 error ("Statement marked for throw in middle of block.");
3407 goto fail;
3408 }
3409 }
3410
6de9cd9a 3411 return false;
1eaba2f2
RH
3412
3413 fail:
3414 debug_generic_stmt (stmt);
3415 return true;
6de9cd9a
DN
3416}
3417
3418
3419/* Return true when the T can be shared. */
3420
3421static bool
3422tree_node_can_be_shared (tree t)
3423{
6615c446 3424 if (IS_TYPE_OR_DECL_P (t)
6de9cd9a
DN
3425 /* We check for constants explicitly since they are not considered
3426 gimple invariants if they overflowed. */
6615c446 3427 || CONSTANT_CLASS_P (t)
6de9cd9a 3428 || is_gimple_min_invariant (t)
5e23162d 3429 || TREE_CODE (t) == SSA_NAME
bac8beb4 3430 || t == error_mark_node)
6de9cd9a
DN
3431 return true;
3432
92b6dff3
JL
3433 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3434 return true;
3435
44de5aeb 3436 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6de9cd9a
DN
3437 /* We check for constants explicitly since they are not considered
3438 gimple invariants if they overflowed. */
6615c446 3439 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
6de9cd9a
DN
3440 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3441 || (TREE_CODE (t) == COMPONENT_REF
3442 || TREE_CODE (t) == REALPART_EXPR
3443 || TREE_CODE (t) == IMAGPART_EXPR))
3444 t = TREE_OPERAND (t, 0);
3445
3446 if (DECL_P (t))
3447 return true;
3448
3449 return false;
3450}
3451
3452
3453/* Called via walk_trees. Verify tree sharing. */
3454
3455static tree
3456verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3457{
3458 htab_t htab = (htab_t) data;
3459 void **slot;
3460
3461 if (tree_node_can_be_shared (*tp))
3462 {
3463 *walk_subtrees = false;
3464 return NULL;
3465 }
3466
3467 slot = htab_find_slot (htab, *tp, INSERT);
3468 if (*slot)
3469 return *slot;
3470 *slot = *tp;
3471
3472 return NULL;
3473}
3474
3475
3476/* Verify the GIMPLE statement chain. */
3477
3478void
3479verify_stmts (void)
3480{
3481 basic_block bb;
3482 block_stmt_iterator bsi;
3483 bool err = false;
3484 htab_t htab;
3485 tree addr;
3486
3487 timevar_push (TV_TREE_STMT_VERIFY);
3488 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3489
3490 FOR_EACH_BB (bb)
3491 {
3492 tree phi;
3493 int i;
3494
17192884 3495 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
3496 {
3497 int phi_num_args = PHI_NUM_ARGS (phi);
3498
3499 for (i = 0; i < phi_num_args; i++)
3500 {
3501 tree t = PHI_ARG_DEF (phi, i);
3502 tree addr;
3503
3504 /* Addressable variables do have SSA_NAMEs but they
3505 are not considered gimple values. */
3506 if (TREE_CODE (t) != SSA_NAME
3507 && TREE_CODE (t) != FUNCTION_DECL
3508 && !is_gimple_val (t))
3509 {
3510 error ("PHI def is not a GIMPLE value");
3511 debug_generic_stmt (phi);
3512 debug_generic_stmt (t);
3513 err |= true;
3514 }
3515
3516 addr = walk_tree (&t, verify_expr, NULL, NULL);
3517 if (addr)
3518 {
3519 debug_generic_stmt (addr);
3520 err |= true;
3521 }
3522
3523 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3524 if (addr)
3525 {
3526 error ("Incorrect sharing of tree nodes");
3527 debug_generic_stmt (phi);
3528 debug_generic_stmt (addr);
3529 err |= true;
3530 }
3531 }
3532 }
3533
1eaba2f2 3534 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
6de9cd9a
DN
3535 {
3536 tree stmt = bsi_stmt (bsi);
1eaba2f2
RH
3537 bsi_next (&bsi);
3538 err |= verify_stmt (stmt, bsi_end_p (bsi));
6de9cd9a
DN
3539 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3540 if (addr)
3541 {
3542 error ("Incorrect sharing of tree nodes");
3543 debug_generic_stmt (stmt);
3544 debug_generic_stmt (addr);
3545 err |= true;
3546 }
3547 }
3548 }
3549
3550 if (err)
3551 internal_error ("verify_stmts failed.");
3552
3553 htab_delete (htab);
3554 timevar_pop (TV_TREE_STMT_VERIFY);
3555}
3556
3557
3558/* Verifies that the flow information is OK. */
3559
3560static int
3561tree_verify_flow_info (void)
3562{
3563 int err = 0;
3564 basic_block bb;
3565 block_stmt_iterator bsi;
3566 tree stmt;
3567 edge e;
628f6a4e 3568 edge_iterator ei;
6de9cd9a
DN
3569
3570 if (ENTRY_BLOCK_PTR->stmt_list)
3571 {
3572 error ("ENTRY_BLOCK has a statement list associated with it\n");
3573 err = 1;
3574 }
3575
3576 if (EXIT_BLOCK_PTR->stmt_list)
3577 {
3578 error ("EXIT_BLOCK has a statement list associated with it\n");
3579 err = 1;
3580 }
3581
628f6a4e 3582 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
3583 if (e->flags & EDGE_FALLTHRU)
3584 {
3585 error ("Fallthru to exit from bb %d\n", e->src->index);
3586 err = 1;
3587 }
3588
3589 FOR_EACH_BB (bb)
3590 {
3591 bool found_ctrl_stmt = false;
3592
3593 /* Skip labels on the start of basic block. */
3594 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3595 {
3596 if (TREE_CODE (bsi_stmt (bsi)) != LABEL_EXPR)
3597 break;
3598
3599 if (label_to_block (LABEL_EXPR_LABEL (bsi_stmt (bsi))) != bb)
3600 {
77568960 3601 tree stmt = bsi_stmt (bsi);
6de9cd9a 3602 error ("Label %s to block does not match in bb %d\n",
77568960 3603 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
6de9cd9a
DN
3604 bb->index);
3605 err = 1;
3606 }
3607
3608 if (decl_function_context (LABEL_EXPR_LABEL (bsi_stmt (bsi)))
3609 != current_function_decl)
3610 {
77568960 3611 tree stmt = bsi_stmt (bsi);
6de9cd9a 3612 error ("Label %s has incorrect context in bb %d\n",
77568960 3613 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
6de9cd9a
DN
3614 bb->index);
3615 err = 1;
3616 }
3617 }
3618
3619 /* Verify that body of basic block BB is free of control flow. */
3620 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3621 {
3622 tree stmt = bsi_stmt (bsi);
3623
3624 if (found_ctrl_stmt)
3625 {
3626 error ("Control flow in the middle of basic block %d\n",
3627 bb->index);
3628 err = 1;
3629 }
3630
3631 if (stmt_ends_bb_p (stmt))
3632 found_ctrl_stmt = true;
3633
3634 if (TREE_CODE (stmt) == LABEL_EXPR)
3635 {
3636 error ("Label %s in the middle of basic block %d\n",
3637 IDENTIFIER_POINTER (DECL_NAME (stmt)),
3638 bb->index);
3639 err = 1;
3640 }
3641 }
3642 bsi = bsi_last (bb);
3643 if (bsi_end_p (bsi))
3644 continue;
3645
3646 stmt = bsi_stmt (bsi);
3647
3648 if (is_ctrl_stmt (stmt))
3649 {
628f6a4e 3650 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
3651 if (e->flags & EDGE_FALLTHRU)
3652 {
3653 error ("Fallthru edge after a control statement in bb %d \n",
3654 bb->index);
3655 err = 1;
3656 }
3657 }
3658
3659 switch (TREE_CODE (stmt))
3660 {
3661 case COND_EXPR:
3662 {
3663 edge true_edge;
3664 edge false_edge;
3665 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3666 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3667 {
3668 error ("Structured COND_EXPR at the end of bb %d\n", bb->index);
3669 err = 1;
3670 }
3671
3672 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3673
3674 if (!true_edge || !false_edge
3675 || !(true_edge->flags & EDGE_TRUE_VALUE)
3676 || !(false_edge->flags & EDGE_FALSE_VALUE)
3677 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3678 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
628f6a4e 3679 || EDGE_COUNT (bb->succs) >= 3)
6de9cd9a
DN
3680 {
3681 error ("Wrong outgoing edge flags at end of bb %d\n",
3682 bb->index);
3683 err = 1;
3684 }
3685
3686 if (!has_label_p (true_edge->dest,
3687 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3688 {
971801ff 3689 error ("%<then%> label does not match edge at end of bb %d\n",
6de9cd9a
DN
3690 bb->index);
3691 err = 1;
3692 }
3693
3694 if (!has_label_p (false_edge->dest,
3695 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3696 {
971801ff 3697 error ("%<else%> label does not match edge at end of bb %d\n",
6de9cd9a
DN
3698 bb->index);
3699 err = 1;
3700 }
3701 }
3702 break;
3703
3704 case GOTO_EXPR:
3705 if (simple_goto_p (stmt))
3706 {
3707 error ("Explicit goto at end of bb %d\n", bb->index);
3708 err = 1;
3709 }
3710 else
3711 {
3712 /* FIXME. We should double check that the labels in the
3713 destination blocks have their address taken. */
628f6a4e 3714 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
3715 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3716 | EDGE_FALSE_VALUE))
3717 || !(e->flags & EDGE_ABNORMAL))
3718 {
3719 error ("Wrong outgoing edge flags at end of bb %d\n",
3720 bb->index);
3721 err = 1;
3722 }
3723 }
3724 break;
3725
3726 case RETURN_EXPR:
628f6a4e
BE
3727 if (EDGE_COUNT (bb->succs) != 1
3728 || (EDGE_SUCC (bb, 0)->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
6de9cd9a
DN
3729 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3730 {
3731 error ("Wrong outgoing edge flags at end of bb %d\n", bb->index);
3732 err = 1;
3733 }
628f6a4e 3734 if (EDGE_SUCC (bb, 0)->dest != EXIT_BLOCK_PTR)
6de9cd9a
DN
3735 {
3736 error ("Return edge does not point to exit in bb %d\n",
3737 bb->index);
3738 err = 1;
3739 }
3740 break;
3741
3742 case SWITCH_EXPR:
3743 {
7853504d 3744 tree prev;
6de9cd9a
DN
3745 edge e;
3746 size_t i, n;
3747 tree vec;
3748
3749 vec = SWITCH_LABELS (stmt);
3750 n = TREE_VEC_LENGTH (vec);
3751
3752 /* Mark all the destination basic blocks. */
3753 for (i = 0; i < n; ++i)
3754 {
3755 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3756 basic_block label_bb = label_to_block (lab);
3757
1e128c5f 3758 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
6de9cd9a
DN
3759 label_bb->aux = (void *)1;
3760 }
3761
7853504d
SB
3762 /* Verify that the case labels are sorted. */
3763 prev = TREE_VEC_ELT (vec, 0);
3764 for (i = 1; i < n - 1; ++i)
3765 {
3766 tree c = TREE_VEC_ELT (vec, i);
3767 if (! CASE_LOW (c))
3768 {
3769 error ("Found default case not at end of case vector");
3770 err = 1;
3771 continue;
3772 }
3773 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3774 {
3775 error ("Case labels not sorted:\n ");
3776 print_generic_expr (stderr, prev, 0);
3777 fprintf (stderr," is greater than ");
3778 print_generic_expr (stderr, c, 0);
3779 fprintf (stderr," but comes before it.\n");
3780 err = 1;
3781 }
3782 prev = c;
3783 }
3784 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3785 {
3786 error ("No default case found at end of case vector");
3787 err = 1;
3788 }
3789
628f6a4e 3790 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
3791 {
3792 if (!e->dest->aux)
3793 {
3794 error ("Extra outgoing edge %d->%d\n",
3795 bb->index, e->dest->index);
3796 err = 1;
3797 }
3798 e->dest->aux = (void *)2;
3799 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3800 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3801 {
3802 error ("Wrong outgoing edge flags at end of bb %d\n",
3803 bb->index);
3804 err = 1;
3805 }
3806 }
3807
3808 /* Check that we have all of them. */
3809 for (i = 0; i < n; ++i)
3810 {
3811 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3812 basic_block label_bb = label_to_block (lab);
3813
3814 if (label_bb->aux != (void *)2)
3815 {
6ade0959 3816 error ("Missing edge %i->%i",
6de9cd9a
DN
3817 bb->index, label_bb->index);
3818 err = 1;
3819 }
3820 }
3821
628f6a4e 3822 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
3823 e->dest->aux = (void *)0;
3824 }
3825
3826 default: ;
3827 }
3828 }
3829
3830 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3831 verify_dominators (CDI_DOMINATORS);
3832
3833 return err;
3834}
3835
3836
f0b698c1 3837/* Updates phi nodes after creating a forwarder block joined
6de9cd9a
DN
3838 by edge FALLTHRU. */
3839
3840static void
3841tree_make_forwarder_block (edge fallthru)
3842{
3843 edge e;
628f6a4e 3844 edge_iterator ei;
6de9cd9a 3845 basic_block dummy, bb;
5ae71719 3846 tree phi, new_phi, var;
6de9cd9a
DN
3847
3848 dummy = fallthru->src;
3849 bb = fallthru->dest;
3850
628f6a4e 3851 if (EDGE_COUNT (bb->preds) == 1)
6de9cd9a
DN
3852 return;
3853
3854 /* If we redirected a branch we must create new phi nodes at the
3855 start of BB. */
17192884 3856 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
3857 {
3858 var = PHI_RESULT (phi);
3859 new_phi = create_phi_node (var, bb);
3860 SSA_NAME_DEF_STMT (var) = new_phi;
d00ad49b 3861 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
d2e398df 3862 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
6de9cd9a
DN
3863 }
3864
17192884 3865 /* Ensure that the PHI node chain is in the same order. */
5ae71719 3866 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
6de9cd9a
DN
3867
3868 /* Add the arguments we have stored on edges. */
628f6a4e 3869 FOR_EACH_EDGE (e, ei, bb->preds)
6de9cd9a
DN
3870 {
3871 if (e == fallthru)
3872 continue;
3873
71882046 3874 flush_pending_stmts (e);
6de9cd9a
DN
3875 }
3876}
3877
3878
3879/* Return true if basic block BB does nothing except pass control
3880 flow to another block and that we can safely insert a label at
10a52335
KH
3881 the start of the successor block.
3882
3883 As a precondition, we require that BB be not equal to
3884 ENTRY_BLOCK_PTR. */
6de9cd9a
DN
3885
3886static bool
3887tree_forwarder_block_p (basic_block bb)
3888{
3889 block_stmt_iterator bsi;
6de9cd9a 3890
10a52335 3891 /* BB must have a single outgoing edge. */
628f6a4e 3892 if (EDGE_COUNT (bb->succs) != 1
10a52335
KH
3893 /* BB can not have any PHI nodes. This could potentially be
3894 relaxed early in compilation if we re-rewrote the variables
3895 appearing in any PHI nodes in forwarder blocks. */
3896 || phi_nodes (bb)
3897 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
628f6a4e 3898 || EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR
10a52335
KH
3899 /* BB may not have an abnormal outgoing edge. */
3900 || (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL))
78b6731d 3901 return false;
6de9cd9a 3902
10a52335
KH
3903#if ENABLE_CHECKING
3904 gcc_assert (bb != ENTRY_BLOCK_PTR);
3905#endif
3906
6de9cd9a
DN
3907 /* Now walk through the statements. We can ignore labels, anything else
3908 means this is not a forwarder block. */
3909 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3910 {
3911 tree stmt = bsi_stmt (bsi);
3912
3913 switch (TREE_CODE (stmt))
3914 {
3915 case LABEL_EXPR:
3916 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3917 return false;
3918 break;
3919
3920 default:
6de9cd9a
DN
3921 return false;
3922 }
3923 }
3924
91d9ede4
KH
3925 if (find_edge (ENTRY_BLOCK_PTR, bb))
3926 return false;
3927
6de9cd9a
DN
3928 return true;
3929}
3930
072269d8
KH
3931/* Thread jumps from BB. */
3932
3933static bool
3934thread_jumps_from_bb (basic_block bb)
3935{
3936 edge_iterator ei;
3937 edge e;
3938 bool retval = false;
3939
3940 /* Examine each of our block's successors to see if it is
3941 forwardable. */
3942 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3943 {
3944 int freq;
3945 gcov_type count;
3946 edge last, old;
3947 basic_block dest, tmp, curr, old_dest;
3948 tree phi;
3949 int arg;
3950
3951 /* If the edge is abnormal or its destination is not
3952 forwardable, then there's nothing to do. */
3953 if ((e->flags & EDGE_ABNORMAL)
3954 || !bb_ann (e->dest)->forwardable)
3955 {
3956 ei_next (&ei);
3957 continue;
3958 }
3959
072269d8
KH
3960 /* Now walk through as many forwarder blocks as possible to find
3961 the ultimate destination we want to thread our jump to. */
3962 last = EDGE_SUCC (e->dest, 0);
3963 bb_ann (e->dest)->forwardable = 0;
3964 for (dest = EDGE_SUCC (e->dest, 0)->dest;
3965 bb_ann (dest)->forwardable;
3966 last = EDGE_SUCC (dest, 0),
3967 dest = EDGE_SUCC (dest, 0)->dest)
3968 bb_ann (dest)->forwardable = 0;
3969
3970 /* Reset the forwardable marks to 1. */
3971 for (tmp = e->dest;
3972 tmp != dest;
3973 tmp = EDGE_SUCC (tmp, 0)->dest)
3974 bb_ann (tmp)->forwardable = 1;
3975
3976 if (dest == e->dest)
3977 {
3978 ei_next (&ei);
3979 continue;
3980 }
3981
3982 old = find_edge (bb, dest);
3983 if (old)
3984 {
3985 /* If there already is an edge, check whether the values in
3986 phi nodes differ. */
3987 if (!phi_alternatives_equal (dest, last, old))
3988 {
3989 /* The previous block is forwarder. Redirect our jump
3990 to that target instead since we know it has no PHI
3991 nodes that will need updating. */
3992 dest = last->src;
3993
3994 /* That might mean that no forwarding at all is
3995 possible. */
3996 if (dest == e->dest)
3997 {
3998 ei_next (&ei);
3999 continue;
4000 }
4001
4002 old = find_edge (bb, dest);
4003 }
4004 }
4005
4006 /* Perform the redirection. */
4007 retval = true;
385efa80
KH
4008 count = e->count;
4009 freq = EDGE_FREQUENCY (e);
072269d8
KH
4010 old_dest = e->dest;
4011 e = redirect_edge_and_branch (e, dest);
4012
4013 /* Update the profile. */
4014 if (profile_status != PROFILE_ABSENT)
4015 for (curr = old_dest;
4016 curr != dest;
4017 curr = EDGE_SUCC (curr, 0)->dest)
4018 {
4019 curr->frequency -= freq;
4020 if (curr->frequency < 0)
4021 curr->frequency = 0;
4022 curr->count -= count;
4023 if (curr->count < 0)
4024 curr->count = 0;
4025 EDGE_SUCC (curr, 0)->count -= count;
4026 if (EDGE_SUCC (curr, 0)->count < 0)
4027 EDGE_SUCC (curr, 0)->count = 0;
4028 }
4029
4030 if (!old)
4031 {
4032 /* Update PHI nodes. We know that the new argument should
4033 have the same value as the argument associated with LAST.
4034 Otherwise we would have changed our target block
4035 above. */
4036 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
4037 {
4038 arg = phi_arg_from_edge (phi, last);
4039 gcc_assert (arg >= 0);
d2e398df 4040 add_phi_arg (phi, PHI_ARG_DEF (phi, arg), e);
072269d8
KH
4041 }
4042 }
4043
4044 /* Remove the unreachable blocks (observe that if all blocks
4045 were reachable before, only those in the path we threaded
4046 over and did not have any predecessor outside of the path
4047 become unreachable). */
4048 for (; old_dest != dest; old_dest = tmp)
4049 {
4050 tmp = EDGE_SUCC (old_dest, 0)->dest;
4051
4052 if (EDGE_COUNT (old_dest->preds) > 0)
4053 break;
4054
4055 delete_basic_block (old_dest);
4056 }
4057
4058 /* Update the dominators. */
4059 if (dom_info_available_p (CDI_DOMINATORS))
4060 {
4061 /* If the dominator of the destination was in the
4062 path, set its dominator to the start of the
4063 redirected edge. */
4064 if (get_immediate_dominator (CDI_DOMINATORS, old_dest) == NULL)
4065 set_immediate_dominator (CDI_DOMINATORS, old_dest, bb);
4066
4067 /* Now proceed like if we forwarded just over one edge at a
4068 time. Algorithm for forwarding edge S --> A over
4069 edge A --> B then is
4070
4071 if (idom (B) == A
4072 && !dominated_by (S, B))
4073 idom (B) = idom (A);
4074 recount_idom (A); */
4075
4076 for (; old_dest != dest; old_dest = tmp)
4077 {
4078 basic_block dom;
4079
4080 tmp = EDGE_SUCC (old_dest, 0)->dest;
4081
4082 if (get_immediate_dominator (CDI_DOMINATORS, tmp) == old_dest
4083 && !dominated_by_p (CDI_DOMINATORS, bb, tmp))
4084 {
4085 dom = get_immediate_dominator (CDI_DOMINATORS, old_dest);
4086 set_immediate_dominator (CDI_DOMINATORS, tmp, dom);
4087 }
4088
4089 dom = recount_dominator (CDI_DOMINATORS, old_dest);
4090 set_immediate_dominator (CDI_DOMINATORS, old_dest, dom);
4091 }
4092 }
4093 }
4094
4095 return retval;
4096}
4097
6de9cd9a
DN
4098
4099/* Thread jumps over empty statements.
4100
4101 This code should _not_ thread over obviously equivalent conditions
2abacef0
KH
4102 as that requires nontrivial updates to the SSA graph.
4103
4104 As a precondition, we require that all basic blocks be reachable.
4105 That is, there should be no opportunities left for
4106 delete_unreachable_blocks. */
072269d8 4107
6de9cd9a
DN
4108static bool
4109thread_jumps (void)
4110{
072269d8 4111 basic_block bb;
6de9cd9a 4112 bool retval = false;
afc3f396 4113 basic_block *worklist = xmalloc (sizeof (basic_block) * last_basic_block);
31864f59 4114 basic_block *current = worklist;
6de9cd9a
DN
4115
4116 FOR_EACH_BB (bb)
08445125
KH
4117 {
4118 bb_ann (bb)->forwardable = tree_forwarder_block_p (bb);
4119 bb->flags &= ~BB_VISITED;
4120 }
6de9cd9a 4121
af88d4ec
KH
4122 /* We pretend to have ENTRY_BLOCK_PTR in WORKLIST. This way,
4123 ENTRY_BLOCK_PTR will never be entered into WORKLIST. */
4124 ENTRY_BLOCK_PTR->flags |= BB_VISITED;
4125
afc3f396
KH
4126 /* Initialize WORKLIST by putting non-forwarder blocks that
4127 immediately precede forwarder blocks because those are the ones
4128 that we know we can thread jumps from. We use BB_VISITED to
4129 indicate whether a given basic block is in WORKLIST or not,
4130 thereby avoiding duplicates in WORKLIST. */
08445125 4131 FOR_EACH_BB (bb)
6de9cd9a 4132 {
08445125
KH
4133 edge_iterator ei;
4134 edge e;
4135
4136 /* We are not interested in finding non-forwarder blocks
4137 directly. We want to find non-forwarder blocks as
4138 predecessors of a forwarder block. */
4139 if (!bb_ann (bb)->forwardable)
4140 continue;
4141
4142 /* Now we know BB is a forwarder block. Visit each of its
4143 incoming edges and add to WORKLIST all non-forwarder blocks
4144 among BB's predecessors. */
4145 FOR_EACH_EDGE (e, ei, bb->preds)
6de9cd9a 4146 {
0b371c72
KH
4147 /* We don't want to put a duplicate into WORKLIST. */
4148 if ((e->src->flags & BB_VISITED) == 0
4149 /* We are not interested in threading jumps from a forwarder
4150 block. */
4151 && !bb_ann (e->src)->forwardable)
08445125
KH
4152 {
4153 e->src->flags |= BB_VISITED;
31864f59 4154 *current++ = e->src;
08445125
KH
4155 }
4156 }
4157 }
6de9cd9a 4158
08445125 4159 /* Now let's drain WORKLIST. */
31864f59 4160 while (worklist != current)
08445125 4161 {
31864f59 4162 bb = *--current;
08445125 4163
cf566f7f 4164 /* BB is no longer in WORKLIST, so clear BB_VISITED. */
08445125
KH
4165 bb->flags &= ~BB_VISITED;
4166
4167 if (thread_jumps_from_bb (bb))
4168 {
4169 retval = true;
4170
4171 if (tree_forwarder_block_p (bb))
628f6a4e 4172 {
08445125
KH
4173 edge_iterator ej;
4174 edge f;
8a807136 4175
08445125
KH
4176 bb_ann (bb)->forwardable = true;
4177
4178 /* Attempts to thread through BB may have been blocked
4179 because BB was not a forwarder block before. Now
4180 that BB is a forwarder block, we should revisit BB's
4181 predecessors. */
4182 FOR_EACH_EDGE (f, ej, bb->preds)
4183 {
0b371c72
KH
4184 /* We don't want to put a duplicate into WORKLIST. */
4185 if ((f->src->flags & BB_VISITED) == 0
4186 /* We are not interested in threading jumps from a
4187 forwarder block. */
4188 && !bb_ann (f->src)->forwardable)
08445125
KH
4189 {
4190 f->src->flags |= BB_VISITED;
31864f59 4191 *current++ = f->src;
08445125
KH
4192 }
4193 }
8a807136 4194 }
e61d7b78 4195 }
6de9cd9a 4196 }
08445125 4197
af88d4ec
KH
4198 ENTRY_BLOCK_PTR->flags &= ~BB_VISITED;
4199
08445125 4200 free (worklist);
6de9cd9a
DN
4201
4202 return retval;
4203}
4204
4205
4206/* Return a non-special label in the head of basic block BLOCK.
4207 Create one if it doesn't exist. */
4208
d7621d3c 4209tree
6de9cd9a
DN
4210tree_block_label (basic_block bb)
4211{
4212 block_stmt_iterator i, s = bsi_start (bb);
4213 bool first = true;
4214 tree label, stmt;
4215
4216 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4217 {
4218 stmt = bsi_stmt (i);
4219 if (TREE_CODE (stmt) != LABEL_EXPR)
4220 break;
4221 label = LABEL_EXPR_LABEL (stmt);
4222 if (!DECL_NONLOCAL (label))
4223 {
4224 if (!first)
4225 bsi_move_before (&i, &s);
4226 return label;
4227 }
4228 }
4229
4230 label = create_artificial_label ();
4231 stmt = build1 (LABEL_EXPR, void_type_node, label);
4232 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4233 return label;
4234}
4235
4236
4237/* Attempt to perform edge redirection by replacing a possibly complex
4238 jump instruction by a goto or by removing the jump completely.
4239 This can apply only if all edges now point to the same block. The
4240 parameters and return values are equivalent to
4241 redirect_edge_and_branch. */
4242
4243static edge
4244tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4245{
4246 basic_block src = e->src;
6de9cd9a
DN
4247 block_stmt_iterator b;
4248 tree stmt;
6de9cd9a 4249
07b43a87
KH
4250 /* We can replace or remove a complex jump only when we have exactly
4251 two edges. */
4252 if (EDGE_COUNT (src->succs) != 2
4253 /* Verify that all targets will be TARGET. Specifically, the
4254 edge that is not E must also go to TARGET. */
4255 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6de9cd9a
DN
4256 return NULL;
4257
4258 b = bsi_last (src);
4259 if (bsi_end_p (b))
4260 return NULL;
4261 stmt = bsi_stmt (b);
4262
4263 if (TREE_CODE (stmt) == COND_EXPR
4264 || TREE_CODE (stmt) == SWITCH_EXPR)
4265 {
4266 bsi_remove (&b);
4267 e = ssa_redirect_edge (e, target);
4268 e->flags = EDGE_FALLTHRU;
4269 return e;
4270 }
4271
4272 return NULL;
4273}
4274
4275
4276/* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4277 edge representing the redirected branch. */
4278
4279static edge
4280tree_redirect_edge_and_branch (edge e, basic_block dest)
4281{
4282 basic_block bb = e->src;
4283 block_stmt_iterator bsi;
4284 edge ret;
4285 tree label, stmt;
4286
4287 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4288 return NULL;
4289
4290 if (e->src != ENTRY_BLOCK_PTR
4291 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4292 return ret;
4293
4294 if (e->dest == dest)
4295 return NULL;
4296
4297 label = tree_block_label (dest);
4298
4299 bsi = bsi_last (bb);
4300 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4301
4302 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4303 {
4304 case COND_EXPR:
4305 stmt = (e->flags & EDGE_TRUE_VALUE
4306 ? COND_EXPR_THEN (stmt)
4307 : COND_EXPR_ELSE (stmt));
4308 GOTO_DESTINATION (stmt) = label;
4309 break;
4310
4311 case GOTO_EXPR:
4312 /* No non-abnormal edges should lead from a non-simple goto, and
4313 simple ones should be represented implicitly. */
1e128c5f 4314 gcc_unreachable ();
6de9cd9a
DN
4315
4316 case SWITCH_EXPR:
4317 {
d6be0d7f 4318 tree cases = get_cases_for_edge (e, stmt);
6de9cd9a 4319
d6be0d7f
JL
4320 /* If we have a list of cases associated with E, then use it
4321 as it's a lot faster than walking the entire case vector. */
4322 if (cases)
6de9cd9a 4323 {
4edbbd3f 4324 edge e2 = find_edge (e->src, dest);
d6be0d7f
JL
4325 tree last, first;
4326
4327 first = cases;
4328 while (cases)
4329 {
4330 last = cases;
4331 CASE_LABEL (cases) = label;
4332 cases = TREE_CHAIN (cases);
4333 }
4334
4335 /* If there was already an edge in the CFG, then we need
4336 to move all the cases associated with E to E2. */
4337 if (e2)
4338 {
4339 tree cases2 = get_cases_for_edge (e2, stmt);
4340
4341 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4342 TREE_CHAIN (cases2) = first;
4343 }
6de9cd9a 4344 }
92b6dff3
JL
4345 else
4346 {
d6be0d7f
JL
4347 tree vec = SWITCH_LABELS (stmt);
4348 size_t i, n = TREE_VEC_LENGTH (vec);
4349
4350 for (i = 0; i < n; i++)
4351 {
4352 tree elt = TREE_VEC_ELT (vec, i);
4353
4354 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4355 CASE_LABEL (elt) = label;
4356 }
92b6dff3 4357 }
d6be0d7f 4358
92b6dff3 4359 break;
6de9cd9a 4360 }
6de9cd9a
DN
4361
4362 case RETURN_EXPR:
4363 bsi_remove (&bsi);
4364 e->flags |= EDGE_FALLTHRU;
4365 break;
4366
4367 default:
4368 /* Otherwise it must be a fallthru edge, and we don't need to
4369 do anything besides redirecting it. */
1e128c5f 4370 gcc_assert (e->flags & EDGE_FALLTHRU);
6de9cd9a
DN
4371 break;
4372 }
4373
4374 /* Update/insert PHI nodes as necessary. */
4375
4376 /* Now update the edges in the CFG. */
4377 e = ssa_redirect_edge (e, dest);
4378
4379 return e;
4380}
4381
4382
4383/* Simple wrapper, as we can always redirect fallthru edges. */
4384
4385static basic_block
4386tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4387{
4388 e = tree_redirect_edge_and_branch (e, dest);
1e128c5f 4389 gcc_assert (e);
6de9cd9a
DN
4390
4391 return NULL;
4392}
4393
4394
4395/* Splits basic block BB after statement STMT (but at least after the
4396 labels). If STMT is NULL, BB is split just after the labels. */
4397
4398static basic_block
4399tree_split_block (basic_block bb, void *stmt)
4400{
4401 block_stmt_iterator bsi, bsi_tgt;
4402 tree act;
4403 basic_block new_bb;
4404 edge e;
628f6a4e 4405 edge_iterator ei;
6de9cd9a
DN
4406
4407 new_bb = create_empty_bb (bb);
4408
4409 /* Redirect the outgoing edges. */
628f6a4e
BE
4410 new_bb->succs = bb->succs;
4411 bb->succs = NULL;
4412 FOR_EACH_EDGE (e, ei, new_bb->succs)
6de9cd9a
DN
4413 e->src = new_bb;
4414
4415 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4416 stmt = NULL;
4417
4418 /* Move everything from BSI to the new basic block. */
4419 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4420 {
4421 act = bsi_stmt (bsi);
4422 if (TREE_CODE (act) == LABEL_EXPR)
4423 continue;
4424
4425 if (!stmt)
4426 break;
4427
4428 if (stmt == act)
4429 {
4430 bsi_next (&bsi);
4431 break;
4432 }
4433 }
4434
4435 bsi_tgt = bsi_start (new_bb);
4436 while (!bsi_end_p (bsi))
4437 {
4438 act = bsi_stmt (bsi);
4439 bsi_remove (&bsi);
4440 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4441 }
4442
4443 return new_bb;
4444}
4445
4446
4447/* Moves basic block BB after block AFTER. */
4448
4449static bool
4450tree_move_block_after (basic_block bb, basic_block after)
4451{
4452 if (bb->prev_bb == after)
4453 return true;
4454
4455 unlink_block (bb);
4456 link_block (bb, after);
4457
4458 return true;
4459}
4460
4461
4462/* Return true if basic_block can be duplicated. */
4463
4464static bool
4465tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4466{
4467 return true;
4468}
4469
6de9cd9a
DN
4470/* Create a duplicate of the basic block BB. NOTE: This does not
4471 preserve SSA form. */
4472
4473static basic_block
4474tree_duplicate_bb (basic_block bb)
4475{
4476 basic_block new_bb;
4477 block_stmt_iterator bsi, bsi_tgt;
4c124b4c
AM
4478 tree phi, val;
4479 ssa_op_iter op_iter;
6de9cd9a
DN
4480
4481 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
b0382c67 4482
42759f1e
ZD
4483 /* First copy the phi nodes. We do not copy phi node arguments here,
4484 since the edges are not ready yet. Keep the chain of phi nodes in
4485 the same order, so that we can add them later. */
bb29d951 4486 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
b0382c67
ZD
4487 {
4488 mark_for_rewrite (PHI_RESULT (phi));
42759f1e 4489 create_phi_node (PHI_RESULT (phi), new_bb);
b0382c67 4490 }
5ae71719 4491 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
b0382c67 4492
6de9cd9a
DN
4493 bsi_tgt = bsi_start (new_bb);
4494 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4495 {
4496 tree stmt = bsi_stmt (bsi);
5f240ec4 4497 tree copy;
6de9cd9a
DN
4498
4499 if (TREE_CODE (stmt) == LABEL_EXPR)
4500 continue;
4501
b0382c67
ZD
4502 /* Record the definitions. */
4503 get_stmt_operands (stmt);
4504
4c124b4c
AM
4505 FOR_EACH_SSA_TREE_OPERAND (val, stmt, op_iter, SSA_OP_ALL_DEFS)
4506 mark_for_rewrite (val);
b0382c67 4507
5f240ec4
ZD
4508 copy = unshare_expr (stmt);
4509
4510 /* Copy also the virtual operands. */
4511 get_stmt_ann (copy);
4512 copy_virtual_operands (copy, stmt);
4513
4514 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
6de9cd9a
DN
4515 }
4516
4517 return new_bb;
4518}
4519
42759f1e
ZD
4520/* Basic block BB_COPY was created by code duplication. Add phi node
4521 arguments for edges going out of BB_COPY. The blocks that were
4522 duplicated have rbi->duplicated set to one. */
4523
4524void
4525add_phi_args_after_copy_bb (basic_block bb_copy)
4526{
4527 basic_block bb, dest;
4528 edge e, e_copy;
628f6a4e 4529 edge_iterator ei;
42759f1e
ZD
4530 tree phi, phi_copy, phi_next, def;
4531
4532 bb = bb_copy->rbi->original;
4533
628f6a4e 4534 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
42759f1e
ZD
4535 {
4536 if (!phi_nodes (e_copy->dest))
4537 continue;
4538
4539 if (e_copy->dest->rbi->duplicated)
4540 dest = e_copy->dest->rbi->original;
4541 else
4542 dest = e_copy->dest;
4543
4544 e = find_edge (bb, dest);
4545 if (!e)
4546 {
4547 /* During loop unrolling the target of the latch edge is copied.
4548 In this case we are not looking for edge to dest, but to
4549 duplicated block whose original was dest. */
628f6a4e 4550 FOR_EACH_EDGE (e, ei, bb->succs)
42759f1e
ZD
4551 if (e->dest->rbi->duplicated
4552 && e->dest->rbi->original == dest)
4553 break;
4554
4555 gcc_assert (e != NULL);
4556 }
4557
4558 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4559 phi;
eaf0dc02 4560 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
42759f1e 4561 {
eaf0dc02 4562 phi_next = PHI_CHAIN (phi);
42759f1e
ZD
4563
4564 gcc_assert (PHI_RESULT (phi) == PHI_RESULT (phi_copy));
4565 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
d2e398df 4566 add_phi_arg (phi_copy, def, e_copy);
42759f1e
ZD
4567 }
4568 }
4569}
4570
4571/* Blocks in REGION_COPY array of length N_REGION were created by
4572 duplication of basic blocks. Add phi node arguments for edges
4573 going from these blocks. */
4574
4575void
4576add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4577{
4578 unsigned i;
4579
4580 for (i = 0; i < n_region; i++)
4581 region_copy[i]->rbi->duplicated = 1;
4582
4583 for (i = 0; i < n_region; i++)
4584 add_phi_args_after_copy_bb (region_copy[i]);
4585
4586 for (i = 0; i < n_region; i++)
4587 region_copy[i]->rbi->duplicated = 0;
4588}
4589
4590/* Maps the old ssa name FROM_NAME to TO_NAME. */
4591
4592struct ssa_name_map_entry
4593{
4594 tree from_name;
4595 tree to_name;
4596};
4597
4598/* Hash function for ssa_name_map_entry. */
4599
4600static hashval_t
4601ssa_name_map_entry_hash (const void *entry)
4602{
4603 const struct ssa_name_map_entry *en = entry;
4604 return SSA_NAME_VERSION (en->from_name);
4605}
4606
4607/* Equality function for ssa_name_map_entry. */
4608
4609static int
4610ssa_name_map_entry_eq (const void *in_table, const void *ssa_name)
4611{
4612 const struct ssa_name_map_entry *en = in_table;
4613
4614 return en->from_name == ssa_name;
4615}
4616
4617/* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
4618 to MAP. */
4619
4620void
4621allocate_ssa_names (bitmap definitions, htab_t *map)
4622{
4623 tree name;
4624 struct ssa_name_map_entry *entry;
4625 PTR *slot;
4626 unsigned ver;
87c476a2 4627 bitmap_iterator bi;
42759f1e
ZD
4628
4629 if (!*map)
4630 *map = htab_create (10, ssa_name_map_entry_hash,
4631 ssa_name_map_entry_eq, free);
87c476a2 4632 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
42759f1e
ZD
4633 {
4634 name = ssa_name (ver);
4635 slot = htab_find_slot_with_hash (*map, name, SSA_NAME_VERSION (name),
4636 INSERT);
4637 if (*slot)
4638 entry = *slot;
4639 else
4640 {
4641 entry = xmalloc (sizeof (struct ssa_name_map_entry));
4642 entry->from_name = name;
4643 *slot = entry;
4644 }
4645 entry->to_name = duplicate_ssa_name (name, SSA_NAME_DEF_STMT (name));
87c476a2 4646 }
42759f1e
ZD
4647}
4648
4649/* Rewrite the definition DEF in statement STMT to new ssa name as specified
4650 by the mapping MAP. */
4651
4652static void
4653rewrite_to_new_ssa_names_def (def_operand_p def, tree stmt, htab_t map)
4654{
4655 tree name = DEF_FROM_PTR (def);
4656 struct ssa_name_map_entry *entry;
4657
4658 gcc_assert (TREE_CODE (name) == SSA_NAME);
4659
4660 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4661 if (!entry)
4662 return;
4663
4664 SET_DEF (def, entry->to_name);
4665 SSA_NAME_DEF_STMT (entry->to_name) = stmt;
4666}
4667
4668/* Rewrite the USE to new ssa name as specified by the mapping MAP. */
4669
4670static void
4671rewrite_to_new_ssa_names_use (use_operand_p use, htab_t map)
4672{
4673 tree name = USE_FROM_PTR (use);
4674 struct ssa_name_map_entry *entry;
4675
4676 if (TREE_CODE (name) != SSA_NAME)
4677 return;
4678
4679 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4680 if (!entry)
4681 return;
4682
4683 SET_USE (use, entry->to_name);
4684}
4685
4686/* Rewrite the ssa names in basic block BB to new ones as specified by the
4687 mapping MAP. */
4688
4689void
4690rewrite_to_new_ssa_names_bb (basic_block bb, htab_t map)
4691{
4692 unsigned i;
4693 edge e;
628f6a4e 4694 edge_iterator ei;
42759f1e
ZD
4695 tree phi, stmt;
4696 block_stmt_iterator bsi;
4697 use_optype uses;
4698 vuse_optype vuses;
4699 def_optype defs;
4700 v_may_def_optype v_may_defs;
4701 v_must_def_optype v_must_defs;
4702 stmt_ann_t ann;
4703
628f6a4e 4704 FOR_EACH_EDGE (e, ei, bb->preds)
42759f1e
ZD
4705 if (e->flags & EDGE_ABNORMAL)
4706 break;
4707
bb29d951 4708 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
42759f1e
ZD
4709 {
4710 rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi), phi, map);
4711 if (e)
4712 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1;
4713 }
4714
4715 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4716 {
4717 stmt = bsi_stmt (bsi);
4718 get_stmt_operands (stmt);
4719 ann = stmt_ann (stmt);
4720
4721 uses = USE_OPS (ann);
4722 for (i = 0; i < NUM_USES (uses); i++)
4723 rewrite_to_new_ssa_names_use (USE_OP_PTR (uses, i), map);
4724
4725 defs = DEF_OPS (ann);
4726 for (i = 0; i < NUM_DEFS (defs); i++)
4727 rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs, i), stmt, map);
4728
4729 vuses = VUSE_OPS (ann);
4730 for (i = 0; i < NUM_VUSES (vuses); i++)
4731 rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses, i), map);
4732
4733 v_may_defs = V_MAY_DEF_OPS (ann);
4734 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
4735 {
4736 rewrite_to_new_ssa_names_use
4737 (V_MAY_DEF_OP_PTR (v_may_defs, i), map);
4738 rewrite_to_new_ssa_names_def
4739 (V_MAY_DEF_RESULT_PTR (v_may_defs, i), stmt, map);
4740 }
4741
4742 v_must_defs = V_MUST_DEF_OPS (ann);
4743 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
52328bf6
DB
4744 {
4745 rewrite_to_new_ssa_names_def
4746 (V_MUST_DEF_RESULT_PTR (v_must_defs, i), stmt, map);
4747 rewrite_to_new_ssa_names_use
4748 (V_MUST_DEF_KILL_PTR (v_must_defs, i), map);
4749 }
42759f1e
ZD
4750 }
4751
628f6a4e 4752 FOR_EACH_EDGE (e, ei, bb->succs)
bb29d951 4753 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
42759f1e
ZD
4754 {
4755 rewrite_to_new_ssa_names_use
4756 (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), map);
4757
4758 if (e->flags & EDGE_ABNORMAL)
4759 {
4760 tree op = PHI_ARG_DEF_FROM_EDGE (phi, e);
4761 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op) = 1;
4762 }
4763 }
4764}
4765
4766/* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
4767 by the mapping MAP. */
4768
4769void
4770rewrite_to_new_ssa_names (basic_block *region, unsigned n_region, htab_t map)
4771{
4772 unsigned r;
4773
4774 for (r = 0; r < n_region; r++)
4775 rewrite_to_new_ssa_names_bb (region[r], map);
4776}
4777
4778/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4779 important exit edge EXIT. By important we mean that no SSA name defined
4780 inside region is live over the other exit edges of the region. All entry
4781 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4782 to the duplicate of the region. SSA form, dominance and loop information
4783 is updated. The new basic blocks are stored to REGION_COPY in the same
4784 order as they had in REGION, provided that REGION_COPY is not NULL.
4785 The function returns false if it is unable to copy the region,
4786 true otherwise. */
4787
4788bool
4789tree_duplicate_sese_region (edge entry, edge exit,
4790 basic_block *region, unsigned n_region,
4791 basic_block *region_copy)
4792{
4793 unsigned i, n_doms, ver;
4794 bool free_region_copy = false, copying_header = false;
4795 struct loop *loop = entry->dest->loop_father;
4796 edge exit_copy;
4797 bitmap definitions;
71882046 4798 tree phi;
42759f1e
ZD
4799 basic_block *doms;
4800 htab_t ssa_name_map = NULL;
4801 edge redirected;
87c476a2 4802 bitmap_iterator bi;
42759f1e
ZD
4803
4804 if (!can_copy_bbs_p (region, n_region))
4805 return false;
4806
4807 /* Some sanity checking. Note that we do not check for all possible
4808 missuses of the functions. I.e. if you ask to copy something weird,
4809 it will work, but the state of structures probably will not be
4810 correct. */
4811
4812 for (i = 0; i < n_region; i++)
4813 {
4814 /* We do not handle subloops, i.e. all the blocks must belong to the
4815 same loop. */
4816 if (region[i]->loop_father != loop)
4817 return false;
4818
4819 if (region[i] != entry->dest
4820 && region[i] == loop->header)
4821 return false;
4822 }
4823
4824 loop->copy = loop;
4825
4826 /* In case the function is used for loop header copying (which is the primary
4827 use), ensure that EXIT and its copy will be new latch and entry edges. */
4828 if (loop->header == entry->dest)
4829 {
4830 copying_header = true;
4831 loop->copy = loop->outer;
4832
4833 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4834 return false;
4835
4836 for (i = 0; i < n_region; i++)
4837 if (region[i] != exit->src
4838 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4839 return false;
4840 }
4841
4842 if (!region_copy)
4843 {
4844 region_copy = xmalloc (sizeof (basic_block) * n_region);
4845 free_region_copy = true;
4846 }
4847
4848 gcc_assert (!any_marked_for_rewrite_p ());
4849
4850 /* Record blocks outside the region that are duplicated by something
4851 inside. */
4852 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
4853 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4854
4855 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop);
4856 definitions = marked_ssa_names ();
4857
4858 if (copying_header)
4859 {
4860 loop->header = exit->dest;
4861 loop->latch = exit->src;
4862 }
4863
4864 /* Redirect the entry and add the phi node arguments. */
4865 redirected = redirect_edge_and_branch (entry, entry->dest->rbi->copy);
4866 gcc_assert (redirected != NULL);
71882046 4867 flush_pending_stmts (entry);
42759f1e
ZD
4868
4869 /* Concerning updating of dominators: We must recount dominators
4870 for entry block and its copy. Anything that is outside of the region, but
4871 was dominated by something inside needs recounting as well. */
4872 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4873 doms[n_doms++] = entry->dest->rbi->original;
4874 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4875 free (doms);
4876
4877 /* Add the other phi node arguments. */
4878 add_phi_args_after_copy (region_copy, n_region);
4879
4880 /* Add phi nodes for definitions at exit. TODO -- once we have immediate
4881 uses, it should be possible to emit phi nodes just for definitions that
4882 are used outside region. */
87c476a2 4883 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
42759f1e
ZD
4884 {
4885 tree name = ssa_name (ver);
4886
4887 phi = create_phi_node (name, exit->dest);
d2e398df
KH
4888 add_phi_arg (phi, name, exit);
4889 add_phi_arg (phi, name, exit_copy);
42759f1e
ZD
4890
4891 SSA_NAME_DEF_STMT (name) = phi;
87c476a2 4892 }
42759f1e
ZD
4893
4894 /* And create new definitions inside region and its copy. TODO -- once we
4895 have immediate uses, it might be better to leave definitions in region
4896 unchanged, create new ssa names for phi nodes on exit, and rewrite
4897 the uses, to avoid changing the copied region. */
4898 allocate_ssa_names (definitions, &ssa_name_map);
4899 rewrite_to_new_ssa_names (region, n_region, ssa_name_map);
4900 allocate_ssa_names (definitions, &ssa_name_map);
4901 rewrite_to_new_ssa_names (region_copy, n_region, ssa_name_map);
4902 htab_delete (ssa_name_map);
4903
4904 if (free_region_copy)
4905 free (region_copy);
4906
4907 unmark_all_for_rewrite ();
4908 BITMAP_XFREE (definitions);
4909
4910 return true;
4911}
6de9cd9a
DN
4912
4913/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4914
4915void
4916dump_function_to_file (tree fn, FILE *file, int flags)
4917{
4918 tree arg, vars, var;
4919 bool ignore_topmost_bind = false, any_var = false;
4920 basic_block bb;
4921 tree chain;
4922
673fda6b 4923 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6de9cd9a
DN
4924
4925 arg = DECL_ARGUMENTS (fn);
4926 while (arg)
4927 {
4928 print_generic_expr (file, arg, dump_flags);
4929 if (TREE_CHAIN (arg))
4930 fprintf (file, ", ");
4931 arg = TREE_CHAIN (arg);
4932 }
4933 fprintf (file, ")\n");
4934
4935 if (flags & TDF_RAW)
4936 {
4937 dump_node (fn, TDF_SLIM | flags, file);
4938 return;
4939 }
4940
4941 /* When GIMPLE is lowered, the variables are no longer available in
4942 BIND_EXPRs, so display them separately. */
4943 if (cfun && cfun->unexpanded_var_list)
4944 {
4945 ignore_topmost_bind = true;
4946
4947 fprintf (file, "{\n");
4948 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
4949 {
4950 var = TREE_VALUE (vars);
4951
4952 print_generic_decl (file, var, flags);
4953 fprintf (file, "\n");
4954
4955 any_var = true;
4956 }
4957 }
4958
4959 if (basic_block_info)
4960 {
4961 /* Make a CFG based dump. */
878f99d2 4962 check_bb_profile (ENTRY_BLOCK_PTR, file);
6de9cd9a
DN
4963 if (!ignore_topmost_bind)
4964 fprintf (file, "{\n");
4965
4966 if (any_var && n_basic_blocks)
4967 fprintf (file, "\n");
4968
4969 FOR_EACH_BB (bb)
4970 dump_generic_bb (file, bb, 2, flags);
4971
4972 fprintf (file, "}\n");
878f99d2 4973 check_bb_profile (EXIT_BLOCK_PTR, file);
6de9cd9a
DN
4974 }
4975 else
4976 {
4977 int indent;
4978
4979 /* Make a tree based dump. */
4980 chain = DECL_SAVED_TREE (fn);
4981
4982 if (TREE_CODE (chain) == BIND_EXPR)
4983 {
4984 if (ignore_topmost_bind)
4985 {
4986 chain = BIND_EXPR_BODY (chain);
4987 indent = 2;
4988 }
4989 else
4990 indent = 0;
4991 }
4992 else
4993 {
4994 if (!ignore_topmost_bind)
4995 fprintf (file, "{\n");
4996 indent = 2;
4997 }
4998
4999 if (any_var)
5000 fprintf (file, "\n");
5001
5002 print_generic_stmt_indented (file, chain, flags, indent);
5003 if (ignore_topmost_bind)
5004 fprintf (file, "}\n");
5005 }
5006
5007 fprintf (file, "\n\n");
5008}
5009
5010
5011/* Pretty print of the loops intermediate representation. */
5012static void print_loop (FILE *, struct loop *, int);
628f6a4e
BE
5013static void print_pred_bbs (FILE *, basic_block bb);
5014static void print_succ_bbs (FILE *, basic_block bb);
6de9cd9a
DN
5015
5016
5017/* Print the predecessors indexes of edge E on FILE. */
5018
5019static void
628f6a4e 5020print_pred_bbs (FILE *file, basic_block bb)
6de9cd9a 5021{
628f6a4e
BE
5022 edge e;
5023 edge_iterator ei;
5024
5025 FOR_EACH_EDGE (e, ei, bb->preds)
6de9cd9a 5026 fprintf (file, "bb_%d", e->src->index);
6de9cd9a
DN
5027}
5028
5029
5030/* Print the successors indexes of edge E on FILE. */
5031
5032static void
628f6a4e 5033print_succ_bbs (FILE *file, basic_block bb)
6de9cd9a 5034{
628f6a4e
BE
5035 edge e;
5036 edge_iterator ei;
5037
5038 FOR_EACH_EDGE (e, ei, bb->succs)
5039 fprintf (file, "bb_%d", e->src->index);
6de9cd9a
DN
5040}
5041
5042
5043/* Pretty print LOOP on FILE, indented INDENT spaces. */
5044
5045static void
5046print_loop (FILE *file, struct loop *loop, int indent)
5047{
5048 char *s_indent;
5049 basic_block bb;
5050
5051 if (loop == NULL)
5052 return;
5053
5054 s_indent = (char *) alloca ((size_t) indent + 1);
5055 memset ((void *) s_indent, ' ', (size_t) indent);
5056 s_indent[indent] = '\0';
5057
5058 /* Print the loop's header. */
5059 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5060
5061 /* Print the loop's body. */
5062 fprintf (file, "%s{\n", s_indent);
5063 FOR_EACH_BB (bb)
5064 if (bb->loop_father == loop)
5065 {
5066 /* Print the basic_block's header. */
5067 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
628f6a4e 5068 print_pred_bbs (file, bb);
6de9cd9a 5069 fprintf (file, "}, succs = {");
628f6a4e 5070 print_succ_bbs (file, bb);
6de9cd9a
DN
5071 fprintf (file, "})\n");
5072
5073 /* Print the basic_block's body. */
5074 fprintf (file, "%s {\n", s_indent);
5075 tree_dump_bb (bb, file, indent + 4);
5076 fprintf (file, "%s }\n", s_indent);
5077 }
5078
5079 print_loop (file, loop->inner, indent + 2);
5080 fprintf (file, "%s}\n", s_indent);
5081 print_loop (file, loop->next, indent);
5082}
5083
5084
5085/* Follow a CFG edge from the entry point of the program, and on entry
5086 of a loop, pretty print the loop structure on FILE. */
5087
5088void
5089print_loop_ir (FILE *file)
5090{
5091 basic_block bb;
5092
5093 bb = BASIC_BLOCK (0);
5094 if (bb && bb->loop_father)
5095 print_loop (file, bb->loop_father, 0);
5096}
5097
5098
5099/* Debugging loops structure at tree level. */
5100
5101void
5102debug_loop_ir (void)
5103{
5104 print_loop_ir (stderr);
5105}
5106
5107
5108/* Return true if BB ends with a call, possibly followed by some
5109 instructions that must stay with the call. Return false,
5110 otherwise. */
5111
5112static bool
5113tree_block_ends_with_call_p (basic_block bb)
5114{
5115 block_stmt_iterator bsi = bsi_last (bb);
cd709752 5116 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
6de9cd9a
DN
5117}
5118
5119
5120/* Return true if BB ends with a conditional branch. Return false,
5121 otherwise. */
5122
5123static bool
5124tree_block_ends_with_condjump_p (basic_block bb)
5125{
5126 tree stmt = tsi_stmt (bsi_last (bb).tsi);
5127 return (TREE_CODE (stmt) == COND_EXPR);
5128}
5129
5130
5131/* Return true if we need to add fake edge to exit at statement T.
5132 Helper function for tree_flow_call_edges_add. */
5133
5134static bool
5135need_fake_edge_p (tree t)
5136{
cd709752 5137 tree call;
6de9cd9a
DN
5138
5139 /* NORETURN and LONGJMP calls already have an edge to exit.
5140 CONST, PURE and ALWAYS_RETURN calls do not need one.
5141 We don't currently check for CONST and PURE here, although
5142 it would be a good idea, because those attributes are
5143 figured out from the RTL in mark_constant_function, and
5144 the counter incrementation code from -fprofile-arcs
5145 leads to different results from -fbranch-probabilities. */
cd709752
RH
5146 call = get_call_expr_in (t);
5147 if (call
6e14af16 5148 && !(call_expr_flags (call) & (ECF_NORETURN | ECF_ALWAYS_RETURN)))
6de9cd9a
DN
5149 return true;
5150
5151 if (TREE_CODE (t) == ASM_EXPR
5152 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5153 return true;
5154
5155 return false;
5156}
5157
5158
5159/* Add fake edges to the function exit for any non constant and non
5160 noreturn calls, volatile inline assembly in the bitmap of blocks
5161 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5162 the number of blocks that were split.
5163
5164 The goal is to expose cases in which entering a basic block does
5165 not imply that all subsequent instructions must be executed. */
5166
5167static int
5168tree_flow_call_edges_add (sbitmap blocks)
5169{
5170 int i;
5171 int blocks_split = 0;
5172 int last_bb = last_basic_block;
5173 bool check_last_block = false;
5174
5175 if (n_basic_blocks == 0)
5176 return 0;
5177
5178 if (! blocks)
5179 check_last_block = true;
5180 else
5181 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5182
5183 /* In the last basic block, before epilogue generation, there will be
5184 a fallthru edge to EXIT. Special care is required if the last insn
5185 of the last basic block is a call because make_edge folds duplicate
5186 edges, which would result in the fallthru edge also being marked
5187 fake, which would result in the fallthru edge being removed by
5188 remove_fake_edges, which would result in an invalid CFG.
5189
5190 Moreover, we can't elide the outgoing fake edge, since the block
5191 profiler needs to take this into account in order to solve the minimal
5192 spanning tree in the case that the call doesn't return.
5193
5194 Handle this by adding a dummy instruction in a new last basic block. */
5195 if (check_last_block)
5196 {
5197 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5198 block_stmt_iterator bsi = bsi_last (bb);
5199 tree t = NULL_TREE;
5200 if (!bsi_end_p (bsi))
5201 t = bsi_stmt (bsi);
5202
5203 if (need_fake_edge_p (t))
5204 {
5205 edge e;
5206
9ff3d2de
JL
5207 e = find_edge (bb, EXIT_BLOCK_PTR);
5208 if (e)
5209 {
5210 bsi_insert_on_edge (e, build_empty_stmt ());
5211 bsi_commit_edge_inserts ();
5212 }
6de9cd9a
DN
5213 }
5214 }
5215
5216 /* Now add fake edges to the function exit for any non constant
5217 calls since there is no way that we can determine if they will
5218 return or not... */
5219 for (i = 0; i < last_bb; i++)
5220 {
5221 basic_block bb = BASIC_BLOCK (i);
5222 block_stmt_iterator bsi;
5223 tree stmt, last_stmt;
5224
5225 if (!bb)
5226 continue;
5227
5228 if (blocks && !TEST_BIT (blocks, i))
5229 continue;
5230
5231 bsi = bsi_last (bb);
5232 if (!bsi_end_p (bsi))
5233 {
5234 last_stmt = bsi_stmt (bsi);
5235 do
5236 {
5237 stmt = bsi_stmt (bsi);
5238 if (need_fake_edge_p (stmt))
5239 {
5240 edge e;
5241 /* The handling above of the final block before the
5242 epilogue should be enough to verify that there is
5243 no edge to the exit block in CFG already.
5244 Calling make_edge in such case would cause us to
5245 mark that edge as fake and remove it later. */
5246#ifdef ENABLE_CHECKING
5247 if (stmt == last_stmt)
628f6a4e 5248 {
9ff3d2de
JL
5249 e = find_edge (bb, EXIT_BLOCK_PTR);
5250 gcc_assert (e == NULL);
628f6a4e 5251 }
6de9cd9a
DN
5252#endif
5253
5254 /* Note that the following may create a new basic block
5255 and renumber the existing basic blocks. */
5256 if (stmt != last_stmt)
5257 {
5258 e = split_block (bb, stmt);
5259 if (e)
5260 blocks_split++;
5261 }
5262 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5263 }
5264 bsi_prev (&bsi);
5265 }
5266 while (!bsi_end_p (bsi));
5267 }
5268 }
5269
5270 if (blocks_split)
5271 verify_flow_info ();
5272
5273 return blocks_split;
5274}
5275
1eaba2f2
RH
5276bool
5277tree_purge_dead_eh_edges (basic_block bb)
5278{
5279 bool changed = false;
628f6a4e
BE
5280 edge e;
5281 edge_iterator ei;
1eaba2f2
RH
5282 tree stmt = last_stmt (bb);
5283
5284 if (stmt && tree_can_throw_internal (stmt))
5285 return false;
5286
628f6a4e 5287 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1eaba2f2 5288 {
1eaba2f2
RH
5289 if (e->flags & EDGE_EH)
5290 {
d0d2cc21 5291 remove_edge (e);
1eaba2f2
RH
5292 changed = true;
5293 }
628f6a4e
BE
5294 else
5295 ei_next (&ei);
1eaba2f2
RH
5296 }
5297
69d49802
JJ
5298 /* Removal of dead EH edges might change dominators of not
5299 just immediate successors. E.g. when bb1 is changed so that
5300 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5301 eh edges purged by this function in:
5302 0
5303 / \
5304 v v
5305 1-->2
5306 / \ |
5307 v v |
5308 3-->4 |
5309 \ v
5310 --->5
5311 |
5312 -
5313 idom(bb5) must be recomputed. For now just free the dominance
5314 info. */
5315 if (changed)
5316 free_dominance_info (CDI_DOMINATORS);
5317
1eaba2f2
RH
5318 return changed;
5319}
5320
5321bool
5322tree_purge_all_dead_eh_edges (bitmap blocks)
5323{
5324 bool changed = false;
3cd8c58a 5325 unsigned i;
87c476a2 5326 bitmap_iterator bi;
1eaba2f2 5327
87c476a2
ZD
5328 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5329 {
5330 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5331 }
1eaba2f2
RH
5332
5333 return changed;
5334}
6de9cd9a 5335
a100ac1e
KH
5336/* This function is called whenever a new edge is created or
5337 redirected. */
5338
5339static void
5340tree_execute_on_growing_pred (edge e)
5341{
5342 basic_block bb = e->dest;
5343
5344 if (phi_nodes (bb))
5345 reserve_phi_args_for_new_edge (bb);
5346}
5347
e51546f8
KH
5348/* This function is called immediately before edge E is removed from
5349 the edge vector E->dest->preds. */
5350
5351static void
5352tree_execute_on_shrinking_pred (edge e)
5353{
5354 if (phi_nodes (e->dest))
5355 remove_phi_args (e);
5356}
5357
6de9cd9a
DN
5358struct cfg_hooks tree_cfg_hooks = {
5359 "tree",
5360 tree_verify_flow_info,
5361 tree_dump_bb, /* dump_bb */
5362 create_bb, /* create_basic_block */
5363 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5364 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5365 remove_bb, /* delete_basic_block */
5366 tree_split_block, /* split_block */
5367 tree_move_block_after, /* move_block_after */
5368 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5369 tree_merge_blocks, /* merge_blocks */
5370 tree_predict_edge, /* predict_edge */
5371 tree_predicted_by_p, /* predicted_by_p */
5372 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5373 tree_duplicate_bb, /* duplicate_block */
5374 tree_split_edge, /* split_edge */
5375 tree_make_forwarder_block, /* make_forward_block */
5376 NULL, /* tidy_fallthru_edge */
5377 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5378 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
d9d4706f 5379 tree_flow_call_edges_add, /* flow_call_edges_add */
a100ac1e 5380 tree_execute_on_growing_pred, /* execute_on_growing_pred */
e51546f8 5381 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
6de9cd9a
DN
5382};
5383
5384
5385/* Split all critical edges. */
5386
5387static void
5388split_critical_edges (void)
5389{
5390 basic_block bb;
5391 edge e;
628f6a4e 5392 edge_iterator ei;
6de9cd9a 5393
d6be0d7f
JL
5394 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5395 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5396 mappings around the calls to split_edge. */
5397 start_recording_case_labels ();
6de9cd9a
DN
5398 FOR_ALL_BB (bb)
5399 {
628f6a4e 5400 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
5401 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5402 {
5403 split_edge (e);
5404 }
5405 }
d6be0d7f 5406 end_recording_case_labels ();
6de9cd9a
DN
5407}
5408
5409struct tree_opt_pass pass_split_crit_edges =
5410{
5d44aeed 5411 "crited", /* name */
6de9cd9a
DN
5412 NULL, /* gate */
5413 split_critical_edges, /* execute */
5414 NULL, /* sub */
5415 NULL, /* next */
5416 0, /* static_pass_number */
5417 TV_TREE_SPLIT_EDGES, /* tv_id */
5418 PROP_cfg, /* properties required */
5419 PROP_no_crit_edges, /* properties_provided */
5420 0, /* properties_destroyed */
5421 0, /* todo_flags_start */
9f8628ba
PB
5422 TODO_dump_func, /* todo_flags_finish */
5423 0 /* letter */
6de9cd9a 5424};
26277d41
PB
5425
5426\f
5427/* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5428 a temporary, make sure and register it to be renamed if necessary,
5429 and finally return the temporary. Put the statements to compute
5430 EXP before the current statement in BSI. */
5431
5432tree
5433gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5434{
5435 tree t, new_stmt, orig_stmt;
5436
5437 if (is_gimple_val (exp))
5438 return exp;
5439
5440 t = make_rename_temp (type, NULL);
5441 new_stmt = build (MODIFY_EXPR, type, t, exp);
5442
5443 orig_stmt = bsi_stmt (*bsi);
5444 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5445 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5446
5447 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5448
5449 return t;
5450}
5451
5452/* Build a ternary operation and gimplify it. Emit code before BSI.
5453 Return the gimple_val holding the result. */
5454
5455tree
5456gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5457 tree type, tree a, tree b, tree c)
5458{
5459 tree ret;
5460
5461 ret = fold (build3 (code, type, a, b, c));
5462 STRIP_NOPS (ret);
5463
5464 return gimplify_val (bsi, type, ret);
5465}
5466
5467/* Build a binary operation and gimplify it. Emit code before BSI.
5468 Return the gimple_val holding the result. */
5469
5470tree
5471gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5472 tree type, tree a, tree b)
5473{
5474 tree ret;
5475
5476 ret = fold (build2 (code, type, a, b));
5477 STRIP_NOPS (ret);
5478
5479 return gimplify_val (bsi, type, ret);
5480}
5481
5482/* Build a unary operation and gimplify it. Emit code before BSI.
5483 Return the gimple_val holding the result. */
5484
5485tree
5486gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5487 tree a)
5488{
5489 tree ret;
5490
5491 ret = fold (build1 (code, type, a));
5492 STRIP_NOPS (ret);
5493
5494 return gimplify_val (bsi, type, ret);
5495}
5496
5497
6de9cd9a
DN
5498\f
5499/* Emit return warnings. */
5500
5501static void
5502execute_warn_function_return (void)
5503{
9506ac2b
PB
5504#ifdef USE_MAPPED_LOCATION
5505 source_location location;
5506#else
6de9cd9a 5507 location_t *locus;
9506ac2b 5508#endif
6de9cd9a
DN
5509 tree last;
5510 edge e;
628f6a4e 5511 edge_iterator ei;
6de9cd9a
DN
5512
5513 if (warn_missing_noreturn
5514 && !TREE_THIS_VOLATILE (cfun->decl)
628f6a4e 5515 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
6de9cd9a 5516 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
971801ff
JM
5517 warning ("%Jfunction might be possible candidate for "
5518 "attribute %<noreturn%>",
6de9cd9a
DN
5519 cfun->decl);
5520
5521 /* If we have a path to EXIT, then we do return. */
5522 if (TREE_THIS_VOLATILE (cfun->decl)
628f6a4e 5523 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
6de9cd9a 5524 {
9506ac2b
PB
5525#ifdef USE_MAPPED_LOCATION
5526 location = UNKNOWN_LOCATION;
5527#else
6de9cd9a 5528 locus = NULL;
9506ac2b 5529#endif
628f6a4e 5530 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
5531 {
5532 last = last_stmt (e->src);
5533 if (TREE_CODE (last) == RETURN_EXPR
9506ac2b
PB
5534#ifdef USE_MAPPED_LOCATION
5535 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5536#else
6de9cd9a 5537 && (locus = EXPR_LOCUS (last)) != NULL)
9506ac2b 5538#endif
6de9cd9a
DN
5539 break;
5540 }
9506ac2b
PB
5541#ifdef USE_MAPPED_LOCATION
5542 if (location == UNKNOWN_LOCATION)
5543 location = cfun->function_end_locus;
971801ff 5544 warning ("%H%<noreturn%> function does return", &location);
9506ac2b 5545#else
6de9cd9a
DN
5546 if (!locus)
5547 locus = &cfun->function_end_locus;
971801ff 5548 warning ("%H%<noreturn%> function does return", locus);
9506ac2b 5549#endif
6de9cd9a
DN
5550 }
5551
5552 /* If we see "return;" in some basic block, then we do reach the end
5553 without returning a value. */
5554 else if (warn_return_type
628f6a4e 5555 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
6de9cd9a
DN
5556 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5557 {
628f6a4e 5558 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
5559 {
5560 tree last = last_stmt (e->src);
5561 if (TREE_CODE (last) == RETURN_EXPR
5562 && TREE_OPERAND (last, 0) == NULL)
5563 {
9506ac2b
PB
5564#ifdef USE_MAPPED_LOCATION
5565 location = EXPR_LOCATION (last);
5566 if (location == UNKNOWN_LOCATION)
5567 location = cfun->function_end_locus;
5568 warning ("%Hcontrol reaches end of non-void function", &location);
5569#else
6de9cd9a
DN
5570 locus = EXPR_LOCUS (last);
5571 if (!locus)
5572 locus = &cfun->function_end_locus;
5573 warning ("%Hcontrol reaches end of non-void function", locus);
9506ac2b 5574#endif
6de9cd9a
DN
5575 break;
5576 }
5577 }
5578 }
5579}
5580
5581
5582/* Given a basic block B which ends with a conditional and has
5583 precisely two successors, determine which of the edges is taken if
5584 the conditional is true and which is taken if the conditional is
5585 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5586
5587void
5588extract_true_false_edges_from_block (basic_block b,
5589 edge *true_edge,
5590 edge *false_edge)
5591{
628f6a4e 5592 edge e = EDGE_SUCC (b, 0);
6de9cd9a
DN
5593
5594 if (e->flags & EDGE_TRUE_VALUE)
5595 {
5596 *true_edge = e;
628f6a4e 5597 *false_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
5598 }
5599 else
5600 {
5601 *false_edge = e;
628f6a4e 5602 *true_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
5603 }
5604}
5605
5606struct tree_opt_pass pass_warn_function_return =
5607{
5608 NULL, /* name */
5609 NULL, /* gate */
5610 execute_warn_function_return, /* execute */
5611 NULL, /* sub */
5612 NULL, /* next */
5613 0, /* static_pass_number */
5614 0, /* tv_id */
00bfee6f 5615 PROP_cfg, /* properties_required */
6de9cd9a
DN
5616 0, /* properties_provided */
5617 0, /* properties_destroyed */
5618 0, /* todo_flags_start */
9f8628ba
PB
5619 0, /* todo_flags_finish */
5620 0 /* letter */
6de9cd9a
DN
5621};
5622
5623#include "gt-tree-cfg.h"