]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-cfg.c
Changelog: Fix merge bit.
[thirdparty/gcc.git] / gcc / tree-cfg.c
CommitLineData
6de9cd9a 1/* Control flow functions for trees.
135a171d 2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
56e84019 3 Free Software Foundation, Inc.
6de9cd9a
DN
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
6de9cd9a
DN
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "output.h"
6de9cd9a
DN
32#include "flags.h"
33#include "function.h"
34#include "expr.h"
35#include "ggc.h"
36#include "langhooks.h"
37#include "diagnostic.h"
38#include "tree-flow.h"
39#include "timevar.h"
40#include "tree-dump.h"
41#include "tree-pass.h"
42#include "toplev.h"
43#include "except.h"
44#include "cfgloop.h"
42759f1e 45#include "cfglayout.h"
9af0df6b 46#include "tree-ssa-propagate.h"
6946b3f7 47#include "value-prof.h"
4437b50d 48#include "pointer-set.h"
917948d3 49#include "tree-inline.h"
6de9cd9a
DN
50
51/* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
53
54/* Local declarations. */
55
56/* Initial capacity for the basic block array. */
57static const int initial_cfg_capacity = 20;
58
d6be0d7f
JL
59/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of SWITCH_EXPRs.
92b6dff3 63
d6be0d7f
JL
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
92b6dff3 66
d6be0d7f
JL
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
71
15814ba0 72static struct pointer_map_t *edge_to_cases;
92b6dff3 73
6de9cd9a
DN
74/* CFG statistics. */
75struct cfg_stats_d
76{
77 long num_merged_labels;
78};
79
80static struct cfg_stats_d cfg_stats;
81
82/* Nonzero if we found a computed goto while building basic blocks. */
83static bool found_computed_goto;
84
85/* Basic blocks and flowgraphs. */
86static basic_block create_bb (void *, void *, basic_block);
6de9cd9a
DN
87static void make_blocks (tree);
88static void factor_computed_gotos (void);
6de9cd9a
DN
89
90/* Edges. */
91static void make_edges (void);
6de9cd9a
DN
92static void make_cond_expr_edges (basic_block);
93static void make_switch_expr_edges (basic_block);
94static void make_goto_expr_edges (basic_block);
95static edge tree_redirect_edge_and_branch (edge, basic_block);
96static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
c2924966 97static unsigned int split_critical_edges (void);
6de9cd9a
DN
98
99/* Various helpers. */
6ea2b70d 100static inline bool stmt_starts_bb_p (const_tree, const_tree);
6de9cd9a
DN
101static int tree_verify_flow_info (void);
102static void tree_make_forwarder_block (edge);
6de9cd9a 103static void tree_cfg2vcg (FILE *);
0a4fe58f 104static inline void change_bb_for_stmt (tree t, basic_block bb);
6de9cd9a
DN
105
106/* Flowgraph optimization and cleanup. */
107static void tree_merge_blocks (basic_block, basic_block);
b48d0358 108static bool tree_can_merge_blocks_p (basic_block, basic_block);
6de9cd9a 109static void remove_bb (basic_block);
be477406 110static edge find_taken_edge_computed_goto (basic_block, tree);
6de9cd9a
DN
111static edge find_taken_edge_cond_expr (basic_block, tree);
112static edge find_taken_edge_switch_expr (basic_block, tree);
113static tree find_case_label_for_value (tree, tree);
6de9cd9a 114
a930a4ef
JH
115void
116init_empty_tree_cfg (void)
117{
118 /* Initialize the basic block array. */
119 init_flow ();
120 profile_status = PROFILE_ABSENT;
24bd1a0b
DB
121 n_basic_blocks = NUM_FIXED_BLOCKS;
122 last_basic_block = NUM_FIXED_BLOCKS;
68f9b844 123 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
a590ac65
KH
124 VEC_safe_grow_cleared (basic_block, gc, basic_block_info,
125 initial_cfg_capacity);
a930a4ef
JH
126
127 /* Build a mapping of labels to their associated blocks. */
e597f337 128 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
a590ac65
KH
129 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
130 initial_cfg_capacity);
a930a4ef 131
68f9b844
KH
132 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
133 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
a930a4ef
JH
134 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
135 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
a930a4ef 136}
6de9cd9a
DN
137
138/*---------------------------------------------------------------------------
139 Create basic blocks
140---------------------------------------------------------------------------*/
141
142/* Entry point to the CFG builder for trees. TP points to the list of
143 statements to be added to the flowgraph. */
144
145static void
146build_tree_cfg (tree *tp)
147{
148 /* Register specific tree functions. */
149 tree_register_cfg_hooks ();
150
6de9cd9a
DN
151 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
152
a930a4ef 153 init_empty_tree_cfg ();
6de9cd9a
DN
154
155 found_computed_goto = 0;
156 make_blocks (*tp);
157
158 /* Computed gotos are hell to deal with, especially if there are
159 lots of them with a large number of destinations. So we factor
160 them to a common computed goto location before we build the
161 edge list. After we convert back to normal form, we will un-factor
162 the computed gotos since factoring introduces an unwanted jump. */
163 if (found_computed_goto)
164 factor_computed_gotos ();
165
f0b698c1 166 /* Make sure there is always at least one block, even if it's empty. */
24bd1a0b 167 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
168 create_empty_bb (ENTRY_BLOCK_PTR);
169
6de9cd9a 170 /* Adjust the size of the array. */
68f9b844 171 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
a590ac65 172 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
6de9cd9a 173
f667741c
SB
174 /* To speed up statement iterator walks, we first purge dead labels. */
175 cleanup_dead_labels ();
176
177 /* Group case nodes to reduce the number of edges.
178 We do this after cleaning up dead labels because otherwise we miss
179 a lot of obvious case merging opportunities. */
180 group_case_labels ();
181
6de9cd9a
DN
182 /* Create the edges of the flowgraph. */
183 make_edges ();
8b11009b 184 cleanup_dead_labels ();
6de9cd9a
DN
185
186 /* Debugging dumps. */
187
188 /* Write the flowgraph to a VCG file. */
189 {
190 int local_dump_flags;
10d22567
ZD
191 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
192 if (vcg_file)
6de9cd9a 193 {
10d22567
ZD
194 tree_cfg2vcg (vcg_file);
195 dump_end (TDI_vcg, vcg_file);
6de9cd9a
DN
196 }
197 }
198
81cfbbc2
JH
199#ifdef ENABLE_CHECKING
200 verify_stmts ();
201#endif
202
6de9cd9a
DN
203 /* Dump a textual representation of the flowgraph. */
204 if (dump_file)
205 dump_tree_cfg (dump_file, dump_flags);
206}
207
c2924966 208static unsigned int
6de9cd9a
DN
209execute_build_cfg (void)
210{
211 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
c2924966 212 return 0;
6de9cd9a
DN
213}
214
8ddbbcae 215struct gimple_opt_pass pass_build_cfg =
6de9cd9a 216{
8ddbbcae
JH
217 {
218 GIMPLE_PASS,
6de9cd9a
DN
219 "cfg", /* name */
220 NULL, /* gate */
221 execute_build_cfg, /* execute */
222 NULL, /* sub */
223 NULL, /* next */
224 0, /* static_pass_number */
225 TV_TREE_CFG, /* tv_id */
226 PROP_gimple_leh, /* properties_required */
227 PROP_cfg, /* properties_provided */
228 0, /* properties_destroyed */
229 0, /* todo_flags_start */
8ddbbcae
JH
230 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
231 }
6de9cd9a
DN
232};
233
6531d1be 234/* Search the CFG for any computed gotos. If found, factor them to a
6de9cd9a 235 common computed goto site. Also record the location of that site so
6531d1be 236 that we can un-factor the gotos after we have converted back to
6de9cd9a
DN
237 normal form. */
238
239static void
240factor_computed_gotos (void)
241{
242 basic_block bb;
243 tree factored_label_decl = NULL;
244 tree var = NULL;
245 tree factored_computed_goto_label = NULL;
246 tree factored_computed_goto = NULL;
247
248 /* We know there are one or more computed gotos in this function.
249 Examine the last statement in each basic block to see if the block
250 ends with a computed goto. */
6531d1be 251
6de9cd9a
DN
252 FOR_EACH_BB (bb)
253 {
254 block_stmt_iterator bsi = bsi_last (bb);
255 tree last;
256
257 if (bsi_end_p (bsi))
258 continue;
259 last = bsi_stmt (bsi);
260
261 /* Ignore the computed goto we create when we factor the original
262 computed gotos. */
263 if (last == factored_computed_goto)
264 continue;
265
266 /* If the last statement is a computed goto, factor it. */
267 if (computed_goto_p (last))
268 {
269 tree assignment;
270
271 /* The first time we find a computed goto we need to create
272 the factored goto block and the variable each original
273 computed goto will use for their goto destination. */
274 if (! factored_computed_goto)
275 {
276 basic_block new_bb = create_empty_bb (bb);
277 block_stmt_iterator new_bsi = bsi_start (new_bb);
278
279 /* Create the destination of the factored goto. Each original
280 computed goto will put its desired destination into this
281 variable and jump to the label we create immediately
282 below. */
283 var = create_tmp_var (ptr_type_node, "gotovar");
284
285 /* Build a label for the new block which will contain the
286 factored computed goto. */
287 factored_label_decl = create_artificial_label ();
288 factored_computed_goto_label
289 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
290 bsi_insert_after (&new_bsi, factored_computed_goto_label,
291 BSI_NEW_STMT);
292
293 /* Build our new computed goto. */
294 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
295 bsi_insert_after (&new_bsi, factored_computed_goto,
296 BSI_NEW_STMT);
297 }
298
299 /* Copy the original computed goto's destination into VAR. */
939409af
RS
300 assignment = build_gimple_modify_stmt (var,
301 GOTO_DESTINATION (last));
6de9cd9a
DN
302 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
303
304 /* And re-vector the computed goto to the new destination. */
305 GOTO_DESTINATION (last) = factored_label_decl;
306 }
307 }
308}
309
310
6de9cd9a
DN
311/* Build a flowgraph for the statement_list STMT_LIST. */
312
313static void
314make_blocks (tree stmt_list)
315{
316 tree_stmt_iterator i = tsi_start (stmt_list);
317 tree stmt = NULL;
318 bool start_new_block = true;
319 bool first_stmt_of_list = true;
320 basic_block bb = ENTRY_BLOCK_PTR;
321
322 while (!tsi_end_p (i))
323 {
324 tree prev_stmt;
325
326 prev_stmt = stmt;
327 stmt = tsi_stmt (i);
328
329 /* If the statement starts a new basic block or if we have determined
330 in a previous pass that we need to create a new block for STMT, do
331 so now. */
332 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
333 {
334 if (!first_stmt_of_list)
335 stmt_list = tsi_split_statement_list_before (&i);
336 bb = create_basic_block (stmt_list, NULL, bb);
337 start_new_block = false;
338 }
339
340 /* Now add STMT to BB and create the subgraphs for special statement
341 codes. */
342 set_bb_for_stmt (stmt, bb);
343
344 if (computed_goto_p (stmt))
345 found_computed_goto = true;
346
347 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
348 next iteration. */
349 if (stmt_ends_bb_p (stmt))
350 start_new_block = true;
351
352 tsi_next (&i);
353 first_stmt_of_list = false;
354 }
355}
356
357
358/* Create and return a new empty basic block after bb AFTER. */
359
360static basic_block
361create_bb (void *h, void *e, basic_block after)
362{
363 basic_block bb;
364
1e128c5f 365 gcc_assert (!e);
6de9cd9a 366
27fd69fa
KH
367 /* Create and initialize a new basic block. Since alloc_block uses
368 ggc_alloc_cleared to allocate a basic block, we do not have to
369 clear the newly allocated basic block here. */
6de9cd9a 370 bb = alloc_block ();
6de9cd9a
DN
371
372 bb->index = last_basic_block;
373 bb->flags = BB_NEW;
7506e1cb
ZD
374 bb->il.tree = GGC_CNEW (struct tree_bb_info);
375 set_bb_stmt_list (bb, h ? (tree) h : alloc_stmt_list ());
6de9cd9a
DN
376
377 /* Add the new block to the linked list of blocks. */
378 link_block (bb, after);
379
380 /* Grow the basic block array if needed. */
68f9b844 381 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
6de9cd9a
DN
382 {
383 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
a590ac65 384 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
6de9cd9a
DN
385 }
386
387 /* Add the newly created block to the array. */
68f9b844 388 SET_BASIC_BLOCK (last_basic_block, bb);
6de9cd9a 389
6de9cd9a
DN
390 n_basic_blocks++;
391 last_basic_block++;
392
6de9cd9a
DN
393 return bb;
394}
395
396
397/*---------------------------------------------------------------------------
398 Edge creation
399---------------------------------------------------------------------------*/
400
fca01525
KH
401/* Fold COND_EXPR_COND of each COND_EXPR. */
402
e21aff8a 403void
fca01525
KH
404fold_cond_expr_cond (void)
405{
406 basic_block bb;
407
408 FOR_EACH_BB (bb)
409 {
410 tree stmt = last_stmt (bb);
411
412 if (stmt
413 && TREE_CODE (stmt) == COND_EXPR)
414 {
6ac01510
ILT
415 tree cond;
416 bool zerop, onep;
417
418 fold_defer_overflow_warnings ();
419 cond = fold (COND_EXPR_COND (stmt));
420 zerop = integer_zerop (cond);
421 onep = integer_onep (cond);
e233ac97 422 fold_undefer_overflow_warnings (zerop || onep,
4df28528 423 stmt,
6ac01510
ILT
424 WARN_STRICT_OVERFLOW_CONDITIONAL);
425 if (zerop)
4bafe847 426 COND_EXPR_COND (stmt) = boolean_false_node;
6ac01510 427 else if (onep)
4bafe847 428 COND_EXPR_COND (stmt) = boolean_true_node;
fca01525
KH
429 }
430 }
431}
432
6de9cd9a
DN
433/* Join all the blocks in the flowgraph. */
434
435static void
436make_edges (void)
437{
438 basic_block bb;
bed575d5 439 struct omp_region *cur_region = NULL;
6de9cd9a
DN
440
441 /* Create an edge from entry to the first block with executable
442 statements in it. */
24bd1a0b 443 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
6de9cd9a 444
adb35797 445 /* Traverse the basic block array placing edges. */
6de9cd9a
DN
446 FOR_EACH_BB (bb)
447 {
6de9cd9a 448 tree last = last_stmt (bb);
56e84019 449 bool fallthru;
6de9cd9a 450
56e84019 451 if (last)
6de9cd9a 452 {
bed575d5
RS
453 enum tree_code code = TREE_CODE (last);
454 switch (code)
56e84019
RH
455 {
456 case GOTO_EXPR:
457 make_goto_expr_edges (bb);
458 fallthru = false;
459 break;
460 case RETURN_EXPR:
461 make_edge (bb, EXIT_BLOCK_PTR, 0);
462 fallthru = false;
463 break;
464 case COND_EXPR:
465 make_cond_expr_edges (bb);
466 fallthru = false;
467 break;
468 case SWITCH_EXPR:
469 make_switch_expr_edges (bb);
470 fallthru = false;
471 break;
472 case RESX_EXPR:
473 make_eh_edges (last);
474 fallthru = false;
475 break;
476
477 case CALL_EXPR:
478 /* If this function receives a nonlocal goto, then we need to
479 make edges from this call site to all the nonlocal goto
480 handlers. */
4f6c2131
EB
481 if (tree_can_make_abnormal_goto (last))
482 make_abnormal_goto_edges (bb, true);
6de9cd9a 483
56e84019
RH
484 /* If this statement has reachable exception handlers, then
485 create abnormal edges to them. */
486 make_eh_edges (last);
487
488 /* Some calls are known not to return. */
489 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
490 break;
491
492 case MODIFY_EXPR:
07beea0d
AH
493 gcc_unreachable ();
494
495 case GIMPLE_MODIFY_STMT:
56e84019
RH
496 if (is_ctrl_altering_stmt (last))
497 {
07beea0d
AH
498 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
499 the CALL_EXPR may have an abnormal edge. Search the RHS
500 for this case and create any required edges. */
4f6c2131
EB
501 if (tree_can_make_abnormal_goto (last))
502 make_abnormal_goto_edges (bb, true);
56e84019
RH
503
504 make_eh_edges (last);
505 }
506 fallthru = true;
507 break;
508
509 case OMP_PARALLEL:
510 case OMP_FOR:
511 case OMP_SINGLE:
512 case OMP_MASTER:
513 case OMP_ORDERED:
514 case OMP_CRITICAL:
515 case OMP_SECTION:
bed575d5 516 cur_region = new_omp_region (bb, code, cur_region);
56e84019
RH
517 fallthru = true;
518 break;
519
7e2df4a1 520 case OMP_SECTIONS:
bed575d5 521 cur_region = new_omp_region (bb, code, cur_region);
e5c95afe
ZD
522 fallthru = true;
523 break;
524
525 case OMP_SECTIONS_SWITCH:
7e2df4a1 526 fallthru = false;
777f7f9a
RH
527 break;
528
a509ebb5
RL
529
530 case OMP_ATOMIC_LOAD:
531 case OMP_ATOMIC_STORE:
532 fallthru = true;
533 break;
534
535
bed575d5
RS
536 case OMP_RETURN:
537 /* In the case of an OMP_SECTION, the edge will go somewhere
538 other than the next block. This will be created later. */
539 cur_region->exit = bb;
540 fallthru = cur_region->type != OMP_SECTION;
541 cur_region = cur_region->outer;
542 break;
543
544 case OMP_CONTINUE:
545 cur_region->cont = bb;
546 switch (cur_region->type)
547 {
548 case OMP_FOR:
135a171d
JJ
549 /* Mark all OMP_FOR and OMP_CONTINUE succs edges as abnormal
550 to prevent splitting them. */
551 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
e5c95afe 552 /* Make the loopback edge. */
135a171d
JJ
553 make_edge (bb, single_succ (cur_region->entry),
554 EDGE_ABNORMAL);
555
e5c95afe
ZD
556 /* Create an edge from OMP_FOR to exit, which corresponds to
557 the case that the body of the loop is not executed at
558 all. */
135a171d
JJ
559 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
560 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
561 fallthru = false;
bed575d5
RS
562 break;
563
564 case OMP_SECTIONS:
565 /* Wire up the edges into and out of the nested sections. */
bed575d5 566 {
e5c95afe
ZD
567 basic_block switch_bb = single_succ (cur_region->entry);
568
bed575d5
RS
569 struct omp_region *i;
570 for (i = cur_region->inner; i ; i = i->next)
571 {
572 gcc_assert (i->type == OMP_SECTION);
e5c95afe 573 make_edge (switch_bb, i->entry, 0);
bed575d5
RS
574 make_edge (i->exit, bb, EDGE_FALLTHRU);
575 }
e5c95afe
ZD
576
577 /* Make the loopback edge to the block with
578 OMP_SECTIONS_SWITCH. */
579 make_edge (bb, switch_bb, 0);
580
581 /* Make the edge from the switch to exit. */
582 make_edge (switch_bb, bb->next_bb, 0);
583 fallthru = false;
bed575d5
RS
584 }
585 break;
6531d1be 586
bed575d5
RS
587 default:
588 gcc_unreachable ();
589 }
bed575d5
RS
590 break;
591
56e84019
RH
592 default:
593 gcc_assert (!stmt_ends_bb_p (last));
594 fallthru = true;
595 }
6de9cd9a 596 }
56e84019
RH
597 else
598 fallthru = true;
6de9cd9a 599
56e84019 600 if (fallthru)
6de9cd9a
DN
601 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
602 }
603
bed575d5
RS
604 if (root_omp_region)
605 free_omp_regions ();
606
fca01525
KH
607 /* Fold COND_EXPR_COND of each COND_EXPR. */
608 fold_cond_expr_cond ();
6de9cd9a
DN
609}
610
611
6de9cd9a
DN
612/* Create the edges for a COND_EXPR starting at block BB.
613 At this point, both clauses must contain only simple gotos. */
614
615static void
616make_cond_expr_edges (basic_block bb)
617{
618 tree entry = last_stmt (bb);
619 basic_block then_bb, else_bb;
620 tree then_label, else_label;
d783b2a2 621 edge e;
6de9cd9a 622
1e128c5f
GB
623 gcc_assert (entry);
624 gcc_assert (TREE_CODE (entry) == COND_EXPR);
6de9cd9a
DN
625
626 /* Entry basic blocks for each component. */
627 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
628 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
629 then_bb = label_to_block (then_label);
630 else_bb = label_to_block (else_label);
631
d783b2a2 632 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
d783b2a2 633 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
d783b2a2
JH
634 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
635 if (e)
2d593c86 636 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
a9b77cd1
ZD
637
638 /* We do not need the gotos anymore. */
639 COND_EXPR_THEN (entry) = NULL_TREE;
640 COND_EXPR_ELSE (entry) = NULL_TREE;
6de9cd9a
DN
641}
642
92b6dff3 643
d6be0d7f
JL
644/* Called for each element in the hash table (P) as we delete the
645 edge to cases hash table.
646
6531d1be 647 Clear all the TREE_CHAINs to prevent problems with copying of
d6be0d7f
JL
648 SWITCH_EXPRs and structure sharing rules, then free the hash table
649 element. */
650
15814ba0 651static bool
ac7d7749 652edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
15814ba0 653 void *data ATTRIBUTE_UNUSED)
d6be0d7f 654{
d6be0d7f
JL
655 tree t, next;
656
15814ba0 657 for (t = (tree) *value; t; t = next)
d6be0d7f
JL
658 {
659 next = TREE_CHAIN (t);
660 TREE_CHAIN (t) = NULL;
661 }
15814ba0
PB
662
663 *value = NULL;
664 return false;
d6be0d7f
JL
665}
666
667/* Start recording information mapping edges to case labels. */
668
c9784e6d 669void
d6be0d7f
JL
670start_recording_case_labels (void)
671{
672 gcc_assert (edge_to_cases == NULL);
15814ba0 673 edge_to_cases = pointer_map_create ();
d6be0d7f
JL
674}
675
676/* Return nonzero if we are recording information for case labels. */
677
678static bool
679recording_case_labels_p (void)
680{
681 return (edge_to_cases != NULL);
682}
683
684/* Stop recording information mapping edges to case labels and
685 remove any information we have recorded. */
c9784e6d 686void
d6be0d7f
JL
687end_recording_case_labels (void)
688{
15814ba0
PB
689 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
690 pointer_map_destroy (edge_to_cases);
d6be0d7f
JL
691 edge_to_cases = NULL;
692}
693
d6be0d7f
JL
694/* If we are inside a {start,end}_recording_cases block, then return
695 a chain of CASE_LABEL_EXPRs from T which reference E.
696
697 Otherwise return NULL. */
92b6dff3
JL
698
699static tree
d6be0d7f 700get_cases_for_edge (edge e, tree t)
92b6dff3 701{
92b6dff3 702 void **slot;
d6be0d7f
JL
703 size_t i, n;
704 tree vec;
92b6dff3 705
d6be0d7f
JL
706 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
707 chains available. Return NULL so the caller can detect this case. */
708 if (!recording_case_labels_p ())
709 return NULL;
6531d1be 710
15814ba0 711 slot = pointer_map_contains (edge_to_cases, e);
92b6dff3 712 if (slot)
15814ba0 713 return (tree) *slot;
92b6dff3 714
d6be0d7f
JL
715 /* If we did not find E in the hash table, then this must be the first
716 time we have been queried for information about E & T. Add all the
717 elements from T to the hash table then perform the query again. */
92b6dff3 718
d6be0d7f 719 vec = SWITCH_LABELS (t);
92b6dff3 720 n = TREE_VEC_LENGTH (vec);
92b6dff3
JL
721 for (i = 0; i < n; i++)
722 {
15814ba0
PB
723 tree elt = TREE_VEC_ELT (vec, i);
724 tree lab = CASE_LABEL (elt);
d6be0d7f 725 basic_block label_bb = label_to_block (lab);
15814ba0
PB
726 edge this_edge = find_edge (e->src, label_bb);
727
728 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
729 a new chain. */
730 slot = pointer_map_insert (edge_to_cases, this_edge);
731 TREE_CHAIN (elt) = (tree) *slot;
732 *slot = elt;
92b6dff3 733 }
15814ba0
PB
734
735 return (tree) *pointer_map_contains (edge_to_cases, e);
92b6dff3 736}
6de9cd9a
DN
737
738/* Create the edges for a SWITCH_EXPR starting at block BB.
739 At this point, the switch body has been lowered and the
740 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
741
742static void
743make_switch_expr_edges (basic_block bb)
744{
745 tree entry = last_stmt (bb);
746 size_t i, n;
747 tree vec;
748
749 vec = SWITCH_LABELS (entry);
750 n = TREE_VEC_LENGTH (vec);
751
752 for (i = 0; i < n; ++i)
753 {
754 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
755 basic_block label_bb = label_to_block (lab);
d6be0d7f 756 make_edge (bb, label_bb, 0);
6de9cd9a
DN
757 }
758}
759
760
761/* Return the basic block holding label DEST. */
762
763basic_block
997de8ed 764label_to_block_fn (struct function *ifun, tree dest)
6de9cd9a 765{
242229bb
JH
766 int uid = LABEL_DECL_UID (dest);
767
f0b698c1
KH
768 /* We would die hard when faced by an undefined label. Emit a label to
769 the very first basic block. This will hopefully make even the dataflow
242229bb
JH
770 and undefined variable warnings quite right. */
771 if ((errorcount || sorrycount) && uid < 0)
772 {
6531d1be 773 block_stmt_iterator bsi =
24bd1a0b 774 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
242229bb
JH
775 tree stmt;
776
777 stmt = build1 (LABEL_EXPR, void_type_node, dest);
778 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
779 uid = LABEL_DECL_UID (dest);
780 }
e597f337
KH
781 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
782 <= (unsigned int) uid)
98f464e0 783 return NULL;
e597f337 784 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
6de9cd9a
DN
785}
786
4f6c2131
EB
787/* Create edges for an abnormal goto statement at block BB. If FOR_CALL
788 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
789
790void
791make_abnormal_goto_edges (basic_block bb, bool for_call)
792{
793 basic_block target_bb;
794 block_stmt_iterator bsi;
795
796 FOR_EACH_BB (target_bb)
797 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
798 {
799 tree target = bsi_stmt (bsi);
800
801 if (TREE_CODE (target) != LABEL_EXPR)
802 break;
803
804 target = LABEL_EXPR_LABEL (target);
805
806 /* Make an edge to every label block that has been marked as a
807 potential target for a computed goto or a non-local goto. */
808 if ((FORCED_LABEL (target) && !for_call)
809 || (DECL_NONLOCAL (target) && for_call))
810 {
811 make_edge (bb, target_bb, EDGE_ABNORMAL);
812 break;
813 }
814 }
815}
816
6de9cd9a
DN
817/* Create edges for a goto statement at block BB. */
818
819static void
820make_goto_expr_edges (basic_block bb)
821{
6de9cd9a 822 block_stmt_iterator last = bsi_last (bb);
4f6c2131 823 tree goto_t = bsi_stmt (last);
6de9cd9a 824
4f6c2131
EB
825 /* A simple GOTO creates normal edges. */
826 if (simple_goto_p (goto_t))
6de9cd9a 827 {
7d3bf067 828 tree dest = GOTO_DESTINATION (goto_t);
4f6c2131 829 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
4f6c2131 830 e->goto_locus = EXPR_LOCATION (goto_t);
4f6c2131
EB
831 bsi_remove (&last, true);
832 return;
6de9cd9a
DN
833 }
834
4f6c2131
EB
835 /* A computed GOTO creates abnormal edges. */
836 make_abnormal_goto_edges (bb, false);
6de9cd9a
DN
837}
838
839
840/*---------------------------------------------------------------------------
841 Flowgraph analysis
842---------------------------------------------------------------------------*/
843
f698d217
SB
844/* Cleanup useless labels in basic blocks. This is something we wish
845 to do early because it allows us to group case labels before creating
846 the edges for the CFG, and it speeds up block statement iterators in
847 all passes later on.
8b11009b
ZD
848 We rerun this pass after CFG is created, to get rid of the labels that
849 are no longer referenced. After then we do not run it any more, since
850 (almost) no new labels should be created. */
f698d217
SB
851
852/* A map from basic block index to the leading label of that block. */
8b11009b
ZD
853static struct label_record
854{
855 /* The label. */
856 tree label;
857
858 /* True if the label is referenced from somewhere. */
859 bool used;
860} *label_for_bb;
f698d217
SB
861
862/* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
863static void
864update_eh_label (struct eh_region *region)
865{
866 tree old_label = get_eh_region_tree_label (region);
867 if (old_label)
868 {
165b54c3
SB
869 tree new_label;
870 basic_block bb = label_to_block (old_label);
871
872 /* ??? After optimizing, there may be EH regions with labels
873 that have already been removed from the function body, so
874 there is no basic block for them. */
875 if (! bb)
876 return;
877
8b11009b
ZD
878 new_label = label_for_bb[bb->index].label;
879 label_for_bb[bb->index].used = true;
f698d217
SB
880 set_eh_region_tree_label (region, new_label);
881 }
882}
883
242229bb
JH
884/* Given LABEL return the first label in the same basic block. */
885static tree
886main_block_label (tree label)
887{
888 basic_block bb = label_to_block (label);
8b11009b 889 tree main_label = label_for_bb[bb->index].label;
242229bb
JH
890
891 /* label_to_block possibly inserted undefined label into the chain. */
8b11009b
ZD
892 if (!main_label)
893 {
894 label_for_bb[bb->index].label = label;
895 main_label = label;
896 }
897
898 label_for_bb[bb->index].used = true;
899 return main_label;
242229bb
JH
900}
901
b986ebf3 902/* Cleanup redundant labels. This is a three-step process:
f698d217
SB
903 1) Find the leading label for each block.
904 2) Redirect all references to labels to the leading labels.
905 3) Cleanup all useless labels. */
6de9cd9a 906
165b54c3 907void
6de9cd9a
DN
908cleanup_dead_labels (void)
909{
910 basic_block bb;
8b11009b 911 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
6de9cd9a
DN
912
913 /* Find a suitable label for each block. We use the first user-defined
f0b698c1 914 label if there is one, or otherwise just the first label we see. */
6de9cd9a
DN
915 FOR_EACH_BB (bb)
916 {
917 block_stmt_iterator i;
918
919 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
920 {
921 tree label, stmt = bsi_stmt (i);
922
923 if (TREE_CODE (stmt) != LABEL_EXPR)
924 break;
925
926 label = LABEL_EXPR_LABEL (stmt);
927
928 /* If we have not yet seen a label for the current block,
929 remember this one and see if there are more labels. */
8b11009b 930 if (!label_for_bb[bb->index].label)
6de9cd9a 931 {
8b11009b 932 label_for_bb[bb->index].label = label;
6de9cd9a
DN
933 continue;
934 }
935
936 /* If we did see a label for the current block already, but it
937 is an artificially created label, replace it if the current
938 label is a user defined label. */
8b11009b
ZD
939 if (!DECL_ARTIFICIAL (label)
940 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
6de9cd9a 941 {
8b11009b 942 label_for_bb[bb->index].label = label;
6de9cd9a
DN
943 break;
944 }
945 }
946 }
947
f698d217
SB
948 /* Now redirect all jumps/branches to the selected label.
949 First do so for each block ending in a control statement. */
6de9cd9a
DN
950 FOR_EACH_BB (bb)
951 {
952 tree stmt = last_stmt (bb);
953 if (!stmt)
954 continue;
955
956 switch (TREE_CODE (stmt))
957 {
958 case COND_EXPR:
959 {
960 tree true_branch, false_branch;
6de9cd9a
DN
961
962 true_branch = COND_EXPR_THEN (stmt);
963 false_branch = COND_EXPR_ELSE (stmt);
6de9cd9a 964
a9b77cd1
ZD
965 if (true_branch)
966 GOTO_DESTINATION (true_branch)
967 = main_block_label (GOTO_DESTINATION (true_branch));
968 if (false_branch)
969 GOTO_DESTINATION (false_branch)
970 = main_block_label (GOTO_DESTINATION (false_branch));
6de9cd9a
DN
971
972 break;
973 }
6531d1be 974
6de9cd9a
DN
975 case SWITCH_EXPR:
976 {
977 size_t i;
978 tree vec = SWITCH_LABELS (stmt);
979 size_t n = TREE_VEC_LENGTH (vec);
6531d1be 980
6de9cd9a
DN
981 /* Replace all destination labels. */
982 for (i = 0; i < n; ++i)
92b6dff3
JL
983 {
984 tree elt = TREE_VEC_ELT (vec, i);
985 tree label = main_block_label (CASE_LABEL (elt));
d6be0d7f 986 CASE_LABEL (elt) = label;
92b6dff3 987 }
6de9cd9a
DN
988 break;
989 }
990
f667741c
SB
991 /* We have to handle GOTO_EXPRs until they're removed, and we don't
992 remove them until after we've created the CFG edges. */
993 case GOTO_EXPR:
242229bb
JH
994 if (! computed_goto_p (stmt))
995 {
996 GOTO_DESTINATION (stmt)
997 = main_block_label (GOTO_DESTINATION (stmt));
998 break;
999 }
f667741c 1000
6de9cd9a
DN
1001 default:
1002 break;
1003 }
1004 }
1005
f698d217
SB
1006 for_each_eh_region (update_eh_label);
1007
6de9cd9a 1008 /* Finally, purge dead labels. All user-defined labels and labels that
cea0f4f1
AP
1009 can be the target of non-local gotos and labels which have their
1010 address taken are preserved. */
6de9cd9a
DN
1011 FOR_EACH_BB (bb)
1012 {
1013 block_stmt_iterator i;
8b11009b 1014 tree label_for_this_bb = label_for_bb[bb->index].label;
6de9cd9a 1015
8b11009b 1016 if (!label_for_this_bb)
6de9cd9a
DN
1017 continue;
1018
8b11009b
ZD
1019 /* If the main label of the block is unused, we may still remove it. */
1020 if (!label_for_bb[bb->index].used)
1021 label_for_this_bb = NULL;
1022
6de9cd9a
DN
1023 for (i = bsi_start (bb); !bsi_end_p (i); )
1024 {
1025 tree label, stmt = bsi_stmt (i);
1026
1027 if (TREE_CODE (stmt) != LABEL_EXPR)
1028 break;
1029
1030 label = LABEL_EXPR_LABEL (stmt);
1031
1032 if (label == label_for_this_bb
1033 || ! DECL_ARTIFICIAL (label)
cea0f4f1
AP
1034 || DECL_NONLOCAL (label)
1035 || FORCED_LABEL (label))
6de9cd9a
DN
1036 bsi_next (&i);
1037 else
736432ee 1038 bsi_remove (&i, true);
6de9cd9a
DN
1039 }
1040 }
1041
1042 free (label_for_bb);
1043}
1044
f667741c
SB
1045/* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1046 and scan the sorted vector of cases. Combine the ones jumping to the
1047 same label.
1048 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1049
165b54c3 1050void
f667741c
SB
1051group_case_labels (void)
1052{
1053 basic_block bb;
1054
1055 FOR_EACH_BB (bb)
1056 {
1057 tree stmt = last_stmt (bb);
1058 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1059 {
1060 tree labels = SWITCH_LABELS (stmt);
1061 int old_size = TREE_VEC_LENGTH (labels);
1062 int i, j, new_size = old_size;
b7814a18
RG
1063 tree default_case = NULL_TREE;
1064 tree default_label = NULL_TREE;
29c4d22b 1065
66efeafc 1066 /* The default label is always the last case in a switch
b7814a18
RG
1067 statement after gimplification if it was not optimized
1068 away. */
1069 if (!CASE_LOW (TREE_VEC_ELT (labels, old_size - 1))
1070 && !CASE_HIGH (TREE_VEC_ELT (labels, old_size - 1)))
1071 {
1072 default_case = TREE_VEC_ELT (labels, old_size - 1);
1073 default_label = CASE_LABEL (default_case);
1074 old_size--;
1075 }
f667741c 1076
b7814a18 1077 /* Look for possible opportunities to merge cases. */
f667741c 1078 i = 0;
b7814a18 1079 while (i < old_size)
f667741c 1080 {
ed9cef22 1081 tree base_case, base_label, base_high;
f667741c
SB
1082 base_case = TREE_VEC_ELT (labels, i);
1083
1e128c5f 1084 gcc_assert (base_case);
f667741c 1085 base_label = CASE_LABEL (base_case);
31e9eea2
SB
1086
1087 /* Discard cases that have the same destination as the
1088 default case. */
1089 if (base_label == default_label)
1090 {
1091 TREE_VEC_ELT (labels, i) = NULL_TREE;
1092 i++;
29c4d22b 1093 new_size--;
31e9eea2
SB
1094 continue;
1095 }
1096
f667741c
SB
1097 base_high = CASE_HIGH (base_case) ?
1098 CASE_HIGH (base_case) : CASE_LOW (base_case);
d717e500 1099 i++;
f667741c
SB
1100 /* Try to merge case labels. Break out when we reach the end
1101 of the label vector or when we cannot merge the next case
1102 label with the current one. */
b7814a18 1103 while (i < old_size)
f667741c 1104 {
d717e500 1105 tree merge_case = TREE_VEC_ELT (labels, i);
f667741c
SB
1106 tree merge_label = CASE_LABEL (merge_case);
1107 tree t = int_const_binop (PLUS_EXPR, base_high,
1108 integer_one_node, 1);
1109
1110 /* Merge the cases if they jump to the same place,
1111 and their ranges are consecutive. */
1112 if (merge_label == base_label
1113 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1114 {
1115 base_high = CASE_HIGH (merge_case) ?
1116 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1117 CASE_HIGH (base_case) = base_high;
1118 TREE_VEC_ELT (labels, i) = NULL_TREE;
1119 new_size--;
d717e500 1120 i++;
f667741c
SB
1121 }
1122 else
1123 break;
1124 }
1125 }
1126
1127 /* Compress the case labels in the label vector, and adjust the
1128 length of the vector. */
1129 for (i = 0, j = 0; i < new_size; i++)
1130 {
1131 while (! TREE_VEC_ELT (labels, j))
1132 j++;
1133 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1134 }
1135 TREE_VEC_LENGTH (labels) = new_size;
1136 }
1137 }
1138}
6de9cd9a
DN
1139
1140/* Checks whether we can merge block B into block A. */
1141
1142static bool
b48d0358 1143tree_can_merge_blocks_p (basic_block a, basic_block b)
6de9cd9a 1144{
9678086d 1145 const_tree stmt;
b48d0358 1146 block_stmt_iterator bsi;
38965eb2 1147 tree phi;
6de9cd9a 1148
c5cbcccf 1149 if (!single_succ_p (a))
6de9cd9a
DN
1150 return false;
1151
c5cbcccf 1152 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
6de9cd9a
DN
1153 return false;
1154
c5cbcccf 1155 if (single_succ (a) != b)
6de9cd9a
DN
1156 return false;
1157
c5cbcccf 1158 if (!single_pred_p (b))
6de9cd9a
DN
1159 return false;
1160
26e75214
KH
1161 if (b == EXIT_BLOCK_PTR)
1162 return false;
6531d1be 1163
6de9cd9a
DN
1164 /* If A ends by a statement causing exceptions or something similar, we
1165 cannot merge the blocks. */
75547801
KG
1166 /* This CONST_CAST is okay because last_stmt doesn't modify its
1167 argument and the return value is assign to a const_tree. */
b48d0358 1168 stmt = last_stmt (CONST_CAST_BB (a));
6de9cd9a
DN
1169 if (stmt && stmt_ends_bb_p (stmt))
1170 return false;
1171
1172 /* Do not allow a block with only a non-local label to be merged. */
1173 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1174 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1175 return false;
1176
38965eb2 1177 /* It must be possible to eliminate all phi nodes in B. If ssa form
8f8bb1d2
ZD
1178 is not up-to-date, we cannot eliminate any phis; however, if only
1179 some symbols as whole are marked for renaming, this is not a problem,
1180 as phi nodes for those symbols are irrelevant in updating anyway. */
38965eb2
ZD
1181 phi = phi_nodes (b);
1182 if (phi)
1183 {
8f8bb1d2 1184 if (name_mappings_registered_p ())
38965eb2
ZD
1185 return false;
1186
1187 for (; phi; phi = PHI_CHAIN (phi))
1188 if (!is_gimple_reg (PHI_RESULT (phi))
1189 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1190 return false;
1191 }
6de9cd9a
DN
1192
1193 /* Do not remove user labels. */
b48d0358 1194 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
6de9cd9a 1195 {
b48d0358 1196 stmt = bsi_stmt (bsi);
6de9cd9a
DN
1197 if (TREE_CODE (stmt) != LABEL_EXPR)
1198 break;
1199 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1200 return false;
1201 }
1202
2b271002
ZD
1203 /* Protect the loop latches. */
1204 if (current_loops
1205 && b->loop_father->latch == b)
1206 return false;
1207
6de9cd9a
DN
1208 return true;
1209}
1210
38965eb2
ZD
1211/* Replaces all uses of NAME by VAL. */
1212
684aaf29 1213void
38965eb2
ZD
1214replace_uses_by (tree name, tree val)
1215{
1216 imm_use_iterator imm_iter;
1217 use_operand_p use;
1218 tree stmt;
1219 edge e;
38965eb2 1220
6c00f606 1221 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
38965eb2 1222 {
cfaab3a9
DN
1223 if (TREE_CODE (stmt) != PHI_NODE)
1224 push_stmt_changes (&stmt);
1225
6c00f606
AM
1226 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1227 {
1228 replace_exp (use, val);
38965eb2 1229
6c00f606 1230 if (TREE_CODE (stmt) == PHI_NODE)
38965eb2 1231 {
6c00f606
AM
1232 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1233 if (e->flags & EDGE_ABNORMAL)
1234 {
1235 /* This can only occur for virtual operands, since
1236 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1237 would prevent replacement. */
1238 gcc_assert (!is_gimple_reg (name));
1239 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1240 }
38965eb2
ZD
1241 }
1242 }
cfaab3a9 1243
6c00f606
AM
1244 if (TREE_CODE (stmt) != PHI_NODE)
1245 {
1246 tree rhs;
9af0df6b 1247
6c00f606 1248 fold_stmt_inplace (stmt);
672987e8
ZD
1249 if (cfgcleanup_altered_bbs)
1250 bitmap_set_bit (cfgcleanup_altered_bbs, bb_for_stmt (stmt)->index);
cfaab3a9
DN
1251
1252 /* FIXME. This should go in pop_stmt_changes. */
6c00f606
AM
1253 rhs = get_rhs (stmt);
1254 if (TREE_CODE (rhs) == ADDR_EXPR)
1255 recompute_tree_invariant_for_addr_expr (rhs);
9af0df6b 1256
6c00f606 1257 maybe_clean_or_replace_eh_stmt (stmt, stmt);
cfaab3a9
DN
1258
1259 pop_stmt_changes (&stmt);
6c00f606 1260 }
38965eb2 1261 }
6531d1be 1262
40b448ef 1263 gcc_assert (has_zero_uses (name));
d5ab5675
ZD
1264
1265 /* Also update the trees stored in loop structures. */
1266 if (current_loops)
1267 {
1268 struct loop *loop;
42fd6772 1269 loop_iterator li;
d5ab5675 1270
42fd6772 1271 FOR_EACH_LOOP (li, loop, 0)
d5ab5675 1272 {
42fd6772 1273 substitute_in_loop_info (loop, name, val);
d5ab5675
ZD
1274 }
1275 }
38965eb2 1276}
6de9cd9a
DN
1277
1278/* Merge block B into block A. */
1279
1280static void
1281tree_merge_blocks (basic_block a, basic_block b)
1282{
1283 block_stmt_iterator bsi;
1284 tree_stmt_iterator last;
38965eb2 1285 tree phi;
6de9cd9a
DN
1286
1287 if (dump_file)
1288 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1289
c4f548b8
DN
1290 /* Remove all single-valued PHI nodes from block B of the form
1291 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
38965eb2
ZD
1292 bsi = bsi_last (a);
1293 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1294 {
1295 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1296 tree copy;
d7f0e25c
ZD
1297 bool may_replace_uses = may_propagate_copy (def, use);
1298
7c8eb293
ZD
1299 /* In case we maintain loop closed ssa form, do not propagate arguments
1300 of loop exit phi nodes. */
d7f0e25c 1301 if (current_loops
f87000d0 1302 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
d7f0e25c
ZD
1303 && is_gimple_reg (def)
1304 && TREE_CODE (use) == SSA_NAME
1305 && a->loop_father != b->loop_father)
1306 may_replace_uses = false;
1307
1308 if (!may_replace_uses)
38965eb2
ZD
1309 {
1310 gcc_assert (is_gimple_reg (def));
1311
128a79fb 1312 /* Note that just emitting the copies is fine -- there is no problem
38965eb2
ZD
1313 with ordering of phi nodes. This is because A is the single
1314 predecessor of B, therefore results of the phi nodes cannot
1315 appear as arguments of the phi nodes. */
939409af 1316 copy = build_gimple_modify_stmt (def, use);
38965eb2 1317 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
38965eb2 1318 SSA_NAME_DEF_STMT (def) = copy;
611021e1 1319 remove_phi_node (phi, NULL, false);
38965eb2
ZD
1320 }
1321 else
611021e1 1322 {
d0f76c4b
RG
1323 /* If we deal with a PHI for virtual operands, we can simply
1324 propagate these without fussing with folding or updating
1325 the stmt. */
1326 if (!is_gimple_reg (def))
1327 {
1328 imm_use_iterator iter;
1329 use_operand_p use_p;
1330 tree stmt;
1331
1332 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1333 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1334 SET_USE (use_p, use);
1335 }
1336 else
1337 replace_uses_by (def, use);
611021e1
RK
1338 remove_phi_node (phi, NULL, true);
1339 }
38965eb2
ZD
1340 }
1341
6de9cd9a
DN
1342 /* Ensure that B follows A. */
1343 move_block_after (b, a);
1344
c5cbcccf 1345 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1e128c5f 1346 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
6de9cd9a
DN
1347
1348 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1349 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1350 {
1351 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
be477406
JL
1352 {
1353 tree label = bsi_stmt (bsi);
1354
736432ee 1355 bsi_remove (&bsi, false);
be477406
JL
1356 /* Now that we can thread computed gotos, we might have
1357 a situation where we have a forced label in block B
1358 However, the label at the start of block B might still be
1359 used in other ways (think about the runtime checking for
1360 Fortran assigned gotos). So we can not just delete the
1361 label. Instead we move the label to the start of block A. */
1362 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1363 {
1364 block_stmt_iterator dest_bsi = bsi_start (a);
1365 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1366 }
1367 }
6de9cd9a
DN
1368 else
1369 {
0a4fe58f 1370 change_bb_for_stmt (bsi_stmt (bsi), a);
6de9cd9a
DN
1371 bsi_next (&bsi);
1372 }
1373 }
1374
1375 /* Merge the chains. */
7506e1cb
ZD
1376 last = tsi_last (bb_stmt_list (a));
1377 tsi_link_after (&last, bb_stmt_list (b), TSI_NEW_STMT);
1378 set_bb_stmt_list (b, NULL_TREE);
672987e8
ZD
1379
1380 if (cfgcleanup_altered_bbs)
1381 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
6de9cd9a
DN
1382}
1383
1384
bc23502b
PB
1385/* Return the one of two successors of BB that is not reachable by a
1386 reached by a complex edge, if there is one. Else, return BB. We use
1387 this in optimizations that use post-dominators for their heuristics,
1388 to catch the cases in C++ where function calls are involved. */
6531d1be 1389
bc23502b 1390basic_block
6531d1be 1391single_noncomplex_succ (basic_block bb)
bc23502b
PB
1392{
1393 edge e0, e1;
1394 if (EDGE_COUNT (bb->succs) != 2)
1395 return bb;
6531d1be 1396
bc23502b
PB
1397 e0 = EDGE_SUCC (bb, 0);
1398 e1 = EDGE_SUCC (bb, 1);
1399 if (e0->flags & EDGE_COMPLEX)
1400 return e1->dest;
1401 if (e1->flags & EDGE_COMPLEX)
1402 return e0->dest;
6531d1be 1403
bc23502b 1404 return bb;
6531d1be 1405}
bc23502b
PB
1406
1407
6de9cd9a
DN
1408/* Walk the function tree removing unnecessary statements.
1409
1410 * Empty statement nodes are removed
1411
1412 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1413
1414 * Unnecessary COND_EXPRs are removed
1415
1416 * Some unnecessary BIND_EXPRs are removed
1417
1418 Clearly more work could be done. The trick is doing the analysis
1419 and removal fast enough to be a net improvement in compile times.
1420
1421 Note that when we remove a control structure such as a COND_EXPR
1422 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1423 to ensure we eliminate all the useless code. */
1424
1425struct rus_data
1426{
1427 tree *last_goto;
1428 bool repeat;
1429 bool may_throw;
1430 bool may_branch;
1431 bool has_label;
1432};
1433
1434static void remove_useless_stmts_1 (tree *, struct rus_data *);
1435
1436static bool
1437remove_useless_stmts_warn_notreached (tree stmt)
1438{
9506ac2b 1439 if (EXPR_HAS_LOCATION (stmt))
6de9cd9a 1440 {
9506ac2b 1441 location_t loc = EXPR_LOCATION (stmt);
43e05e45
SB
1442 if (LOCATION_LINE (loc) > 0)
1443 {
c5409249 1444 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
43e05e45
SB
1445 return true;
1446 }
6de9cd9a
DN
1447 }
1448
1449 switch (TREE_CODE (stmt))
1450 {
1451 case STATEMENT_LIST:
1452 {
1453 tree_stmt_iterator i;
1454 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1455 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1456 return true;
1457 }
1458 break;
1459
1460 case COND_EXPR:
1461 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1462 return true;
1463 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1464 return true;
1465 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1466 return true;
1467 break;
1468
1469 case TRY_FINALLY_EXPR:
1470 case TRY_CATCH_EXPR:
1471 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1472 return true;
1473 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1474 return true;
1475 break;
1476
1477 case CATCH_EXPR:
1478 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1479 case EH_FILTER_EXPR:
1480 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1481 case BIND_EXPR:
1482 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1483
1484 default:
1485 /* Not a live container. */
1486 break;
1487 }
1488
1489 return false;
1490}
1491
1492static void
1493remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1494{
1495 tree then_clause, else_clause, cond;
1496 bool save_has_label, then_has_label, else_has_label;
1497
1498 save_has_label = data->has_label;
1499 data->has_label = false;
1500 data->last_goto = NULL;
1501
1502 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1503
1504 then_has_label = data->has_label;
1505 data->has_label = false;
1506 data->last_goto = NULL;
1507
1508 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1509
1510 else_has_label = data->has_label;
1511 data->has_label = save_has_label | then_has_label | else_has_label;
1512
6de9cd9a
DN
1513 then_clause = COND_EXPR_THEN (*stmt_p);
1514 else_clause = COND_EXPR_ELSE (*stmt_p);
18faa5da 1515 cond = fold (COND_EXPR_COND (*stmt_p));
6de9cd9a
DN
1516
1517 /* If neither arm does anything at all, we can remove the whole IF. */
1518 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1519 {
1520 *stmt_p = build_empty_stmt ();
1521 data->repeat = true;
1522 }
1523
1524 /* If there are no reachable statements in an arm, then we can
1525 zap the entire conditional. */
1526 else if (integer_nonzerop (cond) && !else_has_label)
1527 {
1528 if (warn_notreached)
1529 remove_useless_stmts_warn_notreached (else_clause);
1530 *stmt_p = then_clause;
1531 data->repeat = true;
1532 }
1533 else if (integer_zerop (cond) && !then_has_label)
1534 {
1535 if (warn_notreached)
1536 remove_useless_stmts_warn_notreached (then_clause);
1537 *stmt_p = else_clause;
1538 data->repeat = true;
1539 }
1540
1541 /* Check a couple of simple things on then/else with single stmts. */
1542 else
1543 {
1544 tree then_stmt = expr_only (then_clause);
1545 tree else_stmt = expr_only (else_clause);
1546
1547 /* Notice branches to a common destination. */
1548 if (then_stmt && else_stmt
1549 && TREE_CODE (then_stmt) == GOTO_EXPR
1550 && TREE_CODE (else_stmt) == GOTO_EXPR
1551 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1552 {
1553 *stmt_p = then_stmt;
1554 data->repeat = true;
1555 }
1556
1557 /* If the THEN/ELSE clause merely assigns a value to a variable or
1558 parameter which is already known to contain that value, then
1559 remove the useless THEN/ELSE clause. */
1560 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1561 {
1562 if (else_stmt
07beea0d
AH
1563 && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
1564 && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
1565 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
6de9cd9a
DN
1566 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1567 }
1568 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1569 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1570 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1571 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1572 {
1573 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1574 ? then_stmt : else_stmt);
1575 tree *location = (TREE_CODE (cond) == EQ_EXPR
1576 ? &COND_EXPR_THEN (*stmt_p)
1577 : &COND_EXPR_ELSE (*stmt_p));
1578
1579 if (stmt
07beea0d
AH
1580 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1581 && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1582 && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
6de9cd9a
DN
1583 *location = alloc_stmt_list ();
1584 }
1585 }
1586
1587 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1588 would be re-introduced during lowering. */
1589 data->last_goto = NULL;
1590}
1591
1592
1593static void
1594remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1595{
1596 bool save_may_branch, save_may_throw;
1597 bool this_may_branch, this_may_throw;
1598
1599 /* Collect may_branch and may_throw information for the body only. */
1600 save_may_branch = data->may_branch;
1601 save_may_throw = data->may_throw;
1602 data->may_branch = false;
1603 data->may_throw = false;
1604 data->last_goto = NULL;
1605
1606 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1607
1608 this_may_branch = data->may_branch;
1609 this_may_throw = data->may_throw;
1610 data->may_branch |= save_may_branch;
1611 data->may_throw |= save_may_throw;
1612 data->last_goto = NULL;
1613
1614 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1615
1616 /* If the body is empty, then we can emit the FINALLY block without
1617 the enclosing TRY_FINALLY_EXPR. */
1618 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1619 {
1620 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1621 data->repeat = true;
1622 }
1623
1624 /* If the handler is empty, then we can emit the TRY block without
1625 the enclosing TRY_FINALLY_EXPR. */
1626 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1627 {
1628 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1629 data->repeat = true;
1630 }
1631
1632 /* If the body neither throws, nor branches, then we can safely
1633 string the TRY and FINALLY blocks together. */
1634 else if (!this_may_branch && !this_may_throw)
1635 {
1636 tree stmt = *stmt_p;
1637 *stmt_p = TREE_OPERAND (stmt, 0);
1638 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1639 data->repeat = true;
1640 }
1641}
1642
1643
1644static void
1645remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1646{
1647 bool save_may_throw, this_may_throw;
1648 tree_stmt_iterator i;
1649 tree stmt;
1650
1651 /* Collect may_throw information for the body only. */
1652 save_may_throw = data->may_throw;
1653 data->may_throw = false;
1654 data->last_goto = NULL;
1655
1656 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1657
1658 this_may_throw = data->may_throw;
1659 data->may_throw = save_may_throw;
1660
1661 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1662 if (!this_may_throw)
1663 {
1664 if (warn_notreached)
1665 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1666 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1667 data->repeat = true;
1668 return;
1669 }
1670
1671 /* Process the catch clause specially. We may be able to tell that
1672 no exceptions propagate past this point. */
1673
1674 this_may_throw = true;
1675 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1676 stmt = tsi_stmt (i);
1677 data->last_goto = NULL;
1678
1679 switch (TREE_CODE (stmt))
1680 {
1681 case CATCH_EXPR:
1682 for (; !tsi_end_p (i); tsi_next (&i))
1683 {
1684 stmt = tsi_stmt (i);
1685 /* If we catch all exceptions, then the body does not
1686 propagate exceptions past this point. */
1687 if (CATCH_TYPES (stmt) == NULL)
1688 this_may_throw = false;
1689 data->last_goto = NULL;
1690 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1691 }
1692 break;
1693
1694 case EH_FILTER_EXPR:
1695 if (EH_FILTER_MUST_NOT_THROW (stmt))
1696 this_may_throw = false;
1697 else if (EH_FILTER_TYPES (stmt) == NULL)
1698 this_may_throw = false;
1699 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1700 break;
1701
1702 default:
1703 /* Otherwise this is a cleanup. */
1704 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1705
1706 /* If the cleanup is empty, then we can emit the TRY block without
1707 the enclosing TRY_CATCH_EXPR. */
1708 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1709 {
1710 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1711 data->repeat = true;
1712 }
1713 break;
1714 }
1715 data->may_throw |= this_may_throw;
1716}
1717
1718
1719static void
1720remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1721{
1722 tree block;
1723
1724 /* First remove anything underneath the BIND_EXPR. */
1725 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1726
1727 /* If the BIND_EXPR has no variables, then we can pull everything
1728 up one level and remove the BIND_EXPR, unless this is the toplevel
1729 BIND_EXPR for the current function or an inlined function.
1730
1731 When this situation occurs we will want to apply this
1732 optimization again. */
1733 block = BIND_EXPR_BLOCK (*stmt_p);
1734 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1735 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1736 && (! block
1737 || ! BLOCK_ABSTRACT_ORIGIN (block)
1738 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1739 != FUNCTION_DECL)))
1740 {
1741 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1742 data->repeat = true;
1743 }
1744}
1745
1746
1747static void
1748remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1749{
1750 tree dest = GOTO_DESTINATION (*stmt_p);
1751
1752 data->may_branch = true;
1753 data->last_goto = NULL;
1754
1755 /* Record the last goto expr, so that we can delete it if unnecessary. */
1756 if (TREE_CODE (dest) == LABEL_DECL)
1757 data->last_goto = stmt_p;
1758}
1759
1760
1761static void
1762remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1763{
1764 tree label = LABEL_EXPR_LABEL (*stmt_p);
1765
1766 data->has_label = true;
1767
1768 /* We do want to jump across non-local label receiver code. */
1769 if (DECL_NONLOCAL (label))
1770 data->last_goto = NULL;
1771
1772 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1773 {
1774 *data->last_goto = build_empty_stmt ();
1775 data->repeat = true;
1776 }
1777
1778 /* ??? Add something here to delete unused labels. */
1779}
1780
1781
1782/* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1783 decl. This allows us to eliminate redundant or useless
6531d1be 1784 calls to "const" functions.
6de9cd9a
DN
1785
1786 Gimplifier already does the same operation, but we may notice functions
1787 being const and pure once their calls has been gimplified, so we need
1788 to update the flag. */
1789
1790static void
1791update_call_expr_flags (tree call)
1792{
1793 tree decl = get_callee_fndecl (call);
1794 if (!decl)
1795 return;
1796 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1797 TREE_SIDE_EFFECTS (call) = 0;
1798 if (TREE_NOTHROW (decl))
1799 TREE_NOTHROW (call) = 1;
1800}
1801
1802
1803/* T is CALL_EXPR. Set current_function_calls_* flags. */
1804
1805void
1806notice_special_calls (tree t)
1807{
1808 int flags = call_expr_flags (t);
1809
1810 if (flags & ECF_MAY_BE_ALLOCA)
1811 current_function_calls_alloca = true;
1812 if (flags & ECF_RETURNS_TWICE)
1813 current_function_calls_setjmp = true;
1814}
1815
1816
1817/* Clear flags set by notice_special_calls. Used by dead code removal
1818 to update the flags. */
1819
1820void
1821clear_special_calls (void)
1822{
1823 current_function_calls_alloca = false;
1824 current_function_calls_setjmp = false;
1825}
1826
1827
1828static void
1829remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1830{
cd709752 1831 tree t = *tp, op;
6de9cd9a
DN
1832
1833 switch (TREE_CODE (t))
1834 {
1835 case COND_EXPR:
1836 remove_useless_stmts_cond (tp, data);
1837 break;
1838
1839 case TRY_FINALLY_EXPR:
1840 remove_useless_stmts_tf (tp, data);
1841 break;
1842
1843 case TRY_CATCH_EXPR:
1844 remove_useless_stmts_tc (tp, data);
1845 break;
1846
1847 case BIND_EXPR:
1848 remove_useless_stmts_bind (tp, data);
1849 break;
1850
1851 case GOTO_EXPR:
1852 remove_useless_stmts_goto (tp, data);
1853 break;
1854
1855 case LABEL_EXPR:
1856 remove_useless_stmts_label (tp, data);
1857 break;
1858
1859 case RETURN_EXPR:
53e782e5 1860 fold_stmt (tp);
6de9cd9a
DN
1861 data->last_goto = NULL;
1862 data->may_branch = true;
1863 break;
1864
1865 case CALL_EXPR:
53e782e5 1866 fold_stmt (tp);
6de9cd9a
DN
1867 data->last_goto = NULL;
1868 notice_special_calls (t);
1869 update_call_expr_flags (t);
1870 if (tree_could_throw_p (t))
1871 data->may_throw = true;
1872 break;
1873
1874 case MODIFY_EXPR:
07beea0d
AH
1875 gcc_unreachable ();
1876
1877 case GIMPLE_MODIFY_STMT:
6de9cd9a 1878 data->last_goto = NULL;
53e782e5 1879 fold_stmt (tp);
cd709752
RH
1880 op = get_call_expr_in (t);
1881 if (op)
6de9cd9a 1882 {
cd709752
RH
1883 update_call_expr_flags (op);
1884 notice_special_calls (op);
6de9cd9a
DN
1885 }
1886 if (tree_could_throw_p (t))
1887 data->may_throw = true;
1888 break;
1889
1890 case STATEMENT_LIST:
1891 {
1892 tree_stmt_iterator i = tsi_start (t);
1893 while (!tsi_end_p (i))
1894 {
1895 t = tsi_stmt (i);
1896 if (IS_EMPTY_STMT (t))
1897 {
1898 tsi_delink (&i);
1899 continue;
1900 }
6531d1be 1901
6de9cd9a
DN
1902 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1903
1904 t = tsi_stmt (i);
1905 if (TREE_CODE (t) == STATEMENT_LIST)
1906 {
1907 tsi_link_before (&i, t, TSI_SAME_STMT);
1908 tsi_delink (&i);
1909 }
1910 else
1911 tsi_next (&i);
1912 }
1913 }
1914 break;
8e14584d 1915 case ASM_EXPR:
53e782e5
AP
1916 fold_stmt (tp);
1917 data->last_goto = NULL;
1918 break;
6de9cd9a 1919
3088d404
JJ
1920 case OMP_PARALLEL:
1921 /* Make sure the outermost BIND_EXPR in OMP_BODY isn't removed
1922 as useless. */
1923 remove_useless_stmts_1 (&BIND_EXPR_BODY (OMP_BODY (*tp)), data);
1924 data->last_goto = NULL;
1925 break;
1926
1927 case OMP_SECTIONS:
1928 case OMP_SINGLE:
1929 case OMP_SECTION:
1930 case OMP_MASTER :
1931 case OMP_ORDERED:
1932 case OMP_CRITICAL:
1933 remove_useless_stmts_1 (&OMP_BODY (*tp), data);
1934 data->last_goto = NULL;
1935 break;
1936
1937 case OMP_FOR:
1938 remove_useless_stmts_1 (&OMP_FOR_BODY (*tp), data);
1939 data->last_goto = NULL;
1940 if (OMP_FOR_PRE_BODY (*tp))
1941 {
1942 remove_useless_stmts_1 (&OMP_FOR_PRE_BODY (*tp), data);
1943 data->last_goto = NULL;
1944 }
1945 break;
1946
6de9cd9a
DN
1947 default:
1948 data->last_goto = NULL;
1949 break;
1950 }
1951}
1952
c2924966 1953static unsigned int
6de9cd9a
DN
1954remove_useless_stmts (void)
1955{
1956 struct rus_data data;
1957
1958 clear_special_calls ();
1959
1960 do
1961 {
1962 memset (&data, 0, sizeof (data));
1963 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1964 }
1965 while (data.repeat);
c2924966 1966 return 0;
6de9cd9a
DN
1967}
1968
1969
8ddbbcae 1970struct gimple_opt_pass pass_remove_useless_stmts =
6de9cd9a 1971{
8ddbbcae
JH
1972 {
1973 GIMPLE_PASS,
6de9cd9a
DN
1974 "useless", /* name */
1975 NULL, /* gate */
1976 remove_useless_stmts, /* execute */
1977 NULL, /* sub */
1978 NULL, /* next */
1979 0, /* static_pass_number */
1980 0, /* tv_id */
9e5a3e6c
RH
1981 PROP_gimple_any, /* properties_required */
1982 0, /* properties_provided */
6de9cd9a
DN
1983 0, /* properties_destroyed */
1984 0, /* todo_flags_start */
8ddbbcae
JH
1985 TODO_dump_func /* todo_flags_finish */
1986 }
6de9cd9a
DN
1987};
1988
6de9cd9a
DN
1989/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1990
1991static void
1992remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1993{
1994 tree phi;
1995
1996 /* Since this block is no longer reachable, we can just delete all
1997 of its PHI nodes. */
1998 phi = phi_nodes (bb);
1999 while (phi)
2000 {
17192884 2001 tree next = PHI_CHAIN (phi);
9b3b55a1 2002 remove_phi_node (phi, NULL_TREE, true);
6de9cd9a
DN
2003 phi = next;
2004 }
2005
2006 /* Remove edges to BB's successors. */
628f6a4e 2007 while (EDGE_COUNT (bb->succs) > 0)
d0d2cc21 2008 remove_edge (EDGE_SUCC (bb, 0));
6de9cd9a
DN
2009}
2010
2011
2012/* Remove statements of basic block BB. */
2013
2014static void
2015remove_bb (basic_block bb)
2016{
2017 block_stmt_iterator i;
dbce1570 2018 source_location loc = UNKNOWN_LOCATION;
6de9cd9a
DN
2019
2020 if (dump_file)
2021 {
2022 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2023 if (dump_flags & TDF_DETAILS)
2024 {
2025 dump_bb (bb, dump_file, 0);
2026 fprintf (dump_file, "\n");
2027 }
2028 }
2029
2b271002
ZD
2030 if (current_loops)
2031 {
2032 struct loop *loop = bb->loop_father;
2033
598ec7bd
ZD
2034 /* If a loop gets removed, clean up the information associated
2035 with it. */
2b271002
ZD
2036 if (loop->latch == bb
2037 || loop->header == bb)
598ec7bd 2038 free_numbers_of_iterations_estimates_loop (loop);
2b271002
ZD
2039 }
2040
6de9cd9a 2041 /* Remove all the instructions in the block. */
7506e1cb 2042 if (bb_stmt_list (bb) != NULL_TREE)
6de9cd9a 2043 {
7506e1cb 2044 for (i = bsi_start (bb); !bsi_end_p (i);)
77568960 2045 {
7506e1cb
ZD
2046 tree stmt = bsi_stmt (i);
2047 if (TREE_CODE (stmt) == LABEL_EXPR
2048 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2049 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2050 {
2051 basic_block new_bb;
2052 block_stmt_iterator new_bsi;
2053
2054 /* A non-reachable non-local label may still be referenced.
2055 But it no longer needs to carry the extra semantics of
2056 non-locality. */
2057 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2058 {
2059 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2060 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2061 }
bb1ecfe8 2062
7506e1cb
ZD
2063 new_bb = bb->prev_bb;
2064 new_bsi = bsi_start (new_bb);
2065 bsi_remove (&i, false);
2066 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2067 }
2068 else
bb1ecfe8 2069 {
7506e1cb
ZD
2070 /* Release SSA definitions if we are in SSA. Note that we
2071 may be called when not in SSA. For example,
2072 final_cleanup calls this function via
2073 cleanup_tree_cfg. */
2074 if (gimple_in_ssa_p (cfun))
2075 release_defs (stmt);
2076
2077 bsi_remove (&i, true);
bb1ecfe8 2078 }
6531d1be 2079
7506e1cb
ZD
2080 /* Don't warn for removed gotos. Gotos are often removed due to
2081 jump threading, thus resulting in bogus warnings. Not great,
2082 since this way we lose warnings for gotos in the original
2083 program that are indeed unreachable. */
2084 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2085 {
7506e1cb
ZD
2086 if (EXPR_HAS_LOCATION (stmt))
2087 loc = EXPR_LOCATION (stmt);
7506e1cb 2088 }
43e05e45 2089 }
6de9cd9a
DN
2090 }
2091
2092 /* If requested, give a warning that the first statement in the
2093 block is unreachable. We walk statements backwards in the
2094 loop above, so the last statement we process is the first statement
2095 in the block. */
5ffeb913 2096 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
44c21c7f 2097 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
6de9cd9a
DN
2098
2099 remove_phi_nodes_and_edges_for_unreachable_block (bb);
7506e1cb 2100 bb->il.tree = NULL;
6de9cd9a
DN
2101}
2102
6de9cd9a 2103
35920270
KH
2104/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2105 predicate VAL, return the edge that will be taken out of the block.
2106 If VAL does not match a unique edge, NULL is returned. */
6de9cd9a
DN
2107
2108edge
2109find_taken_edge (basic_block bb, tree val)
2110{
2111 tree stmt;
2112
2113 stmt = last_stmt (bb);
2114
1e128c5f
GB
2115 gcc_assert (stmt);
2116 gcc_assert (is_ctrl_stmt (stmt));
65f4323d 2117 gcc_assert (val);
6de9cd9a 2118
be477406 2119 if (! is_gimple_min_invariant (val))
6de9cd9a
DN
2120 return NULL;
2121
2122 if (TREE_CODE (stmt) == COND_EXPR)
2123 return find_taken_edge_cond_expr (bb, val);
2124
2125 if (TREE_CODE (stmt) == SWITCH_EXPR)
2126 return find_taken_edge_switch_expr (bb, val);
2127
be477406 2128 if (computed_goto_p (stmt))
1799efef
JL
2129 {
2130 /* Only optimize if the argument is a label, if the argument is
2131 not a label then we can not construct a proper CFG.
2132
2133 It may be the case that we only need to allow the LABEL_REF to
2134 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2135 appear inside a LABEL_EXPR just to be safe. */
2136 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2137 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2138 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2139 return NULL;
2140 }
be477406 2141
35920270 2142 gcc_unreachable ();
6de9cd9a
DN
2143}
2144
be477406
JL
2145/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2146 statement, determine which of the outgoing edges will be taken out of the
2147 block. Return NULL if either edge may be taken. */
2148
2149static edge
2150find_taken_edge_computed_goto (basic_block bb, tree val)
2151{
2152 basic_block dest;
2153 edge e = NULL;
2154
2155 dest = label_to_block (val);
2156 if (dest)
2157 {
2158 e = find_edge (bb, dest);
2159 gcc_assert (e != NULL);
2160 }
2161
2162 return e;
2163}
6de9cd9a
DN
2164
2165/* Given a constant value VAL and the entry block BB to a COND_EXPR
2166 statement, determine which of the two edges will be taken out of the
2167 block. Return NULL if either edge may be taken. */
2168
2169static edge
2170find_taken_edge_cond_expr (basic_block bb, tree val)
2171{
2172 edge true_edge, false_edge;
2173
2174 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
6531d1be 2175
f1b19062 2176 gcc_assert (TREE_CODE (val) == INTEGER_CST);
6e682d7e 2177 return (integer_zerop (val) ? false_edge : true_edge);
6de9cd9a
DN
2178}
2179
fca01525 2180/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
6de9cd9a
DN
2181 statement, determine which edge will be taken out of the block. Return
2182 NULL if any edge may be taken. */
2183
2184static edge
2185find_taken_edge_switch_expr (basic_block bb, tree val)
2186{
2187 tree switch_expr, taken_case;
2188 basic_block dest_bb;
2189 edge e;
2190
6de9cd9a
DN
2191 switch_expr = last_stmt (bb);
2192 taken_case = find_case_label_for_value (switch_expr, val);
2193 dest_bb = label_to_block (CASE_LABEL (taken_case));
2194
2195 e = find_edge (bb, dest_bb);
1e128c5f 2196 gcc_assert (e);
6de9cd9a
DN
2197 return e;
2198}
2199
2200
f667741c
SB
2201/* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2202 We can make optimal use here of the fact that the case labels are
2203 sorted: We can do a binary search for a case matching VAL. */
6de9cd9a
DN
2204
2205static tree
2206find_case_label_for_value (tree switch_expr, tree val)
2207{
2208 tree vec = SWITCH_LABELS (switch_expr);
f667741c
SB
2209 size_t low, high, n = TREE_VEC_LENGTH (vec);
2210 tree default_case = TREE_VEC_ELT (vec, n - 1);
6de9cd9a 2211
f667741c 2212 for (low = -1, high = n - 1; high - low > 1; )
6de9cd9a 2213 {
f667741c 2214 size_t i = (high + low) / 2;
6de9cd9a 2215 tree t = TREE_VEC_ELT (vec, i);
f667741c
SB
2216 int cmp;
2217
2218 /* Cache the result of comparing CASE_LOW and val. */
2219 cmp = tree_int_cst_compare (CASE_LOW (t), val);
6de9cd9a 2220
f667741c
SB
2221 if (cmp > 0)
2222 high = i;
2223 else
2224 low = i;
2225
2226 if (CASE_HIGH (t) == NULL)
6de9cd9a 2227 {
f667741c
SB
2228 /* A singe-valued case label. */
2229 if (cmp == 0)
6de9cd9a
DN
2230 return t;
2231 }
2232 else
2233 {
2234 /* A case range. We can only handle integer ranges. */
f667741c 2235 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
6de9cd9a
DN
2236 return t;
2237 }
2238 }
2239
6de9cd9a
DN
2240 return default_case;
2241}
2242
2243
6de9cd9a
DN
2244
2245
6de9cd9a
DN
2246/*---------------------------------------------------------------------------
2247 Debugging functions
2248---------------------------------------------------------------------------*/
2249
2250/* Dump tree-specific information of block BB to file OUTF. */
2251
2252void
2253tree_dump_bb (basic_block bb, FILE *outf, int indent)
2254{
38635499 2255 dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
6de9cd9a
DN
2256}
2257
2258
2259/* Dump a basic block on stderr. */
2260
2261void
2262debug_tree_bb (basic_block bb)
2263{
2264 dump_bb (bb, stderr, 0);
2265}
2266
2267
2268/* Dump basic block with index N on stderr. */
2269
2270basic_block
2271debug_tree_bb_n (int n)
2272{
2273 debug_tree_bb (BASIC_BLOCK (n));
2274 return BASIC_BLOCK (n);
6531d1be 2275}
6de9cd9a
DN
2276
2277
2278/* Dump the CFG on stderr.
2279
2280 FLAGS are the same used by the tree dumping functions
6531d1be 2281 (see TDF_* in tree-pass.h). */
6de9cd9a
DN
2282
2283void
2284debug_tree_cfg (int flags)
2285{
2286 dump_tree_cfg (stderr, flags);
2287}
2288
2289
2290/* Dump the program showing basic block boundaries on the given FILE.
2291
2292 FLAGS are the same used by the tree dumping functions (see TDF_* in
2293 tree.h). */
2294
2295void
2296dump_tree_cfg (FILE *file, int flags)
2297{
2298 if (flags & TDF_DETAILS)
2299 {
2300 const char *funcname
673fda6b 2301 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2302
2303 fputc ('\n', file);
2304 fprintf (file, ";; Function %s\n\n", funcname);
2305 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2306 n_basic_blocks, n_edges, last_basic_block);
2307
2308 brief_dump_cfg (file);
2309 fprintf (file, "\n");
2310 }
2311
2312 if (flags & TDF_STATS)
2313 dump_cfg_stats (file);
2314
2315 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2316}
2317
2318
2319/* Dump CFG statistics on FILE. */
2320
2321void
2322dump_cfg_stats (FILE *file)
2323{
2324 static long max_num_merged_labels = 0;
2325 unsigned long size, total = 0;
7b0cab99 2326 long num_edges;
6de9cd9a
DN
2327 basic_block bb;
2328 const char * const fmt_str = "%-30s%-13s%12s\n";
f7fda749 2329 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
cac50d94 2330 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
6de9cd9a
DN
2331 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2332 const char *funcname
673fda6b 2333 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2334
2335
2336 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2337
2338 fprintf (file, "---------------------------------------------------------\n");
2339 fprintf (file, fmt_str, "", " Number of ", "Memory");
2340 fprintf (file, fmt_str, "", " instances ", "used ");
2341 fprintf (file, "---------------------------------------------------------\n");
2342
2343 size = n_basic_blocks * sizeof (struct basic_block_def);
2344 total += size;
f7fda749
RH
2345 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2346 SCALE (size), LABEL (size));
6de9cd9a 2347
7b0cab99 2348 num_edges = 0;
6de9cd9a 2349 FOR_EACH_BB (bb)
7b0cab99
JH
2350 num_edges += EDGE_COUNT (bb->succs);
2351 size = num_edges * sizeof (struct edge_def);
6de9cd9a 2352 total += size;
cac50d94 2353 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
6de9cd9a 2354
6de9cd9a
DN
2355 fprintf (file, "---------------------------------------------------------\n");
2356 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2357 LABEL (total));
2358 fprintf (file, "---------------------------------------------------------\n");
2359 fprintf (file, "\n");
2360
2361 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2362 max_num_merged_labels = cfg_stats.num_merged_labels;
2363
2364 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2365 cfg_stats.num_merged_labels, max_num_merged_labels);
2366
2367 fprintf (file, "\n");
2368}
2369
2370
2371/* Dump CFG statistics on stderr. Keep extern so that it's always
2372 linked in the final executable. */
2373
2374void
2375debug_cfg_stats (void)
2376{
2377 dump_cfg_stats (stderr);
2378}
2379
2380
2381/* Dump the flowgraph to a .vcg FILE. */
2382
2383static void
2384tree_cfg2vcg (FILE *file)
2385{
2386 edge e;
628f6a4e 2387 edge_iterator ei;
6de9cd9a
DN
2388 basic_block bb;
2389 const char *funcname
673fda6b 2390 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2391
2392 /* Write the file header. */
2393 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2394 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2395 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2396
2397 /* Write blocks and edges. */
628f6a4e 2398 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
6de9cd9a
DN
2399 {
2400 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2401 e->dest->index);
2402
2403 if (e->flags & EDGE_FAKE)
2404 fprintf (file, " linestyle: dotted priority: 10");
2405 else
2406 fprintf (file, " linestyle: solid priority: 100");
2407
2408 fprintf (file, " }\n");
2409 }
2410 fputc ('\n', file);
2411
2412 FOR_EACH_BB (bb)
2413 {
2414 enum tree_code head_code, end_code;
2415 const char *head_name, *end_name;
2416 int head_line = 0;
2417 int end_line = 0;
2418 tree first = first_stmt (bb);
2419 tree last = last_stmt (bb);
2420
2421 if (first)
2422 {
2423 head_code = TREE_CODE (first);
2424 head_name = tree_code_name[head_code];
2425 head_line = get_lineno (first);
2426 }
2427 else
2428 head_name = "no-statement";
2429
2430 if (last)
2431 {
2432 end_code = TREE_CODE (last);
2433 end_name = tree_code_name[end_code];
2434 end_line = get_lineno (last);
2435 }
2436 else
2437 end_name = "no-statement";
2438
2439 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2440 bb->index, bb->index, head_name, head_line, end_name,
2441 end_line);
2442
628f6a4e 2443 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
2444 {
2445 if (e->dest == EXIT_BLOCK_PTR)
2446 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2447 else
2448 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2449
2450 if (e->flags & EDGE_FAKE)
2451 fprintf (file, " priority: 10 linestyle: dotted");
2452 else
2453 fprintf (file, " priority: 100 linestyle: solid");
2454
2455 fprintf (file, " }\n");
2456 }
2457
2458 if (bb->next_bb != EXIT_BLOCK_PTR)
2459 fputc ('\n', file);
2460 }
2461
2462 fputs ("}\n\n", file);
2463}
2464
2465
2466
2467/*---------------------------------------------------------------------------
2468 Miscellaneous helpers
2469---------------------------------------------------------------------------*/
2470
2471/* Return true if T represents a stmt that always transfers control. */
2472
2473bool
6ea2b70d 2474is_ctrl_stmt (const_tree t)
6de9cd9a
DN
2475{
2476 return (TREE_CODE (t) == COND_EXPR
2477 || TREE_CODE (t) == SWITCH_EXPR
2478 || TREE_CODE (t) == GOTO_EXPR
2479 || TREE_CODE (t) == RETURN_EXPR
2480 || TREE_CODE (t) == RESX_EXPR);
2481}
2482
2483
2484/* Return true if T is a statement that may alter the flow of control
2485 (e.g., a call to a non-returning function). */
2486
2487bool
9678086d 2488is_ctrl_altering_stmt (const_tree t)
6de9cd9a 2489{
9678086d 2490 const_tree call;
6de9cd9a 2491
1e128c5f 2492 gcc_assert (t);
0e014996 2493 call = get_call_expr_in (CONST_CAST_TREE (t));
cd709752 2494 if (call)
6de9cd9a 2495 {
6de9cd9a
DN
2496 /* A non-pure/const CALL_EXPR alters flow control if the current
2497 function has nonlocal labels. */
cd709752 2498 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
6de9cd9a
DN
2499 return true;
2500
2501 /* A CALL_EXPR also alters control flow if it does not return. */
6e14af16 2502 if (call_expr_flags (call) & ECF_NORETURN)
6de9cd9a 2503 return true;
6de9cd9a
DN
2504 }
2505
50674e96 2506 /* OpenMP directives alter control flow. */
bed575d5 2507 if (OMP_DIRECTIVE_P (t))
50674e96
DN
2508 return true;
2509
6de9cd9a
DN
2510 /* If a statement can throw, it alters control flow. */
2511 return tree_can_throw_internal (t);
2512}
2513
2514
2515/* Return true if T is a computed goto. */
2516
2517bool
6ea2b70d 2518computed_goto_p (const_tree t)
6de9cd9a
DN
2519{
2520 return (TREE_CODE (t) == GOTO_EXPR
2521 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2522}
2523
2524
4f6c2131 2525/* Return true if T is a simple local goto. */
6de9cd9a
DN
2526
2527bool
6ea2b70d 2528simple_goto_p (const_tree t)
6de9cd9a 2529{
4f6c2131
EB
2530 return (TREE_CODE (t) == GOTO_EXPR
2531 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2532}
2533
2534
2535/* Return true if T can make an abnormal transfer of control flow.
2536 Transfers of control flow associated with EH are excluded. */
2537
2538bool
6ea2b70d 2539tree_can_make_abnormal_goto (const_tree t)
4f6c2131
EB
2540{
2541 if (computed_goto_p (t))
2542 return true;
07beea0d
AH
2543 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2544 t = GIMPLE_STMT_OPERAND (t, 1);
4f6c2131
EB
2545 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2546 t = TREE_OPERAND (t, 0);
2547 if (TREE_CODE (t) == CALL_EXPR)
2548 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2549 return false;
6de9cd9a
DN
2550}
2551
2552
2553/* Return true if T should start a new basic block. PREV_T is the
2554 statement preceding T. It is used when T is a label or a case label.
2555 Labels should only start a new basic block if their previous statement
2556 wasn't a label. Otherwise, sequence of labels would generate
2557 unnecessary basic blocks that only contain a single label. */
2558
2559static inline bool
6ea2b70d 2560stmt_starts_bb_p (const_tree t, const_tree prev_t)
6de9cd9a 2561{
6de9cd9a
DN
2562 if (t == NULL_TREE)
2563 return false;
2564
2565 /* LABEL_EXPRs start a new basic block only if the preceding
2566 statement wasn't a label of the same type. This prevents the
2567 creation of consecutive blocks that have nothing but a single
2568 label. */
229cc11f 2569 if (TREE_CODE (t) == LABEL_EXPR)
6de9cd9a
DN
2570 {
2571 /* Nonlocal and computed GOTO targets always start a new block. */
229cc11f
KH
2572 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2573 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
6de9cd9a
DN
2574 return true;
2575
229cc11f 2576 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
6de9cd9a
DN
2577 {
2578 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2579 return true;
2580
2581 cfg_stats.num_merged_labels++;
2582 return false;
2583 }
2584 else
2585 return true;
2586 }
2587
2588 return false;
2589}
2590
2591
2592/* Return true if T should end a basic block. */
2593
2594bool
9678086d 2595stmt_ends_bb_p (const_tree t)
6de9cd9a
DN
2596{
2597 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2598}
2599
242229bb 2600/* Remove block annotations and other datastructures. */
6de9cd9a
DN
2601
2602void
242229bb 2603delete_tree_cfg_annotations (void)
6de9cd9a 2604{
3a40c18a
JH
2605 basic_block bb;
2606 block_stmt_iterator bsi;
2607
2608 /* Remove annotations from every tree in the function. */
2609 FOR_EACH_BB (bb)
2610 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2611 {
2612 tree stmt = bsi_stmt (bsi);
2613 ggc_free (stmt->base.ann);
2614 stmt->base.ann = NULL;
2615 }
6de9cd9a 2616 label_to_block_map = NULL;
6de9cd9a
DN
2617}
2618
2619
2620/* Return the first statement in basic block BB. */
2621
2622tree
2623first_stmt (basic_block bb)
2624{
2625 block_stmt_iterator i = bsi_start (bb);
2626 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2627}
2628
6de9cd9a
DN
2629/* Return the last statement in basic block BB. */
2630
2631tree
2632last_stmt (basic_block bb)
2633{
2634 block_stmt_iterator b = bsi_last (bb);
2635 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2636}
2637
6de9cd9a
DN
2638/* Return the last statement of an otherwise empty block. Return NULL
2639 if the block is totally empty, or if it contains more than one
2640 statement. */
2641
2642tree
2643last_and_only_stmt (basic_block bb)
2644{
2645 block_stmt_iterator i = bsi_last (bb);
2646 tree last, prev;
2647
2648 if (bsi_end_p (i))
2649 return NULL_TREE;
2650
2651 last = bsi_stmt (i);
2652 bsi_prev (&i);
2653 if (bsi_end_p (i))
2654 return last;
2655
2656 /* Empty statements should no longer appear in the instruction stream.
2657 Everything that might have appeared before should be deleted by
2658 remove_useless_stmts, and the optimizers should just bsi_remove
2659 instead of smashing with build_empty_stmt.
2660
2661 Thus the only thing that should appear here in a block containing
2662 one executable statement is a label. */
2663 prev = bsi_stmt (i);
2664 if (TREE_CODE (prev) == LABEL_EXPR)
2665 return last;
2666 else
2667 return NULL_TREE;
2668}
2669
2670
2671/* Mark BB as the basic block holding statement T. */
2672
2673void
2674set_bb_for_stmt (tree t, basic_block bb)
2675{
30d396e3
ZD
2676 if (TREE_CODE (t) == PHI_NODE)
2677 PHI_BB (t) = bb;
2678 else if (TREE_CODE (t) == STATEMENT_LIST)
6de9cd9a
DN
2679 {
2680 tree_stmt_iterator i;
2681 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2682 set_bb_for_stmt (tsi_stmt (i), bb);
2683 }
2684 else
2685 {
2686 stmt_ann_t ann = get_stmt_ann (t);
2687 ann->bb = bb;
2688
50674e96
DN
2689 /* If the statement is a label, add the label to block-to-labels map
2690 so that we can speed up edge creation for GOTO_EXPRs. */
2691 if (TREE_CODE (t) == LABEL_EXPR)
6de9cd9a
DN
2692 {
2693 int uid;
2694
2695 t = LABEL_EXPR_LABEL (t);
2696 uid = LABEL_DECL_UID (t);
2697 if (uid == -1)
2698 {
e597f337 2699 unsigned old_len = VEC_length (basic_block, label_to_block_map);
6de9cd9a 2700 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
e597f337
KH
2701 if (old_len <= (unsigned) uid)
2702 {
e597f337
KH
2703 unsigned new_len = 3 * uid / 2;
2704
a590ac65
KH
2705 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2706 new_len);
e597f337 2707 }
6de9cd9a
DN
2708 }
2709 else
1e128c5f
GB
2710 /* We're moving an existing label. Make sure that we've
2711 removed it from the old block. */
e597f337
KH
2712 gcc_assert (!bb
2713 || !VEC_index (basic_block, label_to_block_map, uid));
2714 VEC_replace (basic_block, label_to_block_map, uid, bb);
6de9cd9a
DN
2715 }
2716 }
2717}
2718
0a4fe58f
JH
2719/* Faster version of set_bb_for_stmt that assume that statement is being moved
2720 from one basic block to another.
2721 For BB splitting we can run into quadratic case, so performance is quite
de1e45c3 2722 important and knowing that the tables are big enough, change_bb_for_stmt
0a4fe58f
JH
2723 can inline as leaf function. */
2724static inline void
2725change_bb_for_stmt (tree t, basic_block bb)
2726{
2727 get_stmt_ann (t)->bb = bb;
2728 if (TREE_CODE (t) == LABEL_EXPR)
2729 VEC_replace (basic_block, label_to_block_map,
2730 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2731}
2732
8b11a64c
ZD
2733/* Finds iterator for STMT. */
2734
2735extern block_stmt_iterator
1a1804c2 2736bsi_for_stmt (tree stmt)
8b11a64c
ZD
2737{
2738 block_stmt_iterator bsi;
2739
2740 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2741 if (bsi_stmt (bsi) == stmt)
2742 return bsi;
2743
1e128c5f 2744 gcc_unreachable ();
8b11a64c 2745}
6de9cd9a 2746
f430bae8
AM
2747/* Mark statement T as modified, and update it. */
2748static inline void
2749update_modified_stmts (tree t)
2750{
ed1a2abd
JH
2751 if (!ssa_operands_active ())
2752 return;
f430bae8
AM
2753 if (TREE_CODE (t) == STATEMENT_LIST)
2754 {
2755 tree_stmt_iterator i;
2756 tree stmt;
2757 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2758 {
2759 stmt = tsi_stmt (i);
2760 update_stmt_if_modified (stmt);
2761 }
2762 }
2763 else
2764 update_stmt_if_modified (t);
2765}
2766
6de9cd9a
DN
2767/* Insert statement (or statement list) T before the statement
2768 pointed-to by iterator I. M specifies how to update iterator I
2769 after insertion (see enum bsi_iterator_update). */
2770
2771void
2772bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2773{
2774 set_bb_for_stmt (t, i->bb);
f430bae8 2775 update_modified_stmts (t);
6de9cd9a
DN
2776 tsi_link_before (&i->tsi, t, m);
2777}
2778
2779
2780/* Insert statement (or statement list) T after the statement
2781 pointed-to by iterator I. M specifies how to update iterator I
2782 after insertion (see enum bsi_iterator_update). */
2783
2784void
2785bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2786{
2787 set_bb_for_stmt (t, i->bb);
f430bae8 2788 update_modified_stmts (t);
6de9cd9a
DN
2789 tsi_link_after (&i->tsi, t, m);
2790}
2791
2792
2793/* Remove the statement pointed to by iterator I. The iterator is updated
6531d1be 2794 to the next statement.
736432ee
JL
2795
2796 When REMOVE_EH_INFO is true we remove the statement pointed to by
2797 iterator I from the EH tables. Otherwise we do not modify the EH
2798 tables.
2799
2800 Generally, REMOVE_EH_INFO should be true when the statement is going to
2801 be removed from the IL and not reinserted elsewhere. */
6de9cd9a
DN
2802
2803void
736432ee 2804bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
6de9cd9a
DN
2805{
2806 tree t = bsi_stmt (*i);
2807 set_bb_for_stmt (t, NULL);
f430bae8 2808 delink_stmt_imm_use (t);
6de9cd9a 2809 tsi_delink (&i->tsi);
f430bae8 2810 mark_stmt_modified (t);
736432ee 2811 if (remove_eh_info)
6946b3f7
JH
2812 {
2813 remove_stmt_from_eh_region (t);
2814 gimple_remove_stmt_histograms (cfun, t);
2815 }
6de9cd9a
DN
2816}
2817
2818
2819/* Move the statement at FROM so it comes right after the statement at TO. */
2820
6531d1be 2821void
6de9cd9a
DN
2822bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2823{
2824 tree stmt = bsi_stmt (*from);
736432ee 2825 bsi_remove (from, false);
18965703
ZD
2826 /* We must have BSI_NEW_STMT here, as bsi_move_after is sometimes used to
2827 move statements to an empty block. */
2828 bsi_insert_after (to, stmt, BSI_NEW_STMT);
6531d1be 2829}
6de9cd9a
DN
2830
2831
2832/* Move the statement at FROM so it comes right before the statement at TO. */
2833
6531d1be 2834void
6de9cd9a
DN
2835bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2836{
2837 tree stmt = bsi_stmt (*from);
736432ee 2838 bsi_remove (from, false);
18965703
ZD
2839 /* For consistency with bsi_move_after, it might be better to have
2840 BSI_NEW_STMT here; however, that breaks several places that expect
2841 that TO does not change. */
6de9cd9a
DN
2842 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2843}
2844
2845
2846/* Move the statement at FROM to the end of basic block BB. */
2847
2848void
2849bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2850{
2851 block_stmt_iterator last = bsi_last (bb);
6531d1be 2852
6de9cd9a
DN
2853 /* Have to check bsi_end_p because it could be an empty block. */
2854 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2855 bsi_move_before (from, &last);
2856 else
2857 bsi_move_after (from, &last);
2858}
2859
2860
2861/* Replace the contents of the statement pointed to by iterator BSI
736432ee
JL
2862 with STMT. If UPDATE_EH_INFO is true, the exception handling
2863 information of the original statement is moved to the new statement. */
6de9cd9a
DN
2864
2865void
736432ee 2866bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
6de9cd9a
DN
2867{
2868 int eh_region;
2869 tree orig_stmt = bsi_stmt (*bsi);
2870
ff39b79b
JH
2871 if (stmt == orig_stmt)
2872 return;
6de9cd9a
DN
2873 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2874 set_bb_for_stmt (stmt, bsi->bb);
2875
2876 /* Preserve EH region information from the original statement, if
2877 requested by the caller. */
736432ee 2878 if (update_eh_info)
6de9cd9a
DN
2879 {
2880 eh_region = lookup_stmt_eh_region (orig_stmt);
2881 if (eh_region >= 0)
59bb84ef 2882 {
736432ee 2883 remove_stmt_from_eh_region (orig_stmt);
59bb84ef
JL
2884 add_stmt_to_eh_region (stmt, eh_region);
2885 }
6de9cd9a
DN
2886 }
2887
ff39b79b
JH
2888 gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
2889 gimple_remove_stmt_histograms (cfun, orig_stmt);
b1ca239f 2890 delink_stmt_imm_use (orig_stmt);
6de9cd9a 2891 *bsi_stmt_ptr (*bsi) = stmt;
f430bae8
AM
2892 mark_stmt_modified (stmt);
2893 update_modified_stmts (stmt);
6de9cd9a
DN
2894}
2895
2896
2897/* Insert the statement pointed-to by BSI into edge E. Every attempt
2898 is made to place the statement in an existing basic block, but
2899 sometimes that isn't possible. When it isn't possible, the edge is
2900 split and the statement is added to the new block.
2901
2902 In all cases, the returned *BSI points to the correct location. The
2903 return value is true if insertion should be done after the location,
82b85a85
ZD
2904 or false if it should be done before the location. If new basic block
2905 has to be created, it is stored in *NEW_BB. */
6de9cd9a
DN
2906
2907static bool
82b85a85
ZD
2908tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2909 basic_block *new_bb)
6de9cd9a
DN
2910{
2911 basic_block dest, src;
2912 tree tmp;
2913
2914 dest = e->dest;
2915 restart:
2916
2917 /* If the destination has one predecessor which has no PHI nodes,
6531d1be 2918 insert there. Except for the exit block.
6de9cd9a
DN
2919
2920 The requirement for no PHI nodes could be relaxed. Basically we
2921 would have to examine the PHIs to prove that none of them used
e28d0cfb 2922 the value set by the statement we want to insert on E. That
6de9cd9a 2923 hardly seems worth the effort. */
c5cbcccf 2924 if (single_pred_p (dest)
6de9cd9a
DN
2925 && ! phi_nodes (dest)
2926 && dest != EXIT_BLOCK_PTR)
2927 {
2928 *bsi = bsi_start (dest);
2929 if (bsi_end_p (*bsi))
2930 return true;
2931
2932 /* Make sure we insert after any leading labels. */
2933 tmp = bsi_stmt (*bsi);
2934 while (TREE_CODE (tmp) == LABEL_EXPR)
2935 {
2936 bsi_next (bsi);
2937 if (bsi_end_p (*bsi))
2938 break;
2939 tmp = bsi_stmt (*bsi);
2940 }
2941
2942 if (bsi_end_p (*bsi))
2943 {
2944 *bsi = bsi_last (dest);
2945 return true;
2946 }
2947 else
2948 return false;
2949 }
2950
2951 /* If the source has one successor, the edge is not abnormal and
2952 the last statement does not end a basic block, insert there.
2953 Except for the entry block. */
2954 src = e->src;
2955 if ((e->flags & EDGE_ABNORMAL) == 0
c5cbcccf 2956 && single_succ_p (src)
6de9cd9a
DN
2957 && src != ENTRY_BLOCK_PTR)
2958 {
2959 *bsi = bsi_last (src);
2960 if (bsi_end_p (*bsi))
2961 return true;
2962
2963 tmp = bsi_stmt (*bsi);
2964 if (!stmt_ends_bb_p (tmp))
2965 return true;
ce068299
JH
2966
2967 /* Insert code just before returning the value. We may need to decompose
2968 the return in the case it contains non-trivial operand. */
2969 if (TREE_CODE (tmp) == RETURN_EXPR)
2970 {
2971 tree op = TREE_OPERAND (tmp, 0);
7802250d 2972 if (op && !is_gimple_val (op))
ce068299 2973 {
07beea0d 2974 gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
ce068299 2975 bsi_insert_before (bsi, op, BSI_NEW_STMT);
07beea0d 2976 TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
ce068299
JH
2977 }
2978 bsi_prev (bsi);
2979 return true;
2980 }
6de9cd9a
DN
2981 }
2982
2983 /* Otherwise, create a new basic block, and split this edge. */
2984 dest = split_edge (e);
82b85a85
ZD
2985 if (new_bb)
2986 *new_bb = dest;
c5cbcccf 2987 e = single_pred_edge (dest);
6de9cd9a
DN
2988 goto restart;
2989}
2990
2991
2992/* This routine will commit all pending edge insertions, creating any new
8e731e4e 2993 basic blocks which are necessary. */
6de9cd9a
DN
2994
2995void
8e731e4e 2996bsi_commit_edge_inserts (void)
6de9cd9a
DN
2997{
2998 basic_block bb;
2999 edge e;
628f6a4e 3000 edge_iterator ei;
6de9cd9a 3001
c5cbcccf 3002 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
6de9cd9a
DN
3003
3004 FOR_EACH_BB (bb)
628f6a4e 3005 FOR_EACH_EDGE (e, ei, bb->succs)
edfaf675 3006 bsi_commit_one_edge_insert (e, NULL);
6de9cd9a
DN
3007}
3008
3009
edfaf675
AM
3010/* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3011 to this block, otherwise set it to NULL. */
6de9cd9a 3012
edfaf675
AM
3013void
3014bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
6de9cd9a 3015{
edfaf675
AM
3016 if (new_bb)
3017 *new_bb = NULL;
6de9cd9a
DN
3018 if (PENDING_STMT (e))
3019 {
3020 block_stmt_iterator bsi;
3021 tree stmt = PENDING_STMT (e);
3022
3023 PENDING_STMT (e) = NULL_TREE;
3024
edfaf675 3025 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
6de9cd9a
DN
3026 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3027 else
3028 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3029 }
3030}
3031
3032
3033/* Add STMT to the pending list of edge E. No actual insertion is
3034 made until a call to bsi_commit_edge_inserts () is made. */
3035
3036void
3037bsi_insert_on_edge (edge e, tree stmt)
3038{
3039 append_to_statement_list (stmt, &PENDING_STMT (e));
3040}
3041
adb35797
KH
3042/* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3043 block has to be created, it is returned. */
82b85a85
ZD
3044
3045basic_block
3046bsi_insert_on_edge_immediate (edge e, tree stmt)
3047{
3048 block_stmt_iterator bsi;
3049 basic_block new_bb = NULL;
3050
1e128c5f 3051 gcc_assert (!PENDING_STMT (e));
82b85a85
ZD
3052
3053 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3054 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3055 else
3056 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3057
3058 return new_bb;
3059}
6de9cd9a 3060
6de9cd9a
DN
3061/*---------------------------------------------------------------------------
3062 Tree specific functions for CFG manipulation
3063---------------------------------------------------------------------------*/
3064
4f7db7f7
KH
3065/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3066
3067static void
3068reinstall_phi_args (edge new_edge, edge old_edge)
3069{
ea7e6d5a
AH
3070 tree phi;
3071 edge_var_map_vector v;
3072 edge_var_map *vm;
3073 int i;
4f7db7f7 3074
ea7e6d5a
AH
3075 v = redirect_edge_var_map_vector (old_edge);
3076 if (!v)
4f7db7f7 3077 return;
6531d1be 3078
ea7e6d5a
AH
3079 for (i = 0, phi = phi_nodes (new_edge->dest);
3080 VEC_iterate (edge_var_map, v, i, vm) && phi;
3081 i++, phi = PHI_CHAIN (phi))
4f7db7f7 3082 {
ea7e6d5a
AH
3083 tree result = redirect_edge_var_map_result (vm);
3084 tree arg = redirect_edge_var_map_def (vm);
4f7db7f7
KH
3085
3086 gcc_assert (result == PHI_RESULT (phi));
3087
d2e398df 3088 add_phi_arg (phi, arg, new_edge);
4f7db7f7
KH
3089 }
3090
ea7e6d5a 3091 redirect_edge_var_map_clear (old_edge);
4f7db7f7
KH
3092}
3093
2a8a8292 3094/* Returns the basic block after which the new basic block created
b9a66240
ZD
3095 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3096 near its "logical" location. This is of most help to humans looking
3097 at debugging dumps. */
3098
3099static basic_block
3100split_edge_bb_loc (edge edge_in)
3101{
3102 basic_block dest = edge_in->dest;
3103
3104 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3105 return edge_in->src;
3106 else
3107 return dest->prev_bb;
3108}
3109
6de9cd9a
DN
3110/* Split a (typically critical) edge EDGE_IN. Return the new block.
3111 Abort on abnormal edges. */
3112
3113static basic_block
3114tree_split_edge (edge edge_in)
3115{
4741d956 3116 basic_block new_bb, after_bb, dest;
6de9cd9a 3117 edge new_edge, e;
6de9cd9a
DN
3118
3119 /* Abnormal edges cannot be split. */
1e128c5f 3120 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
6de9cd9a 3121
6de9cd9a
DN
3122 dest = edge_in->dest;
3123
b9a66240 3124 after_bb = split_edge_bb_loc (edge_in);
6de9cd9a
DN
3125
3126 new_bb = create_empty_bb (after_bb);
b829f3fa
JH
3127 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3128 new_bb->count = edge_in->count;
6de9cd9a 3129 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
b829f3fa
JH
3130 new_edge->probability = REG_BR_PROB_BASE;
3131 new_edge->count = edge_in->count;
6de9cd9a 3132
1e128c5f 3133 e = redirect_edge_and_branch (edge_in, new_bb);
c7b852c8 3134 gcc_assert (e == edge_in);
4f7db7f7 3135 reinstall_phi_args (new_edge, e);
6de9cd9a
DN
3136
3137 return new_bb;
3138}
3139
6de9cd9a 3140/* Callback for walk_tree, check that all elements with address taken are
7a442a1d
SB
3141 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3142 inside a PHI node. */
6de9cd9a
DN
3143
3144static tree
2fbe90f2 3145verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
6de9cd9a
DN
3146{
3147 tree t = *tp, x;
3148
3149 if (TYPE_P (t))
3150 *walk_subtrees = 0;
6531d1be 3151
e8ca4159 3152 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2fbe90f2 3153#define CHECK_OP(N, MSG) \
e8ca4159 3154 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2fbe90f2 3155 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
6de9cd9a
DN
3156
3157 switch (TREE_CODE (t))
3158 {
3159 case SSA_NAME:
3160 if (SSA_NAME_IN_FREE_LIST (t))
3161 {
3162 error ("SSA name in freelist but still referenced");
3163 return *tp;
3164 }
3165 break;
3166
0bca51f0
DN
3167 case ASSERT_EXPR:
3168 x = fold (ASSERT_EXPR_COND (t));
3169 if (x == boolean_false_node)
3170 {
3171 error ("ASSERT_EXPR with an always-false condition");
3172 return *tp;
3173 }
3174 break;
3175
6de9cd9a 3176 case MODIFY_EXPR:
07beea0d
AH
3177 gcc_unreachable ();
3178
3179 case GIMPLE_MODIFY_STMT:
3180 x = GIMPLE_STMT_OPERAND (t, 0);
6de9cd9a
DN
3181 if (TREE_CODE (x) == BIT_FIELD_REF
3182 && is_gimple_reg (TREE_OPERAND (x, 0)))
3183 {
3184 error ("GIMPLE register modified with BIT_FIELD_REF");
2fbe90f2 3185 return t;
6de9cd9a
DN
3186 }
3187 break;
3188
3189 case ADDR_EXPR:
81fc3052
DB
3190 {
3191 bool old_invariant;
3192 bool old_constant;
3193 bool old_side_effects;
3194 bool new_invariant;
3195 bool new_constant;
3196 bool new_side_effects;
3197
81fc3052
DB
3198 old_invariant = TREE_INVARIANT (t);
3199 old_constant = TREE_CONSTANT (t);
3200 old_side_effects = TREE_SIDE_EFFECTS (t);
3201
127203ac 3202 recompute_tree_invariant_for_addr_expr (t);
81fc3052
DB
3203 new_invariant = TREE_INVARIANT (t);
3204 new_side_effects = TREE_SIDE_EFFECTS (t);
3205 new_constant = TREE_CONSTANT (t);
3206
3207 if (old_invariant != new_invariant)
3208 {
3209 error ("invariant not recomputed when ADDR_EXPR changed");
3210 return t;
3211 }
3212
3213 if (old_constant != new_constant)
3214 {
3215 error ("constant not recomputed when ADDR_EXPR changed");
3216 return t;
3217 }
3218 if (old_side_effects != new_side_effects)
3219 {
3220 error ("side effects not recomputed when ADDR_EXPR changed");
3221 return t;
3222 }
3223
3224 /* Skip any references (they will be checked when we recurse down the
3225 tree) and ensure that any variable used as a prefix is marked
3226 addressable. */
3227 for (x = TREE_OPERAND (t, 0);
3228 handled_component_p (x);
3229 x = TREE_OPERAND (x, 0))
3230 ;
3231
3232 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3233 return NULL;
3234 if (!TREE_ADDRESSABLE (x))
3235 {
3236 error ("address taken, but ADDRESSABLE bit not set");
3237 return x;
3238 }
bdb69bee 3239
81fc3052
DB
3240 break;
3241 }
6de9cd9a
DN
3242
3243 case COND_EXPR:
a6234684 3244 x = COND_EXPR_COND (t);
d40055ab 3245 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
6de9cd9a 3246 {
d40055ab 3247 error ("non-integral used in condition");
6de9cd9a
DN
3248 return x;
3249 }
9c691961
AP
3250 if (!is_gimple_condexpr (x))
3251 {
ab532386 3252 error ("invalid conditional operand");
9c691961
AP
3253 return x;
3254 }
6de9cd9a
DN
3255 break;
3256
3257 case NOP_EXPR:
3258 case CONVERT_EXPR:
3259 case FIX_TRUNC_EXPR:
6de9cd9a
DN
3260 case FLOAT_EXPR:
3261 case NEGATE_EXPR:
3262 case ABS_EXPR:
3263 case BIT_NOT_EXPR:
3264 case NON_LVALUE_EXPR:
3265 case TRUTH_NOT_EXPR:
ab532386 3266 CHECK_OP (0, "invalid operand to unary operator");
6de9cd9a
DN
3267 break;
3268
3269 case REALPART_EXPR:
3270 case IMAGPART_EXPR:
2fbe90f2
RK
3271 case COMPONENT_REF:
3272 case ARRAY_REF:
3273 case ARRAY_RANGE_REF:
3274 case BIT_FIELD_REF:
3275 case VIEW_CONVERT_EXPR:
3276 /* We have a nest of references. Verify that each of the operands
3277 that determine where to reference is either a constant or a variable,
3278 verify that the base is valid, and then show we've already checked
3279 the subtrees. */
afe84921 3280 while (handled_component_p (t))
2fbe90f2
RK
3281 {
3282 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
ab532386 3283 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2fbe90f2
RK
3284 else if (TREE_CODE (t) == ARRAY_REF
3285 || TREE_CODE (t) == ARRAY_RANGE_REF)
3286 {
ab532386 3287 CHECK_OP (1, "invalid array index");
2fbe90f2 3288 if (TREE_OPERAND (t, 2))
ab532386 3289 CHECK_OP (2, "invalid array lower bound");
2fbe90f2 3290 if (TREE_OPERAND (t, 3))
ab532386 3291 CHECK_OP (3, "invalid array stride");
2fbe90f2
RK
3292 }
3293 else if (TREE_CODE (t) == BIT_FIELD_REF)
3294 {
e55f42fb
RG
3295 if (!host_integerp (TREE_OPERAND (t, 1), 1)
3296 || !host_integerp (TREE_OPERAND (t, 2), 1))
3297 {
3298 error ("invalid position or size operand to BIT_FIELD_REF");
3299 return t;
3300 }
fc0f49f3
RG
3301 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3302 && (TYPE_PRECISION (TREE_TYPE (t))
3303 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3304 {
3305 error ("integral result type precision does not match "
3306 "field size of BIT_FIELD_REF");
3307 return t;
3308 }
3309 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3310 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
3311 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3312 {
3313 error ("mode precision of non-integral result does not "
3314 "match field size of BIT_FIELD_REF");
3315 return t;
3316 }
2fbe90f2
RK
3317 }
3318
3319 t = TREE_OPERAND (t, 0);
3320 }
3321
bb0c55f6 3322 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2fbe90f2 3323 {
ab532386 3324 error ("invalid reference prefix");
2fbe90f2
RK
3325 return t;
3326 }
3327 *walk_subtrees = 0;
6de9cd9a 3328 break;
5be014d5
AP
3329 case PLUS_EXPR:
3330 case MINUS_EXPR:
3331 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3332 POINTER_PLUS_EXPR. */
3333 if (POINTER_TYPE_P (TREE_TYPE (t)))
3334 {
3335 error ("invalid operand to plus/minus, type is a pointer");
3336 return t;
3337 }
3338 CHECK_OP (0, "invalid operand to binary operator");
3339 CHECK_OP (1, "invalid operand to binary operator");
3340 break;
6de9cd9a 3341
5be014d5
AP
3342 case POINTER_PLUS_EXPR:
3343 /* Check to make sure the first operand is a pointer or reference type. */
3344 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3345 {
3346 error ("invalid operand to pointer plus, first operand is not a pointer");
3347 return t;
3348 }
3349 /* Check to make sure the second operand is an integer with type of
3350 sizetype. */
36618b93
RG
3351 if (!useless_type_conversion_p (sizetype,
3352 TREE_TYPE (TREE_OPERAND (t, 1))))
5be014d5
AP
3353 {
3354 error ("invalid operand to pointer plus, second operand is not an "
3355 "integer with type of sizetype.");
3356 return t;
3357 }
3358 /* FALLTHROUGH */
6de9cd9a
DN
3359 case LT_EXPR:
3360 case LE_EXPR:
3361 case GT_EXPR:
3362 case GE_EXPR:
3363 case EQ_EXPR:
3364 case NE_EXPR:
3365 case UNORDERED_EXPR:
3366 case ORDERED_EXPR:
3367 case UNLT_EXPR:
3368 case UNLE_EXPR:
3369 case UNGT_EXPR:
3370 case UNGE_EXPR:
3371 case UNEQ_EXPR:
d1a7edaf 3372 case LTGT_EXPR:
6de9cd9a
DN
3373 case MULT_EXPR:
3374 case TRUNC_DIV_EXPR:
3375 case CEIL_DIV_EXPR:
3376 case FLOOR_DIV_EXPR:
3377 case ROUND_DIV_EXPR:
3378 case TRUNC_MOD_EXPR:
3379 case CEIL_MOD_EXPR:
3380 case FLOOR_MOD_EXPR:
3381 case ROUND_MOD_EXPR:
3382 case RDIV_EXPR:
3383 case EXACT_DIV_EXPR:
3384 case MIN_EXPR:
3385 case MAX_EXPR:
3386 case LSHIFT_EXPR:
3387 case RSHIFT_EXPR:
3388 case LROTATE_EXPR:
3389 case RROTATE_EXPR:
3390 case BIT_IOR_EXPR:
3391 case BIT_XOR_EXPR:
3392 case BIT_AND_EXPR:
ab532386
JM
3393 CHECK_OP (0, "invalid operand to binary operator");
3394 CHECK_OP (1, "invalid operand to binary operator");
6de9cd9a
DN
3395 break;
3396
84816907
JM
3397 case CONSTRUCTOR:
3398 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3399 *walk_subtrees = 0;
3400 break;
3401
6de9cd9a
DN
3402 default:
3403 break;
3404 }
3405 return NULL;
2fbe90f2
RK
3406
3407#undef CHECK_OP
6de9cd9a
DN
3408}
3409
7e98624c
RG
3410/* Verifies if EXPR is a valid GIMPLE unary expression. Returns true
3411 if there is an error, otherwise false. */
3412
3413static bool
ed7a4b4b 3414verify_gimple_unary_expr (const_tree expr)
7e98624c
RG
3415{
3416 tree op = TREE_OPERAND (expr, 0);
3417 tree type = TREE_TYPE (expr);
3418
3419 if (!is_gimple_val (op))
3420 {
3421 error ("invalid operand in unary expression");
3422 return true;
3423 }
3424
3425 /* For general unary expressions we have the operations type
3426 as the effective type the operation is carried out on. So all
3427 we need to require is that the operand is trivially convertible
3428 to that type. */
3429 if (!useless_type_conversion_p (type, TREE_TYPE (op)))
3430 {
3431 error ("type mismatch in unary expression");
3432 debug_generic_expr (type);
3433 debug_generic_expr (TREE_TYPE (op));
3434 return true;
3435 }
3436
3437 return false;
3438}
3439
3440/* Verifies if EXPR is a valid GIMPLE binary expression. Returns true
3441 if there is an error, otherwise false. */
3442
3443static bool
ed7a4b4b 3444verify_gimple_binary_expr (const_tree expr)
7e98624c
RG
3445{
3446 tree op0 = TREE_OPERAND (expr, 0);
3447 tree op1 = TREE_OPERAND (expr, 1);
3448 tree type = TREE_TYPE (expr);
3449
3450 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3451 {
3452 error ("invalid operands in binary expression");
3453 return true;
3454 }
3455
3456 /* For general binary expressions we have the operations type
3457 as the effective type the operation is carried out on. So all
3458 we need to require is that both operands are trivially convertible
3459 to that type. */
3460 if (!useless_type_conversion_p (type, TREE_TYPE (op0))
3461 || !useless_type_conversion_p (type, TREE_TYPE (op1)))
3462 {
3463 error ("type mismatch in binary expression");
3464 debug_generic_stmt (type);
3465 debug_generic_stmt (TREE_TYPE (op0));
3466 debug_generic_stmt (TREE_TYPE (op1));
3467 return true;
3468 }
3469
3470 return false;
3471}
3472
3473/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3474 Returns true if there is an error, otherwise false. */
3475
3476static bool
3477verify_gimple_min_lval (tree expr)
3478{
3479 tree op;
3480
3481 if (is_gimple_id (expr))
3482 return false;
3483
3484 if (TREE_CODE (expr) != INDIRECT_REF
3485 && TREE_CODE (expr) != ALIGN_INDIRECT_REF
3486 && TREE_CODE (expr) != MISALIGNED_INDIRECT_REF)
3487 {
3488 error ("invalid expression for min lvalue");
3489 return true;
3490 }
3491
3492 op = TREE_OPERAND (expr, 0);
3493 if (!is_gimple_val (op))
3494 {
3495 error ("invalid operand in indirect reference");
3496 debug_generic_stmt (op);
3497 return true;
3498 }
3499 if (!useless_type_conversion_p (TREE_TYPE (expr),
3500 TREE_TYPE (TREE_TYPE (op))))
3501 {
3502 error ("type mismatch in indirect reference");
3503 debug_generic_stmt (TREE_TYPE (expr));
3504 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3505 return true;
3506 }
3507
3508 return false;
3509}
3510
3511/* Verify if EXPR is a valid GIMPLE reference expression. Returns true
3512 if there is an error, otherwise false. */
3513
3514static bool
3515verify_gimple_reference (tree expr)
3516{
3517 while (handled_component_p (expr))
3518 {
3519 tree op = TREE_OPERAND (expr, 0);
3520
3521 if (TREE_CODE (expr) == ARRAY_REF
3522 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3523 {
3524 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3525 || (TREE_OPERAND (expr, 2)
3526 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3527 || (TREE_OPERAND (expr, 3)
3528 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3529 {
3530 error ("invalid operands to array reference");
3531 debug_generic_stmt (expr);
3532 return true;
3533 }
3534 }
3535
3536 /* Verify if the reference array element types are compatible. */
3537 if (TREE_CODE (expr) == ARRAY_REF
3538 && !useless_type_conversion_p (TREE_TYPE (expr),
3539 TREE_TYPE (TREE_TYPE (op))))
3540 {
3541 error ("type mismatch in array reference");
3542 debug_generic_stmt (TREE_TYPE (expr));
3543 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3544 return true;
3545 }
3546 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3547 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3548 TREE_TYPE (TREE_TYPE (op))))
3549 {
3550 error ("type mismatch in array range reference");
3551 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3552 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3553 return true;
3554 }
3555
3556 if ((TREE_CODE (expr) == REALPART_EXPR
3557 || TREE_CODE (expr) == IMAGPART_EXPR)
3558 && !useless_type_conversion_p (TREE_TYPE (expr),
3559 TREE_TYPE (TREE_TYPE (op))))
3560 {
3561 error ("type mismatch in real/imagpart reference");
3562 debug_generic_stmt (TREE_TYPE (expr));
3563 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3564 return true;
3565 }
3566
3567 if (TREE_CODE (expr) == COMPONENT_REF
3568 && !useless_type_conversion_p (TREE_TYPE (expr),
3569 TREE_TYPE (TREE_OPERAND (expr, 1))))
3570 {
3571 error ("type mismatch in component reference");
3572 debug_generic_stmt (TREE_TYPE (expr));
3573 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3574 return true;
3575 }
3576
3577 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3578 is nothing to verify. Gross mismatches at most invoke
3579 undefined behavior. */
3580
3581 expr = op;
3582 }
3583
3584 return verify_gimple_min_lval (expr);
3585}
3586
20dcff2a
RG
3587/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3588 list of pointer-to types that is trivially convertible to DEST. */
3589
3590static bool
3591one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3592{
3593 tree src;
3594
3595 if (!TYPE_POINTER_TO (src_obj))
3596 return true;
3597
3598 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3599 if (useless_type_conversion_p (dest, src))
3600 return true;
3601
3602 return false;
3603}
3604
7e98624c
RG
3605/* Verify the GIMPLE expression EXPR. Returns true if there is an
3606 error, otherwise false. */
3607
3608static bool
3609verify_gimple_expr (tree expr)
3610{
3611 tree type = TREE_TYPE (expr);
3612
3613 if (is_gimple_val (expr))
3614 return false;
3615
3616 /* Special codes we cannot handle via their class. */
3617 switch (TREE_CODE (expr))
3618 {
3619 case NOP_EXPR:
3620 case CONVERT_EXPR:
3621 {
3622 tree op = TREE_OPERAND (expr, 0);
3623 if (!is_gimple_val (op))
3624 {
3625 error ("invalid operand in conversion");
3626 return true;
3627 }
3628
9822c455
RG
3629 /* Allow conversions between integral types and between
3630 pointer types. */
3631 if ((INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
3632 || (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (op))))
7e98624c
RG
3633 return false;
3634
3635 /* Allow conversions between integral types and pointers only if
3636 there is no sign or zero extension involved. */
3637 if (((POINTER_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
3638 || (POINTER_TYPE_P (TREE_TYPE (op)) && INTEGRAL_TYPE_P (type)))
3639 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op)))
3640 return false;
3641
3642 /* Allow conversion from integer to offset type and vice versa. */
3643 if ((TREE_CODE (type) == OFFSET_TYPE
3644 && TREE_CODE (TREE_TYPE (op)) == INTEGER_TYPE)
3645 || (TREE_CODE (type) == INTEGER_TYPE
3646 && TREE_CODE (TREE_TYPE (op)) == OFFSET_TYPE))
3647 return false;
3648
3649 /* Otherwise assert we are converting between types of the
3650 same kind. */
3651 if (TREE_CODE (type) != TREE_CODE (TREE_TYPE (op)))
3652 {
3653 error ("invalid types in nop conversion");
3654 debug_generic_expr (type);
3655 debug_generic_expr (TREE_TYPE (op));
3656 return true;
3657 }
3658
3659 return false;
3660 }
3661
3662 case FLOAT_EXPR:
3663 {
3664 tree op = TREE_OPERAND (expr, 0);
3665 if (!is_gimple_val (op))
3666 {
3667 error ("invalid operand in int to float conversion");
3668 return true;
3669 }
3670 if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
3671 || !SCALAR_FLOAT_TYPE_P (type))
3672 {
3673 error ("invalid types in conversion to floating point");
3674 debug_generic_expr (type);
3675 debug_generic_expr (TREE_TYPE (op));
3676 return true;
3677 }
3678 return false;
3679 }
3680
3681 case FIX_TRUNC_EXPR:
3682 {
3683 tree op = TREE_OPERAND (expr, 0);
3684 if (!is_gimple_val (op))
3685 {
3686 error ("invalid operand in float to int conversion");
3687 return true;
3688 }
3689 if (!INTEGRAL_TYPE_P (type)
3690 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (op)))
3691 {
3692 error ("invalid types in conversion to integer");
3693 debug_generic_expr (type);
3694 debug_generic_expr (TREE_TYPE (op));
3695 return true;
3696 }
3697 return false;
3698 }
3699
3700 case COMPLEX_EXPR:
3701 {
3702 tree op0 = TREE_OPERAND (expr, 0);
3703 tree op1 = TREE_OPERAND (expr, 1);
3704 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3705 {
3706 error ("invalid operands in complex expression");
3707 return true;
3708 }
3709 if (!TREE_CODE (type) == COMPLEX_TYPE
3710 || !(TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
3711 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op0)))
3712 || !(TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
3713 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op1)))
3714 || !useless_type_conversion_p (TREE_TYPE (type),
3715 TREE_TYPE (op0))
3716 || !useless_type_conversion_p (TREE_TYPE (type),
3717 TREE_TYPE (op1)))
3718 {
3719 error ("type mismatch in complex expression");
3720 debug_generic_stmt (TREE_TYPE (expr));
3721 debug_generic_stmt (TREE_TYPE (op0));
3722 debug_generic_stmt (TREE_TYPE (op1));
3723 return true;
3724 }
3725 return false;
3726 }
3727
3728 case CONSTRUCTOR:
3729 {
3730 /* This is used like COMPLEX_EXPR but for vectors. */
3731 if (TREE_CODE (type) != VECTOR_TYPE)
3732 {
3733 error ("constructor not allowed for non-vector types");
3734 debug_generic_stmt (type);
3735 return true;
3736 }
3737 /* FIXME: verify constructor arguments. */
3738 return false;
3739 }
3740
3741 case LSHIFT_EXPR:
3742 case RSHIFT_EXPR:
3743 case LROTATE_EXPR:
3744 case RROTATE_EXPR:
3745 {
3746 tree op0 = TREE_OPERAND (expr, 0);
3747 tree op1 = TREE_OPERAND (expr, 1);
3748 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3749 {
3750 error ("invalid operands in shift expression");
3751 return true;
3752 }
3753 if (!TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
3754 || !useless_type_conversion_p (type, TREE_TYPE (op0)))
3755 {
3756 error ("type mismatch in shift expression");
3757 debug_generic_stmt (TREE_TYPE (expr));
3758 debug_generic_stmt (TREE_TYPE (op0));
3759 debug_generic_stmt (TREE_TYPE (op1));
3760 return true;
3761 }
3762 return false;
3763 }
3764
3765 case PLUS_EXPR:
3766 case MINUS_EXPR:
3767 {
3768 tree op0 = TREE_OPERAND (expr, 0);
3769 tree op1 = TREE_OPERAND (expr, 1);
3770 if (POINTER_TYPE_P (type)
3771 || POINTER_TYPE_P (TREE_TYPE (op0))
3772 || POINTER_TYPE_P (TREE_TYPE (op1)))
3773 {
3774 error ("invalid (pointer) operands to plus/minus");
3775 return true;
3776 }
3777 /* Continue with generic binary expression handling. */
3778 break;
3779 }
3780
3781 case POINTER_PLUS_EXPR:
3782 {
3783 tree op0 = TREE_OPERAND (expr, 0);
3784 tree op1 = TREE_OPERAND (expr, 1);
3785 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3786 {
3787 error ("invalid operands in pointer plus expression");
3788 return true;
3789 }
3790 if (!POINTER_TYPE_P (TREE_TYPE (op0))
7e98624c
RG
3791 || !useless_type_conversion_p (type, TREE_TYPE (op0))
3792 || !useless_type_conversion_p (sizetype, TREE_TYPE (op1)))
3793 {
3794 error ("type mismatch in pointer plus expression");
3795 debug_generic_stmt (type);
3796 debug_generic_stmt (TREE_TYPE (op0));
3797 debug_generic_stmt (TREE_TYPE (op1));
3798 return true;
3799 }
3800 return false;
3801 }
3802
3803 case COND_EXPR:
3804 {
3805 tree op0 = TREE_OPERAND (expr, 0);
3806 tree op1 = TREE_OPERAND (expr, 1);
3807 tree op2 = TREE_OPERAND (expr, 2);
3808 if ((!is_gimple_val (op1)
3809 && TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE)
3810 || (!is_gimple_val (op2)
3811 && TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE))
3812 {
3813 error ("invalid operands in conditional expression");
3814 return true;
3815 }
3816 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
3817 || (TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE
3818 && !useless_type_conversion_p (type, TREE_TYPE (op1)))
3819 || (TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE
3820 && !useless_type_conversion_p (type, TREE_TYPE (op2))))
3821 {
3822 error ("type mismatch in conditional expression");
3823 debug_generic_stmt (type);
3824 debug_generic_stmt (TREE_TYPE (op0));
3825 debug_generic_stmt (TREE_TYPE (op1));
3826 debug_generic_stmt (TREE_TYPE (op2));
3827 return true;
3828 }
3829 return verify_gimple_expr (op0);
3830 }
3831
3832 case ADDR_EXPR:
3833 {
3834 tree op = TREE_OPERAND (expr, 0);
7e98624c
RG
3835 if (!is_gimple_addressable (op))
3836 {
3837 error ("invalid operand in unary expression");
3838 return true;
3839 }
20dcff2a 3840 if (!one_pointer_to_useless_type_conversion_p (type, TREE_TYPE (op))
7e98624c
RG
3841 /* FIXME: a longstanding wart, &a == &a[0]. */
3842 && (TREE_CODE (TREE_TYPE (op)) != ARRAY_TYPE
20dcff2a
RG
3843 || !one_pointer_to_useless_type_conversion_p (type,
3844 TREE_TYPE (TREE_TYPE (op)))))
7e98624c
RG
3845 {
3846 error ("type mismatch in address expression");
3847 debug_generic_stmt (TREE_TYPE (expr));
8fc6f12f 3848 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
7e98624c
RG
3849 return true;
3850 }
3851
3852 return verify_gimple_reference (op);
3853 }
3854
3855 case TRUTH_ANDIF_EXPR:
3856 case TRUTH_ORIF_EXPR:
3857 case TRUTH_AND_EXPR:
3858 case TRUTH_OR_EXPR:
3859 case TRUTH_XOR_EXPR:
3860 {
3861 tree op0 = TREE_OPERAND (expr, 0);
3862 tree op1 = TREE_OPERAND (expr, 1);
3863
3864 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3865 {
3866 error ("invalid operands in truth expression");
3867 return true;
3868 }
3869
3870 /* We allow any kind of integral typed argument and result. */
3871 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
3872 || !INTEGRAL_TYPE_P (TREE_TYPE (op1))
3873 || !INTEGRAL_TYPE_P (type))
3874 {
3875 error ("type mismatch in binary truth expression");
3876 debug_generic_stmt (type);
3877 debug_generic_stmt (TREE_TYPE (op0));
3878 debug_generic_stmt (TREE_TYPE (op1));
3879 return true;
3880 }
3881
3882 return false;
3883 }
3884
3885 case TRUTH_NOT_EXPR:
3886 {
3887 tree op = TREE_OPERAND (expr, 0);
3888
3889 if (!is_gimple_val (op))
3890 {
3891 error ("invalid operand in unary not");
3892 return true;
3893 }
3894
3895 /* For TRUTH_NOT_EXPR we can have any kind of integral
3896 typed arguments and results. */
3897 if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
3898 || !INTEGRAL_TYPE_P (type))
3899 {
3900 error ("type mismatch in not expression");
3901 debug_generic_expr (TREE_TYPE (expr));
3902 debug_generic_expr (TREE_TYPE (op));
3903 return true;
3904 }
3905
3906 return false;
3907 }
3908
3909 case CALL_EXPR:
3910 /* FIXME. The C frontend passes unpromoted arguments in case it
3911 didn't see a function declaration before the call. */
3912 return false;
3913
b691d4b0
RG
3914 case OBJ_TYPE_REF:
3915 /* FIXME. */
3916 return false;
3917
7e98624c
RG
3918 default:;
3919 }
3920
3921 /* Generic handling via classes. */
3922 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3923 {
3924 case tcc_unary:
3925 return verify_gimple_unary_expr (expr);
3926
3927 case tcc_binary:
3928 return verify_gimple_binary_expr (expr);
3929
3930 case tcc_reference:
3931 return verify_gimple_reference (expr);
3932
3933 case tcc_comparison:
3934 {
3935 tree op0 = TREE_OPERAND (expr, 0);
3936 tree op1 = TREE_OPERAND (expr, 1);
3937 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3938 {
3939 error ("invalid operands in comparison expression");
3940 return true;
3941 }
3942 /* For comparisons we do not have the operations type as the
3943 effective type the comparison is carried out in. Instead
3944 we require that either the first operand is trivially
3945 convertible into the second, or the other way around.
3946 The resulting type of a comparison may be any integral type.
3947 Because we special-case pointers to void we allow
3948 comparisons of pointers with the same mode as well. */
3949 if ((!useless_type_conversion_p (TREE_TYPE (op0), TREE_TYPE (op1))
3950 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0))
3951 && (!POINTER_TYPE_P (TREE_TYPE (op0))
3952 || !POINTER_TYPE_P (TREE_TYPE (op1))
3953 || TYPE_MODE (TREE_TYPE (op0)) != TYPE_MODE (TREE_TYPE (op1))))
3954 || !INTEGRAL_TYPE_P (type))
3955 {
3956 error ("type mismatch in comparison expression");
3957 debug_generic_stmt (TREE_TYPE (expr));
3958 debug_generic_stmt (TREE_TYPE (op0));
3959 debug_generic_stmt (TREE_TYPE (op1));
3960 return true;
3961 }
3962 break;
3963 }
3964
3965 default:
3966 gcc_unreachable ();
3967 }
3968
3969 return false;
3970}
3971
3972/* Verify the GIMPLE assignment statement STMT. Returns true if there
3973 is an error, otherwise false. */
3974
3975static bool
ed7a4b4b 3976verify_gimple_modify_stmt (const_tree stmt)
7e98624c
RG
3977{
3978 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3979 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3980
3981 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
3982
3983 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3984 TREE_TYPE (rhs)))
3985 {
3986 error ("non-trivial conversion at assignment");
3987 debug_generic_expr (TREE_TYPE (lhs));
3988 debug_generic_expr (TREE_TYPE (rhs));
3989 return true;
3990 }
3991
3992 /* Loads/stores from/to a variable are ok. */
3993 if ((is_gimple_val (lhs)
3994 && is_gimple_variable (rhs))
3995 || (is_gimple_val (rhs)
3996 && is_gimple_variable (lhs)))
3997 return false;
3998
3999 /* Aggregate copies are ok. */
4000 if (!is_gimple_reg_type (TREE_TYPE (lhs))
4001 && !is_gimple_reg_type (TREE_TYPE (rhs)))
4002 return false;
4003
4004 /* We might get 'loads' from a parameter which is not a gimple value. */
4005 if (TREE_CODE (rhs) == PARM_DECL)
4006 return verify_gimple_expr (lhs);
4007
4008 if (!is_gimple_variable (lhs)
4009 && verify_gimple_expr (lhs))
4010 return true;
4011
4012 if (!is_gimple_variable (rhs)
4013 && verify_gimple_expr (rhs))
4014 return true;
4015
4016 return false;
4017}
4018
4019/* Verify the GIMPLE statement STMT. Returns true if there is an
4020 error, otherwise false. */
4021
4022static bool
4023verify_gimple_stmt (tree stmt)
4024{
4025 if (!is_gimple_stmt (stmt))
4026 {
4027 error ("is not a valid GIMPLE statement");
4028 return true;
4029 }
4030
4031 if (OMP_DIRECTIVE_P (stmt))
4032 {
4033 /* OpenMP directives are validated by the FE and never operated
4034 on by the optimizers. Furthermore, OMP_FOR may contain
4035 non-gimple expressions when the main index variable has had
4036 its address taken. This does not affect the loop itself
4037 because the header of an OMP_FOR is merely used to determine
4038 how to setup the parallel iteration. */
4039 return false;
4040 }
4041
4042 switch (TREE_CODE (stmt))
4043 {
4044 case GIMPLE_MODIFY_STMT:
4045 return verify_gimple_modify_stmt (stmt);
4046
4047 case GOTO_EXPR:
4048 case LABEL_EXPR:
4049 return false;
4050
4051 case SWITCH_EXPR:
4052 if (!is_gimple_val (TREE_OPERAND (stmt, 0)))
4053 {
4054 error ("invalid operand to switch statement");
4055 debug_generic_expr (TREE_OPERAND (stmt, 0));
4056 }
4057 return false;
4058
4059 case RETURN_EXPR:
4060 {
4061 tree op = TREE_OPERAND (stmt, 0);
4062
4063 if (TREE_CODE (TREE_TYPE (stmt)) != VOID_TYPE)
4064 {
4065 error ("type error in return expression");
4066 return true;
4067 }
4068
4069 if (op == NULL_TREE
4070 || TREE_CODE (op) == RESULT_DECL)
4071 return false;
4072
4073 return verify_gimple_modify_stmt (op);
4074 }
4075
4076 case CALL_EXPR:
4077 case COND_EXPR:
4078 return verify_gimple_expr (stmt);
4079
4080 case NOP_EXPR:
4081 case CHANGE_DYNAMIC_TYPE_EXPR:
4082 case ASM_EXPR:
2e28e797 4083 case PREDICT_EXPR:
7e98624c
RG
4084 return false;
4085
4086 default:
4087 gcc_unreachable ();
4088 }
4089}
4090
7dc83ebc
RG
4091/* Verify the GIMPLE statements inside the statement list STMTS.
4092 Returns true if there were any errors. */
7e98624c 4093
7dc83ebc
RG
4094static bool
4095verify_gimple_2 (tree stmts)
7e98624c
RG
4096{
4097 tree_stmt_iterator tsi;
7dc83ebc 4098 bool err = false;
7e98624c
RG
4099
4100 for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
4101 {
4102 tree stmt = tsi_stmt (tsi);
4103
4104 switch (TREE_CODE (stmt))
4105 {
4106 case BIND_EXPR:
7dc83ebc 4107 err |= verify_gimple_2 (BIND_EXPR_BODY (stmt));
7e98624c
RG
4108 break;
4109
4110 case TRY_CATCH_EXPR:
4111 case TRY_FINALLY_EXPR:
7dc83ebc
RG
4112 err |= verify_gimple_2 (TREE_OPERAND (stmt, 0));
4113 err |= verify_gimple_2 (TREE_OPERAND (stmt, 1));
7e98624c
RG
4114 break;
4115
4116 case CATCH_EXPR:
7dc83ebc 4117 err |= verify_gimple_2 (CATCH_BODY (stmt));
7e98624c
RG
4118 break;
4119
4120 case EH_FILTER_EXPR:
7dc83ebc 4121 err |= verify_gimple_2 (EH_FILTER_FAILURE (stmt));
7e98624c
RG
4122 break;
4123
4124 default:
7dc83ebc
RG
4125 {
4126 bool err2 = verify_gimple_stmt (stmt);
4127 if (err2)
4128 debug_generic_expr (stmt);
4129 err |= err2;
4130 }
7e98624c
RG
4131 }
4132 }
7dc83ebc
RG
4133
4134 return err;
4135}
4136
4137
4138/* Verify the GIMPLE statements inside the statement list STMTS. */
4139
4140void
4141verify_gimple_1 (tree stmts)
4142{
4143 if (verify_gimple_2 (stmts))
4144 internal_error ("verify_gimple failed");
7e98624c
RG
4145}
4146
4147/* Verify the GIMPLE statements inside the current function. */
4148
4149void
4150verify_gimple (void)
4151{
4152 verify_gimple_1 (BIND_EXPR_BODY (DECL_SAVED_TREE (cfun->decl)));
4153}
6de9cd9a
DN
4154
4155/* Verify STMT, return true if STMT is not in GIMPLE form.
4156 TODO: Implement type checking. */
4157
4158static bool
1eaba2f2 4159verify_stmt (tree stmt, bool last_in_block)
6de9cd9a
DN
4160{
4161 tree addr;
4162
50674e96
DN
4163 if (OMP_DIRECTIVE_P (stmt))
4164 {
4165 /* OpenMP directives are validated by the FE and never operated
4166 on by the optimizers. Furthermore, OMP_FOR may contain
4167 non-gimple expressions when the main index variable has had
4168 its address taken. This does not affect the loop itself
4169 because the header of an OMP_FOR is merely used to determine
4170 how to setup the parallel iteration. */
4171 return false;
4172 }
4173
6de9cd9a
DN
4174 if (!is_gimple_stmt (stmt))
4175 {
ab532386 4176 error ("is not a valid GIMPLE statement");
1eaba2f2 4177 goto fail;
6de9cd9a
DN
4178 }
4179
4180 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
4181 if (addr)
4182 {
4183 debug_generic_stmt (addr);
2f9ea521
RG
4184 if (addr != stmt)
4185 {
4186 inform ("in statement");
4187 debug_generic_stmt (stmt);
4188 }
6de9cd9a
DN
4189 return true;
4190 }
4191
1eaba2f2
RH
4192 /* If the statement is marked as part of an EH region, then it is
4193 expected that the statement could throw. Verify that when we
4194 have optimizations that simplify statements such that we prove
4195 that they cannot throw, that we update other data structures
4196 to match. */
4197 if (lookup_stmt_eh_region (stmt) >= 0)
4198 {
4199 if (!tree_could_throw_p (stmt))
4200 {
ab532386 4201 error ("statement marked for throw, but doesn%'t");
1eaba2f2
RH
4202 goto fail;
4203 }
4204 if (!last_in_block && tree_can_throw_internal (stmt))
4205 {
ab532386 4206 error ("statement marked for throw in middle of block");
1eaba2f2
RH
4207 goto fail;
4208 }
4209 }
4210
6de9cd9a 4211 return false;
1eaba2f2
RH
4212
4213 fail:
4214 debug_generic_stmt (stmt);
4215 return true;
6de9cd9a
DN
4216}
4217
4218
4219/* Return true when the T can be shared. */
4220
4221static bool
4222tree_node_can_be_shared (tree t)
4223{
6615c446 4224 if (IS_TYPE_OR_DECL_P (t)
6de9cd9a 4225 || is_gimple_min_invariant (t)
5e23162d 4226 || TREE_CODE (t) == SSA_NAME
953ff289
DN
4227 || t == error_mark_node
4228 || TREE_CODE (t) == IDENTIFIER_NODE)
6de9cd9a
DN
4229 return true;
4230
92b6dff3
JL
4231 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4232 return true;
4233
44de5aeb 4234 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
953ff289
DN
4235 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4236 || TREE_CODE (t) == COMPONENT_REF
4237 || TREE_CODE (t) == REALPART_EXPR
4238 || TREE_CODE (t) == IMAGPART_EXPR)
6de9cd9a
DN
4239 t = TREE_OPERAND (t, 0);
4240
4241 if (DECL_P (t))
4242 return true;
4243
4244 return false;
4245}
4246
4247
4248/* Called via walk_trees. Verify tree sharing. */
4249
4250static tree
4251verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
4252{
4437b50d 4253 struct pointer_set_t *visited = (struct pointer_set_t *) data;
6de9cd9a
DN
4254
4255 if (tree_node_can_be_shared (*tp))
4256 {
4257 *walk_subtrees = false;
4258 return NULL;
4259 }
4260
4437b50d
JH
4261 if (pointer_set_insert (visited, *tp))
4262 return *tp;
6de9cd9a
DN
4263
4264 return NULL;
4265}
4266
4267
07beea0d
AH
4268/* Helper function for verify_gimple_tuples. */
4269
4270static tree
4271verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4272 void *data ATTRIBUTE_UNUSED)
4273{
4274 switch (TREE_CODE (*tp))
4275 {
4276 case MODIFY_EXPR:
4277 error ("unexpected non-tuple");
4278 debug_tree (*tp);
4279 gcc_unreachable ();
4280 return NULL_TREE;
4281
4282 default:
4283 return NULL_TREE;
4284 }
4285}
4286
4287/* Verify that there are no trees that should have been converted to
4288 gimple tuples. Return true if T contains a node that should have
4289 been converted to a gimple tuple, but hasn't. */
4290
4291static bool
4292verify_gimple_tuples (tree t)
4293{
4294 return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
4295}
4296
4437b50d
JH
4297static bool eh_error_found;
4298static int
4299verify_eh_throw_stmt_node (void **slot, void *data)
4300{
4301 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4302 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4303
4304 if (!pointer_set_contains (visited, node->stmt))
4305 {
4306 error ("Dead STMT in EH table");
4307 debug_generic_stmt (node->stmt);
4308 eh_error_found = true;
4309 }
4310 return 0;
4311}
4312
6de9cd9a
DN
4313/* Verify the GIMPLE statement chain. */
4314
4315void
4316verify_stmts (void)
4317{
4318 basic_block bb;
4319 block_stmt_iterator bsi;
4320 bool err = false;
4437b50d 4321 struct pointer_set_t *visited, *visited_stmts;
6de9cd9a
DN
4322 tree addr;
4323
4324 timevar_push (TV_TREE_STMT_VERIFY);
4437b50d
JH
4325 visited = pointer_set_create ();
4326 visited_stmts = pointer_set_create ();
6de9cd9a
DN
4327
4328 FOR_EACH_BB (bb)
4329 {
4330 tree phi;
4331 int i;
4332
17192884 4333 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
4334 {
4335 int phi_num_args = PHI_NUM_ARGS (phi);
4336
4437b50d 4337 pointer_set_insert (visited_stmts, phi);
8de1fc1b
KH
4338 if (bb_for_stmt (phi) != bb)
4339 {
ab532386 4340 error ("bb_for_stmt (phi) is set to a wrong basic block");
8de1fc1b
KH
4341 err |= true;
4342 }
4343
6de9cd9a
DN
4344 for (i = 0; i < phi_num_args; i++)
4345 {
4346 tree t = PHI_ARG_DEF (phi, i);
4347 tree addr;
4348
e9705dc5
AO
4349 if (!t)
4350 {
4351 error ("missing PHI def");
4352 debug_generic_stmt (phi);
4353 err |= true;
4354 continue;
4355 }
6de9cd9a
DN
4356 /* Addressable variables do have SSA_NAMEs but they
4357 are not considered gimple values. */
e9705dc5
AO
4358 else if (TREE_CODE (t) != SSA_NAME
4359 && TREE_CODE (t) != FUNCTION_DECL
220f1c29 4360 && !is_gimple_min_invariant (t))
6de9cd9a
DN
4361 {
4362 error ("PHI def is not a GIMPLE value");
4363 debug_generic_stmt (phi);
4364 debug_generic_stmt (t);
4365 err |= true;
4366 }
4367
4437b50d 4368 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
6de9cd9a
DN
4369 if (addr)
4370 {
ab532386 4371 error ("incorrect sharing of tree nodes");
6de9cd9a
DN
4372 debug_generic_stmt (phi);
4373 debug_generic_stmt (addr);
4374 err |= true;
4375 }
4376 }
4377 }
4378
1eaba2f2 4379 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
6de9cd9a
DN
4380 {
4381 tree stmt = bsi_stmt (bsi);
8de1fc1b 4382
4437b50d 4383 pointer_set_insert (visited_stmts, stmt);
07beea0d
AH
4384 err |= verify_gimple_tuples (stmt);
4385
8de1fc1b
KH
4386 if (bb_for_stmt (stmt) != bb)
4387 {
ab532386 4388 error ("bb_for_stmt (stmt) is set to a wrong basic block");
8de1fc1b
KH
4389 err |= true;
4390 }
4391
1eaba2f2
RH
4392 bsi_next (&bsi);
4393 err |= verify_stmt (stmt, bsi_end_p (bsi));
4437b50d 4394 addr = walk_tree (&stmt, verify_node_sharing, visited, NULL);
6de9cd9a
DN
4395 if (addr)
4396 {
ab532386 4397 error ("incorrect sharing of tree nodes");
6de9cd9a
DN
4398 debug_generic_stmt (stmt);
4399 debug_generic_stmt (addr);
4400 err |= true;
4401 }
4402 }
4403 }
4437b50d
JH
4404 eh_error_found = false;
4405 if (get_eh_throw_stmt_table (cfun))
4406 htab_traverse (get_eh_throw_stmt_table (cfun),
4407 verify_eh_throw_stmt_node,
4408 visited_stmts);
6de9cd9a 4409
4437b50d 4410 if (err | eh_error_found)
ab532386 4411 internal_error ("verify_stmts failed");
6de9cd9a 4412
4437b50d
JH
4413 pointer_set_destroy (visited);
4414 pointer_set_destroy (visited_stmts);
6946b3f7 4415 verify_histograms ();
6de9cd9a
DN
4416 timevar_pop (TV_TREE_STMT_VERIFY);
4417}
4418
4419
4420/* Verifies that the flow information is OK. */
4421
4422static int
4423tree_verify_flow_info (void)
4424{
4425 int err = 0;
4426 basic_block bb;
4427 block_stmt_iterator bsi;
4428 tree stmt;
4429 edge e;
628f6a4e 4430 edge_iterator ei;
6de9cd9a 4431
7506e1cb 4432 if (ENTRY_BLOCK_PTR->il.tree)
6de9cd9a 4433 {
7506e1cb 4434 error ("ENTRY_BLOCK has IL associated with it");
6de9cd9a
DN
4435 err = 1;
4436 }
4437
7506e1cb 4438 if (EXIT_BLOCK_PTR->il.tree)
6de9cd9a 4439 {
7506e1cb 4440 error ("EXIT_BLOCK has IL associated with it");
6de9cd9a
DN
4441 err = 1;
4442 }
4443
628f6a4e 4444 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
4445 if (e->flags & EDGE_FALLTHRU)
4446 {
ab532386 4447 error ("fallthru to exit from bb %d", e->src->index);
6de9cd9a
DN
4448 err = 1;
4449 }
4450
4451 FOR_EACH_BB (bb)
4452 {
4453 bool found_ctrl_stmt = false;
4454
548414c6
KH
4455 stmt = NULL_TREE;
4456
6de9cd9a
DN
4457 /* Skip labels on the start of basic block. */
4458 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4459 {
548414c6
KH
4460 tree prev_stmt = stmt;
4461
4462 stmt = bsi_stmt (bsi);
4463
4464 if (TREE_CODE (stmt) != LABEL_EXPR)
6de9cd9a
DN
4465 break;
4466
548414c6
KH
4467 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
4468 {
953ff289
DN
4469 error ("nonlocal label ");
4470 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4471 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4472 bb->index);
548414c6
KH
4473 err = 1;
4474 }
4475
4476 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
6de9cd9a 4477 {
953ff289
DN
4478 error ("label ");
4479 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4480 fprintf (stderr, " to block does not match in bb %d",
4481 bb->index);
6de9cd9a
DN
4482 err = 1;
4483 }
4484
548414c6 4485 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
6de9cd9a
DN
4486 != current_function_decl)
4487 {
953ff289
DN
4488 error ("label ");
4489 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4490 fprintf (stderr, " has incorrect context in bb %d",
4491 bb->index);
6de9cd9a
DN
4492 err = 1;
4493 }
4494 }
4495
4496 /* Verify that body of basic block BB is free of control flow. */
4497 for (; !bsi_end_p (bsi); bsi_next (&bsi))
4498 {
4499 tree stmt = bsi_stmt (bsi);
4500
4501 if (found_ctrl_stmt)
4502 {
ab532386 4503 error ("control flow in the middle of basic block %d",
6de9cd9a
DN
4504 bb->index);
4505 err = 1;
4506 }
4507
4508 if (stmt_ends_bb_p (stmt))
4509 found_ctrl_stmt = true;
4510
4511 if (TREE_CODE (stmt) == LABEL_EXPR)
4512 {
953ff289
DN
4513 error ("label ");
4514 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4515 fprintf (stderr, " in the middle of basic block %d", bb->index);
6de9cd9a
DN
4516 err = 1;
4517 }
4518 }
953ff289 4519
6de9cd9a
DN
4520 bsi = bsi_last (bb);
4521 if (bsi_end_p (bsi))
4522 continue;
4523
4524 stmt = bsi_stmt (bsi);
4525
cc7220fd
JH
4526 err |= verify_eh_edges (stmt);
4527
6de9cd9a
DN
4528 if (is_ctrl_stmt (stmt))
4529 {
628f6a4e 4530 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4531 if (e->flags & EDGE_FALLTHRU)
4532 {
ab532386 4533 error ("fallthru edge after a control statement in bb %d",
6de9cd9a
DN
4534 bb->index);
4535 err = 1;
4536 }
4537 }
4538
36b24193
ZD
4539 if (TREE_CODE (stmt) != COND_EXPR)
4540 {
4541 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4542 after anything else but if statement. */
4543 FOR_EACH_EDGE (e, ei, bb->succs)
4544 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4545 {
4546 error ("true/false edge after a non-COND_EXPR in bb %d",
4547 bb->index);
4548 err = 1;
4549 }
4550 }
4551
6de9cd9a
DN
4552 switch (TREE_CODE (stmt))
4553 {
4554 case COND_EXPR:
4555 {
4556 edge true_edge;
4557 edge false_edge;
a9b77cd1
ZD
4558
4559 if (COND_EXPR_THEN (stmt) != NULL_TREE
4560 || COND_EXPR_ELSE (stmt) != NULL_TREE)
6de9cd9a 4561 {
a9b77cd1
ZD
4562 error ("COND_EXPR with code in branches at the end of bb %d",
4563 bb->index);
6de9cd9a
DN
4564 err = 1;
4565 }
4566
4567 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4568
4569 if (!true_edge || !false_edge
4570 || !(true_edge->flags & EDGE_TRUE_VALUE)
4571 || !(false_edge->flags & EDGE_FALSE_VALUE)
4572 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4573 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
628f6a4e 4574 || EDGE_COUNT (bb->succs) >= 3)
6de9cd9a 4575 {
ab532386 4576 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4577 bb->index);
4578 err = 1;
4579 }
6de9cd9a
DN
4580 }
4581 break;
4582
4583 case GOTO_EXPR:
4584 if (simple_goto_p (stmt))
4585 {
ab532386 4586 error ("explicit goto at end of bb %d", bb->index);
6531d1be 4587 err = 1;
6de9cd9a
DN
4588 }
4589 else
4590 {
6531d1be 4591 /* FIXME. We should double check that the labels in the
6de9cd9a 4592 destination blocks have their address taken. */
628f6a4e 4593 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4594 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4595 | EDGE_FALSE_VALUE))
4596 || !(e->flags & EDGE_ABNORMAL))
4597 {
ab532386 4598 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4599 bb->index);
4600 err = 1;
4601 }
4602 }
4603 break;
4604
4605 case RETURN_EXPR:
c5cbcccf
ZD
4606 if (!single_succ_p (bb)
4607 || (single_succ_edge (bb)->flags
4608 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4609 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
6de9cd9a 4610 {
ab532386 4611 error ("wrong outgoing edge flags at end of bb %d", bb->index);
6de9cd9a
DN
4612 err = 1;
4613 }
c5cbcccf 4614 if (single_succ (bb) != EXIT_BLOCK_PTR)
6de9cd9a 4615 {
ab532386 4616 error ("return edge does not point to exit in bb %d",
6de9cd9a
DN
4617 bb->index);
4618 err = 1;
4619 }
4620 break;
4621
4622 case SWITCH_EXPR:
4623 {
7853504d 4624 tree prev;
6de9cd9a
DN
4625 edge e;
4626 size_t i, n;
4627 tree vec;
4628
4629 vec = SWITCH_LABELS (stmt);
4630 n = TREE_VEC_LENGTH (vec);
4631
4632 /* Mark all the destination basic blocks. */
4633 for (i = 0; i < n; ++i)
4634 {
4635 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4636 basic_block label_bb = label_to_block (lab);
4637
1e128c5f 4638 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
6de9cd9a
DN
4639 label_bb->aux = (void *)1;
4640 }
4641
7853504d
SB
4642 /* Verify that the case labels are sorted. */
4643 prev = TREE_VEC_ELT (vec, 0);
b7814a18 4644 for (i = 1; i < n; ++i)
7853504d
SB
4645 {
4646 tree c = TREE_VEC_ELT (vec, i);
4647 if (! CASE_LOW (c))
4648 {
b7814a18
RG
4649 if (i != n - 1)
4650 {
4651 error ("found default case not at end of case vector");
4652 err = 1;
4653 }
7853504d
SB
4654 continue;
4655 }
4656 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4657 {
953ff289 4658 error ("case labels not sorted: ");
7853504d
SB
4659 print_generic_expr (stderr, prev, 0);
4660 fprintf (stderr," is greater than ");
4661 print_generic_expr (stderr, c, 0);
4662 fprintf (stderr," but comes before it.\n");
4663 err = 1;
4664 }
4665 prev = c;
4666 }
b7814a18
RG
4667 /* VRP will remove the default case if it can prove it will
4668 never be executed. So do not verify there always exists
4669 a default case here. */
7853504d 4670
628f6a4e 4671 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4672 {
4673 if (!e->dest->aux)
4674 {
ab532386 4675 error ("extra outgoing edge %d->%d",
6de9cd9a
DN
4676 bb->index, e->dest->index);
4677 err = 1;
4678 }
4679 e->dest->aux = (void *)2;
4680 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4681 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4682 {
ab532386 4683 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4684 bb->index);
4685 err = 1;
4686 }
4687 }
4688
4689 /* Check that we have all of them. */
4690 for (i = 0; i < n; ++i)
4691 {
4692 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4693 basic_block label_bb = label_to_block (lab);
4694
4695 if (label_bb->aux != (void *)2)
4696 {
ab532386 4697 error ("missing edge %i->%i",
6de9cd9a
DN
4698 bb->index, label_bb->index);
4699 err = 1;
4700 }
4701 }
4702
628f6a4e 4703 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4704 e->dest->aux = (void *)0;
4705 }
4706
4707 default: ;
4708 }
4709 }
4710
2b28c07a 4711 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
6de9cd9a
DN
4712 verify_dominators (CDI_DOMINATORS);
4713
4714 return err;
4715}
4716
4717
f0b698c1 4718/* Updates phi nodes after creating a forwarder block joined
6de9cd9a
DN
4719 by edge FALLTHRU. */
4720
4721static void
4722tree_make_forwarder_block (edge fallthru)
4723{
4724 edge e;
628f6a4e 4725 edge_iterator ei;
6de9cd9a 4726 basic_block dummy, bb;
5ae71719 4727 tree phi, new_phi, var;
6de9cd9a
DN
4728
4729 dummy = fallthru->src;
4730 bb = fallthru->dest;
4731
c5cbcccf 4732 if (single_pred_p (bb))
6de9cd9a
DN
4733 return;
4734
cfaab3a9 4735 /* If we redirected a branch we must create new PHI nodes at the
6de9cd9a 4736 start of BB. */
17192884 4737 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
4738 {
4739 var = PHI_RESULT (phi);
4740 new_phi = create_phi_node (var, bb);
4741 SSA_NAME_DEF_STMT (var) = new_phi;
d00ad49b 4742 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
d2e398df 4743 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
6de9cd9a
DN
4744 }
4745
17192884 4746 /* Ensure that the PHI node chain is in the same order. */
5ae71719 4747 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
6de9cd9a
DN
4748
4749 /* Add the arguments we have stored on edges. */
628f6a4e 4750 FOR_EACH_EDGE (e, ei, bb->preds)
6de9cd9a
DN
4751 {
4752 if (e == fallthru)
4753 continue;
4754
71882046 4755 flush_pending_stmts (e);
6de9cd9a
DN
4756 }
4757}
4758
4759
6de9cd9a
DN
4760/* Return a non-special label in the head of basic block BLOCK.
4761 Create one if it doesn't exist. */
4762
d7621d3c 4763tree
6de9cd9a
DN
4764tree_block_label (basic_block bb)
4765{
4766 block_stmt_iterator i, s = bsi_start (bb);
4767 bool first = true;
4768 tree label, stmt;
4769
4770 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4771 {
4772 stmt = bsi_stmt (i);
4773 if (TREE_CODE (stmt) != LABEL_EXPR)
4774 break;
4775 label = LABEL_EXPR_LABEL (stmt);
4776 if (!DECL_NONLOCAL (label))
4777 {
4778 if (!first)
4779 bsi_move_before (&i, &s);
4780 return label;
4781 }
4782 }
4783
4784 label = create_artificial_label ();
4785 stmt = build1 (LABEL_EXPR, void_type_node, label);
4786 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4787 return label;
4788}
4789
4790
4791/* Attempt to perform edge redirection by replacing a possibly complex
4792 jump instruction by a goto or by removing the jump completely.
4793 This can apply only if all edges now point to the same block. The
4794 parameters and return values are equivalent to
4795 redirect_edge_and_branch. */
4796
4797static edge
4798tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4799{
4800 basic_block src = e->src;
6de9cd9a
DN
4801 block_stmt_iterator b;
4802 tree stmt;
6de9cd9a 4803
07b43a87
KH
4804 /* We can replace or remove a complex jump only when we have exactly
4805 two edges. */
4806 if (EDGE_COUNT (src->succs) != 2
4807 /* Verify that all targets will be TARGET. Specifically, the
4808 edge that is not E must also go to TARGET. */
4809 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6de9cd9a
DN
4810 return NULL;
4811
4812 b = bsi_last (src);
4813 if (bsi_end_p (b))
4814 return NULL;
4815 stmt = bsi_stmt (b);
4816
4817 if (TREE_CODE (stmt) == COND_EXPR
4818 || TREE_CODE (stmt) == SWITCH_EXPR)
4819 {
736432ee 4820 bsi_remove (&b, true);
6de9cd9a
DN
4821 e = ssa_redirect_edge (e, target);
4822 e->flags = EDGE_FALLTHRU;
4823 return e;
4824 }
4825
4826 return NULL;
4827}
4828
4829
4830/* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4831 edge representing the redirected branch. */
4832
4833static edge
4834tree_redirect_edge_and_branch (edge e, basic_block dest)
4835{
4836 basic_block bb = e->src;
4837 block_stmt_iterator bsi;
4838 edge ret;
18965703 4839 tree stmt;
6de9cd9a 4840
4f6c2131 4841 if (e->flags & EDGE_ABNORMAL)
6de9cd9a
DN
4842 return NULL;
4843
6531d1be 4844 if (e->src != ENTRY_BLOCK_PTR
6de9cd9a
DN
4845 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4846 return ret;
4847
4848 if (e->dest == dest)
4849 return NULL;
4850
6de9cd9a
DN
4851 bsi = bsi_last (bb);
4852 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4853
4854 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4855 {
4856 case COND_EXPR:
a9b77cd1 4857 /* For COND_EXPR, we only need to redirect the edge. */
6de9cd9a
DN
4858 break;
4859
4860 case GOTO_EXPR:
4861 /* No non-abnormal edges should lead from a non-simple goto, and
4862 simple ones should be represented implicitly. */
1e128c5f 4863 gcc_unreachable ();
6de9cd9a
DN
4864
4865 case SWITCH_EXPR:
4866 {
d6be0d7f 4867 tree cases = get_cases_for_edge (e, stmt);
18965703 4868 tree label = tree_block_label (dest);
6de9cd9a 4869
d6be0d7f
JL
4870 /* If we have a list of cases associated with E, then use it
4871 as it's a lot faster than walking the entire case vector. */
4872 if (cases)
6de9cd9a 4873 {
4edbbd3f 4874 edge e2 = find_edge (e->src, dest);
d6be0d7f
JL
4875 tree last, first;
4876
4877 first = cases;
4878 while (cases)
4879 {
4880 last = cases;
4881 CASE_LABEL (cases) = label;
4882 cases = TREE_CHAIN (cases);
4883 }
4884
4885 /* If there was already an edge in the CFG, then we need
4886 to move all the cases associated with E to E2. */
4887 if (e2)
4888 {
4889 tree cases2 = get_cases_for_edge (e2, stmt);
4890
4891 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4892 TREE_CHAIN (cases2) = first;
4893 }
6de9cd9a 4894 }
92b6dff3
JL
4895 else
4896 {
d6be0d7f
JL
4897 tree vec = SWITCH_LABELS (stmt);
4898 size_t i, n = TREE_VEC_LENGTH (vec);
4899
4900 for (i = 0; i < n; i++)
4901 {
4902 tree elt = TREE_VEC_ELT (vec, i);
4903
4904 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4905 CASE_LABEL (elt) = label;
4906 }
92b6dff3 4907 }
d6be0d7f 4908
92b6dff3 4909 break;
6de9cd9a 4910 }
6de9cd9a
DN
4911
4912 case RETURN_EXPR:
736432ee 4913 bsi_remove (&bsi, true);
6de9cd9a
DN
4914 e->flags |= EDGE_FALLTHRU;
4915 break;
4916
e5c95afe
ZD
4917 case OMP_RETURN:
4918 case OMP_CONTINUE:
4919 case OMP_SECTIONS_SWITCH:
4920 case OMP_FOR:
4921 /* The edges from OMP constructs can be simply redirected. */
4922 break;
4923
6de9cd9a
DN
4924 default:
4925 /* Otherwise it must be a fallthru edge, and we don't need to
4926 do anything besides redirecting it. */
1e128c5f 4927 gcc_assert (e->flags & EDGE_FALLTHRU);
6de9cd9a
DN
4928 break;
4929 }
4930
4931 /* Update/insert PHI nodes as necessary. */
4932
4933 /* Now update the edges in the CFG. */
4934 e = ssa_redirect_edge (e, dest);
4935
4936 return e;
4937}
4938
14fa2cc0
ZD
4939/* Returns true if it is possible to remove edge E by redirecting
4940 it to the destination of the other edge from E->src. */
4941
4942static bool
9678086d 4943tree_can_remove_branch_p (const_edge e)
14fa2cc0
ZD
4944{
4945 if (e->flags & EDGE_ABNORMAL)
4946 return false;
4947
4948 return true;
4949}
6de9cd9a
DN
4950
4951/* Simple wrapper, as we can always redirect fallthru edges. */
4952
4953static basic_block
4954tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4955{
4956 e = tree_redirect_edge_and_branch (e, dest);
1e128c5f 4957 gcc_assert (e);
6de9cd9a
DN
4958
4959 return NULL;
4960}
4961
4962
4963/* Splits basic block BB after statement STMT (but at least after the
4964 labels). If STMT is NULL, BB is split just after the labels. */
4965
4966static basic_block
4967tree_split_block (basic_block bb, void *stmt)
4968{
597ae074
JH
4969 block_stmt_iterator bsi;
4970 tree_stmt_iterator tsi_tgt;
7506e1cb 4971 tree act, list;
6de9cd9a
DN
4972 basic_block new_bb;
4973 edge e;
628f6a4e 4974 edge_iterator ei;
6de9cd9a
DN
4975
4976 new_bb = create_empty_bb (bb);
4977
4978 /* Redirect the outgoing edges. */
628f6a4e
BE
4979 new_bb->succs = bb->succs;
4980 bb->succs = NULL;
4981 FOR_EACH_EDGE (e, ei, new_bb->succs)
6de9cd9a
DN
4982 e->src = new_bb;
4983
4984 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4985 stmt = NULL;
4986
4987 /* Move everything from BSI to the new basic block. */
4988 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4989 {
4990 act = bsi_stmt (bsi);
4991 if (TREE_CODE (act) == LABEL_EXPR)
4992 continue;
4993
4994 if (!stmt)
4995 break;
4996
4997 if (stmt == act)
4998 {
4999 bsi_next (&bsi);
5000 break;
5001 }
5002 }
5003
597ae074
JH
5004 if (bsi_end_p (bsi))
5005 return new_bb;
5006
5007 /* Split the statement list - avoid re-creating new containers as this
5008 brings ugly quadratic memory consumption in the inliner.
5009 (We are still quadratic since we need to update stmt BB pointers,
5010 sadly.) */
7506e1cb
ZD
5011 list = tsi_split_statement_list_before (&bsi.tsi);
5012 set_bb_stmt_list (new_bb, list);
5013 for (tsi_tgt = tsi_start (list);
597ae074 5014 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
0a4fe58f 5015 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
6de9cd9a
DN
5016
5017 return new_bb;
5018}
5019
5020
5021/* Moves basic block BB after block AFTER. */
5022
5023static bool
5024tree_move_block_after (basic_block bb, basic_block after)
5025{
5026 if (bb->prev_bb == after)
5027 return true;
5028
5029 unlink_block (bb);
5030 link_block (bb, after);
5031
5032 return true;
5033}
5034
5035
5036/* Return true if basic_block can be duplicated. */
5037
5038static bool
9678086d 5039tree_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6de9cd9a
DN
5040{
5041 return true;
5042}
5043
84d65814 5044
6de9cd9a
DN
5045/* Create a duplicate of the basic block BB. NOTE: This does not
5046 preserve SSA form. */
5047
5048static basic_block
5049tree_duplicate_bb (basic_block bb)
5050{
5051 basic_block new_bb;
5052 block_stmt_iterator bsi, bsi_tgt;
84d65814 5053 tree phi;
6de9cd9a
DN
5054
5055 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
b0382c67 5056
84d65814
DN
5057 /* Copy the PHI nodes. We ignore PHI node arguments here because
5058 the incoming edges have not been setup yet. */
bb29d951 5059 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
b0382c67 5060 {
84d65814
DN
5061 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
5062 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
b0382c67 5063 }
84d65814
DN
5064
5065 /* Keep the chain of PHI nodes in the same order so that they can be
5066 updated by ssa_redirect_edge. */
5ae71719 5067 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
b0382c67 5068
6de9cd9a
DN
5069 bsi_tgt = bsi_start (new_bb);
5070 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5071 {
84d65814
DN
5072 def_operand_p def_p;
5073 ssa_op_iter op_iter;
5074 tree stmt, copy;
cc7220fd 5075 int region;
6de9cd9a 5076
84d65814 5077 stmt = bsi_stmt (bsi);
6de9cd9a
DN
5078 if (TREE_CODE (stmt) == LABEL_EXPR)
5079 continue;
5080
84d65814
DN
5081 /* Create a new copy of STMT and duplicate STMT's virtual
5082 operands. */
5f240ec4 5083 copy = unshare_expr (stmt);
5f240ec4 5084 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
84d65814 5085 copy_virtual_operands (copy, stmt);
cc7220fd
JH
5086 region = lookup_stmt_eh_region (stmt);
5087 if (region >= 0)
5088 add_stmt_to_eh_region (copy, region);
6946b3f7 5089 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
84d65814
DN
5090
5091 /* Create new names for all the definitions created by COPY and
5092 add replacement mappings for each new name. */
5093 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5094 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6de9cd9a
DN
5095 }
5096
5097 return new_bb;
5098}
5099
5f40b3cb
ZD
5100/* Adds phi node arguments for edge E_COPY after basic block duplication. */
5101
5102static void
5103add_phi_args_after_copy_edge (edge e_copy)
5104{
5105 basic_block bb, bb_copy = e_copy->src, dest;
5106 edge e;
5107 edge_iterator ei;
5108 tree phi, phi_copy, phi_next, def;
5109
5110 if (!phi_nodes (e_copy->dest))
5111 return;
5112
5113 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5114
5115 if (e_copy->dest->flags & BB_DUPLICATED)
5116 dest = get_bb_original (e_copy->dest);
5117 else
5118 dest = e_copy->dest;
5119
5120 e = find_edge (bb, dest);
5121 if (!e)
5122 {
5123 /* During loop unrolling the target of the latch edge is copied.
5124 In this case we are not looking for edge to dest, but to
5125 duplicated block whose original was dest. */
5126 FOR_EACH_EDGE (e, ei, bb->succs)
5127 {
5128 if ((e->dest->flags & BB_DUPLICATED)
5129 && get_bb_original (e->dest) == dest)
5130 break;
5131 }
5132
5133 gcc_assert (e != NULL);
5134 }
5135
5136 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
5137 phi;
5138 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
5139 {
5140 phi_next = PHI_CHAIN (phi);
5141 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5142 add_phi_arg (phi_copy, def, e_copy);
5143 }
5144}
5145
84d65814 5146
42759f1e
ZD
5147/* Basic block BB_COPY was created by code duplication. Add phi node
5148 arguments for edges going out of BB_COPY. The blocks that were
6580ee77 5149 duplicated have BB_DUPLICATED set. */
42759f1e
ZD
5150
5151void
5152add_phi_args_after_copy_bb (basic_block bb_copy)
5153{
628f6a4e 5154 edge_iterator ei;
5f40b3cb 5155 edge e_copy;
42759f1e 5156
628f6a4e 5157 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
42759f1e 5158 {
5f40b3cb 5159 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
5160 }
5161}
5162
5163/* Blocks in REGION_COPY array of length N_REGION were created by
5164 duplication of basic blocks. Add phi node arguments for edges
5f40b3cb
ZD
5165 going from these blocks. If E_COPY is not NULL, also add
5166 phi node arguments for its destination.*/
42759f1e
ZD
5167
5168void
5f40b3cb
ZD
5169add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5170 edge e_copy)
42759f1e
ZD
5171{
5172 unsigned i;
5173
5174 for (i = 0; i < n_region; i++)
6580ee77 5175 region_copy[i]->flags |= BB_DUPLICATED;
42759f1e
ZD
5176
5177 for (i = 0; i < n_region; i++)
5178 add_phi_args_after_copy_bb (region_copy[i]);
5f40b3cb
ZD
5179 if (e_copy)
5180 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
5181
5182 for (i = 0; i < n_region; i++)
6580ee77 5183 region_copy[i]->flags &= ~BB_DUPLICATED;
42759f1e
ZD
5184}
5185
42759f1e
ZD
5186/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5187 important exit edge EXIT. By important we mean that no SSA name defined
5188 inside region is live over the other exit edges of the region. All entry
5189 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5190 to the duplicate of the region. SSA form, dominance and loop information
5191 is updated. The new basic blocks are stored to REGION_COPY in the same
5192 order as they had in REGION, provided that REGION_COPY is not NULL.
5193 The function returns false if it is unable to copy the region,
5194 true otherwise. */
5195
5196bool
5197tree_duplicate_sese_region (edge entry, edge exit,
5198 basic_block *region, unsigned n_region,
5199 basic_block *region_copy)
5200{
66f97d31 5201 unsigned i;
42759f1e
ZD
5202 bool free_region_copy = false, copying_header = false;
5203 struct loop *loop = entry->dest->loop_father;
5204 edge exit_copy;
66f97d31 5205 VEC (basic_block, heap) *doms;
42759f1e 5206 edge redirected;
09bac500
JH
5207 int total_freq = 0, entry_freq = 0;
5208 gcov_type total_count = 0, entry_count = 0;
42759f1e
ZD
5209
5210 if (!can_copy_bbs_p (region, n_region))
5211 return false;
5212
5213 /* Some sanity checking. Note that we do not check for all possible
5214 missuses of the functions. I.e. if you ask to copy something weird,
5215 it will work, but the state of structures probably will not be
5216 correct. */
42759f1e
ZD
5217 for (i = 0; i < n_region; i++)
5218 {
5219 /* We do not handle subloops, i.e. all the blocks must belong to the
5220 same loop. */
5221 if (region[i]->loop_father != loop)
5222 return false;
5223
5224 if (region[i] != entry->dest
5225 && region[i] == loop->header)
5226 return false;
5227 }
5228
561e8a90 5229 set_loop_copy (loop, loop);
42759f1e
ZD
5230
5231 /* In case the function is used for loop header copying (which is the primary
5232 use), ensure that EXIT and its copy will be new latch and entry edges. */
5233 if (loop->header == entry->dest)
5234 {
5235 copying_header = true;
561e8a90 5236 set_loop_copy (loop, loop_outer (loop));
42759f1e
ZD
5237
5238 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5239 return false;
5240
5241 for (i = 0; i < n_region; i++)
5242 if (region[i] != exit->src
5243 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5244 return false;
5245 }
5246
5247 if (!region_copy)
5248 {
858904db 5249 region_copy = XNEWVEC (basic_block, n_region);
42759f1e
ZD
5250 free_region_copy = true;
5251 }
5252
84d65814 5253 gcc_assert (!need_ssa_update_p ());
42759f1e 5254
5deaef19 5255 /* Record blocks outside the region that are dominated by something
42759f1e 5256 inside. */
66f97d31 5257 doms = NULL;
6580ee77
JH
5258 initialize_original_copy_tables ();
5259
66f97d31 5260 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
42759f1e 5261
09bac500
JH
5262 if (entry->dest->count)
5263 {
5264 total_count = entry->dest->count;
5265 entry_count = entry->count;
5266 /* Fix up corner cases, to avoid division by zero or creation of negative
5267 frequencies. */
5268 if (entry_count > total_count)
5269 entry_count = total_count;
5270 }
5271 else
5272 {
5273 total_freq = entry->dest->frequency;
5274 entry_freq = EDGE_FREQUENCY (entry);
5275 /* Fix up corner cases, to avoid division by zero or creation of negative
5276 frequencies. */
5277 if (total_freq == 0)
5278 total_freq = 1;
5279 else if (entry_freq > total_freq)
5280 entry_freq = total_freq;
5281 }
5deaef19 5282
b9a66240
ZD
5283 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5284 split_edge_bb_loc (entry));
09bac500
JH
5285 if (total_count)
5286 {
5287 scale_bbs_frequencies_gcov_type (region, n_region,
5288 total_count - entry_count,
5289 total_count);
5290 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6531d1be 5291 total_count);
09bac500
JH
5292 }
5293 else
5294 {
5295 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5296 total_freq);
5297 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5298 }
42759f1e
ZD
5299
5300 if (copying_header)
5301 {
5302 loop->header = exit->dest;
5303 loop->latch = exit->src;
5304 }
5305
5306 /* Redirect the entry and add the phi node arguments. */
6580ee77 5307 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
42759f1e 5308 gcc_assert (redirected != NULL);
71882046 5309 flush_pending_stmts (entry);
42759f1e
ZD
5310
5311 /* Concerning updating of dominators: We must recount dominators
84d65814
DN
5312 for entry block and its copy. Anything that is outside of the
5313 region, but was dominated by something inside needs recounting as
5314 well. */
42759f1e 5315 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
66f97d31
ZD
5316 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5317 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5f40b3cb 5318 VEC_free (basic_block, heap, doms);
42759f1e 5319
84d65814 5320 /* Add the other PHI node arguments. */
5f40b3cb
ZD
5321 add_phi_args_after_copy (region_copy, n_region, NULL);
5322
5323 /* Update the SSA web. */
5324 update_ssa (TODO_update_ssa);
5325
5326 if (free_region_copy)
5327 free (region_copy);
5328
5329 free_original_copy_tables ();
5330 return true;
5331}
5332
5333/* Duplicates REGION consisting of N_REGION blocks. The new blocks
5334 are stored to REGION_COPY in the same order in that they appear
5335 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5336 the region, EXIT an exit from it. The condition guarding EXIT
5337 is moved to ENTRY. Returns true if duplication succeeds, false
5338 otherwise.
5339
5340 For example,
5341
5342 some_code;
5343 if (cond)
5344 A;
5345 else
5346 B;
5347
5348 is transformed to
5349
5350 if (cond)
5351 {
5352 some_code;
5353 A;
5354 }
5355 else
5356 {
5357 some_code;
5358 B;
5359 }
5360*/
5361
5362bool
5363tree_duplicate_sese_tail (edge entry, edge exit,
5364 basic_block *region, unsigned n_region,
5365 basic_block *region_copy)
5366{
5367 unsigned i;
5368 bool free_region_copy = false;
5369 struct loop *loop = exit->dest->loop_father;
5370 struct loop *orig_loop = entry->dest->loop_father;
5371 basic_block switch_bb, entry_bb, nentry_bb;
5372 VEC (basic_block, heap) *doms;
5373 int total_freq = 0, exit_freq = 0;
5374 gcov_type total_count = 0, exit_count = 0;
5375 edge exits[2], nexits[2], e;
5376 block_stmt_iterator bsi;
5377 tree cond;
5378 edge sorig, snew;
5379
5380 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5381 exits[0] = exit;
5382 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5383
5384 if (!can_copy_bbs_p (region, n_region))
5385 return false;
5386
5387 /* Some sanity checking. Note that we do not check for all possible
5388 missuses of the functions. I.e. if you ask to copy something weird
5389 (e.g., in the example, if there is a jump from inside to the middle
5390 of some_code, or come_code defines some of the values used in cond)
5391 it will work, but the resulting code will not be correct. */
5392 for (i = 0; i < n_region; i++)
5393 {
5394 /* We do not handle subloops, i.e. all the blocks must belong to the
5395 same loop. */
5396 if (region[i]->loop_father != orig_loop)
5397 return false;
5398
5399 if (region[i] == orig_loop->latch)
5400 return false;
5401 }
5402
5403 initialize_original_copy_tables ();
5404 set_loop_copy (orig_loop, loop);
5405
5406 if (!region_copy)
5407 {
5408 region_copy = XNEWVEC (basic_block, n_region);
5409 free_region_copy = true;
5410 }
5411
5412 gcc_assert (!need_ssa_update_p ());
5413
5414 /* Record blocks outside the region that are dominated by something
5415 inside. */
5416 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5417
5418 if (exit->src->count)
5419 {
5420 total_count = exit->src->count;
5421 exit_count = exit->count;
5422 /* Fix up corner cases, to avoid division by zero or creation of negative
5423 frequencies. */
5424 if (exit_count > total_count)
5425 exit_count = total_count;
5426 }
5427 else
5428 {
5429 total_freq = exit->src->frequency;
5430 exit_freq = EDGE_FREQUENCY (exit);
5431 /* Fix up corner cases, to avoid division by zero or creation of negative
5432 frequencies. */
5433 if (total_freq == 0)
5434 total_freq = 1;
5435 if (exit_freq > total_freq)
5436 exit_freq = total_freq;
5437 }
5438
5439 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5440 split_edge_bb_loc (exit));
5441 if (total_count)
5442 {
5443 scale_bbs_frequencies_gcov_type (region, n_region,
5444 total_count - exit_count,
5445 total_count);
5446 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5447 total_count);
5448 }
5449 else
5450 {
5451 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5452 total_freq);
5453 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5454 }
5455
5456 /* Create the switch block, and put the exit condition to it. */
5457 entry_bb = entry->dest;
5458 nentry_bb = get_bb_copy (entry_bb);
5459 if (!last_stmt (entry->src)
5460 || !stmt_ends_bb_p (last_stmt (entry->src)))
5461 switch_bb = entry->src;
5462 else
5463 switch_bb = split_edge (entry);
5464 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5465
5466 bsi = bsi_last (switch_bb);
5467 cond = last_stmt (exit->src);
5468 gcc_assert (TREE_CODE (cond) == COND_EXPR);
5469 bsi_insert_after (&bsi, unshare_expr (cond), BSI_NEW_STMT);
5470
5471 sorig = single_succ_edge (switch_bb);
5472 sorig->flags = exits[1]->flags;
5473 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5474
5475 /* Register the new edge from SWITCH_BB in loop exit lists. */
5476 rescan_loop_exit (snew, true, false);
5477
5478 /* Add the PHI node arguments. */
5479 add_phi_args_after_copy (region_copy, n_region, snew);
5480
5481 /* Get rid of now superfluous conditions and associated edges (and phi node
5482 arguments). */
5483 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5484 PENDING_STMT (e) = NULL_TREE;
5485 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5486 PENDING_STMT (e) = NULL_TREE;
5487
5488 /* Anything that is outside of the region, but was dominated by something
5489 inside needs to update dominance info. */
5490 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5491 VEC_free (basic_block, heap, doms);
42759f1e 5492
84d65814
DN
5493 /* Update the SSA web. */
5494 update_ssa (TODO_update_ssa);
42759f1e
ZD
5495
5496 if (free_region_copy)
5497 free (region_copy);
5498
6580ee77 5499 free_original_copy_tables ();
42759f1e
ZD
5500 return true;
5501}
6de9cd9a 5502
50674e96
DN
5503/*
5504DEF_VEC_P(basic_block);
5505DEF_VEC_ALLOC_P(basic_block,heap);
5506*/
5507
5508/* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5509 adding blocks when the dominator traversal reaches EXIT. This
5510 function silently assumes that ENTRY strictly dominates EXIT. */
5511
5512static void
5513gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5514 VEC(basic_block,heap) **bbs_p)
5515{
5516 basic_block son;
5517
5518 for (son = first_dom_son (CDI_DOMINATORS, entry);
5519 son;
5520 son = next_dom_son (CDI_DOMINATORS, son))
5521 {
5522 VEC_safe_push (basic_block, heap, *bbs_p, son);
5523 if (son != exit)
5524 gather_blocks_in_sese_region (son, exit, bbs_p);
5525 }
5526}
5527
917948d3
ZD
5528/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5529 The duplicates are recorded in VARS_MAP. */
5530
5531static void
5532replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5533 tree to_context)
5534{
5535 tree t = *tp, new_t;
5536 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5537 void **loc;
5538
5539 if (DECL_CONTEXT (t) == to_context)
5540 return;
5541
5542 loc = pointer_map_contains (vars_map, t);
5543
5544 if (!loc)
5545 {
5546 loc = pointer_map_insert (vars_map, t);
5547
5548 if (SSA_VAR_P (t))
5549 {
5550 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5551 f->unexpanded_var_list
5552 = tree_cons (NULL_TREE, new_t, f->unexpanded_var_list);
5553 }
5554 else
5555 {
5556 gcc_assert (TREE_CODE (t) == CONST_DECL);
5557 new_t = copy_node (t);
5558 }
5559 DECL_CONTEXT (new_t) = to_context;
5560
5561 *loc = new_t;
5562 }
5563 else
5564 new_t = *loc;
5565
5566 *tp = new_t;
5567}
5568
5569/* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5570 VARS_MAP maps old ssa names and var_decls to the new ones. */
5571
5572static tree
5573replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5574 tree to_context)
5575{
5576 void **loc;
5577 tree new_name, decl = SSA_NAME_VAR (name);
5578
5579 gcc_assert (is_gimple_reg (name));
5580
5581 loc = pointer_map_contains (vars_map, name);
5582
5583 if (!loc)
5584 {
5585 replace_by_duplicate_decl (&decl, vars_map, to_context);
5586
5587 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5588 if (gimple_in_ssa_p (cfun))
5589 add_referenced_var (decl);
5590
5591 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5592 if (SSA_NAME_IS_DEFAULT_DEF (name))
5593 set_default_def (decl, new_name);
5594 pop_cfun ();
5595
5596 loc = pointer_map_insert (vars_map, name);
5597 *loc = new_name;
5598 }
5599 else
5600 new_name = *loc;
5601
5602 return new_name;
5603}
50674e96
DN
5604
5605struct move_stmt_d
5606{
5607 tree block;
5608 tree from_context;
5609 tree to_context;
917948d3 5610 struct pointer_map_t *vars_map;
fad41cd7 5611 htab_t new_label_map;
50674e96
DN
5612 bool remap_decls_p;
5613};
5614
5615/* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5616 contained in *TP and change the DECL_CONTEXT of every local
5617 variable referenced in *TP. */
5618
5619static tree
fad41cd7 5620move_stmt_r (tree *tp, int *walk_subtrees, void *data)
50674e96
DN
5621{
5622 struct move_stmt_d *p = (struct move_stmt_d *) data;
fad41cd7 5623 tree t = *tp;
50674e96 5624
07beea0d
AH
5625 if (p->block
5626 && (EXPR_P (t) || GIMPLE_STMT_P (t)))
fad41cd7 5627 TREE_BLOCK (t) = p->block;
50674e96 5628
bed575d5
RS
5629 if (OMP_DIRECTIVE_P (t)
5630 && TREE_CODE (t) != OMP_RETURN
5631 && TREE_CODE (t) != OMP_CONTINUE)
50674e96
DN
5632 {
5633 /* Do not remap variables inside OMP directives. Variables
5634 referenced in clauses and directive header belong to the
5635 parent function and should not be moved into the child
5636 function. */
fad41cd7 5637 bool save_remap_decls_p = p->remap_decls_p;
50674e96 5638 p->remap_decls_p = false;
fad41cd7
RH
5639 *walk_subtrees = 0;
5640
5641 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
50674e96 5642
fad41cd7
RH
5643 p->remap_decls_p = save_remap_decls_p;
5644 }
917948d3 5645 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
50674e96 5646 {
917948d3
ZD
5647 if (TREE_CODE (t) == SSA_NAME)
5648 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5649 else if (TREE_CODE (t) == LABEL_DECL)
fad41cd7
RH
5650 {
5651 if (p->new_label_map)
5652 {
5653 struct tree_map in, *out;
fc8600f9 5654 in.base.from = t;
fad41cd7
RH
5655 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5656 if (out)
5657 *tp = t = out->to;
5658 }
50674e96 5659
fad41cd7
RH
5660 DECL_CONTEXT (t) = p->to_context;
5661 }
5662 else if (p->remap_decls_p)
50674e96 5663 {
917948d3
ZD
5664 /* Replace T with its duplicate. T should no longer appear in the
5665 parent function, so this looks wasteful; however, it may appear
5666 in referenced_vars, and more importantly, as virtual operands of
5667 statements, and in alias lists of other variables. It would be
5668 quite difficult to expunge it from all those places. ??? It might
5669 suffice to do this for addressable variables. */
5670 if ((TREE_CODE (t) == VAR_DECL
5671 && !is_global_var (t))
5672 || TREE_CODE (t) == CONST_DECL)
5673 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5674
5675 if (SSA_VAR_P (t)
5676 && gimple_in_ssa_p (cfun))
fad41cd7 5677 {
917948d3
ZD
5678 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5679 add_referenced_var (*tp);
5680 pop_cfun ();
fad41cd7 5681 }
50674e96 5682 }
917948d3 5683 *walk_subtrees = 0;
50674e96 5684 }
fad41cd7
RH
5685 else if (TYPE_P (t))
5686 *walk_subtrees = 0;
50674e96
DN
5687
5688 return NULL_TREE;
5689}
5690
917948d3
ZD
5691/* Marks virtual operands of all statements in basic blocks BBS for
5692 renaming. */
5693
dea61d92
SP
5694void
5695mark_virtual_ops_in_bb (basic_block bb)
917948d3
ZD
5696{
5697 tree phi;
5698 block_stmt_iterator bsi;
dea61d92
SP
5699
5700 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5701 mark_virtual_ops_for_renaming (phi);
5702
5703 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5704 mark_virtual_ops_for_renaming (bsi_stmt (bsi));
5705}
5706
5707/* Marks virtual operands of all statements in basic blocks BBS for
5708 renaming. */
5709
5710static void
5711mark_virtual_ops_in_region (VEC (basic_block,heap) *bbs)
5712{
917948d3
ZD
5713 basic_block bb;
5714 unsigned i;
5715
5716 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
dea61d92 5717 mark_virtual_ops_in_bb (bb);
917948d3 5718}
50674e96
DN
5719
5720/* Move basic block BB from function CFUN to function DEST_FN. The
5721 block is moved out of the original linked list and placed after
5722 block AFTER in the new list. Also, the block is removed from the
5723 original array of blocks and placed in DEST_FN's array of blocks.
5724 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5725 updated to reflect the moved edges.
6531d1be 5726
917948d3
ZD
5727 The local variables are remapped to new instances, VARS_MAP is used
5728 to record the mapping. */
50674e96
DN
5729
5730static void
5731move_block_to_fn (struct function *dest_cfun, basic_block bb,
5732 basic_block after, bool update_edge_count_p,
917948d3
ZD
5733 struct pointer_map_t *vars_map, htab_t new_label_map,
5734 int eh_offset)
50674e96
DN
5735{
5736 struct control_flow_graph *cfg;
5737 edge_iterator ei;
5738 edge e;
5739 block_stmt_iterator si;
5740 struct move_stmt_d d;
728b26bb 5741 unsigned old_len, new_len;
5f40b3cb 5742 tree phi, next_phi;
50674e96 5743
3722506a
ZD
5744 /* Remove BB from dominance structures. */
5745 delete_from_dominance_info (CDI_DOMINATORS, bb);
5f40b3cb
ZD
5746 if (current_loops)
5747 remove_bb_from_loops (bb);
3722506a 5748
50674e96
DN
5749 /* Link BB to the new linked list. */
5750 move_block_after (bb, after);
5751
5752 /* Update the edge count in the corresponding flowgraphs. */
5753 if (update_edge_count_p)
5754 FOR_EACH_EDGE (e, ei, bb->succs)
5755 {
5756 cfun->cfg->x_n_edges--;
5757 dest_cfun->cfg->x_n_edges++;
5758 }
5759
5760 /* Remove BB from the original basic block array. */
5761 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5762 cfun->cfg->x_n_basic_blocks--;
5763
5764 /* Grow DEST_CFUN's basic block array if needed. */
5765 cfg = dest_cfun->cfg;
5766 cfg->x_n_basic_blocks++;
3722506a
ZD
5767 if (bb->index >= cfg->x_last_basic_block)
5768 cfg->x_last_basic_block = bb->index + 1;
50674e96 5769
728b26bb
DN
5770 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5771 if ((unsigned) cfg->x_last_basic_block >= old_len)
50674e96 5772 {
728b26bb 5773 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
a590ac65
KH
5774 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5775 new_len);
50674e96
DN
5776 }
5777
5778 VEC_replace (basic_block, cfg->x_basic_block_info,
e0310afb 5779 bb->index, bb);
50674e96 5780
917948d3 5781 /* Remap the variables in phi nodes. */
5f40b3cb 5782 for (phi = phi_nodes (bb); phi; phi = next_phi)
917948d3
ZD
5783 {
5784 use_operand_p use;
5785 tree op = PHI_RESULT (phi);
5786 ssa_op_iter oi;
5787
5f40b3cb 5788 next_phi = PHI_CHAIN (phi);
917948d3 5789 if (!is_gimple_reg (op))
5f40b3cb
ZD
5790 {
5791 /* Remove the phi nodes for virtual operands (alias analysis will be
5792 run for the new function, anyway). */
5793 remove_phi_node (phi, NULL, true);
5794 continue;
5795 }
917948d3
ZD
5796
5797 SET_PHI_RESULT (phi, replace_ssa_name (op, vars_map, dest_cfun->decl));
5798 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5799 {
5800 op = USE_FROM_PTR (use);
5801 if (TREE_CODE (op) == SSA_NAME)
5802 SET_USE (use, replace_ssa_name (op, vars_map, dest_cfun->decl));
5803 }
5804 }
5805
50674e96
DN
5806 /* The statements in BB need to be associated with a new TREE_BLOCK.
5807 Labels need to be associated with a new label-to-block map. */
5808 memset (&d, 0, sizeof (d));
917948d3
ZD
5809 d.vars_map = vars_map;
5810 d.from_context = cfun->decl;
5811 d.to_context = dest_cfun->decl;
5812 d.new_label_map = new_label_map;
50674e96
DN
5813
5814 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
5815 {
5816 tree stmt = bsi_stmt (si);
fad41cd7 5817 int region;
50674e96 5818
50674e96
DN
5819 d.remap_decls_p = true;
5820 if (TREE_BLOCK (stmt))
5821 d.block = DECL_INITIAL (dest_cfun->decl);
5822
5823 walk_tree (&stmt, move_stmt_r, &d, NULL);
5824
5825 if (TREE_CODE (stmt) == LABEL_EXPR)
5826 {
50674e96
DN
5827 tree label = LABEL_EXPR_LABEL (stmt);
5828 int uid = LABEL_DECL_UID (label);
5829
5830 gcc_assert (uid > -1);
5831
5832 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5833 if (old_len <= (unsigned) uid)
5834 {
728b26bb 5835 new_len = 3 * uid / 2;
a590ac65
KH
5836 VEC_safe_grow_cleared (basic_block, gc,
5837 cfg->x_label_to_block_map, new_len);
50674e96
DN
5838 }
5839
5840 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5841 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5842
5843 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5844
5845 if (uid >= dest_cfun->last_label_uid)
5846 dest_cfun->last_label_uid = uid + 1;
5847 }
fad41cd7
RH
5848 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
5849 TREE_OPERAND (stmt, 0) =
5850 build_int_cst (NULL_TREE,
5851 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
5852 + eh_offset);
5853
5854 region = lookup_stmt_eh_region (stmt);
5855 if (region >= 0)
5856 {
5857 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5858 remove_stmt_from_eh_region (stmt);
6946b3f7
JH
5859 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5860 gimple_remove_stmt_histograms (cfun, stmt);
fad41cd7 5861 }
917948d3 5862
5f40b3cb
ZD
5863 /* We cannot leave any operands allocated from the operand caches of
5864 the current function. */
5865 free_stmt_operands (stmt);
5866 push_cfun (dest_cfun);
917948d3 5867 update_stmt (stmt);
5f40b3cb 5868 pop_cfun ();
fad41cd7
RH
5869 }
5870}
5871
5872/* Examine the statements in BB (which is in SRC_CFUN); find and return
5873 the outermost EH region. Use REGION as the incoming base EH region. */
5874
5875static int
5876find_outermost_region_in_block (struct function *src_cfun,
5877 basic_block bb, int region)
5878{
5879 block_stmt_iterator si;
6531d1be 5880
fad41cd7
RH
5881 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
5882 {
5883 tree stmt = bsi_stmt (si);
5884 int stmt_region;
1799e5d5 5885
07ed51c9
JJ
5886 if (TREE_CODE (stmt) == RESX_EXPR)
5887 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
5888 else
5889 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
7e2df4a1
JJ
5890 if (stmt_region > 0)
5891 {
5892 if (region < 0)
5893 region = stmt_region;
5894 else if (stmt_region != region)
5895 {
5896 region = eh_region_outermost (src_cfun, stmt_region, region);
5897 gcc_assert (region != -1);
5898 }
5899 }
50674e96 5900 }
fad41cd7
RH
5901
5902 return region;
50674e96
DN
5903}
5904
fad41cd7
RH
5905static tree
5906new_label_mapper (tree decl, void *data)
5907{
5908 htab_t hash = (htab_t) data;
5909 struct tree_map *m;
5910 void **slot;
5911
5912 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5913
5914 m = xmalloc (sizeof (struct tree_map));
5915 m->hash = DECL_UID (decl);
fc8600f9 5916 m->base.from = decl;
fad41cd7
RH
5917 m->to = create_artificial_label ();
5918 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
8b46837c
JJ
5919 if (LABEL_DECL_UID (m->to) >= cfun->last_label_uid)
5920 cfun->last_label_uid = LABEL_DECL_UID (m->to) + 1;
fad41cd7
RH
5921
5922 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5923 gcc_assert (*slot == NULL);
5924
5925 *slot = m;
5926
5927 return m->to;
5928}
50674e96
DN
5929
5930/* Move a single-entry, single-exit region delimited by ENTRY_BB and
5931 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
5932 single basic block in the original CFG and the new basic block is
5933 returned. DEST_CFUN must not have a CFG yet.
5934
5935 Note that the region need not be a pure SESE region. Blocks inside
5936 the region may contain calls to abort/exit. The only restriction
5937 is that ENTRY_BB should be the only entry point and it must
5938 dominate EXIT_BB.
5939
5940 All local variables referenced in the region are assumed to be in
5941 the corresponding BLOCK_VARS and unexpanded variable lists
5942 associated with DEST_CFUN. */
5943
5944basic_block
5945move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
5946 basic_block exit_bb)
5947{
917948d3
ZD
5948 VEC(basic_block,heap) *bbs, *dom_bbs;
5949 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
5950 basic_block after, bb, *entry_pred, *exit_succ, abb;
5951 struct function *saved_cfun = cfun;
fad41cd7 5952 int *entry_flag, *exit_flag, eh_offset;
917948d3 5953 unsigned *entry_prob, *exit_prob;
50674e96
DN
5954 unsigned i, num_entry_edges, num_exit_edges;
5955 edge e;
5956 edge_iterator ei;
fad41cd7 5957 htab_t new_label_map;
917948d3 5958 struct pointer_map_t *vars_map;
5f40b3cb 5959 struct loop *loop = entry_bb->loop_father;
50674e96
DN
5960
5961 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
5962 region. */
5963 gcc_assert (entry_bb != exit_bb
2aee3e57
JJ
5964 && (!exit_bb
5965 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
50674e96 5966
917948d3
ZD
5967 /* Collect all the blocks in the region. Manually add ENTRY_BB
5968 because it won't be added by dfs_enumerate_from. */
50674e96
DN
5969 bbs = NULL;
5970 VEC_safe_push (basic_block, heap, bbs, entry_bb);
5971 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
5972
917948d3
ZD
5973 /* The blocks that used to be dominated by something in BBS will now be
5974 dominated by the new block. */
5975 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
5976 VEC_address (basic_block, bbs),
5977 VEC_length (basic_block, bbs));
5978
50674e96
DN
5979 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
5980 the predecessor edges to ENTRY_BB and the successor edges to
5981 EXIT_BB so that we can re-attach them to the new basic block that
5982 will replace the region. */
5983 num_entry_edges = EDGE_COUNT (entry_bb->preds);
5984 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
5985 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
917948d3 5986 entry_prob = XNEWVEC (unsigned, num_entry_edges);
50674e96
DN
5987 i = 0;
5988 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
5989 {
917948d3 5990 entry_prob[i] = e->probability;
50674e96
DN
5991 entry_flag[i] = e->flags;
5992 entry_pred[i++] = e->src;
5993 remove_edge (e);
5994 }
5995
2aee3e57 5996 if (exit_bb)
50674e96 5997 {
2aee3e57
JJ
5998 num_exit_edges = EDGE_COUNT (exit_bb->succs);
5999 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6000 sizeof (basic_block));
6001 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
917948d3 6002 exit_prob = XNEWVEC (unsigned, num_exit_edges);
2aee3e57
JJ
6003 i = 0;
6004 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6005 {
917948d3 6006 exit_prob[i] = e->probability;
2aee3e57
JJ
6007 exit_flag[i] = e->flags;
6008 exit_succ[i++] = e->dest;
6009 remove_edge (e);
6010 }
6011 }
6012 else
6013 {
6014 num_exit_edges = 0;
6015 exit_succ = NULL;
6016 exit_flag = NULL;
917948d3 6017 exit_prob = NULL;
50674e96
DN
6018 }
6019
6020 /* Switch context to the child function to initialize DEST_FN's CFG. */
6021 gcc_assert (dest_cfun->cfg == NULL);
917948d3 6022 push_cfun (dest_cfun);
fad41cd7 6023
50674e96 6024 init_empty_tree_cfg ();
fad41cd7
RH
6025
6026 /* Initialize EH information for the new function. */
6027 eh_offset = 0;
6028 new_label_map = NULL;
6029 if (saved_cfun->eh)
6030 {
6031 int region = -1;
6032
6033 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6034 region = find_outermost_region_in_block (saved_cfun, bb, region);
6035
6036 init_eh_for_function ();
6037 if (region != -1)
6038 {
6039 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6040 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
6041 new_label_map, region, 0);
6042 }
6043 }
6044
917948d3
ZD
6045 pop_cfun ();
6046
6047 /* The ssa form for virtual operands in the source function will have to
6048 be repaired. We do not care for the real operands -- the sese region
6049 must be closed with respect to those. */
6050 mark_virtual_ops_in_region (bbs);
50674e96
DN
6051
6052 /* Move blocks from BBS into DEST_CFUN. */
6053 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6054 after = dest_cfun->cfg->x_entry_block_ptr;
917948d3 6055 vars_map = pointer_map_create ();
50674e96
DN
6056 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6057 {
6058 /* No need to update edge counts on the last block. It has
6059 already been updated earlier when we detached the region from
6060 the original CFG. */
917948d3 6061 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_map,
fad41cd7 6062 new_label_map, eh_offset);
50674e96
DN
6063 after = bb;
6064 }
6065
fad41cd7
RH
6066 if (new_label_map)
6067 htab_delete (new_label_map);
917948d3 6068 pointer_map_destroy (vars_map);
50674e96
DN
6069
6070 /* Rewire the entry and exit blocks. The successor to the entry
6071 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6072 the child function. Similarly, the predecessor of DEST_FN's
6073 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6074 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6075 various CFG manipulation function get to the right CFG.
6076
6077 FIXME, this is silly. The CFG ought to become a parameter to
6078 these helpers. */
917948d3 6079 push_cfun (dest_cfun);
50674e96 6080 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
2aee3e57
JJ
6081 if (exit_bb)
6082 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
917948d3 6083 pop_cfun ();
50674e96
DN
6084
6085 /* Back in the original function, the SESE region has disappeared,
6086 create a new basic block in its place. */
6087 bb = create_empty_bb (entry_pred[0]);
5f40b3cb
ZD
6088 if (current_loops)
6089 add_bb_to_loop (bb, loop);
50674e96 6090 for (i = 0; i < num_entry_edges; i++)
917948d3
ZD
6091 {
6092 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6093 e->probability = entry_prob[i];
6094 }
50674e96
DN
6095
6096 for (i = 0; i < num_exit_edges; i++)
917948d3
ZD
6097 {
6098 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6099 e->probability = exit_prob[i];
6100 }
6101
6102 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6103 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6104 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6105 VEC_free (basic_block, heap, dom_bbs);
50674e96 6106
2aee3e57
JJ
6107 if (exit_bb)
6108 {
917948d3 6109 free (exit_prob);
2aee3e57
JJ
6110 free (exit_flag);
6111 free (exit_succ);
6112 }
917948d3 6113 free (entry_prob);
50674e96
DN
6114 free (entry_flag);
6115 free (entry_pred);
50674e96
DN
6116 VEC_free (basic_block, heap, bbs);
6117
6118 return bb;
6119}
6120
84d65814 6121
6de9cd9a
DN
6122/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
6123
6124void
6125dump_function_to_file (tree fn, FILE *file, int flags)
6126{
6127 tree arg, vars, var;
459ffad3 6128 struct function *dsf;
6de9cd9a
DN
6129 bool ignore_topmost_bind = false, any_var = false;
6130 basic_block bb;
6131 tree chain;
6531d1be 6132
673fda6b 6133 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6de9cd9a
DN
6134
6135 arg = DECL_ARGUMENTS (fn);
6136 while (arg)
6137 {
2f9ea521
RG
6138 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6139 fprintf (file, " ");
6de9cd9a
DN
6140 print_generic_expr (file, arg, dump_flags);
6141 if (TREE_CHAIN (arg))
6142 fprintf (file, ", ");
6143 arg = TREE_CHAIN (arg);
6144 }
6145 fprintf (file, ")\n");
6146
459ffad3
EB
6147 dsf = DECL_STRUCT_FUNCTION (fn);
6148 if (dsf && (flags & TDF_DETAILS))
6149 dump_eh_tree (file, dsf);
6150
6de9cd9a
DN
6151 if (flags & TDF_RAW)
6152 {
6153 dump_node (fn, TDF_SLIM | flags, file);
6154 return;
6155 }
6156
953ff289 6157 /* Switch CFUN to point to FN. */
db2960f4 6158 push_cfun (DECL_STRUCT_FUNCTION (fn));
953ff289 6159
6de9cd9a
DN
6160 /* When GIMPLE is lowered, the variables are no longer available in
6161 BIND_EXPRs, so display them separately. */
32a87d45 6162 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
6de9cd9a
DN
6163 {
6164 ignore_topmost_bind = true;
6165
6166 fprintf (file, "{\n");
6167 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
6168 {
6169 var = TREE_VALUE (vars);
6170
6171 print_generic_decl (file, var, flags);
6172 fprintf (file, "\n");
6173
6174 any_var = true;
6175 }
6176 }
6177
32a87d45 6178 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6de9cd9a
DN
6179 {
6180 /* Make a CFG based dump. */
878f99d2 6181 check_bb_profile (ENTRY_BLOCK_PTR, file);
6de9cd9a
DN
6182 if (!ignore_topmost_bind)
6183 fprintf (file, "{\n");
6184
6185 if (any_var && n_basic_blocks)
6186 fprintf (file, "\n");
6187
6188 FOR_EACH_BB (bb)
6189 dump_generic_bb (file, bb, 2, flags);
6531d1be 6190
6de9cd9a 6191 fprintf (file, "}\n");
878f99d2 6192 check_bb_profile (EXIT_BLOCK_PTR, file);
6de9cd9a
DN
6193 }
6194 else
6195 {
6196 int indent;
6197
6198 /* Make a tree based dump. */
6199 chain = DECL_SAVED_TREE (fn);
6200
953ff289 6201 if (chain && TREE_CODE (chain) == BIND_EXPR)
6de9cd9a
DN
6202 {
6203 if (ignore_topmost_bind)
6204 {
6205 chain = BIND_EXPR_BODY (chain);
6206 indent = 2;
6207 }
6208 else
6209 indent = 0;
6210 }
6211 else
6212 {
6213 if (!ignore_topmost_bind)
6214 fprintf (file, "{\n");
6215 indent = 2;
6216 }
6217
6218 if (any_var)
6219 fprintf (file, "\n");
6220
6221 print_generic_stmt_indented (file, chain, flags, indent);
6222 if (ignore_topmost_bind)
6223 fprintf (file, "}\n");
6224 }
6225
6226 fprintf (file, "\n\n");
953ff289
DN
6227
6228 /* Restore CFUN. */
db2960f4 6229 pop_cfun ();
953ff289
DN
6230}
6231
6232
6233/* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6234
6235void
6236debug_function (tree fn, int flags)
6237{
6238 dump_function_to_file (fn, stderr, flags);
6de9cd9a
DN
6239}
6240
6241
d7770457 6242/* Print on FILE the indexes for the predecessors of basic_block BB. */
6de9cd9a
DN
6243
6244static void
628f6a4e 6245print_pred_bbs (FILE *file, basic_block bb)
6de9cd9a 6246{
628f6a4e
BE
6247 edge e;
6248 edge_iterator ei;
6249
6250 FOR_EACH_EDGE (e, ei, bb->preds)
d7770457 6251 fprintf (file, "bb_%d ", e->src->index);
6de9cd9a
DN
6252}
6253
6254
d7770457 6255/* Print on FILE the indexes for the successors of basic_block BB. */
6de9cd9a
DN
6256
6257static void
628f6a4e 6258print_succ_bbs (FILE *file, basic_block bb)
6de9cd9a 6259{
628f6a4e
BE
6260 edge e;
6261 edge_iterator ei;
6262
6263 FOR_EACH_EDGE (e, ei, bb->succs)
d7770457 6264 fprintf (file, "bb_%d ", e->dest->index);
6de9cd9a
DN
6265}
6266
0c8efed8
SP
6267/* Print to FILE the basic block BB following the VERBOSITY level. */
6268
6269void
6270print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6271{
6272 char *s_indent = (char *) alloca ((size_t) indent + 1);
6273 memset ((void *) s_indent, ' ', (size_t) indent);
6274 s_indent[indent] = '\0';
6275
6276 /* Print basic_block's header. */
6277 if (verbosity >= 2)
6278 {
6279 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6280 print_pred_bbs (file, bb);
6281 fprintf (file, "}, succs = {");
6282 print_succ_bbs (file, bb);
6283 fprintf (file, "})\n");
6284 }
6285
6286 /* Print basic_block's body. */
6287 if (verbosity >= 3)
6288 {
6289 fprintf (file, "%s {\n", s_indent);
6290 tree_dump_bb (bb, file, indent + 4);
6291 fprintf (file, "%s }\n", s_indent);
6292 }
6293}
6294
6295static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6de9cd9a 6296
0c8efed8
SP
6297/* Pretty print LOOP on FILE, indented INDENT spaces. Following
6298 VERBOSITY level this outputs the contents of the loop, or just its
6299 structure. */
6de9cd9a
DN
6300
6301static void
0c8efed8 6302print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6de9cd9a
DN
6303{
6304 char *s_indent;
6305 basic_block bb;
6531d1be 6306
6de9cd9a
DN
6307 if (loop == NULL)
6308 return;
6309
6310 s_indent = (char *) alloca ((size_t) indent + 1);
6311 memset ((void *) s_indent, ' ', (size_t) indent);
6312 s_indent[indent] = '\0';
6313
0c8efed8
SP
6314 /* Print loop's header. */
6315 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6316 loop->num, loop->header->index, loop->latch->index);
6317 fprintf (file, ", niter = ");
6318 print_generic_expr (file, loop->nb_iterations, 0);
6531d1be 6319
0c8efed8
SP
6320 if (loop->any_upper_bound)
6321 {
6322 fprintf (file, ", upper_bound = ");
6323 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6324 }
6531d1be 6325
0c8efed8
SP
6326 if (loop->any_estimate)
6327 {
6328 fprintf (file, ", estimate = ");
6329 dump_double_int (file, loop->nb_iterations_estimate, true);
6330 }
6331 fprintf (file, ")\n");
6332
6333 /* Print loop's body. */
6334 if (verbosity >= 1)
6335 {
6336 fprintf (file, "%s{\n", s_indent);
6337 FOR_EACH_BB (bb)
6338 if (bb->loop_father == loop)
6339 print_loops_bb (file, bb, indent, verbosity);
6340
6341 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6342 fprintf (file, "%s}\n", s_indent);
6343 }
6de9cd9a
DN
6344}
6345
0c8efed8
SP
6346/* Print the LOOP and its sibling loops on FILE, indented INDENT
6347 spaces. Following VERBOSITY level this outputs the contents of the
6348 loop, or just its structure. */
6349
6350static void
6351print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6352{
6353 if (loop == NULL)
6354 return;
6355
6356 print_loop (file, loop, indent, verbosity);
6357 print_loop_and_siblings (file, loop->next, indent, verbosity);
6358}
6de9cd9a
DN
6359
6360/* Follow a CFG edge from the entry point of the program, and on entry
6361 of a loop, pretty print the loop structure on FILE. */
6362
6531d1be 6363void
0c8efed8 6364print_loops (FILE *file, int verbosity)
6de9cd9a
DN
6365{
6366 basic_block bb;
6531d1be 6367
24bd1a0b 6368 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
6de9cd9a 6369 if (bb && bb->loop_father)
0c8efed8 6370 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6de9cd9a
DN
6371}
6372
6373
0c8efed8
SP
6374/* Debugging loops structure at tree level, at some VERBOSITY level. */
6375
6376void
6377debug_loops (int verbosity)
6378{
6379 print_loops (stderr, verbosity);
6380}
6381
6382/* Print on stderr the code of LOOP, at some VERBOSITY level. */
6de9cd9a 6383
6531d1be 6384void
0c8efed8 6385debug_loop (struct loop *loop, int verbosity)
6de9cd9a 6386{
0c8efed8 6387 print_loop (stderr, loop, 0, verbosity);
6de9cd9a
DN
6388}
6389
0c8efed8
SP
6390/* Print on stderr the code of loop number NUM, at some VERBOSITY
6391 level. */
6392
6393void
6394debug_loop_num (unsigned num, int verbosity)
6395{
6396 debug_loop (get_loop (num), verbosity);
6397}
6de9cd9a
DN
6398
6399/* Return true if BB ends with a call, possibly followed by some
6400 instructions that must stay with the call. Return false,
6401 otherwise. */
6402
6403static bool
b48d0358 6404tree_block_ends_with_call_p (basic_block bb)
6de9cd9a 6405{
b48d0358 6406 block_stmt_iterator bsi = bsi_last (bb);
0e014996 6407 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
6de9cd9a
DN
6408}
6409
6410
6411/* Return true if BB ends with a conditional branch. Return false,
6412 otherwise. */
6413
6414static bool
9678086d 6415tree_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a 6416{
75547801
KG
6417 /* This CONST_CAST is okay because last_stmt doesn't modify its
6418 argument and the return value is not modified. */
b1d5455a 6419 const_tree stmt = last_stmt (CONST_CAST_BB(bb));
9885da8e 6420 return (stmt && TREE_CODE (stmt) == COND_EXPR);
6de9cd9a
DN
6421}
6422
6423
6424/* Return true if we need to add fake edge to exit at statement T.
6425 Helper function for tree_flow_call_edges_add. */
6426
6427static bool
6428need_fake_edge_p (tree t)
6429{
23ef6d21
BE
6430 tree call, fndecl = NULL_TREE;
6431 int call_flags;
6de9cd9a
DN
6432
6433 /* NORETURN and LONGJMP calls already have an edge to exit.
321cf1f2 6434 CONST and PURE calls do not need one.
6de9cd9a
DN
6435 We don't currently check for CONST and PURE here, although
6436 it would be a good idea, because those attributes are
6437 figured out from the RTL in mark_constant_function, and
6438 the counter incrementation code from -fprofile-arcs
6439 leads to different results from -fbranch-probabilities. */
cd709752 6440 call = get_call_expr_in (t);
23ef6d21
BE
6441 if (call)
6442 {
6443 fndecl = get_callee_fndecl (call);
6444 call_flags = call_expr_flags (call);
6445 }
6446
6447 if (call && fndecl && DECL_BUILT_IN (fndecl)
6448 && (call_flags & ECF_NOTHROW)
6449 && !(call_flags & ECF_NORETURN)
6450 && !(call_flags & ECF_RETURNS_TWICE))
6451 return false;
6452
6453 if (call && !(call_flags & ECF_NORETURN))
6de9cd9a
DN
6454 return true;
6455
6456 if (TREE_CODE (t) == ASM_EXPR
6457 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
6458 return true;
6459
6460 return false;
6461}
6462
6463
6464/* Add fake edges to the function exit for any non constant and non
6465 noreturn calls, volatile inline assembly in the bitmap of blocks
6466 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6467 the number of blocks that were split.
6468
6469 The goal is to expose cases in which entering a basic block does
6470 not imply that all subsequent instructions must be executed. */
6471
6472static int
6473tree_flow_call_edges_add (sbitmap blocks)
6474{
6475 int i;
6476 int blocks_split = 0;
6477 int last_bb = last_basic_block;
6478 bool check_last_block = false;
6479
24bd1a0b 6480 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
6481 return 0;
6482
6483 if (! blocks)
6484 check_last_block = true;
6485 else
6486 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6487
6488 /* In the last basic block, before epilogue generation, there will be
6489 a fallthru edge to EXIT. Special care is required if the last insn
6490 of the last basic block is a call because make_edge folds duplicate
6491 edges, which would result in the fallthru edge also being marked
6492 fake, which would result in the fallthru edge being removed by
6493 remove_fake_edges, which would result in an invalid CFG.
6494
6495 Moreover, we can't elide the outgoing fake edge, since the block
6496 profiler needs to take this into account in order to solve the minimal
6497 spanning tree in the case that the call doesn't return.
6498
6499 Handle this by adding a dummy instruction in a new last basic block. */
6500 if (check_last_block)
6501 {
6502 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6503 block_stmt_iterator bsi = bsi_last (bb);
6504 tree t = NULL_TREE;
6505 if (!bsi_end_p (bsi))
6506 t = bsi_stmt (bsi);
6507
6a60530d 6508 if (t && need_fake_edge_p (t))
6de9cd9a
DN
6509 {
6510 edge e;
6511
9ff3d2de
JL
6512 e = find_edge (bb, EXIT_BLOCK_PTR);
6513 if (e)
6514 {
6515 bsi_insert_on_edge (e, build_empty_stmt ());
6516 bsi_commit_edge_inserts ();
6517 }
6de9cd9a
DN
6518 }
6519 }
6520
6521 /* Now add fake edges to the function exit for any non constant
6522 calls since there is no way that we can determine if they will
6523 return or not... */
6524 for (i = 0; i < last_bb; i++)
6525 {
6526 basic_block bb = BASIC_BLOCK (i);
6527 block_stmt_iterator bsi;
6528 tree stmt, last_stmt;
6529
6530 if (!bb)
6531 continue;
6532
6533 if (blocks && !TEST_BIT (blocks, i))
6534 continue;
6535
6536 bsi = bsi_last (bb);
6537 if (!bsi_end_p (bsi))
6538 {
6539 last_stmt = bsi_stmt (bsi);
6540 do
6541 {
6542 stmt = bsi_stmt (bsi);
6543 if (need_fake_edge_p (stmt))
6544 {
6545 edge e;
6546 /* The handling above of the final block before the
6547 epilogue should be enough to verify that there is
6548 no edge to the exit block in CFG already.
6549 Calling make_edge in such case would cause us to
6550 mark that edge as fake and remove it later. */
6551#ifdef ENABLE_CHECKING
6552 if (stmt == last_stmt)
628f6a4e 6553 {
9ff3d2de
JL
6554 e = find_edge (bb, EXIT_BLOCK_PTR);
6555 gcc_assert (e == NULL);
628f6a4e 6556 }
6de9cd9a
DN
6557#endif
6558
6559 /* Note that the following may create a new basic block
6560 and renumber the existing basic blocks. */
6561 if (stmt != last_stmt)
6562 {
6563 e = split_block (bb, stmt);
6564 if (e)
6565 blocks_split++;
6566 }
6567 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6568 }
6569 bsi_prev (&bsi);
6570 }
6571 while (!bsi_end_p (bsi));
6572 }
6573 }
6574
6575 if (blocks_split)
6576 verify_flow_info ();
6577
6578 return blocks_split;
6579}
6580
4f6c2131
EB
6581/* Purge dead abnormal call edges from basic block BB. */
6582
6583bool
6584tree_purge_dead_abnormal_call_edges (basic_block bb)
6585{
6586 bool changed = tree_purge_dead_eh_edges (bb);
6587
6588 if (current_function_has_nonlocal_label)
6589 {
6590 tree stmt = last_stmt (bb);
6591 edge_iterator ei;
6592 edge e;
6593
6594 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
6595 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6596 {
6597 if (e->flags & EDGE_ABNORMAL)
6598 {
6599 remove_edge (e);
6600 changed = true;
6601 }
6602 else
6603 ei_next (&ei);
6604 }
6605
6606 /* See tree_purge_dead_eh_edges below. */
6607 if (changed)
6608 free_dominance_info (CDI_DOMINATORS);
6609 }
6610
6611 return changed;
6612}
6613
672987e8
ZD
6614/* Stores all basic blocks dominated by BB to DOM_BBS. */
6615
6616static void
6617get_all_dominated_blocks (basic_block bb, VEC (basic_block, heap) **dom_bbs)
6618{
6619 basic_block son;
6620
6621 VEC_safe_push (basic_block, heap, *dom_bbs, bb);
6622 for (son = first_dom_son (CDI_DOMINATORS, bb);
6623 son;
6624 son = next_dom_son (CDI_DOMINATORS, son))
6625 get_all_dominated_blocks (son, dom_bbs);
6626}
6627
6628/* Removes edge E and all the blocks dominated by it, and updates dominance
6629 information. The IL in E->src needs to be updated separately.
6630 If dominance info is not available, only the edge E is removed.*/
6631
6632void
6633remove_edge_and_dominated_blocks (edge e)
6634{
6635 VEC (basic_block, heap) *bbs_to_remove = NULL;
6636 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6637 bitmap df, df_idom;
6638 edge f;
6639 edge_iterator ei;
6640 bool none_removed = false;
6641 unsigned i;
6642 basic_block bb, dbb;
6643 bitmap_iterator bi;
6644
2b28c07a 6645 if (!dom_info_available_p (CDI_DOMINATORS))
672987e8
ZD
6646 {
6647 remove_edge (e);
6648 return;
6649 }
6650
6651 /* No updating is needed for edges to exit. */
6652 if (e->dest == EXIT_BLOCK_PTR)
6653 {
6654 if (cfgcleanup_altered_bbs)
6655 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6656 remove_edge (e);
6657 return;
6658 }
6659
6660 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6661 that is not dominated by E->dest, then this set is empty. Otherwise,
6662 all the basic blocks dominated by E->dest are removed.
6663
6664 Also, to DF_IDOM we store the immediate dominators of the blocks in
6665 the dominance frontier of E (i.e., of the successors of the
6666 removed blocks, if there are any, and of E->dest otherwise). */
6667 FOR_EACH_EDGE (f, ei, e->dest->preds)
6668 {
6669 if (f == e)
6670 continue;
6671
6672 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6673 {
6674 none_removed = true;
6675 break;
6676 }
6677 }
6678
6679 df = BITMAP_ALLOC (NULL);
6680 df_idom = BITMAP_ALLOC (NULL);
6681
6682 if (none_removed)
6683 bitmap_set_bit (df_idom,
6684 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6685 else
6686 {
6687 get_all_dominated_blocks (e->dest, &bbs_to_remove);
6688 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6689 {
6690 FOR_EACH_EDGE (f, ei, bb->succs)
6691 {
6692 if (f->dest != EXIT_BLOCK_PTR)
6693 bitmap_set_bit (df, f->dest->index);
6694 }
6695 }
6696 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6697 bitmap_clear_bit (df, bb->index);
6698
6699 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6700 {
6701 bb = BASIC_BLOCK (i);
6702 bitmap_set_bit (df_idom,
6703 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6704 }
6705 }
6706
6707 if (cfgcleanup_altered_bbs)
6708 {
6709 /* Record the set of the altered basic blocks. */
6710 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6711 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6712 }
6713
6714 /* Remove E and the cancelled blocks. */
6715 if (none_removed)
6716 remove_edge (e);
6717 else
6718 {
6719 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6720 delete_basic_block (bb);
6721 }
6722
6723 /* Update the dominance information. The immediate dominator may change only
6724 for blocks whose immediate dominator belongs to DF_IDOM:
6725
6726 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6727 removal. Let Z the arbitrary block such that idom(Z) = Y and
6728 Z dominates X after the removal. Before removal, there exists a path P
6729 from Y to X that avoids Z. Let F be the last edge on P that is
6730 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6731 dominates W, and because of P, Z does not dominate W), and W belongs to
6732 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6733 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6734 {
6735 bb = BASIC_BLOCK (i);
6736 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6737 dbb;
6738 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6739 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6740 }
6741
66f97d31 6742 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
672987e8
ZD
6743
6744 BITMAP_FREE (df);
6745 BITMAP_FREE (df_idom);
6746 VEC_free (basic_block, heap, bbs_to_remove);
6747 VEC_free (basic_block, heap, bbs_to_fix_dom);
6748}
6749
4f6c2131
EB
6750/* Purge dead EH edges from basic block BB. */
6751
1eaba2f2
RH
6752bool
6753tree_purge_dead_eh_edges (basic_block bb)
6754{
6755 bool changed = false;
628f6a4e
BE
6756 edge e;
6757 edge_iterator ei;
1eaba2f2
RH
6758 tree stmt = last_stmt (bb);
6759
6760 if (stmt && tree_can_throw_internal (stmt))
6761 return false;
6762
628f6a4e 6763 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1eaba2f2 6764 {
1eaba2f2
RH
6765 if (e->flags & EDGE_EH)
6766 {
672987e8 6767 remove_edge_and_dominated_blocks (e);
1eaba2f2
RH
6768 changed = true;
6769 }
628f6a4e
BE
6770 else
6771 ei_next (&ei);
1eaba2f2
RH
6772 }
6773
6774 return changed;
6775}
6776
6777bool
6ea2b70d 6778tree_purge_all_dead_eh_edges (const_bitmap blocks)
1eaba2f2
RH
6779{
6780 bool changed = false;
3cd8c58a 6781 unsigned i;
87c476a2 6782 bitmap_iterator bi;
1eaba2f2 6783
87c476a2
ZD
6784 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6785 {
6786 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
6787 }
1eaba2f2
RH
6788
6789 return changed;
6790}
6de9cd9a 6791
a100ac1e
KH
6792/* This function is called whenever a new edge is created or
6793 redirected. */
6794
6795static void
6796tree_execute_on_growing_pred (edge e)
6797{
6798 basic_block bb = e->dest;
6799
6800 if (phi_nodes (bb))
6801 reserve_phi_args_for_new_edge (bb);
6802}
6803
e51546f8
KH
6804/* This function is called immediately before edge E is removed from
6805 the edge vector E->dest->preds. */
6806
6807static void
6808tree_execute_on_shrinking_pred (edge e)
6809{
6810 if (phi_nodes (e->dest))
6811 remove_phi_args (e);
6812}
6813
1cb7dfc3
MH
6814/*---------------------------------------------------------------------------
6815 Helper functions for Loop versioning
6816 ---------------------------------------------------------------------------*/
6817
6818/* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6819 of 'first'. Both of them are dominated by 'new_head' basic block. When
6820 'new_head' was created by 'second's incoming edge it received phi arguments
6821 on the edge by split_edge(). Later, additional edge 'e' was created to
6531d1be
BF
6822 connect 'new_head' and 'first'. Now this routine adds phi args on this
6823 additional edge 'e' that new_head to second edge received as part of edge
1cb7dfc3
MH
6824 splitting.
6825*/
6826
6827static void
6828tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6829 basic_block new_head, edge e)
6830{
6831 tree phi1, phi2;
d0e12fc6
KH
6832 edge e2 = find_edge (new_head, second);
6833
6834 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6835 edge, we should always have an edge from NEW_HEAD to SECOND. */
6836 gcc_assert (e2 != NULL);
1cb7dfc3
MH
6837
6838 /* Browse all 'second' basic block phi nodes and add phi args to
6839 edge 'e' for 'first' head. PHI args are always in correct order. */
6840
6531d1be
BF
6841 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
6842 phi2 && phi1;
1cb7dfc3
MH
6843 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
6844 {
d0e12fc6
KH
6845 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
6846 add_phi_arg (phi1, def, e);
1cb7dfc3
MH
6847 }
6848}
6849
6531d1be
BF
6850/* Adds a if else statement to COND_BB with condition COND_EXPR.
6851 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
1cb7dfc3
MH
6852 the destination of the ELSE part. */
6853static void
a9b77cd1
ZD
6854tree_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6855 basic_block second_head ATTRIBUTE_UNUSED,
6856 basic_block cond_bb, void *cond_e)
1cb7dfc3
MH
6857{
6858 block_stmt_iterator bsi;
1cb7dfc3
MH
6859 tree new_cond_expr = NULL_TREE;
6860 tree cond_expr = (tree) cond_e;
6861 edge e0;
6862
6863 /* Build new conditional expr */
a9b77cd1
ZD
6864 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr,
6865 NULL_TREE, NULL_TREE);
1cb7dfc3 6866
6531d1be
BF
6867 /* Add new cond in cond_bb. */
6868 bsi = bsi_start (cond_bb);
1cb7dfc3
MH
6869 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
6870 /* Adjust edges appropriately to connect new head with first head
6871 as well as second head. */
6872 e0 = single_succ_edge (cond_bb);
6873 e0->flags &= ~EDGE_FALLTHRU;
6874 e0->flags |= EDGE_FALSE_VALUE;
6875}
6876
6de9cd9a
DN
6877struct cfg_hooks tree_cfg_hooks = {
6878 "tree",
6879 tree_verify_flow_info,
6880 tree_dump_bb, /* dump_bb */
6881 create_bb, /* create_basic_block */
6882 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
6883 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
14fa2cc0 6884 tree_can_remove_branch_p, /* can_remove_branch_p */
6de9cd9a
DN
6885 remove_bb, /* delete_basic_block */
6886 tree_split_block, /* split_block */
6887 tree_move_block_after, /* move_block_after */
6888 tree_can_merge_blocks_p, /* can_merge_blocks_p */
6889 tree_merge_blocks, /* merge_blocks */
6890 tree_predict_edge, /* predict_edge */
6891 tree_predicted_by_p, /* predicted_by_p */
6892 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
6893 tree_duplicate_bb, /* duplicate_block */
6894 tree_split_edge, /* split_edge */
6895 tree_make_forwarder_block, /* make_forward_block */
6896 NULL, /* tidy_fallthru_edge */
6897 tree_block_ends_with_call_p, /* block_ends_with_call_p */
6898 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
d9d4706f 6899 tree_flow_call_edges_add, /* flow_call_edges_add */
a100ac1e 6900 tree_execute_on_growing_pred, /* execute_on_growing_pred */
e51546f8 6901 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
1cb7dfc3
MH
6902 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
6903 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
6904 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
6905 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6531d1be 6906 flush_pending_stmts /* flush_pending_stmts */
6de9cd9a
DN
6907};
6908
6909
6910/* Split all critical edges. */
6911
c2924966 6912static unsigned int
6de9cd9a
DN
6913split_critical_edges (void)
6914{
6915 basic_block bb;
6916 edge e;
628f6a4e 6917 edge_iterator ei;
6de9cd9a 6918
d6be0d7f
JL
6919 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
6920 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
6921 mappings around the calls to split_edge. */
6922 start_recording_case_labels ();
6de9cd9a
DN
6923 FOR_ALL_BB (bb)
6924 {
628f6a4e 6925 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
6926 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6927 {
6928 split_edge (e);
6929 }
6930 }
d6be0d7f 6931 end_recording_case_labels ();
c2924966 6932 return 0;
6de9cd9a
DN
6933}
6934
8ddbbcae 6935struct gimple_opt_pass pass_split_crit_edges =
6de9cd9a 6936{
8ddbbcae
JH
6937 {
6938 GIMPLE_PASS,
5d44aeed 6939 "crited", /* name */
6de9cd9a
DN
6940 NULL, /* gate */
6941 split_critical_edges, /* execute */
6942 NULL, /* sub */
6943 NULL, /* next */
6944 0, /* static_pass_number */
6945 TV_TREE_SPLIT_EDGES, /* tv_id */
6946 PROP_cfg, /* properties required */
6947 PROP_no_crit_edges, /* properties_provided */
6948 0, /* properties_destroyed */
6949 0, /* todo_flags_start */
8ddbbcae
JH
6950 TODO_dump_func /* todo_flags_finish */
6951 }
6de9cd9a 6952};
26277d41
PB
6953
6954\f
6955/* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
6956 a temporary, make sure and register it to be renamed if necessary,
6957 and finally return the temporary. Put the statements to compute
6958 EXP before the current statement in BSI. */
6959
6960tree
6961gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
6962{
6963 tree t, new_stmt, orig_stmt;
6964
6965 if (is_gimple_val (exp))
6966 return exp;
6967
6968 t = make_rename_temp (type, NULL);
939409af 6969 new_stmt = build_gimple_modify_stmt (t, exp);
26277d41
PB
6970
6971 orig_stmt = bsi_stmt (*bsi);
6972 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
6973 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
6974
6975 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5cd4ec7f 6976 if (gimple_in_ssa_p (cfun))
cfaab3a9 6977 mark_symbols_for_renaming (new_stmt);
26277d41
PB
6978
6979 return t;
6980}
6981
6982/* Build a ternary operation and gimplify it. Emit code before BSI.
6983 Return the gimple_val holding the result. */
6984
6985tree
6986gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
6987 tree type, tree a, tree b, tree c)
6988{
6989 tree ret;
6990
987b67bc 6991 ret = fold_build3 (code, type, a, b, c);
26277d41
PB
6992 STRIP_NOPS (ret);
6993
6994 return gimplify_val (bsi, type, ret);
6995}
6996
6997/* Build a binary operation and gimplify it. Emit code before BSI.
6998 Return the gimple_val holding the result. */
6999
7000tree
7001gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
7002 tree type, tree a, tree b)
7003{
7004 tree ret;
7005
987b67bc 7006 ret = fold_build2 (code, type, a, b);
26277d41
PB
7007 STRIP_NOPS (ret);
7008
7009 return gimplify_val (bsi, type, ret);
7010}
7011
7012/* Build a unary operation and gimplify it. Emit code before BSI.
7013 Return the gimple_val holding the result. */
7014
7015tree
7016gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
7017 tree a)
7018{
7019 tree ret;
7020
987b67bc 7021 ret = fold_build1 (code, type, a);
26277d41
PB
7022 STRIP_NOPS (ret);
7023
7024 return gimplify_val (bsi, type, ret);
7025}
7026
7027
6de9cd9a
DN
7028\f
7029/* Emit return warnings. */
7030
c2924966 7031static unsigned int
6de9cd9a
DN
7032execute_warn_function_return (void)
7033{
9506ac2b 7034 source_location location;
6de9cd9a
DN
7035 tree last;
7036 edge e;
628f6a4e 7037 edge_iterator ei;
6de9cd9a 7038
6de9cd9a
DN
7039 /* If we have a path to EXIT, then we do return. */
7040 if (TREE_THIS_VOLATILE (cfun->decl)
628f6a4e 7041 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
6de9cd9a 7042 {
9506ac2b 7043 location = UNKNOWN_LOCATION;
628f6a4e 7044 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
7045 {
7046 last = last_stmt (e->src);
7047 if (TREE_CODE (last) == RETURN_EXPR
9506ac2b 7048 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
6de9cd9a
DN
7049 break;
7050 }
9506ac2b
PB
7051 if (location == UNKNOWN_LOCATION)
7052 location = cfun->function_end_locus;
d4ee4d25 7053 warning (0, "%H%<noreturn%> function does return", &location);
6de9cd9a
DN
7054 }
7055
7056 /* If we see "return;" in some basic block, then we do reach the end
7057 without returning a value. */
7058 else if (warn_return_type
089efaa4 7059 && !TREE_NO_WARNING (cfun->decl)
628f6a4e 7060 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
6de9cd9a
DN
7061 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7062 {
628f6a4e 7063 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
7064 {
7065 tree last = last_stmt (e->src);
7066 if (TREE_CODE (last) == RETURN_EXPR
0c9b182b
JJ
7067 && TREE_OPERAND (last, 0) == NULL
7068 && !TREE_NO_WARNING (last))
6de9cd9a 7069 {
9506ac2b
PB
7070 location = EXPR_LOCATION (last);
7071 if (location == UNKNOWN_LOCATION)
7072 location = cfun->function_end_locus;
c5409249 7073 warning (OPT_Wreturn_type, "%Hcontrol reaches end of non-void function", &location);
089efaa4 7074 TREE_NO_WARNING (cfun->decl) = 1;
6de9cd9a
DN
7075 break;
7076 }
7077 }
7078 }
c2924966 7079 return 0;
6de9cd9a
DN
7080}
7081
7082
7083/* Given a basic block B which ends with a conditional and has
7084 precisely two successors, determine which of the edges is taken if
7085 the conditional is true and which is taken if the conditional is
7086 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7087
7088void
7089extract_true_false_edges_from_block (basic_block b,
7090 edge *true_edge,
7091 edge *false_edge)
7092{
628f6a4e 7093 edge e = EDGE_SUCC (b, 0);
6de9cd9a
DN
7094
7095 if (e->flags & EDGE_TRUE_VALUE)
7096 {
7097 *true_edge = e;
628f6a4e 7098 *false_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7099 }
7100 else
7101 {
7102 *false_edge = e;
628f6a4e 7103 *true_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7104 }
7105}
7106
8ddbbcae 7107struct gimple_opt_pass pass_warn_function_return =
6de9cd9a 7108{
8ddbbcae
JH
7109 {
7110 GIMPLE_PASS,
6de9cd9a
DN
7111 NULL, /* name */
7112 NULL, /* gate */
7113 execute_warn_function_return, /* execute */
7114 NULL, /* sub */
7115 NULL, /* next */
7116 0, /* static_pass_number */
7117 0, /* tv_id */
00bfee6f 7118 PROP_cfg, /* properties_required */
6de9cd9a
DN
7119 0, /* properties_provided */
7120 0, /* properties_destroyed */
7121 0, /* todo_flags_start */
8ddbbcae
JH
7122 0 /* todo_flags_finish */
7123 }
6de9cd9a 7124};
aa313ed4
JH
7125
7126/* Emit noreturn warnings. */
7127
c2924966 7128static unsigned int
aa313ed4
JH
7129execute_warn_function_noreturn (void)
7130{
7131 if (warn_missing_noreturn
7132 && !TREE_THIS_VOLATILE (cfun->decl)
7133 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
e8924938 7134 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
3176a0c2
DD
7135 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
7136 "for attribute %<noreturn%>",
aa313ed4 7137 cfun->decl);
c2924966 7138 return 0;
aa313ed4
JH
7139}
7140
8ddbbcae 7141struct gimple_opt_pass pass_warn_function_noreturn =
aa313ed4 7142{
8ddbbcae
JH
7143 {
7144 GIMPLE_PASS,
aa313ed4
JH
7145 NULL, /* name */
7146 NULL, /* gate */
7147 execute_warn_function_noreturn, /* execute */
7148 NULL, /* sub */
7149 NULL, /* next */
7150 0, /* static_pass_number */
7151 0, /* tv_id */
7152 PROP_cfg, /* properties_required */
7153 0, /* properties_provided */
7154 0, /* properties_destroyed */
7155 0, /* todo_flags_start */
8ddbbcae
JH
7156 0 /* todo_flags_finish */
7157 }
aa313ed4 7158};