]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-cfg.c
cfgloopanal.c (check_irred): Move into ...
[thirdparty/gcc.git] / gcc / tree-cfg.c
CommitLineData
6de9cd9a 1/* Control flow functions for trees.
66647d44 2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
56e84019 3 Free Software Foundation, Inc.
6de9cd9a
DN
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
6de9cd9a
DN
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "output.h"
6de9cd9a
DN
32#include "flags.h"
33#include "function.h"
34#include "expr.h"
35#include "ggc.h"
36#include "langhooks.h"
37#include "diagnostic.h"
38#include "tree-flow.h"
39#include "timevar.h"
40#include "tree-dump.h"
41#include "tree-pass.h"
42#include "toplev.h"
43#include "except.h"
44#include "cfgloop.h"
42759f1e 45#include "cfglayout.h"
9af0df6b 46#include "tree-ssa-propagate.h"
6946b3f7 47#include "value-prof.h"
4437b50d 48#include "pointer-set.h"
917948d3 49#include "tree-inline.h"
6de9cd9a
DN
50
51/* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
53
54/* Local declarations. */
55
56/* Initial capacity for the basic block array. */
57static const int initial_cfg_capacity = 20;
58
d6be0d7f
JL
59/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
726a989a 62 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
92b6dff3 63
d6be0d7f
JL
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
92b6dff3 66
d6be0d7f
JL
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
71
15814ba0 72static struct pointer_map_t *edge_to_cases;
92b6dff3 73
6de9cd9a
DN
74/* CFG statistics. */
75struct cfg_stats_d
76{
77 long num_merged_labels;
78};
79
80static struct cfg_stats_d cfg_stats;
81
82/* Nonzero if we found a computed goto while building basic blocks. */
83static bool found_computed_goto;
84
6c52e687
CC
85/* Hash table to store last discriminator assigned for each locus. */
86struct locus_discrim_map
87{
88 location_t locus;
89 int discriminator;
90};
91static htab_t discriminator_per_locus;
92
6de9cd9a 93/* Basic blocks and flowgraphs. */
726a989a 94static void make_blocks (gimple_seq);
6de9cd9a 95static void factor_computed_gotos (void);
6de9cd9a
DN
96
97/* Edges. */
98static void make_edges (void);
6de9cd9a 99static void make_cond_expr_edges (basic_block);
726a989a 100static void make_gimple_switch_edges (basic_block);
6de9cd9a 101static void make_goto_expr_edges (basic_block);
6c52e687
CC
102static unsigned int locus_map_hash (const void *);
103static int locus_map_eq (const void *, const void *);
104static void assign_discriminator (location_t, basic_block);
726a989a
RB
105static edge gimple_redirect_edge_and_branch (edge, basic_block);
106static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
c2924966 107static unsigned int split_critical_edges (void);
6de9cd9a
DN
108
109/* Various helpers. */
726a989a
RB
110static inline bool stmt_starts_bb_p (gimple, gimple);
111static int gimple_verify_flow_info (void);
112static void gimple_make_forwarder_block (edge);
113static void gimple_cfg2vcg (FILE *);
6c52e687 114static gimple first_non_label_stmt (basic_block);
6de9cd9a
DN
115
116/* Flowgraph optimization and cleanup. */
726a989a
RB
117static void gimple_merge_blocks (basic_block, basic_block);
118static bool gimple_can_merge_blocks_p (basic_block, basic_block);
6de9cd9a 119static void remove_bb (basic_block);
be477406 120static edge find_taken_edge_computed_goto (basic_block, tree);
6de9cd9a
DN
121static edge find_taken_edge_cond_expr (basic_block, tree);
122static edge find_taken_edge_switch_expr (basic_block, tree);
726a989a 123static tree find_case_label_for_value (gimple, tree);
6de9cd9a 124
a930a4ef 125void
9defb1fe 126init_empty_tree_cfg_for_function (struct function *fn)
a930a4ef
JH
127{
128 /* Initialize the basic block array. */
9defb1fe
DN
129 init_flow (fn);
130 profile_status_for_function (fn) = PROFILE_ABSENT;
131 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
132 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
133 basic_block_info_for_function (fn)
134 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135 VEC_safe_grow_cleared (basic_block, gc,
136 basic_block_info_for_function (fn),
a590ac65 137 initial_cfg_capacity);
a930a4ef
JH
138
139 /* Build a mapping of labels to their associated blocks. */
9defb1fe
DN
140 label_to_block_map_for_function (fn)
141 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc,
143 label_to_block_map_for_function (fn),
a590ac65 144 initial_cfg_capacity);
a930a4ef 145
9defb1fe
DN
146 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
147 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
148 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
149 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
150
151 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
152 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
153 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
154 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
155}
156
157void
158init_empty_tree_cfg (void)
159{
160 init_empty_tree_cfg_for_function (cfun);
a930a4ef 161}
6de9cd9a
DN
162
163/*---------------------------------------------------------------------------
164 Create basic blocks
165---------------------------------------------------------------------------*/
166
726a989a 167/* Entry point to the CFG builder for trees. SEQ is the sequence of
6de9cd9a
DN
168 statements to be added to the flowgraph. */
169
170static void
726a989a 171build_gimple_cfg (gimple_seq seq)
6de9cd9a 172{
726a989a
RB
173 /* Register specific gimple functions. */
174 gimple_register_cfg_hooks ();
6de9cd9a 175
6de9cd9a
DN
176 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
177
a930a4ef 178 init_empty_tree_cfg ();
6de9cd9a
DN
179
180 found_computed_goto = 0;
726a989a 181 make_blocks (seq);
6de9cd9a
DN
182
183 /* Computed gotos are hell to deal with, especially if there are
184 lots of them with a large number of destinations. So we factor
185 them to a common computed goto location before we build the
186 edge list. After we convert back to normal form, we will un-factor
187 the computed gotos since factoring introduces an unwanted jump. */
188 if (found_computed_goto)
189 factor_computed_gotos ();
190
f0b698c1 191 /* Make sure there is always at least one block, even if it's empty. */
24bd1a0b 192 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
193 create_empty_bb (ENTRY_BLOCK_PTR);
194
6de9cd9a 195 /* Adjust the size of the array. */
68f9b844 196 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
a590ac65 197 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
6de9cd9a 198
f667741c
SB
199 /* To speed up statement iterator walks, we first purge dead labels. */
200 cleanup_dead_labels ();
201
202 /* Group case nodes to reduce the number of edges.
203 We do this after cleaning up dead labels because otherwise we miss
204 a lot of obvious case merging opportunities. */
205 group_case_labels ();
206
6de9cd9a 207 /* Create the edges of the flowgraph. */
6c52e687
CC
208 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
209 free);
6de9cd9a 210 make_edges ();
8b11009b 211 cleanup_dead_labels ();
6c52e687 212 htab_delete (discriminator_per_locus);
6de9cd9a
DN
213
214 /* Debugging dumps. */
215
216 /* Write the flowgraph to a VCG file. */
217 {
218 int local_dump_flags;
10d22567
ZD
219 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
220 if (vcg_file)
6de9cd9a 221 {
726a989a 222 gimple_cfg2vcg (vcg_file);
10d22567 223 dump_end (TDI_vcg, vcg_file);
6de9cd9a
DN
224 }
225 }
226
81cfbbc2
JH
227#ifdef ENABLE_CHECKING
228 verify_stmts ();
229#endif
6de9cd9a
DN
230}
231
c2924966 232static unsigned int
6de9cd9a
DN
233execute_build_cfg (void)
234{
39ecc018
JH
235 gimple_seq body = gimple_body (current_function_decl);
236
237 build_gimple_cfg (body);
238 gimple_set_body (current_function_decl, NULL);
cff7525f
JH
239 if (dump_file && (dump_flags & TDF_DETAILS))
240 {
241 fprintf (dump_file, "Scope blocks:\n");
242 dump_scope_blocks (dump_file, dump_flags);
243 }
c2924966 244 return 0;
6de9cd9a
DN
245}
246
8ddbbcae 247struct gimple_opt_pass pass_build_cfg =
6de9cd9a 248{
8ddbbcae
JH
249 {
250 GIMPLE_PASS,
6de9cd9a
DN
251 "cfg", /* name */
252 NULL, /* gate */
253 execute_build_cfg, /* execute */
254 NULL, /* sub */
255 NULL, /* next */
256 0, /* static_pass_number */
257 TV_TREE_CFG, /* tv_id */
726a989a 258 PROP_gimple_leh, /* properties_required */
6de9cd9a
DN
259 PROP_cfg, /* properties_provided */
260 0, /* properties_destroyed */
261 0, /* todo_flags_start */
11b08ee9
RG
262 TODO_verify_stmts | TODO_cleanup_cfg
263 | TODO_dump_func /* todo_flags_finish */
8ddbbcae 264 }
6de9cd9a
DN
265};
266
726a989a
RB
267
268/* Return true if T is a computed goto. */
269
270static bool
271computed_goto_p (gimple t)
272{
273 return (gimple_code (t) == GIMPLE_GOTO
274 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
275}
276
277
6531d1be 278/* Search the CFG for any computed gotos. If found, factor them to a
6de9cd9a 279 common computed goto site. Also record the location of that site so
6531d1be 280 that we can un-factor the gotos after we have converted back to
6de9cd9a
DN
281 normal form. */
282
283static void
284factor_computed_gotos (void)
285{
286 basic_block bb;
287 tree factored_label_decl = NULL;
288 tree var = NULL;
726a989a
RB
289 gimple factored_computed_goto_label = NULL;
290 gimple factored_computed_goto = NULL;
6de9cd9a
DN
291
292 /* We know there are one or more computed gotos in this function.
293 Examine the last statement in each basic block to see if the block
294 ends with a computed goto. */
6531d1be 295
6de9cd9a
DN
296 FOR_EACH_BB (bb)
297 {
726a989a
RB
298 gimple_stmt_iterator gsi = gsi_last_bb (bb);
299 gimple last;
6de9cd9a 300
726a989a 301 if (gsi_end_p (gsi))
6de9cd9a 302 continue;
726a989a
RB
303
304 last = gsi_stmt (gsi);
6de9cd9a
DN
305
306 /* Ignore the computed goto we create when we factor the original
307 computed gotos. */
308 if (last == factored_computed_goto)
309 continue;
310
311 /* If the last statement is a computed goto, factor it. */
312 if (computed_goto_p (last))
313 {
726a989a 314 gimple assignment;
6de9cd9a
DN
315
316 /* The first time we find a computed goto we need to create
317 the factored goto block and the variable each original
318 computed goto will use for their goto destination. */
726a989a 319 if (!factored_computed_goto)
6de9cd9a
DN
320 {
321 basic_block new_bb = create_empty_bb (bb);
726a989a 322 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
6de9cd9a
DN
323
324 /* Create the destination of the factored goto. Each original
325 computed goto will put its desired destination into this
326 variable and jump to the label we create immediately
327 below. */
328 var = create_tmp_var (ptr_type_node, "gotovar");
329
330 /* Build a label for the new block which will contain the
331 factored computed goto. */
c2255bc4 332 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 333 factored_computed_goto_label
726a989a
RB
334 = gimple_build_label (factored_label_decl);
335 gsi_insert_after (&new_gsi, factored_computed_goto_label,
336 GSI_NEW_STMT);
6de9cd9a
DN
337
338 /* Build our new computed goto. */
726a989a
RB
339 factored_computed_goto = gimple_build_goto (var);
340 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
6de9cd9a
DN
341 }
342
343 /* Copy the original computed goto's destination into VAR. */
726a989a
RB
344 assignment = gimple_build_assign (var, gimple_goto_dest (last));
345 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
6de9cd9a
DN
346
347 /* And re-vector the computed goto to the new destination. */
726a989a 348 gimple_goto_set_dest (last, factored_label_decl);
6de9cd9a
DN
349 }
350 }
351}
352
353
726a989a 354/* Build a flowgraph for the sequence of stmts SEQ. */
6de9cd9a
DN
355
356static void
726a989a 357make_blocks (gimple_seq seq)
6de9cd9a 358{
726a989a
RB
359 gimple_stmt_iterator i = gsi_start (seq);
360 gimple stmt = NULL;
6de9cd9a 361 bool start_new_block = true;
726a989a 362 bool first_stmt_of_seq = true;
6de9cd9a
DN
363 basic_block bb = ENTRY_BLOCK_PTR;
364
726a989a 365 while (!gsi_end_p (i))
6de9cd9a 366 {
726a989a 367 gimple prev_stmt;
6de9cd9a
DN
368
369 prev_stmt = stmt;
726a989a 370 stmt = gsi_stmt (i);
6de9cd9a
DN
371
372 /* If the statement starts a new basic block or if we have determined
373 in a previous pass that we need to create a new block for STMT, do
374 so now. */
375 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
376 {
726a989a
RB
377 if (!first_stmt_of_seq)
378 seq = gsi_split_seq_before (&i);
379 bb = create_basic_block (seq, NULL, bb);
6de9cd9a
DN
380 start_new_block = false;
381 }
382
383 /* Now add STMT to BB and create the subgraphs for special statement
384 codes. */
726a989a 385 gimple_set_bb (stmt, bb);
6de9cd9a
DN
386
387 if (computed_goto_p (stmt))
388 found_computed_goto = true;
389
390 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
391 next iteration. */
392 if (stmt_ends_bb_p (stmt))
54634841
RG
393 {
394 /* If the stmt can make abnormal goto use a new temporary
395 for the assignment to the LHS. This makes sure the old value
396 of the LHS is available on the abnormal edge. Otherwise
397 we will end up with overlapping life-ranges for abnormal
398 SSA names. */
399 if (gimple_has_lhs (stmt)
400 && stmt_can_make_abnormal_goto (stmt)
401 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
402 {
403 tree lhs = gimple_get_lhs (stmt);
404 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
405 gimple s = gimple_build_assign (lhs, tmp);
406 gimple_set_location (s, gimple_location (stmt));
407 gimple_set_block (s, gimple_block (stmt));
408 gimple_set_lhs (stmt, tmp);
409 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
410 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
411 DECL_GIMPLE_REG_P (tmp) = 1;
412 gsi_insert_after (&i, s, GSI_SAME_STMT);
413 }
414 start_new_block = true;
415 }
6de9cd9a 416
726a989a
RB
417 gsi_next (&i);
418 first_stmt_of_seq = false;
6de9cd9a
DN
419 }
420}
421
422
423/* Create and return a new empty basic block after bb AFTER. */
424
425static basic_block
426create_bb (void *h, void *e, basic_block after)
427{
428 basic_block bb;
429
1e128c5f 430 gcc_assert (!e);
6de9cd9a 431
27fd69fa
KH
432 /* Create and initialize a new basic block. Since alloc_block uses
433 ggc_alloc_cleared to allocate a basic block, we do not have to
434 clear the newly allocated basic block here. */
6de9cd9a 435 bb = alloc_block ();
6de9cd9a
DN
436
437 bb->index = last_basic_block;
438 bb->flags = BB_NEW;
726a989a
RB
439 bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
440 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
6de9cd9a
DN
441
442 /* Add the new block to the linked list of blocks. */
443 link_block (bb, after);
444
445 /* Grow the basic block array if needed. */
68f9b844 446 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
6de9cd9a
DN
447 {
448 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
a590ac65 449 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
6de9cd9a
DN
450 }
451
452 /* Add the newly created block to the array. */
68f9b844 453 SET_BASIC_BLOCK (last_basic_block, bb);
6de9cd9a 454
6de9cd9a
DN
455 n_basic_blocks++;
456 last_basic_block++;
457
6de9cd9a
DN
458 return bb;
459}
460
461
462/*---------------------------------------------------------------------------
463 Edge creation
464---------------------------------------------------------------------------*/
465
fca01525
KH
466/* Fold COND_EXPR_COND of each COND_EXPR. */
467
e21aff8a 468void
fca01525
KH
469fold_cond_expr_cond (void)
470{
471 basic_block bb;
472
473 FOR_EACH_BB (bb)
474 {
726a989a 475 gimple stmt = last_stmt (bb);
fca01525 476
726a989a 477 if (stmt && gimple_code (stmt) == GIMPLE_COND)
fca01525 478 {
6ac01510
ILT
479 tree cond;
480 bool zerop, onep;
481
482 fold_defer_overflow_warnings ();
726a989a
RB
483 cond = fold_binary (gimple_cond_code (stmt), boolean_type_node,
484 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
485 if (cond)
486 {
487 zerop = integer_zerop (cond);
488 onep = integer_onep (cond);
489 }
490 else
491 zerop = onep = false;
492
e233ac97 493 fold_undefer_overflow_warnings (zerop || onep,
4df28528 494 stmt,
6ac01510
ILT
495 WARN_STRICT_OVERFLOW_CONDITIONAL);
496 if (zerop)
726a989a 497 gimple_cond_make_false (stmt);
6ac01510 498 else if (onep)
726a989a 499 gimple_cond_make_true (stmt);
fca01525
KH
500 }
501 }
502}
503
6de9cd9a
DN
504/* Join all the blocks in the flowgraph. */
505
506static void
507make_edges (void)
508{
509 basic_block bb;
bed575d5 510 struct omp_region *cur_region = NULL;
6de9cd9a
DN
511
512 /* Create an edge from entry to the first block with executable
513 statements in it. */
24bd1a0b 514 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
6de9cd9a 515
adb35797 516 /* Traverse the basic block array placing edges. */
6de9cd9a
DN
517 FOR_EACH_BB (bb)
518 {
726a989a 519 gimple last = last_stmt (bb);
56e84019 520 bool fallthru;
6de9cd9a 521
56e84019 522 if (last)
6de9cd9a 523 {
726a989a 524 enum gimple_code code = gimple_code (last);
bed575d5 525 switch (code)
56e84019 526 {
726a989a 527 case GIMPLE_GOTO:
56e84019
RH
528 make_goto_expr_edges (bb);
529 fallthru = false;
530 break;
726a989a 531 case GIMPLE_RETURN:
56e84019
RH
532 make_edge (bb, EXIT_BLOCK_PTR, 0);
533 fallthru = false;
534 break;
726a989a 535 case GIMPLE_COND:
56e84019
RH
536 make_cond_expr_edges (bb);
537 fallthru = false;
538 break;
726a989a
RB
539 case GIMPLE_SWITCH:
540 make_gimple_switch_edges (bb);
56e84019
RH
541 fallthru = false;
542 break;
726a989a 543 case GIMPLE_RESX:
56e84019
RH
544 make_eh_edges (last);
545 fallthru = false;
546 break;
547
726a989a 548 case GIMPLE_CALL:
56e84019
RH
549 /* If this function receives a nonlocal goto, then we need to
550 make edges from this call site to all the nonlocal goto
551 handlers. */
726a989a 552 if (stmt_can_make_abnormal_goto (last))
4f6c2131 553 make_abnormal_goto_edges (bb, true);
6de9cd9a 554
56e84019
RH
555 /* If this statement has reachable exception handlers, then
556 create abnormal edges to them. */
557 make_eh_edges (last);
558
559 /* Some calls are known not to return. */
726a989a 560 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
56e84019
RH
561 break;
562
726a989a
RB
563 case GIMPLE_ASSIGN:
564 /* A GIMPLE_ASSIGN may throw internally and thus be considered
565 control-altering. */
56e84019
RH
566 if (is_ctrl_altering_stmt (last))
567 {
56e84019
RH
568 make_eh_edges (last);
569 }
570 fallthru = true;
571 break;
572
726a989a
RB
573 case GIMPLE_OMP_PARALLEL:
574 case GIMPLE_OMP_TASK:
575 case GIMPLE_OMP_FOR:
576 case GIMPLE_OMP_SINGLE:
577 case GIMPLE_OMP_MASTER:
578 case GIMPLE_OMP_ORDERED:
579 case GIMPLE_OMP_CRITICAL:
580 case GIMPLE_OMP_SECTION:
bed575d5 581 cur_region = new_omp_region (bb, code, cur_region);
56e84019
RH
582 fallthru = true;
583 break;
584
726a989a 585 case GIMPLE_OMP_SECTIONS:
bed575d5 586 cur_region = new_omp_region (bb, code, cur_region);
e5c95afe
ZD
587 fallthru = true;
588 break;
589
726a989a 590 case GIMPLE_OMP_SECTIONS_SWITCH:
7e2df4a1 591 fallthru = false;
777f7f9a
RH
592 break;
593
a509ebb5 594
726a989a
RB
595 case GIMPLE_OMP_ATOMIC_LOAD:
596 case GIMPLE_OMP_ATOMIC_STORE:
a509ebb5
RL
597 fallthru = true;
598 break;
599
600
726a989a
RB
601 case GIMPLE_OMP_RETURN:
602 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
603 somewhere other than the next block. This will be
604 created later. */
bed575d5 605 cur_region->exit = bb;
726a989a 606 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
bed575d5
RS
607 cur_region = cur_region->outer;
608 break;
609
726a989a 610 case GIMPLE_OMP_CONTINUE:
bed575d5
RS
611 cur_region->cont = bb;
612 switch (cur_region->type)
613 {
726a989a
RB
614 case GIMPLE_OMP_FOR:
615 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
616 succs edges as abnormal to prevent splitting
617 them. */
135a171d 618 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
e5c95afe 619 /* Make the loopback edge. */
135a171d
JJ
620 make_edge (bb, single_succ (cur_region->entry),
621 EDGE_ABNORMAL);
622
726a989a
RB
623 /* Create an edge from GIMPLE_OMP_FOR to exit, which
624 corresponds to the case that the body of the loop
625 is not executed at all. */
135a171d
JJ
626 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
627 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
628 fallthru = false;
bed575d5
RS
629 break;
630
726a989a 631 case GIMPLE_OMP_SECTIONS:
bed575d5 632 /* Wire up the edges into and out of the nested sections. */
bed575d5 633 {
e5c95afe
ZD
634 basic_block switch_bb = single_succ (cur_region->entry);
635
bed575d5
RS
636 struct omp_region *i;
637 for (i = cur_region->inner; i ; i = i->next)
638 {
726a989a 639 gcc_assert (i->type == GIMPLE_OMP_SECTION);
e5c95afe 640 make_edge (switch_bb, i->entry, 0);
bed575d5
RS
641 make_edge (i->exit, bb, EDGE_FALLTHRU);
642 }
e5c95afe
ZD
643
644 /* Make the loopback edge to the block with
726a989a 645 GIMPLE_OMP_SECTIONS_SWITCH. */
e5c95afe
ZD
646 make_edge (bb, switch_bb, 0);
647
648 /* Make the edge from the switch to exit. */
649 make_edge (switch_bb, bb->next_bb, 0);
650 fallthru = false;
bed575d5
RS
651 }
652 break;
6531d1be 653
bed575d5
RS
654 default:
655 gcc_unreachable ();
656 }
bed575d5
RS
657 break;
658
56e84019
RH
659 default:
660 gcc_assert (!stmt_ends_bb_p (last));
661 fallthru = true;
662 }
6de9cd9a 663 }
56e84019
RH
664 else
665 fallthru = true;
6de9cd9a 666
56e84019 667 if (fallthru)
6c52e687
CC
668 {
669 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
670 if (last)
671 assign_discriminator (gimple_location (last), bb->next_bb);
672 }
6de9cd9a
DN
673 }
674
bed575d5
RS
675 if (root_omp_region)
676 free_omp_regions ();
677
fca01525
KH
678 /* Fold COND_EXPR_COND of each COND_EXPR. */
679 fold_cond_expr_cond ();
6de9cd9a
DN
680}
681
6c52e687
CC
682/* Trivial hash function for a location_t. ITEM is a pointer to
683 a hash table entry that maps a location_t to a discriminator. */
684
685static unsigned int
686locus_map_hash (const void *item)
687{
688 return ((const struct locus_discrim_map *) item)->locus;
689}
690
691/* Equality function for the locus-to-discriminator map. VA and VB
692 point to the two hash table entries to compare. */
693
694static int
695locus_map_eq (const void *va, const void *vb)
696{
697 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
698 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
699 return a->locus == b->locus;
700}
701
702/* Find the next available discriminator value for LOCUS. The
703 discriminator distinguishes among several basic blocks that
704 share a common locus, allowing for more accurate sample-based
705 profiling. */
706
707static int
708next_discriminator_for_locus (location_t locus)
709{
710 struct locus_discrim_map item;
711 struct locus_discrim_map **slot;
712
713 item.locus = locus;
714 item.discriminator = 0;
715 slot = (struct locus_discrim_map **)
716 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
717 (hashval_t) locus, INSERT);
718 gcc_assert (slot);
719 if (*slot == HTAB_EMPTY_ENTRY)
720 {
721 *slot = XNEW (struct locus_discrim_map);
722 gcc_assert (*slot);
723 (*slot)->locus = locus;
724 (*slot)->discriminator = 0;
725 }
726 (*slot)->discriminator++;
727 return (*slot)->discriminator;
728}
729
730/* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
731
732static bool
733same_line_p (location_t locus1, location_t locus2)
734{
735 expanded_location from, to;
736
737 if (locus1 == locus2)
738 return true;
739
740 from = expand_location (locus1);
741 to = expand_location (locus2);
742
743 if (from.line != to.line)
744 return false;
745 if (from.file == to.file)
746 return true;
747 return (from.file != NULL
748 && to.file != NULL
749 && strcmp (from.file, to.file) == 0);
750}
751
752/* Assign a unique discriminator value to block BB if it begins at the same
753 LOCUS as its predecessor block. */
754
755static void
756assign_discriminator (location_t locus, basic_block bb)
757{
758 gimple to_stmt;
759
760 if (locus == 0 || bb->discriminator != 0)
761 return;
762
763 to_stmt = first_non_label_stmt (bb);
764 if (to_stmt && same_line_p (locus, gimple_location (to_stmt)))
765 bb->discriminator = next_discriminator_for_locus (locus);
766}
6de9cd9a 767
726a989a 768/* Create the edges for a GIMPLE_COND starting at block BB. */
6de9cd9a
DN
769
770static void
771make_cond_expr_edges (basic_block bb)
772{
726a989a
RB
773 gimple entry = last_stmt (bb);
774 gimple then_stmt, else_stmt;
6de9cd9a
DN
775 basic_block then_bb, else_bb;
776 tree then_label, else_label;
d783b2a2 777 edge e;
6c52e687 778 location_t entry_locus;
6de9cd9a 779
1e128c5f 780 gcc_assert (entry);
726a989a 781 gcc_assert (gimple_code (entry) == GIMPLE_COND);
6de9cd9a 782
6c52e687
CC
783 entry_locus = gimple_location (entry);
784
6de9cd9a 785 /* Entry basic blocks for each component. */
726a989a
RB
786 then_label = gimple_cond_true_label (entry);
787 else_label = gimple_cond_false_label (entry);
6de9cd9a
DN
788 then_bb = label_to_block (then_label);
789 else_bb = label_to_block (else_label);
726a989a
RB
790 then_stmt = first_stmt (then_bb);
791 else_stmt = first_stmt (else_bb);
6de9cd9a 792
d783b2a2 793 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
6c52e687 794 assign_discriminator (entry_locus, then_bb);
726a989a 795 e->goto_locus = gimple_location (then_stmt);
cc2a64dd
JJ
796 if (e->goto_locus)
797 e->goto_block = gimple_block (then_stmt);
d783b2a2
JH
798 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
799 if (e)
7241571e 800 {
6c52e687 801 assign_discriminator (entry_locus, else_bb);
7241571e 802 e->goto_locus = gimple_location (else_stmt);
cc2a64dd
JJ
803 if (e->goto_locus)
804 e->goto_block = gimple_block (else_stmt);
7241571e 805 }
a9b77cd1 806
726a989a
RB
807 /* We do not need the labels anymore. */
808 gimple_cond_set_true_label (entry, NULL_TREE);
809 gimple_cond_set_false_label (entry, NULL_TREE);
6de9cd9a
DN
810}
811
92b6dff3 812
d6be0d7f
JL
813/* Called for each element in the hash table (P) as we delete the
814 edge to cases hash table.
815
6531d1be 816 Clear all the TREE_CHAINs to prevent problems with copying of
d6be0d7f
JL
817 SWITCH_EXPRs and structure sharing rules, then free the hash table
818 element. */
819
15814ba0 820static bool
ac7d7749 821edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
15814ba0 822 void *data ATTRIBUTE_UNUSED)
d6be0d7f 823{
d6be0d7f
JL
824 tree t, next;
825
15814ba0 826 for (t = (tree) *value; t; t = next)
d6be0d7f
JL
827 {
828 next = TREE_CHAIN (t);
829 TREE_CHAIN (t) = NULL;
830 }
15814ba0
PB
831
832 *value = NULL;
833 return false;
d6be0d7f
JL
834}
835
836/* Start recording information mapping edges to case labels. */
837
c9784e6d 838void
d6be0d7f
JL
839start_recording_case_labels (void)
840{
841 gcc_assert (edge_to_cases == NULL);
15814ba0 842 edge_to_cases = pointer_map_create ();
d6be0d7f
JL
843}
844
845/* Return nonzero if we are recording information for case labels. */
846
847static bool
848recording_case_labels_p (void)
849{
850 return (edge_to_cases != NULL);
851}
852
853/* Stop recording information mapping edges to case labels and
854 remove any information we have recorded. */
c9784e6d 855void
d6be0d7f
JL
856end_recording_case_labels (void)
857{
15814ba0
PB
858 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
859 pointer_map_destroy (edge_to_cases);
d6be0d7f
JL
860 edge_to_cases = NULL;
861}
862
d6be0d7f
JL
863/* If we are inside a {start,end}_recording_cases block, then return
864 a chain of CASE_LABEL_EXPRs from T which reference E.
865
866 Otherwise return NULL. */
92b6dff3
JL
867
868static tree
726a989a 869get_cases_for_edge (edge e, gimple t)
92b6dff3 870{
92b6dff3 871 void **slot;
d6be0d7f 872 size_t i, n;
92b6dff3 873
d6be0d7f
JL
874 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
875 chains available. Return NULL so the caller can detect this case. */
876 if (!recording_case_labels_p ())
877 return NULL;
6531d1be 878
15814ba0 879 slot = pointer_map_contains (edge_to_cases, e);
92b6dff3 880 if (slot)
15814ba0 881 return (tree) *slot;
92b6dff3 882
d6be0d7f
JL
883 /* If we did not find E in the hash table, then this must be the first
884 time we have been queried for information about E & T. Add all the
885 elements from T to the hash table then perform the query again. */
92b6dff3 886
726a989a 887 n = gimple_switch_num_labels (t);
92b6dff3
JL
888 for (i = 0; i < n; i++)
889 {
726a989a 890 tree elt = gimple_switch_label (t, i);
15814ba0 891 tree lab = CASE_LABEL (elt);
d6be0d7f 892 basic_block label_bb = label_to_block (lab);
15814ba0
PB
893 edge this_edge = find_edge (e->src, label_bb);
894
895 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
896 a new chain. */
897 slot = pointer_map_insert (edge_to_cases, this_edge);
898 TREE_CHAIN (elt) = (tree) *slot;
899 *slot = elt;
92b6dff3 900 }
15814ba0
PB
901
902 return (tree) *pointer_map_contains (edge_to_cases, e);
92b6dff3 903}
6de9cd9a 904
726a989a 905/* Create the edges for a GIMPLE_SWITCH starting at block BB. */
6de9cd9a
DN
906
907static void
726a989a 908make_gimple_switch_edges (basic_block bb)
6de9cd9a 909{
726a989a 910 gimple entry = last_stmt (bb);
6c52e687 911 location_t entry_locus;
6de9cd9a 912 size_t i, n;
6de9cd9a 913
6c52e687
CC
914 entry_locus = gimple_location (entry);
915
726a989a 916 n = gimple_switch_num_labels (entry);
6de9cd9a
DN
917
918 for (i = 0; i < n; ++i)
919 {
726a989a 920 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
6de9cd9a 921 basic_block label_bb = label_to_block (lab);
d6be0d7f 922 make_edge (bb, label_bb, 0);
6c52e687 923 assign_discriminator (entry_locus, label_bb);
6de9cd9a
DN
924 }
925}
926
927
928/* Return the basic block holding label DEST. */
929
930basic_block
997de8ed 931label_to_block_fn (struct function *ifun, tree dest)
6de9cd9a 932{
242229bb
JH
933 int uid = LABEL_DECL_UID (dest);
934
f0b698c1
KH
935 /* We would die hard when faced by an undefined label. Emit a label to
936 the very first basic block. This will hopefully make even the dataflow
242229bb
JH
937 and undefined variable warnings quite right. */
938 if ((errorcount || sorrycount) && uid < 0)
939 {
726a989a
RB
940 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
941 gimple stmt;
242229bb 942
726a989a
RB
943 stmt = gimple_build_label (dest);
944 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
242229bb
JH
945 uid = LABEL_DECL_UID (dest);
946 }
e597f337
KH
947 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
948 <= (unsigned int) uid)
98f464e0 949 return NULL;
e597f337 950 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
6de9cd9a
DN
951}
952
4f6c2131
EB
953/* Create edges for an abnormal goto statement at block BB. If FOR_CALL
954 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
955
956void
957make_abnormal_goto_edges (basic_block bb, bool for_call)
958{
959 basic_block target_bb;
726a989a 960 gimple_stmt_iterator gsi;
4f6c2131
EB
961
962 FOR_EACH_BB (target_bb)
726a989a 963 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
4f6c2131 964 {
726a989a
RB
965 gimple label_stmt = gsi_stmt (gsi);
966 tree target;
4f6c2131 967
726a989a 968 if (gimple_code (label_stmt) != GIMPLE_LABEL)
4f6c2131
EB
969 break;
970
726a989a 971 target = gimple_label_label (label_stmt);
4f6c2131
EB
972
973 /* Make an edge to every label block that has been marked as a
974 potential target for a computed goto or a non-local goto. */
975 if ((FORCED_LABEL (target) && !for_call)
976 || (DECL_NONLOCAL (target) && for_call))
977 {
978 make_edge (bb, target_bb, EDGE_ABNORMAL);
979 break;
980 }
981 }
982}
983
6de9cd9a
DN
984/* Create edges for a goto statement at block BB. */
985
986static void
987make_goto_expr_edges (basic_block bb)
988{
726a989a
RB
989 gimple_stmt_iterator last = gsi_last_bb (bb);
990 gimple goto_t = gsi_stmt (last);
6de9cd9a 991
4f6c2131
EB
992 /* A simple GOTO creates normal edges. */
993 if (simple_goto_p (goto_t))
6de9cd9a 994 {
726a989a 995 tree dest = gimple_goto_dest (goto_t);
6c52e687
CC
996 basic_block label_bb = label_to_block (dest);
997 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
726a989a 998 e->goto_locus = gimple_location (goto_t);
6c52e687 999 assign_discriminator (e->goto_locus, label_bb);
cc2a64dd
JJ
1000 if (e->goto_locus)
1001 e->goto_block = gimple_block (goto_t);
726a989a 1002 gsi_remove (&last, true);
4f6c2131 1003 return;
6de9cd9a
DN
1004 }
1005
4f6c2131
EB
1006 /* A computed GOTO creates abnormal edges. */
1007 make_abnormal_goto_edges (bb, false);
6de9cd9a
DN
1008}
1009
1010
1011/*---------------------------------------------------------------------------
1012 Flowgraph analysis
1013---------------------------------------------------------------------------*/
1014
f698d217
SB
1015/* Cleanup useless labels in basic blocks. This is something we wish
1016 to do early because it allows us to group case labels before creating
1017 the edges for the CFG, and it speeds up block statement iterators in
1018 all passes later on.
8b11009b
ZD
1019 We rerun this pass after CFG is created, to get rid of the labels that
1020 are no longer referenced. After then we do not run it any more, since
1021 (almost) no new labels should be created. */
f698d217
SB
1022
1023/* A map from basic block index to the leading label of that block. */
8b11009b
ZD
1024static struct label_record
1025{
1026 /* The label. */
1027 tree label;
1028
1029 /* True if the label is referenced from somewhere. */
1030 bool used;
1031} *label_for_bb;
f698d217
SB
1032
1033/* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
1034static void
7e5487a2 1035update_eh_label (struct eh_region_d *region)
f698d217
SB
1036{
1037 tree old_label = get_eh_region_tree_label (region);
1038 if (old_label)
1039 {
165b54c3
SB
1040 tree new_label;
1041 basic_block bb = label_to_block (old_label);
1042
1043 /* ??? After optimizing, there may be EH regions with labels
1044 that have already been removed from the function body, so
1045 there is no basic block for them. */
1046 if (! bb)
1047 return;
1048
8b11009b
ZD
1049 new_label = label_for_bb[bb->index].label;
1050 label_for_bb[bb->index].used = true;
f698d217
SB
1051 set_eh_region_tree_label (region, new_label);
1052 }
1053}
1054
726a989a 1055
242229bb 1056/* Given LABEL return the first label in the same basic block. */
726a989a 1057
242229bb
JH
1058static tree
1059main_block_label (tree label)
1060{
1061 basic_block bb = label_to_block (label);
8b11009b 1062 tree main_label = label_for_bb[bb->index].label;
242229bb
JH
1063
1064 /* label_to_block possibly inserted undefined label into the chain. */
8b11009b
ZD
1065 if (!main_label)
1066 {
1067 label_for_bb[bb->index].label = label;
1068 main_label = label;
1069 }
1070
1071 label_for_bb[bb->index].used = true;
1072 return main_label;
242229bb
JH
1073}
1074
b986ebf3 1075/* Cleanup redundant labels. This is a three-step process:
f698d217
SB
1076 1) Find the leading label for each block.
1077 2) Redirect all references to labels to the leading labels.
1078 3) Cleanup all useless labels. */
6de9cd9a 1079
165b54c3 1080void
6de9cd9a
DN
1081cleanup_dead_labels (void)
1082{
1083 basic_block bb;
8b11009b 1084 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
6de9cd9a
DN
1085
1086 /* Find a suitable label for each block. We use the first user-defined
f0b698c1 1087 label if there is one, or otherwise just the first label we see. */
6de9cd9a
DN
1088 FOR_EACH_BB (bb)
1089 {
726a989a 1090 gimple_stmt_iterator i;
6de9cd9a 1091
726a989a 1092 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
6de9cd9a 1093 {
726a989a
RB
1094 tree label;
1095 gimple stmt = gsi_stmt (i);
6de9cd9a 1096
726a989a 1097 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
1098 break;
1099
726a989a 1100 label = gimple_label_label (stmt);
6de9cd9a
DN
1101
1102 /* If we have not yet seen a label for the current block,
1103 remember this one and see if there are more labels. */
8b11009b 1104 if (!label_for_bb[bb->index].label)
6de9cd9a 1105 {
8b11009b 1106 label_for_bb[bb->index].label = label;
6de9cd9a
DN
1107 continue;
1108 }
1109
1110 /* If we did see a label for the current block already, but it
1111 is an artificially created label, replace it if the current
1112 label is a user defined label. */
8b11009b
ZD
1113 if (!DECL_ARTIFICIAL (label)
1114 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
6de9cd9a 1115 {
8b11009b 1116 label_for_bb[bb->index].label = label;
6de9cd9a
DN
1117 break;
1118 }
1119 }
1120 }
1121
f698d217
SB
1122 /* Now redirect all jumps/branches to the selected label.
1123 First do so for each block ending in a control statement. */
6de9cd9a
DN
1124 FOR_EACH_BB (bb)
1125 {
726a989a 1126 gimple stmt = last_stmt (bb);
6de9cd9a
DN
1127 if (!stmt)
1128 continue;
1129
726a989a 1130 switch (gimple_code (stmt))
6de9cd9a 1131 {
726a989a 1132 case GIMPLE_COND:
6de9cd9a 1133 {
726a989a
RB
1134 tree true_label = gimple_cond_true_label (stmt);
1135 tree false_label = gimple_cond_false_label (stmt);
6de9cd9a 1136
726a989a
RB
1137 if (true_label)
1138 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1139 if (false_label)
1140 gimple_cond_set_false_label (stmt, main_block_label (false_label));
6de9cd9a
DN
1141 break;
1142 }
6531d1be 1143
726a989a 1144 case GIMPLE_SWITCH:
6de9cd9a 1145 {
726a989a 1146 size_t i, n = gimple_switch_num_labels (stmt);
6531d1be 1147
6de9cd9a
DN
1148 /* Replace all destination labels. */
1149 for (i = 0; i < n; ++i)
92b6dff3 1150 {
726a989a
RB
1151 tree case_label = gimple_switch_label (stmt, i);
1152 tree label = main_block_label (CASE_LABEL (case_label));
1153 CASE_LABEL (case_label) = label;
92b6dff3 1154 }
6de9cd9a
DN
1155 break;
1156 }
1157
726a989a 1158 /* We have to handle gotos until they're removed, and we don't
f667741c 1159 remove them until after we've created the CFG edges. */
726a989a
RB
1160 case GIMPLE_GOTO:
1161 if (!computed_goto_p (stmt))
242229bb 1162 {
726a989a
RB
1163 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1164 gimple_goto_set_dest (stmt, new_dest);
242229bb
JH
1165 break;
1166 }
f667741c 1167
6de9cd9a
DN
1168 default:
1169 break;
1170 }
1171 }
1172
f698d217
SB
1173 for_each_eh_region (update_eh_label);
1174
6de9cd9a 1175 /* Finally, purge dead labels. All user-defined labels and labels that
cea0f4f1
AP
1176 can be the target of non-local gotos and labels which have their
1177 address taken are preserved. */
6de9cd9a
DN
1178 FOR_EACH_BB (bb)
1179 {
726a989a 1180 gimple_stmt_iterator i;
8b11009b 1181 tree label_for_this_bb = label_for_bb[bb->index].label;
6de9cd9a 1182
8b11009b 1183 if (!label_for_this_bb)
6de9cd9a
DN
1184 continue;
1185
8b11009b
ZD
1186 /* If the main label of the block is unused, we may still remove it. */
1187 if (!label_for_bb[bb->index].used)
1188 label_for_this_bb = NULL;
1189
726a989a 1190 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
6de9cd9a 1191 {
726a989a
RB
1192 tree label;
1193 gimple stmt = gsi_stmt (i);
6de9cd9a 1194
726a989a 1195 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
1196 break;
1197
726a989a 1198 label = gimple_label_label (stmt);
6de9cd9a
DN
1199
1200 if (label == label_for_this_bb
726a989a 1201 || !DECL_ARTIFICIAL (label)
cea0f4f1
AP
1202 || DECL_NONLOCAL (label)
1203 || FORCED_LABEL (label))
726a989a 1204 gsi_next (&i);
6de9cd9a 1205 else
726a989a 1206 gsi_remove (&i, true);
6de9cd9a
DN
1207 }
1208 }
1209
1210 free (label_for_bb);
1211}
1212
f667741c
SB
1213/* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1214 and scan the sorted vector of cases. Combine the ones jumping to the
1215 same label.
1216 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1217
165b54c3 1218void
f667741c
SB
1219group_case_labels (void)
1220{
1221 basic_block bb;
1222
1223 FOR_EACH_BB (bb)
1224 {
726a989a
RB
1225 gimple stmt = last_stmt (bb);
1226 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
f667741c 1227 {
726a989a 1228 int old_size = gimple_switch_num_labels (stmt);
f667741c 1229 int i, j, new_size = old_size;
b7814a18
RG
1230 tree default_case = NULL_TREE;
1231 tree default_label = NULL_TREE;
726a989a 1232 bool has_default;
29c4d22b 1233
726a989a 1234 /* The default label is always the first case in a switch
b7814a18 1235 statement after gimplification if it was not optimized
726a989a
RB
1236 away */
1237 if (!CASE_LOW (gimple_switch_default_label (stmt))
1238 && !CASE_HIGH (gimple_switch_default_label (stmt)))
b7814a18 1239 {
726a989a 1240 default_case = gimple_switch_default_label (stmt);
b7814a18 1241 default_label = CASE_LABEL (default_case);
726a989a 1242 has_default = true;
b7814a18 1243 }
726a989a
RB
1244 else
1245 has_default = false;
f667741c 1246
b7814a18 1247 /* Look for possible opportunities to merge cases. */
726a989a
RB
1248 if (has_default)
1249 i = 1;
1250 else
1251 i = 0;
b7814a18 1252 while (i < old_size)
f667741c 1253 {
ed9cef22 1254 tree base_case, base_label, base_high;
726a989a 1255 base_case = gimple_switch_label (stmt, i);
f667741c 1256
1e128c5f 1257 gcc_assert (base_case);
f667741c 1258 base_label = CASE_LABEL (base_case);
31e9eea2
SB
1259
1260 /* Discard cases that have the same destination as the
1261 default case. */
1262 if (base_label == default_label)
1263 {
726a989a 1264 gimple_switch_set_label (stmt, i, NULL_TREE);
31e9eea2 1265 i++;
29c4d22b 1266 new_size--;
31e9eea2
SB
1267 continue;
1268 }
1269
726a989a
RB
1270 base_high = CASE_HIGH (base_case)
1271 ? CASE_HIGH (base_case)
1272 : CASE_LOW (base_case);
d717e500 1273 i++;
726a989a 1274
f667741c
SB
1275 /* Try to merge case labels. Break out when we reach the end
1276 of the label vector or when we cannot merge the next case
1277 label with the current one. */
b7814a18 1278 while (i < old_size)
f667741c 1279 {
726a989a 1280 tree merge_case = gimple_switch_label (stmt, i);
f667741c
SB
1281 tree merge_label = CASE_LABEL (merge_case);
1282 tree t = int_const_binop (PLUS_EXPR, base_high,
1283 integer_one_node, 1);
1284
1285 /* Merge the cases if they jump to the same place,
1286 and their ranges are consecutive. */
1287 if (merge_label == base_label
1288 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1289 {
1290 base_high = CASE_HIGH (merge_case) ?
1291 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1292 CASE_HIGH (base_case) = base_high;
726a989a 1293 gimple_switch_set_label (stmt, i, NULL_TREE);
f667741c 1294 new_size--;
d717e500 1295 i++;
f667741c
SB
1296 }
1297 else
1298 break;
1299 }
1300 }
1301
1302 /* Compress the case labels in the label vector, and adjust the
1303 length of the vector. */
1304 for (i = 0, j = 0; i < new_size; i++)
1305 {
726a989a 1306 while (! gimple_switch_label (stmt, j))
f667741c 1307 j++;
726a989a
RB
1308 gimple_switch_set_label (stmt, i,
1309 gimple_switch_label (stmt, j++));
f667741c 1310 }
726a989a
RB
1311
1312 gcc_assert (new_size <= old_size);
1313 gimple_switch_set_num_labels (stmt, new_size);
f667741c
SB
1314 }
1315 }
1316}
6de9cd9a
DN
1317
1318/* Checks whether we can merge block B into block A. */
1319
1320static bool
726a989a 1321gimple_can_merge_blocks_p (basic_block a, basic_block b)
6de9cd9a 1322{
726a989a
RB
1323 gimple stmt;
1324 gimple_stmt_iterator gsi;
1325 gimple_seq phis;
6de9cd9a 1326
c5cbcccf 1327 if (!single_succ_p (a))
6de9cd9a
DN
1328 return false;
1329
496a4ef5 1330 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
6de9cd9a
DN
1331 return false;
1332
c5cbcccf 1333 if (single_succ (a) != b)
6de9cd9a
DN
1334 return false;
1335
c5cbcccf 1336 if (!single_pred_p (b))
6de9cd9a
DN
1337 return false;
1338
26e75214
KH
1339 if (b == EXIT_BLOCK_PTR)
1340 return false;
6531d1be 1341
6de9cd9a
DN
1342 /* If A ends by a statement causing exceptions or something similar, we
1343 cannot merge the blocks. */
726a989a 1344 stmt = last_stmt (a);
6de9cd9a
DN
1345 if (stmt && stmt_ends_bb_p (stmt))
1346 return false;
1347
1348 /* Do not allow a block with only a non-local label to be merged. */
726a989a
RB
1349 if (stmt
1350 && gimple_code (stmt) == GIMPLE_LABEL
1351 && DECL_NONLOCAL (gimple_label_label (stmt)))
6de9cd9a
DN
1352 return false;
1353
38965eb2 1354 /* It must be possible to eliminate all phi nodes in B. If ssa form
8f8bb1d2
ZD
1355 is not up-to-date, we cannot eliminate any phis; however, if only
1356 some symbols as whole are marked for renaming, this is not a problem,
1357 as phi nodes for those symbols are irrelevant in updating anyway. */
726a989a
RB
1358 phis = phi_nodes (b);
1359 if (!gimple_seq_empty_p (phis))
38965eb2 1360 {
726a989a
RB
1361 gimple_stmt_iterator i;
1362
8f8bb1d2 1363 if (name_mappings_registered_p ())
38965eb2
ZD
1364 return false;
1365
726a989a
RB
1366 for (i = gsi_start (phis); !gsi_end_p (i); gsi_next (&i))
1367 {
1368 gimple phi = gsi_stmt (i);
1369
1370 if (!is_gimple_reg (gimple_phi_result (phi))
1371 && !may_propagate_copy (gimple_phi_result (phi),
1372 gimple_phi_arg_def (phi, 0)))
1373 return false;
1374 }
38965eb2 1375 }
6de9cd9a
DN
1376
1377 /* Do not remove user labels. */
726a989a 1378 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 1379 {
726a989a
RB
1380 stmt = gsi_stmt (gsi);
1381 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a 1382 break;
726a989a 1383 if (!DECL_ARTIFICIAL (gimple_label_label (stmt)))
6de9cd9a
DN
1384 return false;
1385 }
1386
2b271002
ZD
1387 /* Protect the loop latches. */
1388 if (current_loops
1389 && b->loop_father->latch == b)
1390 return false;
1391
6de9cd9a
DN
1392 return true;
1393}
1394
38965eb2
ZD
1395/* Replaces all uses of NAME by VAL. */
1396
684aaf29 1397void
38965eb2
ZD
1398replace_uses_by (tree name, tree val)
1399{
1400 imm_use_iterator imm_iter;
1401 use_operand_p use;
726a989a 1402 gimple stmt;
38965eb2 1403 edge e;
38965eb2 1404
6c00f606 1405 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
38965eb2 1406 {
6c00f606
AM
1407 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1408 {
1409 replace_exp (use, val);
38965eb2 1410
726a989a 1411 if (gimple_code (stmt) == GIMPLE_PHI)
38965eb2 1412 {
726a989a 1413 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
6c00f606
AM
1414 if (e->flags & EDGE_ABNORMAL)
1415 {
1416 /* This can only occur for virtual operands, since
1417 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1418 would prevent replacement. */
1419 gcc_assert (!is_gimple_reg (name));
1420 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1421 }
38965eb2
ZD
1422 }
1423 }
cfaab3a9 1424
726a989a 1425 if (gimple_code (stmt) != GIMPLE_PHI)
6c00f606 1426 {
726a989a 1427 size_t i;
9af0df6b 1428
6c00f606 1429 fold_stmt_inplace (stmt);
672987e8 1430 if (cfgcleanup_altered_bbs)
726a989a 1431 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
cfaab3a9 1432
cff4e50d 1433 /* FIXME. This should go in update_stmt. */
726a989a
RB
1434 for (i = 0; i < gimple_num_ops (stmt); i++)
1435 {
1436 tree op = gimple_op (stmt, i);
1437 /* Operands may be empty here. For example, the labels
1438 of a GIMPLE_COND are nulled out following the creation
1439 of the corresponding CFG edges. */
1440 if (op && TREE_CODE (op) == ADDR_EXPR)
1441 recompute_tree_invariant_for_addr_expr (op);
1442 }
9af0df6b 1443
6c00f606 1444 maybe_clean_or_replace_eh_stmt (stmt, stmt);
cff4e50d 1445 update_stmt (stmt);
6c00f606 1446 }
38965eb2 1447 }
6531d1be 1448
40b448ef 1449 gcc_assert (has_zero_uses (name));
d5ab5675
ZD
1450
1451 /* Also update the trees stored in loop structures. */
1452 if (current_loops)
1453 {
1454 struct loop *loop;
42fd6772 1455 loop_iterator li;
d5ab5675 1456
42fd6772 1457 FOR_EACH_LOOP (li, loop, 0)
d5ab5675 1458 {
42fd6772 1459 substitute_in_loop_info (loop, name, val);
d5ab5675
ZD
1460 }
1461 }
38965eb2 1462}
6de9cd9a
DN
1463
1464/* Merge block B into block A. */
1465
1466static void
726a989a 1467gimple_merge_blocks (basic_block a, basic_block b)
6de9cd9a 1468{
726a989a
RB
1469 gimple_stmt_iterator last, gsi, psi;
1470 gimple_seq phis = phi_nodes (b);
6de9cd9a
DN
1471
1472 if (dump_file)
1473 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1474
c4f548b8
DN
1475 /* Remove all single-valued PHI nodes from block B of the form
1476 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
726a989a
RB
1477 gsi = gsi_last_bb (a);
1478 for (psi = gsi_start (phis); !gsi_end_p (psi); )
38965eb2 1479 {
726a989a
RB
1480 gimple phi = gsi_stmt (psi);
1481 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1482 gimple copy;
1483 bool may_replace_uses = !is_gimple_reg (def)
1484 || may_propagate_copy (def, use);
d7f0e25c 1485
7c8eb293
ZD
1486 /* In case we maintain loop closed ssa form, do not propagate arguments
1487 of loop exit phi nodes. */
d7f0e25c 1488 if (current_loops
f87000d0 1489 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
d7f0e25c
ZD
1490 && is_gimple_reg (def)
1491 && TREE_CODE (use) == SSA_NAME
1492 && a->loop_father != b->loop_father)
1493 may_replace_uses = false;
1494
1495 if (!may_replace_uses)
38965eb2
ZD
1496 {
1497 gcc_assert (is_gimple_reg (def));
1498
128a79fb 1499 /* Note that just emitting the copies is fine -- there is no problem
38965eb2
ZD
1500 with ordering of phi nodes. This is because A is the single
1501 predecessor of B, therefore results of the phi nodes cannot
1502 appear as arguments of the phi nodes. */
726a989a
RB
1503 copy = gimple_build_assign (def, use);
1504 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1505 remove_phi_node (&psi, false);
38965eb2
ZD
1506 }
1507 else
611021e1 1508 {
d0f76c4b
RG
1509 /* If we deal with a PHI for virtual operands, we can simply
1510 propagate these without fussing with folding or updating
1511 the stmt. */
1512 if (!is_gimple_reg (def))
1513 {
1514 imm_use_iterator iter;
1515 use_operand_p use_p;
726a989a 1516 gimple stmt;
d0f76c4b
RG
1517
1518 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1519 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1520 SET_USE (use_p, use);
1521 }
1522 else
1523 replace_uses_by (def, use);
726a989a
RB
1524
1525 remove_phi_node (&psi, true);
611021e1 1526 }
38965eb2
ZD
1527 }
1528
6de9cd9a
DN
1529 /* Ensure that B follows A. */
1530 move_block_after (b, a);
1531
c5cbcccf 1532 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1e128c5f 1533 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
6de9cd9a 1534
726a989a
RB
1535 /* Remove labels from B and set gimple_bb to A for other statements. */
1536 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
6de9cd9a 1537 {
726a989a 1538 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
be477406 1539 {
726a989a
RB
1540 gimple label = gsi_stmt (gsi);
1541
1542 gsi_remove (&gsi, false);
be477406 1543
be477406
JL
1544 /* Now that we can thread computed gotos, we might have
1545 a situation where we have a forced label in block B
1546 However, the label at the start of block B might still be
1547 used in other ways (think about the runtime checking for
1548 Fortran assigned gotos). So we can not just delete the
1549 label. Instead we move the label to the start of block A. */
726a989a 1550 if (FORCED_LABEL (gimple_label_label (label)))
be477406 1551 {
726a989a
RB
1552 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1553 gsi_insert_before (&dest_gsi, label, GSI_NEW_STMT);
be477406
JL
1554 }
1555 }
6de9cd9a
DN
1556 else
1557 {
726a989a
RB
1558 gimple_set_bb (gsi_stmt (gsi), a);
1559 gsi_next (&gsi);
6de9cd9a
DN
1560 }
1561 }
1562
726a989a
RB
1563 /* Merge the sequences. */
1564 last = gsi_last_bb (a);
1565 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1566 set_bb_seq (b, NULL);
672987e8
ZD
1567
1568 if (cfgcleanup_altered_bbs)
1569 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
6de9cd9a
DN
1570}
1571
1572
bc23502b 1573/* Return the one of two successors of BB that is not reachable by a
2cd713a0 1574 complex edge, if there is one. Else, return BB. We use
bc23502b
PB
1575 this in optimizations that use post-dominators for their heuristics,
1576 to catch the cases in C++ where function calls are involved. */
6531d1be 1577
bc23502b 1578basic_block
6531d1be 1579single_noncomplex_succ (basic_block bb)
bc23502b
PB
1580{
1581 edge e0, e1;
1582 if (EDGE_COUNT (bb->succs) != 2)
1583 return bb;
6531d1be 1584
bc23502b
PB
1585 e0 = EDGE_SUCC (bb, 0);
1586 e1 = EDGE_SUCC (bb, 1);
1587 if (e0->flags & EDGE_COMPLEX)
1588 return e1->dest;
1589 if (e1->flags & EDGE_COMPLEX)
1590 return e0->dest;
6531d1be 1591
bc23502b 1592 return bb;
6531d1be 1593}
bc23502b
PB
1594
1595
6de9cd9a
DN
1596/* Walk the function tree removing unnecessary statements.
1597
1598 * Empty statement nodes are removed
1599
1600 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1601
1602 * Unnecessary COND_EXPRs are removed
1603
1604 * Some unnecessary BIND_EXPRs are removed
1605
726a989a
RB
1606 * GOTO_EXPRs immediately preceding destination are removed.
1607
6de9cd9a
DN
1608 Clearly more work could be done. The trick is doing the analysis
1609 and removal fast enough to be a net improvement in compile times.
1610
1611 Note that when we remove a control structure such as a COND_EXPR
1612 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1613 to ensure we eliminate all the useless code. */
1614
1615struct rus_data
1616{
6de9cd9a
DN
1617 bool repeat;
1618 bool may_throw;
1619 bool may_branch;
1620 bool has_label;
726a989a
RB
1621 bool last_was_goto;
1622 gimple_stmt_iterator last_goto_gsi;
6de9cd9a
DN
1623};
1624
726a989a
RB
1625
1626static void remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *);
1627
1628/* Given a statement sequence, find the first executable statement with
1629 location information, and warn that it is unreachable. When searching,
1630 descend into containers in execution order. */
6de9cd9a
DN
1631
1632static bool
726a989a 1633remove_useless_stmts_warn_notreached (gimple_seq stmts)
6de9cd9a 1634{
726a989a 1635 gimple_stmt_iterator gsi;
6de9cd9a 1636
726a989a 1637 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 1638 {
726a989a 1639 gimple stmt = gsi_stmt (gsi);
6de9cd9a 1640
726a989a
RB
1641 if (gimple_has_location (stmt))
1642 {
1643 location_t loc = gimple_location (stmt);
1644 if (LOCATION_LINE (loc) > 0)
1645 {
1646 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
1647 return true;
1648 }
1649 }
6de9cd9a 1650
726a989a
RB
1651 switch (gimple_code (stmt))
1652 {
1653 /* Unfortunately, we need the CFG now to detect unreachable
1654 branches in a conditional, so conditionals are not handled here. */
6de9cd9a 1655
726a989a
RB
1656 case GIMPLE_TRY:
1657 if (remove_useless_stmts_warn_notreached (gimple_try_eval (stmt)))
1658 return true;
1659 if (remove_useless_stmts_warn_notreached (gimple_try_cleanup (stmt)))
1660 return true;
1661 break;
6de9cd9a 1662
726a989a
RB
1663 case GIMPLE_CATCH:
1664 return remove_useless_stmts_warn_notreached (gimple_catch_handler (stmt));
1665
1666 case GIMPLE_EH_FILTER:
1667 return remove_useless_stmts_warn_notreached (gimple_eh_filter_failure (stmt));
1668
1669 case GIMPLE_BIND:
1670 return remove_useless_stmts_warn_notreached (gimple_bind_body (stmt));
1671
1672 default:
1673 break;
1674 }
6de9cd9a
DN
1675 }
1676
1677 return false;
1678}
1679
726a989a
RB
1680/* Helper for remove_useless_stmts_1. Handle GIMPLE_COND statements. */
1681
6de9cd9a 1682static void
726a989a 1683remove_useless_stmts_cond (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a 1684{
726a989a 1685 gimple stmt = gsi_stmt (*gsi);
6de9cd9a 1686
726a989a 1687 /* The folded result must still be a conditional statement. */
2586ba4b
RG
1688 fold_stmt (gsi);
1689 gcc_assert (gsi_stmt (*gsi) == stmt);
6de9cd9a 1690
726a989a 1691 data->may_branch = true;
6de9cd9a 1692
726a989a
RB
1693 /* Replace trivial conditionals with gotos. */
1694 if (gimple_cond_true_p (stmt))
6de9cd9a 1695 {
726a989a
RB
1696 /* Goto THEN label. */
1697 tree then_label = gimple_cond_true_label (stmt);
6de9cd9a 1698
726a989a
RB
1699 gsi_replace (gsi, gimple_build_goto (then_label), false);
1700 data->last_goto_gsi = *gsi;
1701 data->last_was_goto = true;
6de9cd9a
DN
1702 data->repeat = true;
1703 }
726a989a 1704 else if (gimple_cond_false_p (stmt))
6de9cd9a 1705 {
726a989a
RB
1706 /* Goto ELSE label. */
1707 tree else_label = gimple_cond_false_label (stmt);
1708
1709 gsi_replace (gsi, gimple_build_goto (else_label), false);
1710 data->last_goto_gsi = *gsi;
1711 data->last_was_goto = true;
6de9cd9a
DN
1712 data->repeat = true;
1713 }
6de9cd9a
DN
1714 else
1715 {
726a989a
RB
1716 tree then_label = gimple_cond_true_label (stmt);
1717 tree else_label = gimple_cond_false_label (stmt);
6de9cd9a 1718
726a989a
RB
1719 if (then_label == else_label)
1720 {
1721 /* Goto common destination. */
1722 gsi_replace (gsi, gimple_build_goto (then_label), false);
1723 data->last_goto_gsi = *gsi;
1724 data->last_was_goto = true;
6de9cd9a
DN
1725 data->repeat = true;
1726 }
6de9cd9a
DN
1727 }
1728
726a989a
RB
1729 gsi_next (gsi);
1730
1731 data->last_was_goto = false;
6de9cd9a
DN
1732}
1733
726a989a
RB
1734/* Helper for remove_useless_stmts_1.
1735 Handle the try-finally case for GIMPLE_TRY statements. */
6de9cd9a
DN
1736
1737static void
726a989a 1738remove_useless_stmts_tf (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a
DN
1739{
1740 bool save_may_branch, save_may_throw;
1741 bool this_may_branch, this_may_throw;
1742
726a989a
RB
1743 gimple_seq eval_seq, cleanup_seq;
1744 gimple_stmt_iterator eval_gsi, cleanup_gsi;
1745
1746 gimple stmt = gsi_stmt (*gsi);
1747
6de9cd9a
DN
1748 /* Collect may_branch and may_throw information for the body only. */
1749 save_may_branch = data->may_branch;
1750 save_may_throw = data->may_throw;
1751 data->may_branch = false;
1752 data->may_throw = false;
726a989a 1753 data->last_was_goto = false;
6de9cd9a 1754
726a989a
RB
1755 eval_seq = gimple_try_eval (stmt);
1756 eval_gsi = gsi_start (eval_seq);
1757 remove_useless_stmts_1 (&eval_gsi, data);
6de9cd9a
DN
1758
1759 this_may_branch = data->may_branch;
1760 this_may_throw = data->may_throw;
1761 data->may_branch |= save_may_branch;
1762 data->may_throw |= save_may_throw;
726a989a 1763 data->last_was_goto = false;
6de9cd9a 1764
726a989a
RB
1765 cleanup_seq = gimple_try_cleanup (stmt);
1766 cleanup_gsi = gsi_start (cleanup_seq);
1767 remove_useless_stmts_1 (&cleanup_gsi, data);
6de9cd9a
DN
1768
1769 /* If the body is empty, then we can emit the FINALLY block without
1770 the enclosing TRY_FINALLY_EXPR. */
726a989a 1771 if (gimple_seq_empty_p (eval_seq))
6de9cd9a 1772 {
726a989a
RB
1773 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1774 gsi_remove (gsi, false);
6de9cd9a
DN
1775 data->repeat = true;
1776 }
1777
1778 /* If the handler is empty, then we can emit the TRY block without
1779 the enclosing TRY_FINALLY_EXPR. */
726a989a 1780 else if (gimple_seq_empty_p (cleanup_seq))
6de9cd9a 1781 {
726a989a
RB
1782 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1783 gsi_remove (gsi, false);
6de9cd9a
DN
1784 data->repeat = true;
1785 }
1786
1787 /* If the body neither throws, nor branches, then we can safely
1788 string the TRY and FINALLY blocks together. */
1789 else if (!this_may_branch && !this_may_throw)
1790 {
726a989a
RB
1791 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1792 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1793 gsi_remove (gsi, false);
6de9cd9a
DN
1794 data->repeat = true;
1795 }
726a989a
RB
1796 else
1797 gsi_next (gsi);
6de9cd9a
DN
1798}
1799
726a989a
RB
1800/* Helper for remove_useless_stmts_1.
1801 Handle the try-catch case for GIMPLE_TRY statements. */
6de9cd9a
DN
1802
1803static void
726a989a 1804remove_useless_stmts_tc (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a
DN
1805{
1806 bool save_may_throw, this_may_throw;
726a989a
RB
1807
1808 gimple_seq eval_seq, cleanup_seq, handler_seq, failure_seq;
1809 gimple_stmt_iterator eval_gsi, cleanup_gsi, handler_gsi, failure_gsi;
1810
1811 gimple stmt = gsi_stmt (*gsi);
6de9cd9a
DN
1812
1813 /* Collect may_throw information for the body only. */
1814 save_may_throw = data->may_throw;
1815 data->may_throw = false;
726a989a 1816 data->last_was_goto = false;
6de9cd9a 1817
726a989a
RB
1818 eval_seq = gimple_try_eval (stmt);
1819 eval_gsi = gsi_start (eval_seq);
1820 remove_useless_stmts_1 (&eval_gsi, data);
6de9cd9a
DN
1821
1822 this_may_throw = data->may_throw;
1823 data->may_throw = save_may_throw;
1824
726a989a
RB
1825 cleanup_seq = gimple_try_cleanup (stmt);
1826
6de9cd9a
DN
1827 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1828 if (!this_may_throw)
1829 {
1830 if (warn_notreached)
726a989a
RB
1831 {
1832 remove_useless_stmts_warn_notreached (cleanup_seq);
1833 }
1834 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1835 gsi_remove (gsi, false);
6de9cd9a
DN
1836 data->repeat = true;
1837 return;
1838 }
1839
1840 /* Process the catch clause specially. We may be able to tell that
1841 no exceptions propagate past this point. */
1842
1843 this_may_throw = true;
726a989a
RB
1844 cleanup_gsi = gsi_start (cleanup_seq);
1845 stmt = gsi_stmt (cleanup_gsi);
1846 data->last_was_goto = false;
6de9cd9a 1847
726a989a 1848 switch (gimple_code (stmt))
6de9cd9a 1849 {
726a989a
RB
1850 case GIMPLE_CATCH:
1851 /* If the first element is a catch, they all must be. */
1852 while (!gsi_end_p (cleanup_gsi))
1853 {
1854 stmt = gsi_stmt (cleanup_gsi);
6de9cd9a
DN
1855 /* If we catch all exceptions, then the body does not
1856 propagate exceptions past this point. */
726a989a 1857 if (gimple_catch_types (stmt) == NULL)
6de9cd9a 1858 this_may_throw = false;
726a989a
RB
1859 data->last_was_goto = false;
1860 handler_seq = gimple_catch_handler (stmt);
1861 handler_gsi = gsi_start (handler_seq);
1862 remove_useless_stmts_1 (&handler_gsi, data);
1863 gsi_next (&cleanup_gsi);
6de9cd9a 1864 }
726a989a 1865 gsi_next (gsi);
6de9cd9a
DN
1866 break;
1867
726a989a
RB
1868 case GIMPLE_EH_FILTER:
1869 /* If the first element is an eh_filter, it should stand alone. */
1870 if (gimple_eh_filter_must_not_throw (stmt))
6de9cd9a 1871 this_may_throw = false;
726a989a 1872 else if (gimple_eh_filter_types (stmt) == NULL)
6de9cd9a 1873 this_may_throw = false;
726a989a
RB
1874 failure_seq = gimple_eh_filter_failure (stmt);
1875 failure_gsi = gsi_start (failure_seq);
1876 remove_useless_stmts_1 (&failure_gsi, data);
1877 gsi_next (gsi);
6de9cd9a
DN
1878 break;
1879
1880 default:
726a989a
RB
1881 /* Otherwise this is a list of cleanup statements. */
1882 remove_useless_stmts_1 (&cleanup_gsi, data);
6de9cd9a
DN
1883
1884 /* If the cleanup is empty, then we can emit the TRY block without
1885 the enclosing TRY_CATCH_EXPR. */
726a989a 1886 if (gimple_seq_empty_p (cleanup_seq))
6de9cd9a 1887 {
726a989a
RB
1888 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1889 gsi_remove(gsi, false);
6de9cd9a
DN
1890 data->repeat = true;
1891 }
726a989a
RB
1892 else
1893 gsi_next (gsi);
6de9cd9a
DN
1894 break;
1895 }
726a989a 1896
6de9cd9a
DN
1897 data->may_throw |= this_may_throw;
1898}
1899
726a989a 1900/* Helper for remove_useless_stmts_1. Handle GIMPLE_BIND statements. */
6de9cd9a
DN
1901
1902static void
726a989a 1903remove_useless_stmts_bind (gimple_stmt_iterator *gsi, struct rus_data *data ATTRIBUTE_UNUSED)
6de9cd9a
DN
1904{
1905 tree block;
726a989a
RB
1906 gimple_seq body_seq, fn_body_seq;
1907 gimple_stmt_iterator body_gsi;
1908
1909 gimple stmt = gsi_stmt (*gsi);
6de9cd9a
DN
1910
1911 /* First remove anything underneath the BIND_EXPR. */
726a989a
RB
1912
1913 body_seq = gimple_bind_body (stmt);
1914 body_gsi = gsi_start (body_seq);
1915 remove_useless_stmts_1 (&body_gsi, data);
6de9cd9a 1916
726a989a
RB
1917 /* If the GIMPLE_BIND has no variables, then we can pull everything
1918 up one level and remove the GIMPLE_BIND, unless this is the toplevel
1919 GIMPLE_BIND for the current function or an inlined function.
6de9cd9a
DN
1920
1921 When this situation occurs we will want to apply this
1922 optimization again. */
726a989a
RB
1923 block = gimple_bind_block (stmt);
1924 fn_body_seq = gimple_body (current_function_decl);
1925 if (gimple_bind_vars (stmt) == NULL_TREE
1926 && (gimple_seq_empty_p (fn_body_seq)
1927 || stmt != gimple_seq_first_stmt (fn_body_seq))
6de9cd9a
DN
1928 && (! block
1929 || ! BLOCK_ABSTRACT_ORIGIN (block)
1930 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1931 != FUNCTION_DECL)))
1932 {
ee0ee7e2
JJ
1933 tree var = NULL_TREE;
1934 /* Even if there are no gimple_bind_vars, there might be other
1935 decls in BLOCK_VARS rendering the GIMPLE_BIND not useless. */
9f0e7885 1936 if (block && !BLOCK_NUM_NONLOCALIZED_VARS (block))
ee0ee7e2
JJ
1937 for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
1938 if (TREE_CODE (var) == IMPORTED_DECL)
1939 break;
9f0e7885 1940 if (var || (block && BLOCK_NUM_NONLOCALIZED_VARS (block)))
ee0ee7e2
JJ
1941 gsi_next (gsi);
1942 else
1943 {
1944 gsi_insert_seq_before (gsi, body_seq, GSI_SAME_STMT);
1945 gsi_remove (gsi, false);
1946 data->repeat = true;
1947 }
6de9cd9a 1948 }
726a989a
RB
1949 else
1950 gsi_next (gsi);
6de9cd9a
DN
1951}
1952
726a989a 1953/* Helper for remove_useless_stmts_1. Handle GIMPLE_GOTO statements. */
6de9cd9a
DN
1954
1955static void
726a989a 1956remove_useless_stmts_goto (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a 1957{
726a989a
RB
1958 gimple stmt = gsi_stmt (*gsi);
1959
1960 tree dest = gimple_goto_dest (stmt);
6de9cd9a
DN
1961
1962 data->may_branch = true;
726a989a 1963 data->last_was_goto = false;
6de9cd9a 1964
726a989a 1965 /* Record iterator for last goto expr, so that we can delete it if unnecessary. */
6de9cd9a 1966 if (TREE_CODE (dest) == LABEL_DECL)
726a989a
RB
1967 {
1968 data->last_goto_gsi = *gsi;
1969 data->last_was_goto = true;
1970 }
1971
1972 gsi_next(gsi);
6de9cd9a
DN
1973}
1974
726a989a 1975/* Helper for remove_useless_stmts_1. Handle GIMPLE_LABEL statements. */
6de9cd9a
DN
1976
1977static void
726a989a 1978remove_useless_stmts_label (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a 1979{
726a989a
RB
1980 gimple stmt = gsi_stmt (*gsi);
1981
1982 tree label = gimple_label_label (stmt);
6de9cd9a
DN
1983
1984 data->has_label = true;
1985
1986 /* We do want to jump across non-local label receiver code. */
1987 if (DECL_NONLOCAL (label))
726a989a 1988 data->last_was_goto = false;
6de9cd9a 1989
726a989a
RB
1990 else if (data->last_was_goto
1991 && gimple_goto_dest (gsi_stmt (data->last_goto_gsi)) == label)
6de9cd9a 1992 {
726a989a
RB
1993 /* Replace the preceding GIMPLE_GOTO statement with
1994 a GIMPLE_NOP, which will be subsequently removed.
1995 In this way, we avoid invalidating other iterators
1996 active on the statement sequence. */
1997 gsi_replace(&data->last_goto_gsi, gimple_build_nop(), false);
1998 data->last_was_goto = false;
6de9cd9a
DN
1999 data->repeat = true;
2000 }
2001
2002 /* ??? Add something here to delete unused labels. */
6de9cd9a 2003
726a989a 2004 gsi_next (gsi);
6de9cd9a
DN
2005}
2006
2007
2008/* T is CALL_EXPR. Set current_function_calls_* flags. */
2009
2010void
726a989a 2011notice_special_calls (gimple call)
6de9cd9a 2012{
726a989a 2013 int flags = gimple_call_flags (call);
6de9cd9a
DN
2014
2015 if (flags & ECF_MAY_BE_ALLOCA)
e3b5732b 2016 cfun->calls_alloca = true;
6de9cd9a 2017 if (flags & ECF_RETURNS_TWICE)
e3b5732b 2018 cfun->calls_setjmp = true;
6de9cd9a
DN
2019}
2020
2021
2022/* Clear flags set by notice_special_calls. Used by dead code removal
2023 to update the flags. */
2024
2025void
2026clear_special_calls (void)
2027{
e3b5732b
JH
2028 cfun->calls_alloca = false;
2029 cfun->calls_setjmp = false;
6de9cd9a
DN
2030}
2031
726a989a
RB
2032/* Remove useless statements from a statement sequence, and perform
2033 some preliminary simplifications. */
6de9cd9a
DN
2034
2035static void
726a989a 2036remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a 2037{
726a989a 2038 while (!gsi_end_p (*gsi))
6de9cd9a 2039 {
726a989a 2040 gimple stmt = gsi_stmt (*gsi);
6de9cd9a 2041
726a989a
RB
2042 switch (gimple_code (stmt))
2043 {
2044 case GIMPLE_COND:
2045 remove_useless_stmts_cond (gsi, data);
2046 break;
2047
2048 case GIMPLE_GOTO:
2049 remove_useless_stmts_goto (gsi, data);
2050 break;
2051
2052 case GIMPLE_LABEL:
2053 remove_useless_stmts_label (gsi, data);
2054 break;
2055
2056 case GIMPLE_ASSIGN:
2057 fold_stmt (gsi);
2058 stmt = gsi_stmt (*gsi);
2059 data->last_was_goto = false;
2060 if (stmt_could_throw_p (stmt))
2061 data->may_throw = true;
2062 gsi_next (gsi);
2063 break;
2064
2065 case GIMPLE_ASM:
2066 fold_stmt (gsi);
2067 data->last_was_goto = false;
2068 gsi_next (gsi);
2069 break;
2070
2071 case GIMPLE_CALL:
2072 fold_stmt (gsi);
2073 stmt = gsi_stmt (*gsi);
2074 data->last_was_goto = false;
2075 if (is_gimple_call (stmt))
2076 notice_special_calls (stmt);
2077
2078 /* We used to call update_gimple_call_flags here,
2079 which copied side-effects and nothrows status
2080 from the function decl to the call. In the new
2081 tuplified GIMPLE, the accessors for this information
2082 always consult the function decl, so this copying
2083 is no longer necessary. */
2084 if (stmt_could_throw_p (stmt))
2085 data->may_throw = true;
2086 gsi_next (gsi);
2087 break;
2088
2089 case GIMPLE_RETURN:
2090 fold_stmt (gsi);
2091 data->last_was_goto = false;
2092 data->may_branch = true;
2093 gsi_next (gsi);
2094 break;
2095
2096 case GIMPLE_BIND:
2097 remove_useless_stmts_bind (gsi, data);
2098 break;
2099
2100 case GIMPLE_TRY:
2101 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
2102 remove_useless_stmts_tc (gsi, data);
2103 else if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2104 remove_useless_stmts_tf (gsi, data);
2105 else
2106 gcc_unreachable ();
2107 break;
2108
2109 case GIMPLE_CATCH:
2110 gcc_unreachable ();
2111 break;
2112
2113 case GIMPLE_NOP:
2114 gsi_remove (gsi, false);
2115 break;
2116
2117 case GIMPLE_OMP_FOR:
2118 {
2119 gimple_seq pre_body_seq = gimple_omp_for_pre_body (stmt);
2120 gimple_stmt_iterator pre_body_gsi = gsi_start (pre_body_seq);
2121
2122 remove_useless_stmts_1 (&pre_body_gsi, data);
2123 data->last_was_goto = false;
2124 }
2125 /* FALLTHROUGH */
2126 case GIMPLE_OMP_CRITICAL:
2127 case GIMPLE_OMP_CONTINUE:
2128 case GIMPLE_OMP_MASTER:
2129 case GIMPLE_OMP_ORDERED:
2130 case GIMPLE_OMP_SECTION:
2131 case GIMPLE_OMP_SECTIONS:
2132 case GIMPLE_OMP_SINGLE:
2133 {
2134 gimple_seq body_seq = gimple_omp_body (stmt);
2135 gimple_stmt_iterator body_gsi = gsi_start (body_seq);
2136
2137 remove_useless_stmts_1 (&body_gsi, data);
2138 data->last_was_goto = false;
2139 gsi_next (gsi);
2140 }
2141 break;
2142
2143 case GIMPLE_OMP_PARALLEL:
2144 case GIMPLE_OMP_TASK:
2145 {
2146 /* Make sure the outermost GIMPLE_BIND isn't removed
2147 as useless. */
2148 gimple_seq body_seq = gimple_omp_body (stmt);
2149 gimple bind = gimple_seq_first_stmt (body_seq);
2150 gimple_seq bind_seq = gimple_bind_body (bind);
2151 gimple_stmt_iterator bind_gsi = gsi_start (bind_seq);
2152
2153 remove_useless_stmts_1 (&bind_gsi, data);
2154 data->last_was_goto = false;
2155 gsi_next (gsi);
2156 }
2157 break;
2158
2159 default:
2160 data->last_was_goto = false;
2161 gsi_next (gsi);
2162 break;
2163 }
6de9cd9a
DN
2164 }
2165}
2166
726a989a
RB
2167/* Walk the function tree, removing useless statements and performing
2168 some preliminary simplifications. */
2169
c2924966 2170static unsigned int
6de9cd9a
DN
2171remove_useless_stmts (void)
2172{
2173 struct rus_data data;
2174
2175 clear_special_calls ();
2176
2177 do
2178 {
726a989a
RB
2179 gimple_stmt_iterator gsi;
2180
2181 gsi = gsi_start (gimple_body (current_function_decl));
6de9cd9a 2182 memset (&data, 0, sizeof (data));
726a989a 2183 remove_useless_stmts_1 (&gsi, &data);
6de9cd9a
DN
2184 }
2185 while (data.repeat);
211ca15c
RG
2186
2187#ifdef ENABLE_TYPES_CHECKING
2188 verify_types_in_gimple_seq (gimple_body (current_function_decl));
2189#endif
2190
c2924966 2191 return 0;
6de9cd9a
DN
2192}
2193
2194
8ddbbcae 2195struct gimple_opt_pass pass_remove_useless_stmts =
6de9cd9a 2196{
8ddbbcae
JH
2197 {
2198 GIMPLE_PASS,
6de9cd9a
DN
2199 "useless", /* name */
2200 NULL, /* gate */
2201 remove_useless_stmts, /* execute */
2202 NULL, /* sub */
2203 NULL, /* next */
2204 0, /* static_pass_number */
7072a650 2205 TV_NONE, /* tv_id */
9e5a3e6c
RH
2206 PROP_gimple_any, /* properties_required */
2207 0, /* properties_provided */
6de9cd9a
DN
2208 0, /* properties_destroyed */
2209 0, /* todo_flags_start */
8ddbbcae
JH
2210 TODO_dump_func /* todo_flags_finish */
2211 }
6de9cd9a
DN
2212};
2213
6de9cd9a
DN
2214/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2215
2216static void
2217remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2218{
6de9cd9a
DN
2219 /* Since this block is no longer reachable, we can just delete all
2220 of its PHI nodes. */
81b822d5 2221 remove_phi_nodes (bb);
6de9cd9a
DN
2222
2223 /* Remove edges to BB's successors. */
628f6a4e 2224 while (EDGE_COUNT (bb->succs) > 0)
d0d2cc21 2225 remove_edge (EDGE_SUCC (bb, 0));
6de9cd9a
DN
2226}
2227
2228
2229/* Remove statements of basic block BB. */
2230
2231static void
2232remove_bb (basic_block bb)
2233{
726a989a 2234 gimple_stmt_iterator i;
dbce1570 2235 source_location loc = UNKNOWN_LOCATION;
6de9cd9a
DN
2236
2237 if (dump_file)
2238 {
2239 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2240 if (dump_flags & TDF_DETAILS)
2241 {
2242 dump_bb (bb, dump_file, 0);
2243 fprintf (dump_file, "\n");
2244 }
2245 }
2246
2b271002
ZD
2247 if (current_loops)
2248 {
2249 struct loop *loop = bb->loop_father;
2250
598ec7bd
ZD
2251 /* If a loop gets removed, clean up the information associated
2252 with it. */
2b271002
ZD
2253 if (loop->latch == bb
2254 || loop->header == bb)
598ec7bd 2255 free_numbers_of_iterations_estimates_loop (loop);
2b271002
ZD
2256 }
2257
6de9cd9a 2258 /* Remove all the instructions in the block. */
726a989a 2259 if (bb_seq (bb) != NULL)
6de9cd9a 2260 {
726a989a 2261 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
77568960 2262 {
726a989a
RB
2263 gimple stmt = gsi_stmt (i);
2264 if (gimple_code (stmt) == GIMPLE_LABEL
2265 && (FORCED_LABEL (gimple_label_label (stmt))
2266 || DECL_NONLOCAL (gimple_label_label (stmt))))
7506e1cb
ZD
2267 {
2268 basic_block new_bb;
726a989a 2269 gimple_stmt_iterator new_gsi;
7506e1cb
ZD
2270
2271 /* A non-reachable non-local label may still be referenced.
2272 But it no longer needs to carry the extra semantics of
2273 non-locality. */
726a989a 2274 if (DECL_NONLOCAL (gimple_label_label (stmt)))
7506e1cb 2275 {
726a989a
RB
2276 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2277 FORCED_LABEL (gimple_label_label (stmt)) = 1;
7506e1cb 2278 }
bb1ecfe8 2279
7506e1cb 2280 new_bb = bb->prev_bb;
726a989a
RB
2281 new_gsi = gsi_start_bb (new_bb);
2282 gsi_remove (&i, false);
2283 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
7506e1cb
ZD
2284 }
2285 else
bb1ecfe8 2286 {
7506e1cb
ZD
2287 /* Release SSA definitions if we are in SSA. Note that we
2288 may be called when not in SSA. For example,
2289 final_cleanup calls this function via
2290 cleanup_tree_cfg. */
2291 if (gimple_in_ssa_p (cfun))
2292 release_defs (stmt);
2293
726a989a 2294 gsi_remove (&i, true);
bb1ecfe8 2295 }
6531d1be 2296
7506e1cb
ZD
2297 /* Don't warn for removed gotos. Gotos are often removed due to
2298 jump threading, thus resulting in bogus warnings. Not great,
2299 since this way we lose warnings for gotos in the original
2300 program that are indeed unreachable. */
726a989a
RB
2301 if (gimple_code (stmt) != GIMPLE_GOTO
2302 && gimple_has_location (stmt)
2303 && !loc)
2304 loc = gimple_location (stmt);
43e05e45 2305 }
6de9cd9a
DN
2306 }
2307
2308 /* If requested, give a warning that the first statement in the
2309 block is unreachable. We walk statements backwards in the
2310 loop above, so the last statement we process is the first statement
2311 in the block. */
5ffeb913 2312 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
44c21c7f 2313 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
6de9cd9a
DN
2314
2315 remove_phi_nodes_and_edges_for_unreachable_block (bb);
726a989a 2316 bb->il.gimple = NULL;
6de9cd9a
DN
2317}
2318
6de9cd9a 2319
35920270
KH
2320/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2321 predicate VAL, return the edge that will be taken out of the block.
2322 If VAL does not match a unique edge, NULL is returned. */
6de9cd9a
DN
2323
2324edge
2325find_taken_edge (basic_block bb, tree val)
2326{
726a989a 2327 gimple stmt;
6de9cd9a
DN
2328
2329 stmt = last_stmt (bb);
2330
1e128c5f
GB
2331 gcc_assert (stmt);
2332 gcc_assert (is_ctrl_stmt (stmt));
6de9cd9a 2333
726a989a
RB
2334 if (val == NULL)
2335 return NULL;
2336
2337 if (!is_gimple_min_invariant (val))
6de9cd9a
DN
2338 return NULL;
2339
726a989a 2340 if (gimple_code (stmt) == GIMPLE_COND)
6de9cd9a
DN
2341 return find_taken_edge_cond_expr (bb, val);
2342
726a989a 2343 if (gimple_code (stmt) == GIMPLE_SWITCH)
6de9cd9a
DN
2344 return find_taken_edge_switch_expr (bb, val);
2345
be477406 2346 if (computed_goto_p (stmt))
1799efef
JL
2347 {
2348 /* Only optimize if the argument is a label, if the argument is
2349 not a label then we can not construct a proper CFG.
2350
2351 It may be the case that we only need to allow the LABEL_REF to
2352 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2353 appear inside a LABEL_EXPR just to be safe. */
2354 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2355 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2356 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2357 return NULL;
2358 }
be477406 2359
35920270 2360 gcc_unreachable ();
6de9cd9a
DN
2361}
2362
be477406
JL
2363/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2364 statement, determine which of the outgoing edges will be taken out of the
2365 block. Return NULL if either edge may be taken. */
2366
2367static edge
2368find_taken_edge_computed_goto (basic_block bb, tree val)
2369{
2370 basic_block dest;
2371 edge e = NULL;
2372
2373 dest = label_to_block (val);
2374 if (dest)
2375 {
2376 e = find_edge (bb, dest);
2377 gcc_assert (e != NULL);
2378 }
2379
2380 return e;
2381}
6de9cd9a
DN
2382
2383/* Given a constant value VAL and the entry block BB to a COND_EXPR
2384 statement, determine which of the two edges will be taken out of the
2385 block. Return NULL if either edge may be taken. */
2386
2387static edge
2388find_taken_edge_cond_expr (basic_block bb, tree val)
2389{
2390 edge true_edge, false_edge;
2391
2392 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
6531d1be 2393
f1b19062 2394 gcc_assert (TREE_CODE (val) == INTEGER_CST);
6e682d7e 2395 return (integer_zerop (val) ? false_edge : true_edge);
6de9cd9a
DN
2396}
2397
fca01525 2398/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
6de9cd9a
DN
2399 statement, determine which edge will be taken out of the block. Return
2400 NULL if any edge may be taken. */
2401
2402static edge
2403find_taken_edge_switch_expr (basic_block bb, tree val)
2404{
6de9cd9a
DN
2405 basic_block dest_bb;
2406 edge e;
726a989a
RB
2407 gimple switch_stmt;
2408 tree taken_case;
6de9cd9a 2409
726a989a
RB
2410 switch_stmt = last_stmt (bb);
2411 taken_case = find_case_label_for_value (switch_stmt, val);
6de9cd9a
DN
2412 dest_bb = label_to_block (CASE_LABEL (taken_case));
2413
2414 e = find_edge (bb, dest_bb);
1e128c5f 2415 gcc_assert (e);
6de9cd9a
DN
2416 return e;
2417}
2418
2419
726a989a 2420/* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
f667741c
SB
2421 We can make optimal use here of the fact that the case labels are
2422 sorted: We can do a binary search for a case matching VAL. */
6de9cd9a
DN
2423
2424static tree
726a989a 2425find_case_label_for_value (gimple switch_stmt, tree val)
6de9cd9a 2426{
726a989a
RB
2427 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2428 tree default_case = gimple_switch_default_label (switch_stmt);
6de9cd9a 2429
726a989a 2430 for (low = 0, high = n; high - low > 1; )
6de9cd9a 2431 {
f667741c 2432 size_t i = (high + low) / 2;
726a989a 2433 tree t = gimple_switch_label (switch_stmt, i);
f667741c
SB
2434 int cmp;
2435
2436 /* Cache the result of comparing CASE_LOW and val. */
2437 cmp = tree_int_cst_compare (CASE_LOW (t), val);
6de9cd9a 2438
f667741c
SB
2439 if (cmp > 0)
2440 high = i;
2441 else
2442 low = i;
2443
2444 if (CASE_HIGH (t) == NULL)
6de9cd9a 2445 {
f667741c
SB
2446 /* A singe-valued case label. */
2447 if (cmp == 0)
6de9cd9a
DN
2448 return t;
2449 }
2450 else
2451 {
2452 /* A case range. We can only handle integer ranges. */
f667741c 2453 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
6de9cd9a
DN
2454 return t;
2455 }
2456 }
2457
6de9cd9a
DN
2458 return default_case;
2459}
2460
2461
6de9cd9a
DN
2462/* Dump a basic block on stderr. */
2463
2464void
726a989a 2465gimple_debug_bb (basic_block bb)
6de9cd9a 2466{
726a989a 2467 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
6de9cd9a
DN
2468}
2469
2470
2471/* Dump basic block with index N on stderr. */
2472
2473basic_block
726a989a 2474gimple_debug_bb_n (int n)
6de9cd9a 2475{
726a989a 2476 gimple_debug_bb (BASIC_BLOCK (n));
6de9cd9a 2477 return BASIC_BLOCK (n);
6531d1be 2478}
6de9cd9a
DN
2479
2480
2481/* Dump the CFG on stderr.
2482
2483 FLAGS are the same used by the tree dumping functions
6531d1be 2484 (see TDF_* in tree-pass.h). */
6de9cd9a
DN
2485
2486void
726a989a 2487gimple_debug_cfg (int flags)
6de9cd9a 2488{
726a989a 2489 gimple_dump_cfg (stderr, flags);
6de9cd9a
DN
2490}
2491
2492
2493/* Dump the program showing basic block boundaries on the given FILE.
2494
2495 FLAGS are the same used by the tree dumping functions (see TDF_* in
2496 tree.h). */
2497
2498void
726a989a 2499gimple_dump_cfg (FILE *file, int flags)
6de9cd9a
DN
2500{
2501 if (flags & TDF_DETAILS)
2502 {
2503 const char *funcname
673fda6b 2504 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2505
2506 fputc ('\n', file);
2507 fprintf (file, ";; Function %s\n\n", funcname);
2508 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2509 n_basic_blocks, n_edges, last_basic_block);
2510
2511 brief_dump_cfg (file);
2512 fprintf (file, "\n");
2513 }
2514
2515 if (flags & TDF_STATS)
2516 dump_cfg_stats (file);
2517
2518 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2519}
2520
2521
2522/* Dump CFG statistics on FILE. */
2523
2524void
2525dump_cfg_stats (FILE *file)
2526{
2527 static long max_num_merged_labels = 0;
2528 unsigned long size, total = 0;
7b0cab99 2529 long num_edges;
6de9cd9a
DN
2530 basic_block bb;
2531 const char * const fmt_str = "%-30s%-13s%12s\n";
f7fda749 2532 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
cac50d94 2533 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
6de9cd9a
DN
2534 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2535 const char *funcname
673fda6b 2536 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2537
2538
2539 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2540
2541 fprintf (file, "---------------------------------------------------------\n");
2542 fprintf (file, fmt_str, "", " Number of ", "Memory");
2543 fprintf (file, fmt_str, "", " instances ", "used ");
2544 fprintf (file, "---------------------------------------------------------\n");
2545
2546 size = n_basic_blocks * sizeof (struct basic_block_def);
2547 total += size;
f7fda749
RH
2548 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2549 SCALE (size), LABEL (size));
6de9cd9a 2550
7b0cab99 2551 num_edges = 0;
6de9cd9a 2552 FOR_EACH_BB (bb)
7b0cab99
JH
2553 num_edges += EDGE_COUNT (bb->succs);
2554 size = num_edges * sizeof (struct edge_def);
6de9cd9a 2555 total += size;
cac50d94 2556 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
6de9cd9a 2557
6de9cd9a
DN
2558 fprintf (file, "---------------------------------------------------------\n");
2559 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2560 LABEL (total));
2561 fprintf (file, "---------------------------------------------------------\n");
2562 fprintf (file, "\n");
2563
2564 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2565 max_num_merged_labels = cfg_stats.num_merged_labels;
2566
2567 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2568 cfg_stats.num_merged_labels, max_num_merged_labels);
2569
2570 fprintf (file, "\n");
2571}
2572
2573
2574/* Dump CFG statistics on stderr. Keep extern so that it's always
2575 linked in the final executable. */
2576
2577void
2578debug_cfg_stats (void)
2579{
2580 dump_cfg_stats (stderr);
2581}
2582
2583
2584/* Dump the flowgraph to a .vcg FILE. */
2585
2586static void
726a989a 2587gimple_cfg2vcg (FILE *file)
6de9cd9a
DN
2588{
2589 edge e;
628f6a4e 2590 edge_iterator ei;
6de9cd9a
DN
2591 basic_block bb;
2592 const char *funcname
673fda6b 2593 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2594
2595 /* Write the file header. */
2596 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2597 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2598 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2599
2600 /* Write blocks and edges. */
628f6a4e 2601 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
6de9cd9a
DN
2602 {
2603 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2604 e->dest->index);
2605
2606 if (e->flags & EDGE_FAKE)
2607 fprintf (file, " linestyle: dotted priority: 10");
2608 else
2609 fprintf (file, " linestyle: solid priority: 100");
2610
2611 fprintf (file, " }\n");
2612 }
2613 fputc ('\n', file);
2614
2615 FOR_EACH_BB (bb)
2616 {
726a989a 2617 enum gimple_code head_code, end_code;
6de9cd9a
DN
2618 const char *head_name, *end_name;
2619 int head_line = 0;
2620 int end_line = 0;
726a989a
RB
2621 gimple first = first_stmt (bb);
2622 gimple last = last_stmt (bb);
6de9cd9a
DN
2623
2624 if (first)
2625 {
726a989a
RB
2626 head_code = gimple_code (first);
2627 head_name = gimple_code_name[head_code];
6de9cd9a
DN
2628 head_line = get_lineno (first);
2629 }
2630 else
2631 head_name = "no-statement";
2632
2633 if (last)
2634 {
726a989a
RB
2635 end_code = gimple_code (last);
2636 end_name = gimple_code_name[end_code];
6de9cd9a
DN
2637 end_line = get_lineno (last);
2638 }
2639 else
2640 end_name = "no-statement";
2641
2642 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2643 bb->index, bb->index, head_name, head_line, end_name,
2644 end_line);
2645
628f6a4e 2646 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
2647 {
2648 if (e->dest == EXIT_BLOCK_PTR)
2649 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2650 else
2651 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2652
2653 if (e->flags & EDGE_FAKE)
2654 fprintf (file, " priority: 10 linestyle: dotted");
2655 else
2656 fprintf (file, " priority: 100 linestyle: solid");
2657
2658 fprintf (file, " }\n");
2659 }
2660
2661 if (bb->next_bb != EXIT_BLOCK_PTR)
2662 fputc ('\n', file);
2663 }
2664
2665 fputs ("}\n\n", file);
2666}
2667
2668
2669
2670/*---------------------------------------------------------------------------
2671 Miscellaneous helpers
2672---------------------------------------------------------------------------*/
2673
2674/* Return true if T represents a stmt that always transfers control. */
2675
2676bool
726a989a 2677is_ctrl_stmt (gimple t)
6de9cd9a 2678{
726a989a
RB
2679 return gimple_code (t) == GIMPLE_COND
2680 || gimple_code (t) == GIMPLE_SWITCH
2681 || gimple_code (t) == GIMPLE_GOTO
2682 || gimple_code (t) == GIMPLE_RETURN
2683 || gimple_code (t) == GIMPLE_RESX;
6de9cd9a
DN
2684}
2685
2686
2687/* Return true if T is a statement that may alter the flow of control
2688 (e.g., a call to a non-returning function). */
2689
2690bool
726a989a 2691is_ctrl_altering_stmt (gimple t)
6de9cd9a 2692{
1e128c5f 2693 gcc_assert (t);
726a989a
RB
2694
2695 if (is_gimple_call (t))
6de9cd9a 2696 {
726a989a
RB
2697 int flags = gimple_call_flags (t);
2698
2699 /* A non-pure/const call alters flow control if the current
6de9cd9a 2700 function has nonlocal labels. */
726a989a
RB
2701 if (!(flags & (ECF_CONST | ECF_PURE))
2702 && cfun->has_nonlocal_label)
6de9cd9a
DN
2703 return true;
2704
726a989a
RB
2705 /* A call also alters control flow if it does not return. */
2706 if (gimple_call_flags (t) & ECF_NORETURN)
6de9cd9a 2707 return true;
6de9cd9a
DN
2708 }
2709
50674e96 2710 /* OpenMP directives alter control flow. */
726a989a 2711 if (is_gimple_omp (t))
50674e96
DN
2712 return true;
2713
6de9cd9a 2714 /* If a statement can throw, it alters control flow. */
726a989a 2715 return stmt_can_throw_internal (t);
6de9cd9a
DN
2716}
2717
2718
4f6c2131 2719/* Return true if T is a simple local goto. */
6de9cd9a
DN
2720
2721bool
726a989a 2722simple_goto_p (gimple t)
6de9cd9a 2723{
726a989a
RB
2724 return (gimple_code (t) == GIMPLE_GOTO
2725 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
4f6c2131
EB
2726}
2727
2728
2729/* Return true if T can make an abnormal transfer of control flow.
2730 Transfers of control flow associated with EH are excluded. */
2731
2732bool
726a989a 2733stmt_can_make_abnormal_goto (gimple t)
4f6c2131
EB
2734{
2735 if (computed_goto_p (t))
2736 return true;
726a989a
RB
2737 if (is_gimple_call (t))
2738 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
4f6c2131 2739 return false;
6de9cd9a
DN
2740}
2741
2742
726a989a
RB
2743/* Return true if STMT should start a new basic block. PREV_STMT is
2744 the statement preceding STMT. It is used when STMT is a label or a
2745 case label. Labels should only start a new basic block if their
2746 previous statement wasn't a label. Otherwise, sequence of labels
2747 would generate unnecessary basic blocks that only contain a single
2748 label. */
6de9cd9a
DN
2749
2750static inline bool
726a989a 2751stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
6de9cd9a 2752{
726a989a 2753 if (stmt == NULL)
6de9cd9a
DN
2754 return false;
2755
726a989a
RB
2756 /* Labels start a new basic block only if the preceding statement
2757 wasn't a label of the same type. This prevents the creation of
2758 consecutive blocks that have nothing but a single label. */
2759 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a
DN
2760 {
2761 /* Nonlocal and computed GOTO targets always start a new block. */
726a989a
RB
2762 if (DECL_NONLOCAL (gimple_label_label (stmt))
2763 || FORCED_LABEL (gimple_label_label (stmt)))
6de9cd9a
DN
2764 return true;
2765
726a989a 2766 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
6de9cd9a 2767 {
726a989a 2768 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
6de9cd9a
DN
2769 return true;
2770
2771 cfg_stats.num_merged_labels++;
2772 return false;
2773 }
2774 else
2775 return true;
2776 }
2777
2778 return false;
2779}
2780
2781
2782/* Return true if T should end a basic block. */
2783
2784bool
726a989a 2785stmt_ends_bb_p (gimple t)
6de9cd9a
DN
2786{
2787 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2788}
2789
726a989a 2790/* Remove block annotations and other data structures. */
6de9cd9a
DN
2791
2792void
242229bb 2793delete_tree_cfg_annotations (void)
6de9cd9a 2794{
6de9cd9a 2795 label_to_block_map = NULL;
6de9cd9a
DN
2796}
2797
2798
2799/* Return the first statement in basic block BB. */
2800
726a989a 2801gimple
6de9cd9a
DN
2802first_stmt (basic_block bb)
2803{
726a989a
RB
2804 gimple_stmt_iterator i = gsi_start_bb (bb);
2805 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
6de9cd9a
DN
2806}
2807
6c52e687
CC
2808/* Return the first non-label statement in basic block BB. */
2809
2810static gimple
2811first_non_label_stmt (basic_block bb)
2812{
2813 gimple_stmt_iterator i = gsi_start_bb (bb);
2814 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2815 gsi_next (&i);
2816 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2817}
2818
6de9cd9a
DN
2819/* Return the last statement in basic block BB. */
2820
726a989a 2821gimple
6de9cd9a
DN
2822last_stmt (basic_block bb)
2823{
726a989a
RB
2824 gimple_stmt_iterator b = gsi_last_bb (bb);
2825 return !gsi_end_p (b) ? gsi_stmt (b) : NULL;
6de9cd9a
DN
2826}
2827
6de9cd9a
DN
2828/* Return the last statement of an otherwise empty block. Return NULL
2829 if the block is totally empty, or if it contains more than one
2830 statement. */
2831
726a989a 2832gimple
6de9cd9a
DN
2833last_and_only_stmt (basic_block bb)
2834{
726a989a
RB
2835 gimple_stmt_iterator i = gsi_last_bb (bb);
2836 gimple last, prev;
6de9cd9a 2837
726a989a
RB
2838 if (gsi_end_p (i))
2839 return NULL;
6de9cd9a 2840
726a989a
RB
2841 last = gsi_stmt (i);
2842 gsi_prev (&i);
2843 if (gsi_end_p (i))
6de9cd9a
DN
2844 return last;
2845
2846 /* Empty statements should no longer appear in the instruction stream.
2847 Everything that might have appeared before should be deleted by
726a989a 2848 remove_useless_stmts, and the optimizers should just gsi_remove
6de9cd9a
DN
2849 instead of smashing with build_empty_stmt.
2850
2851 Thus the only thing that should appear here in a block containing
2852 one executable statement is a label. */
726a989a
RB
2853 prev = gsi_stmt (i);
2854 if (gimple_code (prev) == GIMPLE_LABEL)
6de9cd9a
DN
2855 return last;
2856 else
726a989a 2857 return NULL;
82b85a85 2858}
6de9cd9a 2859
4f7db7f7
KH
2860/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2861
2862static void
2863reinstall_phi_args (edge new_edge, edge old_edge)
2864{
ea7e6d5a
AH
2865 edge_var_map_vector v;
2866 edge_var_map *vm;
2867 int i;
726a989a
RB
2868 gimple_stmt_iterator phis;
2869
ea7e6d5a
AH
2870 v = redirect_edge_var_map_vector (old_edge);
2871 if (!v)
4f7db7f7 2872 return;
726a989a
RB
2873
2874 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2875 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2876 i++, gsi_next (&phis))
4f7db7f7 2877 {
726a989a 2878 gimple phi = gsi_stmt (phis);
ea7e6d5a
AH
2879 tree result = redirect_edge_var_map_result (vm);
2880 tree arg = redirect_edge_var_map_def (vm);
726a989a
RB
2881
2882 gcc_assert (result == gimple_phi_result (phi));
2883
d2e398df 2884 add_phi_arg (phi, arg, new_edge);
4f7db7f7 2885 }
726a989a 2886
ea7e6d5a 2887 redirect_edge_var_map_clear (old_edge);
4f7db7f7
KH
2888}
2889
2a8a8292 2890/* Returns the basic block after which the new basic block created
b9a66240
ZD
2891 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2892 near its "logical" location. This is of most help to humans looking
2893 at debugging dumps. */
2894
2895static basic_block
2896split_edge_bb_loc (edge edge_in)
2897{
2898 basic_block dest = edge_in->dest;
2899
2900 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
2901 return edge_in->src;
2902 else
2903 return dest->prev_bb;
2904}
2905
6de9cd9a
DN
2906/* Split a (typically critical) edge EDGE_IN. Return the new block.
2907 Abort on abnormal edges. */
2908
2909static basic_block
726a989a 2910gimple_split_edge (edge edge_in)
6de9cd9a 2911{
4741d956 2912 basic_block new_bb, after_bb, dest;
6de9cd9a 2913 edge new_edge, e;
6de9cd9a
DN
2914
2915 /* Abnormal edges cannot be split. */
1e128c5f 2916 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
6de9cd9a 2917
6de9cd9a
DN
2918 dest = edge_in->dest;
2919
b9a66240 2920 after_bb = split_edge_bb_loc (edge_in);
6de9cd9a
DN
2921
2922 new_bb = create_empty_bb (after_bb);
b829f3fa
JH
2923 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2924 new_bb->count = edge_in->count;
6de9cd9a 2925 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
b829f3fa
JH
2926 new_edge->probability = REG_BR_PROB_BASE;
2927 new_edge->count = edge_in->count;
6de9cd9a 2928
1e128c5f 2929 e = redirect_edge_and_branch (edge_in, new_bb);
c7b852c8 2930 gcc_assert (e == edge_in);
4f7db7f7 2931 reinstall_phi_args (new_edge, e);
6de9cd9a
DN
2932
2933 return new_bb;
2934}
2935
6de9cd9a 2936/* Callback for walk_tree, check that all elements with address taken are
7a442a1d
SB
2937 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2938 inside a PHI node. */
6de9cd9a
DN
2939
2940static tree
2fbe90f2 2941verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
6de9cd9a
DN
2942{
2943 tree t = *tp, x;
2944
2945 if (TYPE_P (t))
2946 *walk_subtrees = 0;
6531d1be 2947
e8ca4159 2948 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2fbe90f2 2949#define CHECK_OP(N, MSG) \
e8ca4159 2950 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2fbe90f2 2951 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
6de9cd9a
DN
2952
2953 switch (TREE_CODE (t))
2954 {
2955 case SSA_NAME:
2956 if (SSA_NAME_IN_FREE_LIST (t))
2957 {
2958 error ("SSA name in freelist but still referenced");
2959 return *tp;
2960 }
2961 break;
2962
26de0bcb
AP
2963 case INDIRECT_REF:
2964 x = TREE_OPERAND (t, 0);
2965 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2966 {
2967 error ("Indirect reference's operand is not a register or a constant.");
2968 return x;
2969 }
2970 break;
2971
0bca51f0
DN
2972 case ASSERT_EXPR:
2973 x = fold (ASSERT_EXPR_COND (t));
2974 if (x == boolean_false_node)
2975 {
2976 error ("ASSERT_EXPR with an always-false condition");
2977 return *tp;
2978 }
2979 break;
2980
6de9cd9a 2981 case MODIFY_EXPR:
26de0bcb 2982 error ("MODIFY_EXPR not expected while having tuples.");
e57fcb68 2983 return *tp;
6de9cd9a
DN
2984
2985 case ADDR_EXPR:
81fc3052 2986 {
81fc3052
DB
2987 bool old_constant;
2988 bool old_side_effects;
81fc3052
DB
2989 bool new_constant;
2990 bool new_side_effects;
2991
51eed280
PB
2992 gcc_assert (is_gimple_address (t));
2993
81fc3052
DB
2994 old_constant = TREE_CONSTANT (t);
2995 old_side_effects = TREE_SIDE_EFFECTS (t);
2996
127203ac 2997 recompute_tree_invariant_for_addr_expr (t);
81fc3052
DB
2998 new_side_effects = TREE_SIDE_EFFECTS (t);
2999 new_constant = TREE_CONSTANT (t);
3000
81fc3052
DB
3001 if (old_constant != new_constant)
3002 {
3003 error ("constant not recomputed when ADDR_EXPR changed");
3004 return t;
3005 }
3006 if (old_side_effects != new_side_effects)
3007 {
3008 error ("side effects not recomputed when ADDR_EXPR changed");
3009 return t;
3010 }
3011
3012 /* Skip any references (they will be checked when we recurse down the
3013 tree) and ensure that any variable used as a prefix is marked
3014 addressable. */
3015 for (x = TREE_OPERAND (t, 0);
3016 handled_component_p (x);
3017 x = TREE_OPERAND (x, 0))
3018 ;
3019
5006671f
RG
3020 if (!(TREE_CODE (x) == VAR_DECL
3021 || TREE_CODE (x) == PARM_DECL
3022 || TREE_CODE (x) == RESULT_DECL))
81fc3052
DB
3023 return NULL;
3024 if (!TREE_ADDRESSABLE (x))
3025 {
3026 error ("address taken, but ADDRESSABLE bit not set");
3027 return x;
3028 }
ba4d8f9d
RG
3029 if (DECL_GIMPLE_REG_P (x))
3030 {
3031 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
3032 return x;
3033 }
bdb69bee 3034
81fc3052
DB
3035 break;
3036 }
6de9cd9a
DN
3037
3038 case COND_EXPR:
a6234684 3039 x = COND_EXPR_COND (t);
d40055ab 3040 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
6de9cd9a 3041 {
d40055ab 3042 error ("non-integral used in condition");
6de9cd9a
DN
3043 return x;
3044 }
9c691961
AP
3045 if (!is_gimple_condexpr (x))
3046 {
ab532386 3047 error ("invalid conditional operand");
9c691961
AP
3048 return x;
3049 }
6de9cd9a
DN
3050 break;
3051
a134e5f3
TB
3052 case NON_LVALUE_EXPR:
3053 gcc_unreachable ();
3054
1043771b 3055 CASE_CONVERT:
6de9cd9a 3056 case FIX_TRUNC_EXPR:
6de9cd9a
DN
3057 case FLOAT_EXPR:
3058 case NEGATE_EXPR:
3059 case ABS_EXPR:
3060 case BIT_NOT_EXPR:
6de9cd9a 3061 case TRUTH_NOT_EXPR:
ab532386 3062 CHECK_OP (0, "invalid operand to unary operator");
6de9cd9a
DN
3063 break;
3064
3065 case REALPART_EXPR:
3066 case IMAGPART_EXPR:
2fbe90f2
RK
3067 case COMPONENT_REF:
3068 case ARRAY_REF:
3069 case ARRAY_RANGE_REF:
3070 case BIT_FIELD_REF:
3071 case VIEW_CONVERT_EXPR:
3072 /* We have a nest of references. Verify that each of the operands
3073 that determine where to reference is either a constant or a variable,
3074 verify that the base is valid, and then show we've already checked
3075 the subtrees. */
afe84921 3076 while (handled_component_p (t))
2fbe90f2
RK
3077 {
3078 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
ab532386 3079 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2fbe90f2
RK
3080 else if (TREE_CODE (t) == ARRAY_REF
3081 || TREE_CODE (t) == ARRAY_RANGE_REF)
3082 {
ab532386 3083 CHECK_OP (1, "invalid array index");
2fbe90f2 3084 if (TREE_OPERAND (t, 2))
ab532386 3085 CHECK_OP (2, "invalid array lower bound");
2fbe90f2 3086 if (TREE_OPERAND (t, 3))
ab532386 3087 CHECK_OP (3, "invalid array stride");
2fbe90f2
RK
3088 }
3089 else if (TREE_CODE (t) == BIT_FIELD_REF)
3090 {
e55f42fb
RG
3091 if (!host_integerp (TREE_OPERAND (t, 1), 1)
3092 || !host_integerp (TREE_OPERAND (t, 2), 1))
3093 {
3094 error ("invalid position or size operand to BIT_FIELD_REF");
3095 return t;
3096 }
fc0f49f3
RG
3097 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3098 && (TYPE_PRECISION (TREE_TYPE (t))
3099 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3100 {
3101 error ("integral result type precision does not match "
3102 "field size of BIT_FIELD_REF");
3103 return t;
3104 }
3105 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3106 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
3107 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3108 {
3109 error ("mode precision of non-integral result does not "
3110 "match field size of BIT_FIELD_REF");
3111 return t;
3112 }
2fbe90f2
RK
3113 }
3114
3115 t = TREE_OPERAND (t, 0);
3116 }
3117
bb0c55f6 3118 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2fbe90f2 3119 {
ab532386 3120 error ("invalid reference prefix");
2fbe90f2
RK
3121 return t;
3122 }
3123 *walk_subtrees = 0;
6de9cd9a 3124 break;
5be014d5
AP
3125 case PLUS_EXPR:
3126 case MINUS_EXPR:
3127 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3128 POINTER_PLUS_EXPR. */
3129 if (POINTER_TYPE_P (TREE_TYPE (t)))
3130 {
3131 error ("invalid operand to plus/minus, type is a pointer");
3132 return t;
3133 }
3134 CHECK_OP (0, "invalid operand to binary operator");
3135 CHECK_OP (1, "invalid operand to binary operator");
3136 break;
6de9cd9a 3137
5be014d5
AP
3138 case POINTER_PLUS_EXPR:
3139 /* Check to make sure the first operand is a pointer or reference type. */
3140 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3141 {
3142 error ("invalid operand to pointer plus, first operand is not a pointer");
3143 return t;
3144 }
3145 /* Check to make sure the second operand is an integer with type of
3146 sizetype. */
36618b93
RG
3147 if (!useless_type_conversion_p (sizetype,
3148 TREE_TYPE (TREE_OPERAND (t, 1))))
5be014d5
AP
3149 {
3150 error ("invalid operand to pointer plus, second operand is not an "
3151 "integer with type of sizetype.");
3152 return t;
3153 }
3154 /* FALLTHROUGH */
6de9cd9a
DN
3155 case LT_EXPR:
3156 case LE_EXPR:
3157 case GT_EXPR:
3158 case GE_EXPR:
3159 case EQ_EXPR:
3160 case NE_EXPR:
3161 case UNORDERED_EXPR:
3162 case ORDERED_EXPR:
3163 case UNLT_EXPR:
3164 case UNLE_EXPR:
3165 case UNGT_EXPR:
3166 case UNGE_EXPR:
3167 case UNEQ_EXPR:
d1a7edaf 3168 case LTGT_EXPR:
6de9cd9a
DN
3169 case MULT_EXPR:
3170 case TRUNC_DIV_EXPR:
3171 case CEIL_DIV_EXPR:
3172 case FLOOR_DIV_EXPR:
3173 case ROUND_DIV_EXPR:
3174 case TRUNC_MOD_EXPR:
3175 case CEIL_MOD_EXPR:
3176 case FLOOR_MOD_EXPR:
3177 case ROUND_MOD_EXPR:
3178 case RDIV_EXPR:
3179 case EXACT_DIV_EXPR:
3180 case MIN_EXPR:
3181 case MAX_EXPR:
3182 case LSHIFT_EXPR:
3183 case RSHIFT_EXPR:
3184 case LROTATE_EXPR:
3185 case RROTATE_EXPR:
3186 case BIT_IOR_EXPR:
3187 case BIT_XOR_EXPR:
3188 case BIT_AND_EXPR:
ab532386
JM
3189 CHECK_OP (0, "invalid operand to binary operator");
3190 CHECK_OP (1, "invalid operand to binary operator");
6de9cd9a
DN
3191 break;
3192
84816907
JM
3193 case CONSTRUCTOR:
3194 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3195 *walk_subtrees = 0;
3196 break;
3197
6de9cd9a
DN
3198 default:
3199 break;
3200 }
3201 return NULL;
2fbe90f2
RK
3202
3203#undef CHECK_OP
6de9cd9a
DN
3204}
3205
7e98624c
RG
3206
3207/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3208 Returns true if there is an error, otherwise false. */
3209
3210static bool
726a989a 3211verify_types_in_gimple_min_lval (tree expr)
7e98624c
RG
3212{
3213 tree op;
3214
3215 if (is_gimple_id (expr))
3216 return false;
3217
9f509004
RG
3218 if (!INDIRECT_REF_P (expr)
3219 && TREE_CODE (expr) != TARGET_MEM_REF)
7e98624c
RG
3220 {
3221 error ("invalid expression for min lvalue");
3222 return true;
3223 }
3224
9f509004
RG
3225 /* TARGET_MEM_REFs are strange beasts. */
3226 if (TREE_CODE (expr) == TARGET_MEM_REF)
3227 return false;
3228
7e98624c
RG
3229 op = TREE_OPERAND (expr, 0);
3230 if (!is_gimple_val (op))
3231 {
3232 error ("invalid operand in indirect reference");
3233 debug_generic_stmt (op);
3234 return true;
3235 }
3236 if (!useless_type_conversion_p (TREE_TYPE (expr),
3237 TREE_TYPE (TREE_TYPE (op))))
3238 {
3239 error ("type mismatch in indirect reference");
3240 debug_generic_stmt (TREE_TYPE (expr));
3241 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3242 return true;
3243 }
3244
3245 return false;
3246}
3247
3a19701a
RG
3248/* Verify if EXPR is a valid GIMPLE reference expression. If
3249 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
7e98624c
RG
3250 if there is an error, otherwise false. */
3251
3252static bool
3a19701a 3253verify_types_in_gimple_reference (tree expr, bool require_lvalue)
7e98624c
RG
3254{
3255 while (handled_component_p (expr))
3256 {
3257 tree op = TREE_OPERAND (expr, 0);
3258
3259 if (TREE_CODE (expr) == ARRAY_REF
3260 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3261 {
3262 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3263 || (TREE_OPERAND (expr, 2)
3264 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3265 || (TREE_OPERAND (expr, 3)
3266 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3267 {
3268 error ("invalid operands to array reference");
3269 debug_generic_stmt (expr);
3270 return true;
3271 }
3272 }
3273
3274 /* Verify if the reference array element types are compatible. */
3275 if (TREE_CODE (expr) == ARRAY_REF
3276 && !useless_type_conversion_p (TREE_TYPE (expr),
3277 TREE_TYPE (TREE_TYPE (op))))
3278 {
3279 error ("type mismatch in array reference");
3280 debug_generic_stmt (TREE_TYPE (expr));
3281 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3282 return true;
3283 }
3284 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3285 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3286 TREE_TYPE (TREE_TYPE (op))))
3287 {
3288 error ("type mismatch in array range reference");
3289 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3290 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3291 return true;
3292 }
3293
3294 if ((TREE_CODE (expr) == REALPART_EXPR
3295 || TREE_CODE (expr) == IMAGPART_EXPR)
3296 && !useless_type_conversion_p (TREE_TYPE (expr),
3297 TREE_TYPE (TREE_TYPE (op))))
3298 {
3299 error ("type mismatch in real/imagpart reference");
3300 debug_generic_stmt (TREE_TYPE (expr));
3301 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3302 return true;
3303 }
3304
3305 if (TREE_CODE (expr) == COMPONENT_REF
3306 && !useless_type_conversion_p (TREE_TYPE (expr),
3307 TREE_TYPE (TREE_OPERAND (expr, 1))))
3308 {
3309 error ("type mismatch in component reference");
3310 debug_generic_stmt (TREE_TYPE (expr));
3311 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3312 return true;
3313 }
3314
3315 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3316 is nothing to verify. Gross mismatches at most invoke
3317 undefined behavior. */
9f509004
RG
3318 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3319 && !handled_component_p (op))
3320 return false;
7e98624c
RG
3321
3322 expr = op;
3323 }
3324
3a19701a
RG
3325 return ((require_lvalue || !is_gimple_min_invariant (expr))
3326 && verify_types_in_gimple_min_lval (expr));
7e98624c
RG
3327}
3328
20dcff2a
RG
3329/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3330 list of pointer-to types that is trivially convertible to DEST. */
3331
3332static bool
3333one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3334{
3335 tree src;
3336
3337 if (!TYPE_POINTER_TO (src_obj))
3338 return true;
3339
3340 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3341 if (useless_type_conversion_p (dest, src))
3342 return true;
3343
3344 return false;
3345}
3346
726a989a
RB
3347/* Return true if TYPE1 is a fixed-point type and if conversions to and
3348 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3349
3350static bool
3351valid_fixed_convert_types_p (tree type1, tree type2)
3352{
3353 return (FIXED_POINT_TYPE_P (type1)
3354 && (INTEGRAL_TYPE_P (type2)
3355 || SCALAR_FLOAT_TYPE_P (type2)
3356 || FIXED_POINT_TYPE_P (type2)));
3357}
3358
726a989a
RB
3359/* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3360 is a problem, otherwise false. */
3361
3362static bool
b59d3976 3363verify_gimple_call (gimple stmt)
726a989a 3364{
b59d3976
RG
3365 tree fn = gimple_call_fn (stmt);
3366 tree fntype;
726a989a 3367
b59d3976
RG
3368 if (!POINTER_TYPE_P (TREE_TYPE (fn))
3369 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3370 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
3371 {
3372 error ("non-function in gimple call");
3373 return true;
3374 }
726a989a 3375
b59d3976
RG
3376 if (gimple_call_lhs (stmt)
3377 && !is_gimple_lvalue (gimple_call_lhs (stmt)))
3378 {
3379 error ("invalid LHS in gimple call");
3380 return true;
3381 }
726a989a 3382
b59d3976
RG
3383 fntype = TREE_TYPE (TREE_TYPE (fn));
3384 if (gimple_call_lhs (stmt)
3385 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3386 TREE_TYPE (fntype))
3387 /* ??? At least C++ misses conversions at assignments from
3388 void * call results.
3389 ??? Java is completely off. Especially with functions
3390 returning java.lang.Object.
3391 For now simply allow arbitrary pointer type conversions. */
3392 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3393 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3394 {
3395 error ("invalid conversion in gimple call");
3396 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3397 debug_generic_stmt (TREE_TYPE (fntype));
3398 return true;
3399 }
726a989a 3400
b59d3976
RG
3401 /* ??? The C frontend passes unpromoted arguments in case it
3402 didn't see a function declaration before the call. So for now
3403 leave the call arguments unverified. Once we gimplify
3404 unit-at-a-time we have a chance to fix this. */
726a989a 3405
b59d3976 3406 return false;
726a989a
RB
3407}
3408
b59d3976
RG
3409/* Verifies the gimple comparison with the result type TYPE and
3410 the operands OP0 and OP1. */
17d23165
RS
3411
3412static bool
b59d3976 3413verify_gimple_comparison (tree type, tree op0, tree op1)
17d23165 3414{
b59d3976
RG
3415 tree op0_type = TREE_TYPE (op0);
3416 tree op1_type = TREE_TYPE (op1);
726a989a 3417
b59d3976
RG
3418 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3419 {
3420 error ("invalid operands in gimple comparison");
3421 return true;
3422 }
17d23165 3423
b59d3976
RG
3424 /* For comparisons we do not have the operations type as the
3425 effective type the comparison is carried out in. Instead
3426 we require that either the first operand is trivially
3427 convertible into the second, or the other way around.
3428 The resulting type of a comparison may be any integral type.
3429 Because we special-case pointers to void we allow
3430 comparisons of pointers with the same mode as well. */
3431 if ((!useless_type_conversion_p (op0_type, op1_type)
3432 && !useless_type_conversion_p (op1_type, op0_type)
3433 && (!POINTER_TYPE_P (op0_type)
3434 || !POINTER_TYPE_P (op1_type)
3435 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3436 || !INTEGRAL_TYPE_P (type))
3437 {
3438 error ("type mismatch in comparison expression");
3439 debug_generic_expr (type);
3440 debug_generic_expr (op0_type);
3441 debug_generic_expr (op1_type);
3442 return true;
3443 }
3444
3445 return false;
3446}
726a989a 3447
9f509004
RG
3448/* Verify a gimple assignment statement STMT with an unary rhs.
3449 Returns true if anything is wrong. */
7e98624c
RG
3450
3451static bool
9f509004 3452verify_gimple_assign_unary (gimple stmt)
7e98624c 3453{
726a989a
RB
3454 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3455 tree lhs = gimple_assign_lhs (stmt);
726a989a 3456 tree lhs_type = TREE_TYPE (lhs);
9f509004 3457 tree rhs1 = gimple_assign_rhs1 (stmt);
726a989a 3458 tree rhs1_type = TREE_TYPE (rhs1);
7e98624c 3459
9f509004
RG
3460 if (!is_gimple_reg (lhs)
3461 && !(optimize == 0
3462 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3463 {
3464 error ("non-register as LHS of unary operation");
3465 return true;
3466 }
3467
3468 if (!is_gimple_val (rhs1))
3469 {
3470 error ("invalid operand in unary operation");
3471 return true;
3472 }
3473
3474 /* First handle conversions. */
726a989a 3475 switch (rhs_code)
7e98624c 3476 {
1043771b 3477 CASE_CONVERT:
7e98624c 3478 {
7e98624c 3479 /* Allow conversions between integral types and pointers only if
9f509004
RG
3480 there is no sign or zero extension involved.
3481 For targets were the precision of sizetype doesn't match that
3482 of pointers we need to allow arbitrary conversions from and
3483 to sizetype. */
3484 if ((POINTER_TYPE_P (lhs_type)
3485 && INTEGRAL_TYPE_P (rhs1_type)
3486 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3487 || rhs1_type == sizetype))
3488 || (POINTER_TYPE_P (rhs1_type)
3489 && INTEGRAL_TYPE_P (lhs_type)
3490 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3491 || lhs_type == sizetype)))
7e98624c
RG
3492 return false;
3493
3494 /* Allow conversion from integer to offset type and vice versa. */
726a989a
RB
3495 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3496 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3497 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3498 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
7e98624c
RG
3499 return false;
3500
3501 /* Otherwise assert we are converting between types of the
3502 same kind. */
726a989a 3503 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
7e98624c
RG
3504 {
3505 error ("invalid types in nop conversion");
726a989a
RB
3506 debug_generic_expr (lhs_type);
3507 debug_generic_expr (rhs1_type);
7e98624c
RG
3508 return true;
3509 }
3510
3511 return false;
3512 }
3513
17d23165
RS
3514 case FIXED_CONVERT_EXPR:
3515 {
726a989a
RB
3516 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3517 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
17d23165
RS
3518 {
3519 error ("invalid types in fixed-point conversion");
726a989a
RB
3520 debug_generic_expr (lhs_type);
3521 debug_generic_expr (rhs1_type);
17d23165
RS
3522 return true;
3523 }
3524
3525 return false;
3526 }
3527
7e98624c
RG
3528 case FLOAT_EXPR:
3529 {
726a989a 3530 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
7e98624c
RG
3531 {
3532 error ("invalid types in conversion to floating point");
726a989a
RB
3533 debug_generic_expr (lhs_type);
3534 debug_generic_expr (rhs1_type);
7e98624c
RG
3535 return true;
3536 }
726a989a 3537
7e98624c
RG
3538 return false;
3539 }
3540
3541 case FIX_TRUNC_EXPR:
3542 {
726a989a 3543 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
7e98624c
RG
3544 {
3545 error ("invalid types in conversion to integer");
726a989a
RB
3546 debug_generic_expr (lhs_type);
3547 debug_generic_expr (rhs1_type);
7e98624c
RG
3548 return true;
3549 }
726a989a 3550
7e98624c
RG
3551 return false;
3552 }
3553
587aa063
RG
3554 case VEC_UNPACK_HI_EXPR:
3555 case VEC_UNPACK_LO_EXPR:
3556 case REDUC_MAX_EXPR:
3557 case REDUC_MIN_EXPR:
3558 case REDUC_PLUS_EXPR:
3559 case VEC_UNPACK_FLOAT_HI_EXPR:
3560 case VEC_UNPACK_FLOAT_LO_EXPR:
3561 /* FIXME. */
3562 return false;
9f509004 3563
587aa063 3564 case TRUTH_NOT_EXPR:
9f509004
RG
3565 case NEGATE_EXPR:
3566 case ABS_EXPR:
3567 case BIT_NOT_EXPR:
3568 case PAREN_EXPR:
3569 case NON_LVALUE_EXPR:
3570 case CONJ_EXPR:
9f509004
RG
3571 break;
3572
3573 default:
3574 gcc_unreachable ();
3575 }
3576
3577 /* For the remaining codes assert there is no conversion involved. */
3578 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3579 {
3580 error ("non-trivial conversion in unary operation");
3581 debug_generic_expr (lhs_type);
3582 debug_generic_expr (rhs1_type);
3583 return true;
3584 }
3585
3586 return false;
3587}
3588
3589/* Verify a gimple assignment statement STMT with a binary rhs.
3590 Returns true if anything is wrong. */
3591
3592static bool
3593verify_gimple_assign_binary (gimple stmt)
3594{
3595 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3596 tree lhs = gimple_assign_lhs (stmt);
3597 tree lhs_type = TREE_TYPE (lhs);
3598 tree rhs1 = gimple_assign_rhs1 (stmt);
3599 tree rhs1_type = TREE_TYPE (rhs1);
3600 tree rhs2 = gimple_assign_rhs2 (stmt);
3601 tree rhs2_type = TREE_TYPE (rhs2);
3602
3603 if (!is_gimple_reg (lhs)
3604 && !(optimize == 0
3605 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3606 {
3607 error ("non-register as LHS of binary operation");
3608 return true;
3609 }
726a989a 3610
9f509004
RG
3611 if (!is_gimple_val (rhs1)
3612 || !is_gimple_val (rhs2))
3613 {
3614 error ("invalid operands in binary operation");
3615 return true;
3616 }
3617
3618 /* First handle operations that involve different types. */
3619 switch (rhs_code)
3620 {
3621 case COMPLEX_EXPR:
3622 {
3623 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3624 || !(INTEGRAL_TYPE_P (rhs1_type)
726a989a 3625 || SCALAR_FLOAT_TYPE_P (rhs1_type))
9f509004 3626 || !(INTEGRAL_TYPE_P (rhs2_type)
726a989a 3627 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
7e98624c
RG
3628 {
3629 error ("type mismatch in complex expression");
726a989a
RB
3630 debug_generic_expr (lhs_type);
3631 debug_generic_expr (rhs1_type);
3632 debug_generic_expr (rhs2_type);
7e98624c
RG
3633 return true;
3634 }
726a989a 3635
7e98624c
RG
3636 return false;
3637 }
3638
7e98624c
RG
3639 case LSHIFT_EXPR:
3640 case RSHIFT_EXPR:
3641 case LROTATE_EXPR:
3642 case RROTATE_EXPR:
3643 {
587aa063
RG
3644 /* Shifts and rotates are ok on integral types, fixed point
3645 types and integer vector types. */
3646 if ((!INTEGRAL_TYPE_P (rhs1_type)
3647 && !FIXED_POINT_TYPE_P (rhs1_type)
3648 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3649 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE))
3650 || (!INTEGRAL_TYPE_P (rhs2_type)
3651 /* Vector shifts of vectors are also ok. */
3652 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3653 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE
3654 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3655 && TREE_CODE (TREE_TYPE (rhs2_type)) == INTEGER_TYPE))
726a989a 3656 || !useless_type_conversion_p (lhs_type, rhs1_type))
7e98624c
RG
3657 {
3658 error ("type mismatch in shift expression");
726a989a
RB
3659 debug_generic_expr (lhs_type);
3660 debug_generic_expr (rhs1_type);
3661 debug_generic_expr (rhs2_type);
7e98624c
RG
3662 return true;
3663 }
726a989a 3664
7e98624c
RG
3665 return false;
3666 }
3667
9f509004
RG
3668 case VEC_LSHIFT_EXPR:
3669 case VEC_RSHIFT_EXPR:
7e98624c 3670 {
9f509004 3671 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
0009b473 3672 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
1fe479fd
RG
3673 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3674 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
9f509004
RG
3675 || (!INTEGRAL_TYPE_P (rhs2_type)
3676 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3677 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3678 || !useless_type_conversion_p (lhs_type, rhs1_type))
7e98624c 3679 {
9f509004
RG
3680 error ("type mismatch in vector shift expression");
3681 debug_generic_expr (lhs_type);
3682 debug_generic_expr (rhs1_type);
3683 debug_generic_expr (rhs2_type);
7e98624c
RG
3684 return true;
3685 }
1fe479fd
RG
3686 /* For shifting a vector of floating point components we
3687 only allow shifting by a constant multiple of the element size. */
3688 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3689 && (TREE_CODE (rhs2) != INTEGER_CST
3690 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3691 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3692 {
3693 error ("non-element sized vector shift of floating point vector");
3694 return true;
3695 }
726a989a 3696
9f509004 3697 return false;
7e98624c
RG
3698 }
3699
646bea10
RG
3700 case PLUS_EXPR:
3701 {
3702 /* We use regular PLUS_EXPR for vectors.
3703 ??? This just makes the checker happy and may not be what is
3704 intended. */
3705 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3706 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3707 {
3708 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3709 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3710 {
3711 error ("invalid non-vector operands to vector valued plus");
3712 return true;
3713 }
3714 lhs_type = TREE_TYPE (lhs_type);
3715 rhs1_type = TREE_TYPE (rhs1_type);
3716 rhs2_type = TREE_TYPE (rhs2_type);
3717 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3718 the pointer to 2nd place. */
3719 if (POINTER_TYPE_P (rhs2_type))
3720 {
3721 tree tem = rhs1_type;
3722 rhs1_type = rhs2_type;
3723 rhs2_type = tem;
3724 }
3725 goto do_pointer_plus_expr_check;
3726 }
3727 }
3728 /* Fallthru. */
3729 case MINUS_EXPR:
3730 {
3731 if (POINTER_TYPE_P (lhs_type)
3732 || POINTER_TYPE_P (rhs1_type)
3733 || POINTER_TYPE_P (rhs2_type))
3734 {
3735 error ("invalid (pointer) operands to plus/minus");
3736 return true;
3737 }
3738
3739 /* Continue with generic binary expression handling. */
3740 break;
3741 }
3742
7e98624c
RG
3743 case POINTER_PLUS_EXPR:
3744 {
646bea10 3745do_pointer_plus_expr_check:
726a989a
RB
3746 if (!POINTER_TYPE_P (rhs1_type)
3747 || !useless_type_conversion_p (lhs_type, rhs1_type)
3748 || !useless_type_conversion_p (sizetype, rhs2_type))
7e98624c
RG
3749 {
3750 error ("type mismatch in pointer plus expression");
726a989a
RB
3751 debug_generic_stmt (lhs_type);
3752 debug_generic_stmt (rhs1_type);
3753 debug_generic_stmt (rhs2_type);
7e98624c
RG
3754 return true;
3755 }
7e98624c 3756
726a989a
RB
3757 return false;
3758 }
7e98624c 3759
7e98624c
RG
3760 case TRUTH_ANDIF_EXPR:
3761 case TRUTH_ORIF_EXPR:
2893f753
RAE
3762 gcc_unreachable ();
3763
7e98624c
RG
3764 case TRUTH_AND_EXPR:
3765 case TRUTH_OR_EXPR:
3766 case TRUTH_XOR_EXPR:
3767 {
7e98624c 3768 /* We allow any kind of integral typed argument and result. */
726a989a
RB
3769 if (!INTEGRAL_TYPE_P (rhs1_type)
3770 || !INTEGRAL_TYPE_P (rhs2_type)
3771 || !INTEGRAL_TYPE_P (lhs_type))
7e98624c
RG
3772 {
3773 error ("type mismatch in binary truth expression");
726a989a
RB
3774 debug_generic_expr (lhs_type);
3775 debug_generic_expr (rhs1_type);
3776 debug_generic_expr (rhs2_type);
7e98624c
RG
3777 return true;
3778 }
3779
3780 return false;
3781 }
3782
9f509004
RG
3783 case LT_EXPR:
3784 case LE_EXPR:
3785 case GT_EXPR:
3786 case GE_EXPR:
3787 case EQ_EXPR:
3788 case NE_EXPR:
3789 case UNORDERED_EXPR:
3790 case ORDERED_EXPR:
3791 case UNLT_EXPR:
3792 case UNLE_EXPR:
3793 case UNGT_EXPR:
3794 case UNGE_EXPR:
3795 case UNEQ_EXPR:
3796 case LTGT_EXPR:
3797 /* Comparisons are also binary, but the result type is not
3798 connected to the operand types. */
3799 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
7e98624c 3800
587aa063
RG
3801 case WIDEN_SUM_EXPR:
3802 case WIDEN_MULT_EXPR:
3803 case VEC_WIDEN_MULT_HI_EXPR:
3804 case VEC_WIDEN_MULT_LO_EXPR:
3805 case VEC_PACK_TRUNC_EXPR:
3806 case VEC_PACK_SAT_EXPR:
3807 case VEC_PACK_FIX_TRUNC_EXPR:
3808 case VEC_EXTRACT_EVEN_EXPR:
3809 case VEC_EXTRACT_ODD_EXPR:
3810 case VEC_INTERLEAVE_HIGH_EXPR:
3811 case VEC_INTERLEAVE_LOW_EXPR:
3812 /* FIXME. */
3813 return false;
3814
9f509004
RG
3815 case MULT_EXPR:
3816 case TRUNC_DIV_EXPR:
3817 case CEIL_DIV_EXPR:
3818 case FLOOR_DIV_EXPR:
3819 case ROUND_DIV_EXPR:
3820 case TRUNC_MOD_EXPR:
3821 case CEIL_MOD_EXPR:
3822 case FLOOR_MOD_EXPR:
3823 case ROUND_MOD_EXPR:
3824 case RDIV_EXPR:
3825 case EXACT_DIV_EXPR:
3826 case MIN_EXPR:
3827 case MAX_EXPR:
3828 case BIT_IOR_EXPR:
3829 case BIT_XOR_EXPR:
3830 case BIT_AND_EXPR:
9f509004
RG
3831 /* Continue with generic binary expression handling. */
3832 break;
7e98624c 3833
9f509004
RG
3834 default:
3835 gcc_unreachable ();
3836 }
b691d4b0 3837
9f509004
RG
3838 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3839 || !useless_type_conversion_p (lhs_type, rhs2_type))
3840 {
3841 error ("type mismatch in binary expression");
3842 debug_generic_stmt (lhs_type);
3843 debug_generic_stmt (rhs1_type);
3844 debug_generic_stmt (rhs2_type);
3845 return true;
3846 }
3847
3848 return false;
3849}
3850
3851/* Verify a gimple assignment statement STMT with a single rhs.
3852 Returns true if anything is wrong. */
3853
3854static bool
3855verify_gimple_assign_single (gimple stmt)
3856{
3857 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3858 tree lhs = gimple_assign_lhs (stmt);
3859 tree lhs_type = TREE_TYPE (lhs);
3860 tree rhs1 = gimple_assign_rhs1 (stmt);
3861 tree rhs1_type = TREE_TYPE (rhs1);
3862 bool res = false;
3863
3864 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3865 {
3866 error ("non-trivial conversion at assignment");
3867 debug_generic_expr (lhs_type);
3868 debug_generic_expr (rhs1_type);
3869 return true;
7e98624c
RG
3870 }
3871
9f509004 3872 if (handled_component_p (lhs))
3a19701a 3873 res |= verify_types_in_gimple_reference (lhs, true);
9f509004
RG
3874
3875 /* Special codes we cannot handle via their class. */
3876 switch (rhs_code)
7e98624c 3877 {
9f509004
RG
3878 case ADDR_EXPR:
3879 {
3880 tree op = TREE_OPERAND (rhs1, 0);
3881 if (!is_gimple_addressable (op))
3882 {
3883 error ("invalid operand in unary expression");
3884 return true;
3885 }
f5e85907 3886
22a65a54
RG
3887 if (!one_pointer_to_useless_type_conversion_p (lhs_type,
3888 TREE_TYPE (op)))
9f509004
RG
3889 {
3890 error ("type mismatch in address expression");
3891 debug_generic_stmt (lhs_type);
3892 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
3893 return true;
3894 }
3895
3a19701a 3896 return verify_types_in_gimple_reference (op, true);
9f509004
RG
3897 }
3898
3899 /* tcc_reference */
3900 case COMPONENT_REF:
3901 case BIT_FIELD_REF:
3902 case INDIRECT_REF:
3903 case ALIGN_INDIRECT_REF:
3904 case MISALIGNED_INDIRECT_REF:
3905 case ARRAY_REF:
3906 case ARRAY_RANGE_REF:
3907 case VIEW_CONVERT_EXPR:
3908 case REALPART_EXPR:
3909 case IMAGPART_EXPR:
3910 case TARGET_MEM_REF:
3911 if (!is_gimple_reg (lhs)
3912 && is_gimple_reg_type (TREE_TYPE (lhs)))
f5e85907 3913 {
9f509004
RG
3914 error ("invalid rhs for gimple memory store");
3915 debug_generic_stmt (lhs);
3916 debug_generic_stmt (rhs1);
726a989a
RB
3917 return true;
3918 }
3a19701a 3919 return res || verify_types_in_gimple_reference (rhs1, false);
7e98624c 3920
9f509004
RG
3921 /* tcc_constant */
3922 case SSA_NAME:
3923 case INTEGER_CST:
3924 case REAL_CST:
3925 case FIXED_CST:
3926 case COMPLEX_CST:
3927 case VECTOR_CST:
3928 case STRING_CST:
3929 return res;
3930
3931 /* tcc_declaration */
3932 case CONST_DECL:
3933 return res;
3934 case VAR_DECL:
3935 case PARM_DECL:
3936 if (!is_gimple_reg (lhs)
3937 && !is_gimple_reg (rhs1)
3938 && is_gimple_reg_type (TREE_TYPE (lhs)))
2f9864e6 3939 {
9f509004
RG
3940 error ("invalid rhs for gimple memory store");
3941 debug_generic_stmt (lhs);
3942 debug_generic_stmt (rhs1);
2f9864e6
RG
3943 return true;
3944 }
9f509004 3945 return res;
7e98624c 3946
9f509004
RG
3947 case COND_EXPR:
3948 case CONSTRUCTOR:
3949 case OBJ_TYPE_REF:
3950 case ASSERT_EXPR:
3951 case WITH_SIZE_EXPR:
3952 case EXC_PTR_EXPR:
3953 case FILTER_EXPR:
3954 case POLYNOMIAL_CHREC:
3955 case DOT_PROD_EXPR:
3956 case VEC_COND_EXPR:
3957 case REALIGN_LOAD_EXPR:
3958 /* FIXME. */
3959 return res;
7e98624c 3960
726a989a 3961 default:;
7e98624c
RG
3962 }
3963
9f509004 3964 return res;
7e98624c
RG
3965}
3966
9f509004
RG
3967/* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3968 is a problem, otherwise false. */
3969
3970static bool
3971verify_gimple_assign (gimple stmt)
3972{
3973 switch (gimple_assign_rhs_class (stmt))
3974 {
3975 case GIMPLE_SINGLE_RHS:
3976 return verify_gimple_assign_single (stmt);
3977
3978 case GIMPLE_UNARY_RHS:
3979 return verify_gimple_assign_unary (stmt);
3980
3981 case GIMPLE_BINARY_RHS:
3982 return verify_gimple_assign_binary (stmt);
3983
3984 default:
3985 gcc_unreachable ();
3986 }
3987}
726a989a
RB
3988
3989/* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3990 is a problem, otherwise false. */
7e98624c
RG
3991
3992static bool
b59d3976 3993verify_gimple_return (gimple stmt)
7e98624c 3994{
726a989a 3995 tree op = gimple_return_retval (stmt);
b59d3976 3996 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
726a989a 3997
b59d3976
RG
3998 /* We cannot test for present return values as we do not fix up missing
3999 return values from the original source. */
726a989a
RB
4000 if (op == NULL)
4001 return false;
b59d3976
RG
4002
4003 if (!is_gimple_val (op)
4004 && TREE_CODE (op) != RESULT_DECL)
4005 {
4006 error ("invalid operand in return statement");
4007 debug_generic_stmt (op);
4008 return true;
4009 }
4010
4011 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
4012 /* ??? With C++ we can have the situation that the result
4013 decl is a reference type while the return type is an aggregate. */
4014 && !(TREE_CODE (op) == RESULT_DECL
4015 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
4016 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
4017 {
4018 error ("invalid conversion in return statement");
4019 debug_generic_stmt (restype);
4020 debug_generic_stmt (TREE_TYPE (op));
4021 return true;
4022 }
4023
4024 return false;
726a989a 4025}
7e98624c 4026
7e98624c 4027
b59d3976
RG
4028/* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4029 is a problem, otherwise false. */
4030
4031static bool
4032verify_gimple_goto (gimple stmt)
4033{
4034 tree dest = gimple_goto_dest (stmt);
4035
4036 /* ??? We have two canonical forms of direct goto destinations, a
4037 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4038 if (TREE_CODE (dest) != LABEL_DECL
4039 && (!is_gimple_val (dest)
4040 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4041 {
4042 error ("goto destination is neither a label nor a pointer");
4043 return true;
4044 }
4045
4046 return false;
4047}
4048
726a989a
RB
4049/* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4050 is a problem, otherwise false. */
4051
4052static bool
b59d3976 4053verify_gimple_switch (gimple stmt)
726a989a
RB
4054{
4055 if (!is_gimple_val (gimple_switch_index (stmt)))
7e98624c 4056 {
726a989a 4057 error ("invalid operand to switch statement");
b59d3976 4058 debug_generic_stmt (gimple_switch_index (stmt));
7e98624c
RG
4059 return true;
4060 }
4061
726a989a
RB
4062 return false;
4063}
7e98624c 4064
7e98624c 4065
726a989a
RB
4066/* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4067 and false otherwise. */
7e98624c 4068
726a989a 4069static bool
b59d3976 4070verify_gimple_phi (gimple stmt)
726a989a 4071{
b59d3976
RG
4072 tree type = TREE_TYPE (gimple_phi_result (stmt));
4073 unsigned i;
7e98624c 4074
b59d3976
RG
4075 if (!is_gimple_variable (gimple_phi_result (stmt)))
4076 {
4077 error ("Invalid PHI result");
4078 return true;
4079 }
7e98624c 4080
726a989a 4081 for (i = 0; i < gimple_phi_num_args (stmt); i++)
b59d3976
RG
4082 {
4083 tree arg = gimple_phi_arg_def (stmt, i);
9f509004
RG
4084 if ((is_gimple_reg (gimple_phi_result (stmt))
4085 && !is_gimple_val (arg))
4086 || (!is_gimple_reg (gimple_phi_result (stmt))
4087 && !is_gimple_addressable (arg)))
b59d3976
RG
4088 {
4089 error ("Invalid PHI argument");
4090 debug_generic_stmt (arg);
4091 return true;
4092 }
4093 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
4094 {
587aa063 4095 error ("Incompatible types in PHI argument %u", i);
b59d3976
RG
4096 debug_generic_stmt (type);
4097 debug_generic_stmt (TREE_TYPE (arg));
4098 return true;
4099 }
4100 }
726a989a 4101
7e98624c
RG
4102 return false;
4103}
4104
726a989a 4105
7e98624c
RG
4106/* Verify the GIMPLE statement STMT. Returns true if there is an
4107 error, otherwise false. */
4108
4109static bool
726a989a 4110verify_types_in_gimple_stmt (gimple stmt)
7e98624c 4111{
726a989a 4112 if (is_gimple_omp (stmt))
7e98624c
RG
4113 {
4114 /* OpenMP directives are validated by the FE and never operated
726a989a 4115 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
7e98624c
RG
4116 non-gimple expressions when the main index variable has had
4117 its address taken. This does not affect the loop itself
726a989a 4118 because the header of an GIMPLE_OMP_FOR is merely used to determine
7e98624c
RG
4119 how to setup the parallel iteration. */
4120 return false;
4121 }
4122
726a989a 4123 switch (gimple_code (stmt))
7e98624c 4124 {
726a989a 4125 case GIMPLE_ASSIGN:
9f509004 4126 return verify_gimple_assign (stmt);
7e98624c 4127
726a989a
RB
4128 case GIMPLE_LABEL:
4129 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
7e98624c 4130
726a989a 4131 case GIMPLE_CALL:
b59d3976 4132 return verify_gimple_call (stmt);
7e98624c 4133
726a989a 4134 case GIMPLE_COND:
b59d3976
RG
4135 return verify_gimple_comparison (boolean_type_node,
4136 gimple_cond_lhs (stmt),
4137 gimple_cond_rhs (stmt));
7e98624c 4138
726a989a 4139 case GIMPLE_GOTO:
b59d3976 4140 return verify_gimple_goto (stmt);
7e98624c 4141
726a989a 4142 case GIMPLE_SWITCH:
b59d3976 4143 return verify_gimple_switch (stmt);
7e98624c 4144
726a989a 4145 case GIMPLE_RETURN:
b59d3976 4146 return verify_gimple_return (stmt);
7e98624c 4147
726a989a 4148 case GIMPLE_ASM:
7e98624c
RG
4149 return false;
4150
726a989a 4151 case GIMPLE_PHI:
b59d3976
RG
4152 return verify_gimple_phi (stmt);
4153
4154 /* Tuples that do not have tree operands. */
4155 case GIMPLE_NOP:
4156 case GIMPLE_RESX:
4157 case GIMPLE_PREDICT:
4158 return false;
726a989a 4159
7e98624c
RG
4160 default:
4161 gcc_unreachable ();
4162 }
4163}
4164
726a989a 4165/* Verify the GIMPLE statements inside the sequence STMTS. */
7e98624c 4166
7dc83ebc 4167static bool
726a989a 4168verify_types_in_gimple_seq_2 (gimple_seq stmts)
7e98624c 4169{
726a989a 4170 gimple_stmt_iterator ittr;
7dc83ebc 4171 bool err = false;
7e98624c 4172
726a989a 4173 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
7e98624c 4174 {
726a989a 4175 gimple stmt = gsi_stmt (ittr);
7e98624c 4176
726a989a
RB
4177 switch (gimple_code (stmt))
4178 {
b59d3976
RG
4179 case GIMPLE_BIND:
4180 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
4181 break;
4182
4183 case GIMPLE_TRY:
4184 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
4185 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
4186 break;
4187
4188 case GIMPLE_EH_FILTER:
4189 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
4190 break;
4191
4192 case GIMPLE_CATCH:
4193 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
4194 break;
7e98624c
RG
4195
4196 default:
7dc83ebc 4197 {
726a989a 4198 bool err2 = verify_types_in_gimple_stmt (stmt);
7dc83ebc 4199 if (err2)
726a989a 4200 debug_gimple_stmt (stmt);
7dc83ebc
RG
4201 err |= err2;
4202 }
7e98624c
RG
4203 }
4204 }
7dc83ebc
RG
4205
4206 return err;
4207}
4208
4209
4210/* Verify the GIMPLE statements inside the statement list STMTS. */
4211
4212void
726a989a 4213verify_types_in_gimple_seq (gimple_seq stmts)
7dc83ebc 4214{
726a989a 4215 if (verify_types_in_gimple_seq_2 (stmts))
7dc83ebc 4216 internal_error ("verify_gimple failed");
7e98624c
RG
4217}
4218
6de9cd9a
DN
4219
4220/* Verify STMT, return true if STMT is not in GIMPLE form.
4221 TODO: Implement type checking. */
4222
4223static bool
726a989a 4224verify_stmt (gimple_stmt_iterator *gsi)
6de9cd9a
DN
4225{
4226 tree addr;
726a989a
RB
4227 struct walk_stmt_info wi;
4228 bool last_in_block = gsi_one_before_end_p (*gsi);
4229 gimple stmt = gsi_stmt (*gsi);
6de9cd9a 4230
726a989a 4231 if (is_gimple_omp (stmt))
50674e96
DN
4232 {
4233 /* OpenMP directives are validated by the FE and never operated
726a989a 4234 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
50674e96
DN
4235 non-gimple expressions when the main index variable has had
4236 its address taken. This does not affect the loop itself
726a989a 4237 because the header of an GIMPLE_OMP_FOR is merely used to determine
50674e96
DN
4238 how to setup the parallel iteration. */
4239 return false;
4240 }
4241
726a989a
RB
4242 /* FIXME. The C frontend passes unpromoted arguments in case it
4243 didn't see a function declaration before the call. */
4244 if (is_gimple_call (stmt))
6de9cd9a 4245 {
7c9577be 4246 tree decl;
726a989a 4247
7c9577be
RG
4248 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
4249 {
4250 error ("invalid function in call statement");
4251 return true;
4252 }
4253
4254 decl = gimple_call_fndecl (stmt);
4255 if (decl
4256 && TREE_CODE (decl) == FUNCTION_DECL
726a989a
RB
4257 && DECL_LOOPING_CONST_OR_PURE_P (decl)
4258 && (!DECL_PURE_P (decl))
4259 && (!TREE_READONLY (decl)))
4260 {
4261 error ("invalid pure const state for function");
4262 return true;
4263 }
6de9cd9a
DN
4264 }
4265
726a989a
RB
4266 memset (&wi, 0, sizeof (wi));
4267 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
6de9cd9a
DN
4268 if (addr)
4269 {
726a989a 4270 debug_generic_expr (addr);
c2255bc4 4271 inform (gimple_location (gsi_stmt (*gsi)), "in statement");
726a989a 4272 debug_gimple_stmt (stmt);
6de9cd9a
DN
4273 return true;
4274 }
4275
1eaba2f2
RH
4276 /* If the statement is marked as part of an EH region, then it is
4277 expected that the statement could throw. Verify that when we
4278 have optimizations that simplify statements such that we prove
4279 that they cannot throw, that we update other data structures
4280 to match. */
4281 if (lookup_stmt_eh_region (stmt) >= 0)
4282 {
2505c5ed
JH
4283 /* During IPA passes, ipa-pure-const sets nothrow flags on calls
4284 and they are updated on statements only after fixup_cfg
4285 is executed at beggining of expansion stage. */
4286 if (!stmt_could_throw_p (stmt) && cgraph_state != CGRAPH_STATE_IPA_SSA)
1eaba2f2 4287 {
ab532386 4288 error ("statement marked for throw, but doesn%'t");
1eaba2f2
RH
4289 goto fail;
4290 }
726a989a 4291 if (!last_in_block && stmt_can_throw_internal (stmt))
1eaba2f2 4292 {
ab532386 4293 error ("statement marked for throw in middle of block");
1eaba2f2
RH
4294 goto fail;
4295 }
4296 }
4297
6de9cd9a 4298 return false;
1eaba2f2
RH
4299
4300 fail:
726a989a 4301 debug_gimple_stmt (stmt);
1eaba2f2 4302 return true;
6de9cd9a
DN
4303}
4304
4305
4306/* Return true when the T can be shared. */
4307
4308static bool
4309tree_node_can_be_shared (tree t)
4310{
6615c446 4311 if (IS_TYPE_OR_DECL_P (t)
6de9cd9a 4312 || is_gimple_min_invariant (t)
5e23162d 4313 || TREE_CODE (t) == SSA_NAME
953ff289
DN
4314 || t == error_mark_node
4315 || TREE_CODE (t) == IDENTIFIER_NODE)
6de9cd9a
DN
4316 return true;
4317
92b6dff3
JL
4318 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4319 return true;
4320
44de5aeb 4321 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
953ff289
DN
4322 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4323 || TREE_CODE (t) == COMPONENT_REF
4324 || TREE_CODE (t) == REALPART_EXPR
4325 || TREE_CODE (t) == IMAGPART_EXPR)
6de9cd9a
DN
4326 t = TREE_OPERAND (t, 0);
4327
4328 if (DECL_P (t))
4329 return true;
4330
4331 return false;
4332}
4333
4334
726a989a 4335/* Called via walk_gimple_stmt. Verify tree sharing. */
6de9cd9a
DN
4336
4337static tree
726a989a 4338verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 4339{
726a989a
RB
4340 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4341 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
6de9cd9a
DN
4342
4343 if (tree_node_can_be_shared (*tp))
4344 {
4345 *walk_subtrees = false;
4346 return NULL;
4347 }
4348
4437b50d
JH
4349 if (pointer_set_insert (visited, *tp))
4350 return *tp;
6de9cd9a
DN
4351
4352 return NULL;
4353}
4354
4355
4437b50d
JH
4356static bool eh_error_found;
4357static int
4358verify_eh_throw_stmt_node (void **slot, void *data)
4359{
4360 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4361 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4362
4363 if (!pointer_set_contains (visited, node->stmt))
4364 {
4365 error ("Dead STMT in EH table");
726a989a 4366 debug_gimple_stmt (node->stmt);
4437b50d
JH
4367 eh_error_found = true;
4368 }
c13edb67 4369 return 1;
4437b50d
JH
4370}
4371
726a989a
RB
4372
4373/* Verify the GIMPLE statements in every basic block. */
6de9cd9a
DN
4374
4375void
4376verify_stmts (void)
4377{
4378 basic_block bb;
726a989a 4379 gimple_stmt_iterator gsi;
6de9cd9a 4380 bool err = false;
4437b50d 4381 struct pointer_set_t *visited, *visited_stmts;
6de9cd9a 4382 tree addr;
726a989a 4383 struct walk_stmt_info wi;
6de9cd9a
DN
4384
4385 timevar_push (TV_TREE_STMT_VERIFY);
4437b50d
JH
4386 visited = pointer_set_create ();
4387 visited_stmts = pointer_set_create ();
6de9cd9a 4388
726a989a
RB
4389 memset (&wi, 0, sizeof (wi));
4390 wi.info = (void *) visited;
4391
6de9cd9a
DN
4392 FOR_EACH_BB (bb)
4393 {
726a989a
RB
4394 gimple phi;
4395 size_t i;
6de9cd9a 4396
726a989a 4397 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4398 {
726a989a 4399 phi = gsi_stmt (gsi);
4437b50d 4400 pointer_set_insert (visited_stmts, phi);
726a989a 4401 if (gimple_bb (phi) != bb)
8de1fc1b 4402 {
726a989a 4403 error ("gimple_bb (phi) is set to a wrong basic block");
8de1fc1b
KH
4404 err |= true;
4405 }
4406
726a989a 4407 for (i = 0; i < gimple_phi_num_args (phi); i++)
6de9cd9a 4408 {
726a989a 4409 tree t = gimple_phi_arg_def (phi, i);
6de9cd9a
DN
4410 tree addr;
4411
e9705dc5
AO
4412 if (!t)
4413 {
4414 error ("missing PHI def");
726a989a 4415 debug_gimple_stmt (phi);
e9705dc5
AO
4416 err |= true;
4417 continue;
4418 }
6de9cd9a
DN
4419 /* Addressable variables do have SSA_NAMEs but they
4420 are not considered gimple values. */
e9705dc5
AO
4421 else if (TREE_CODE (t) != SSA_NAME
4422 && TREE_CODE (t) != FUNCTION_DECL
220f1c29 4423 && !is_gimple_min_invariant (t))
6de9cd9a 4424 {
726a989a
RB
4425 error ("PHI argument is not a GIMPLE value");
4426 debug_gimple_stmt (phi);
4427 debug_generic_expr (t);
6de9cd9a
DN
4428 err |= true;
4429 }
4430
4437b50d 4431 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
6de9cd9a
DN
4432 if (addr)
4433 {
ab532386 4434 error ("incorrect sharing of tree nodes");
726a989a
RB
4435 debug_gimple_stmt (phi);
4436 debug_generic_expr (addr);
6de9cd9a
DN
4437 err |= true;
4438 }
4439 }
211ca15c
RG
4440
4441#ifdef ENABLE_TYPES_CHECKING
4442 if (verify_gimple_phi (phi))
4443 {
4444 debug_gimple_stmt (phi);
4445 err |= true;
4446 }
4447#endif
6de9cd9a
DN
4448 }
4449
726a989a 4450 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
6de9cd9a 4451 {
726a989a
RB
4452 gimple stmt = gsi_stmt (gsi);
4453
4454 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4455 || gimple_code (stmt) == GIMPLE_BIND)
4456 {
4457 error ("invalid GIMPLE statement");
4458 debug_gimple_stmt (stmt);
4459 err |= true;
4460 }
8de1fc1b 4461
4437b50d 4462 pointer_set_insert (visited_stmts, stmt);
07beea0d 4463
726a989a 4464 if (gimple_bb (stmt) != bb)
8de1fc1b 4465 {
726a989a 4466 error ("gimple_bb (stmt) is set to a wrong basic block");
2cd713a0 4467 debug_gimple_stmt (stmt);
8de1fc1b
KH
4468 err |= true;
4469 }
4470
726a989a
RB
4471 if (gimple_code (stmt) == GIMPLE_LABEL)
4472 {
4473 tree decl = gimple_label_label (stmt);
4474 int uid = LABEL_DECL_UID (decl);
4475
4476 if (uid == -1
4477 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4478 {
4479 error ("incorrect entry in label_to_block_map.\n");
4480 err |= true;
4481 }
4482 }
4483
4484 err |= verify_stmt (&gsi);
211ca15c
RG
4485
4486#ifdef ENABLE_TYPES_CHECKING
4487 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4488 {
4489 debug_gimple_stmt (stmt);
4490 err |= true;
4491 }
4492#endif
726a989a 4493 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
6de9cd9a
DN
4494 if (addr)
4495 {
ab532386 4496 error ("incorrect sharing of tree nodes");
726a989a
RB
4497 debug_gimple_stmt (stmt);
4498 debug_generic_expr (addr);
6de9cd9a
DN
4499 err |= true;
4500 }
726a989a 4501 gsi_next (&gsi);
6de9cd9a
DN
4502 }
4503 }
726a989a 4504
4437b50d
JH
4505 eh_error_found = false;
4506 if (get_eh_throw_stmt_table (cfun))
4507 htab_traverse (get_eh_throw_stmt_table (cfun),
4508 verify_eh_throw_stmt_node,
4509 visited_stmts);
6de9cd9a 4510
4437b50d 4511 if (err | eh_error_found)
ab532386 4512 internal_error ("verify_stmts failed");
6de9cd9a 4513
4437b50d
JH
4514 pointer_set_destroy (visited);
4515 pointer_set_destroy (visited_stmts);
6946b3f7 4516 verify_histograms ();
6de9cd9a
DN
4517 timevar_pop (TV_TREE_STMT_VERIFY);
4518}
4519
4520
4521/* Verifies that the flow information is OK. */
4522
4523static int
726a989a 4524gimple_verify_flow_info (void)
6de9cd9a
DN
4525{
4526 int err = 0;
4527 basic_block bb;
726a989a
RB
4528 gimple_stmt_iterator gsi;
4529 gimple stmt;
6de9cd9a 4530 edge e;
628f6a4e 4531 edge_iterator ei;
6de9cd9a 4532
726a989a 4533 if (ENTRY_BLOCK_PTR->il.gimple)
6de9cd9a 4534 {
7506e1cb 4535 error ("ENTRY_BLOCK has IL associated with it");
6de9cd9a
DN
4536 err = 1;
4537 }
4538
726a989a 4539 if (EXIT_BLOCK_PTR->il.gimple)
6de9cd9a 4540 {
7506e1cb 4541 error ("EXIT_BLOCK has IL associated with it");
6de9cd9a
DN
4542 err = 1;
4543 }
4544
628f6a4e 4545 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
4546 if (e->flags & EDGE_FALLTHRU)
4547 {
ab532386 4548 error ("fallthru to exit from bb %d", e->src->index);
6de9cd9a
DN
4549 err = 1;
4550 }
4551
4552 FOR_EACH_BB (bb)
4553 {
4554 bool found_ctrl_stmt = false;
4555
726a989a 4556 stmt = NULL;
548414c6 4557
6de9cd9a 4558 /* Skip labels on the start of basic block. */
726a989a 4559 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4560 {
726a989a
RB
4561 tree label;
4562 gimple prev_stmt = stmt;
548414c6 4563
726a989a 4564 stmt = gsi_stmt (gsi);
548414c6 4565
726a989a 4566 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
4567 break;
4568
726a989a
RB
4569 label = gimple_label_label (stmt);
4570 if (prev_stmt && DECL_NONLOCAL (label))
548414c6 4571 {
953ff289 4572 error ("nonlocal label ");
726a989a 4573 print_generic_expr (stderr, label, 0);
953ff289
DN
4574 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4575 bb->index);
548414c6
KH
4576 err = 1;
4577 }
4578
726a989a 4579 if (label_to_block (label) != bb)
6de9cd9a 4580 {
953ff289 4581 error ("label ");
726a989a 4582 print_generic_expr (stderr, label, 0);
953ff289
DN
4583 fprintf (stderr, " to block does not match in bb %d",
4584 bb->index);
6de9cd9a
DN
4585 err = 1;
4586 }
4587
726a989a 4588 if (decl_function_context (label) != current_function_decl)
6de9cd9a 4589 {
953ff289 4590 error ("label ");
726a989a 4591 print_generic_expr (stderr, label, 0);
953ff289
DN
4592 fprintf (stderr, " has incorrect context in bb %d",
4593 bb->index);
6de9cd9a
DN
4594 err = 1;
4595 }
4596 }
4597
4598 /* Verify that body of basic block BB is free of control flow. */
726a989a 4599 for (; !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4600 {
726a989a 4601 gimple stmt = gsi_stmt (gsi);
6de9cd9a
DN
4602
4603 if (found_ctrl_stmt)
4604 {
ab532386 4605 error ("control flow in the middle of basic block %d",
6de9cd9a
DN
4606 bb->index);
4607 err = 1;
4608 }
4609
4610 if (stmt_ends_bb_p (stmt))
4611 found_ctrl_stmt = true;
4612
726a989a 4613 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a 4614 {
953ff289 4615 error ("label ");
726a989a 4616 print_generic_expr (stderr, gimple_label_label (stmt), 0);
953ff289 4617 fprintf (stderr, " in the middle of basic block %d", bb->index);
6de9cd9a
DN
4618 err = 1;
4619 }
4620 }
953ff289 4621
726a989a
RB
4622 gsi = gsi_last_bb (bb);
4623 if (gsi_end_p (gsi))
6de9cd9a
DN
4624 continue;
4625
726a989a 4626 stmt = gsi_stmt (gsi);
6de9cd9a 4627
cc7220fd
JH
4628 err |= verify_eh_edges (stmt);
4629
6de9cd9a
DN
4630 if (is_ctrl_stmt (stmt))
4631 {
628f6a4e 4632 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4633 if (e->flags & EDGE_FALLTHRU)
4634 {
ab532386 4635 error ("fallthru edge after a control statement in bb %d",
6de9cd9a
DN
4636 bb->index);
4637 err = 1;
4638 }
4639 }
4640
726a989a 4641 if (gimple_code (stmt) != GIMPLE_COND)
36b24193
ZD
4642 {
4643 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4644 after anything else but if statement. */
4645 FOR_EACH_EDGE (e, ei, bb->succs)
4646 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4647 {
726a989a 4648 error ("true/false edge after a non-GIMPLE_COND in bb %d",
36b24193
ZD
4649 bb->index);
4650 err = 1;
4651 }
4652 }
4653
726a989a 4654 switch (gimple_code (stmt))
6de9cd9a 4655 {
726a989a 4656 case GIMPLE_COND:
6de9cd9a
DN
4657 {
4658 edge true_edge;
4659 edge false_edge;
a9b77cd1 4660
6de9cd9a
DN
4661 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4662
726a989a
RB
4663 if (!true_edge
4664 || !false_edge
6de9cd9a
DN
4665 || !(true_edge->flags & EDGE_TRUE_VALUE)
4666 || !(false_edge->flags & EDGE_FALSE_VALUE)
4667 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4668 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
628f6a4e 4669 || EDGE_COUNT (bb->succs) >= 3)
6de9cd9a 4670 {
ab532386 4671 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4672 bb->index);
4673 err = 1;
4674 }
6de9cd9a
DN
4675 }
4676 break;
4677
726a989a 4678 case GIMPLE_GOTO:
6de9cd9a
DN
4679 if (simple_goto_p (stmt))
4680 {
ab532386 4681 error ("explicit goto at end of bb %d", bb->index);
6531d1be 4682 err = 1;
6de9cd9a
DN
4683 }
4684 else
4685 {
6531d1be 4686 /* FIXME. We should double check that the labels in the
6de9cd9a 4687 destination blocks have their address taken. */
628f6a4e 4688 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4689 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4690 | EDGE_FALSE_VALUE))
4691 || !(e->flags & EDGE_ABNORMAL))
4692 {
ab532386 4693 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4694 bb->index);
4695 err = 1;
4696 }
4697 }
4698 break;
4699
726a989a 4700 case GIMPLE_RETURN:
c5cbcccf
ZD
4701 if (!single_succ_p (bb)
4702 || (single_succ_edge (bb)->flags
4703 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4704 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
6de9cd9a 4705 {
ab532386 4706 error ("wrong outgoing edge flags at end of bb %d", bb->index);
6de9cd9a
DN
4707 err = 1;
4708 }
c5cbcccf 4709 if (single_succ (bb) != EXIT_BLOCK_PTR)
6de9cd9a 4710 {
ab532386 4711 error ("return edge does not point to exit in bb %d",
6de9cd9a
DN
4712 bb->index);
4713 err = 1;
4714 }
4715 break;
4716
726a989a 4717 case GIMPLE_SWITCH:
6de9cd9a 4718 {
7853504d 4719 tree prev;
6de9cd9a
DN
4720 edge e;
4721 size_t i, n;
6de9cd9a 4722
726a989a 4723 n = gimple_switch_num_labels (stmt);
6de9cd9a
DN
4724
4725 /* Mark all the destination basic blocks. */
4726 for (i = 0; i < n; ++i)
4727 {
726a989a 4728 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
6de9cd9a 4729 basic_block label_bb = label_to_block (lab);
1e128c5f 4730 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
6de9cd9a
DN
4731 label_bb->aux = (void *)1;
4732 }
4733
7853504d 4734 /* Verify that the case labels are sorted. */
726a989a 4735 prev = gimple_switch_label (stmt, 0);
b7814a18 4736 for (i = 1; i < n; ++i)
7853504d 4737 {
726a989a
RB
4738 tree c = gimple_switch_label (stmt, i);
4739 if (!CASE_LOW (c))
7853504d 4740 {
726a989a
RB
4741 error ("found default case not at the start of "
4742 "case vector");
4743 err = 1;
7853504d
SB
4744 continue;
4745 }
726a989a
RB
4746 if (CASE_LOW (prev)
4747 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
7853504d 4748 {
953ff289 4749 error ("case labels not sorted: ");
7853504d
SB
4750 print_generic_expr (stderr, prev, 0);
4751 fprintf (stderr," is greater than ");
4752 print_generic_expr (stderr, c, 0);
4753 fprintf (stderr," but comes before it.\n");
4754 err = 1;
4755 }
4756 prev = c;
4757 }
b7814a18
RG
4758 /* VRP will remove the default case if it can prove it will
4759 never be executed. So do not verify there always exists
4760 a default case here. */
7853504d 4761
628f6a4e 4762 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4763 {
4764 if (!e->dest->aux)
4765 {
ab532386 4766 error ("extra outgoing edge %d->%d",
6de9cd9a
DN
4767 bb->index, e->dest->index);
4768 err = 1;
4769 }
726a989a 4770
6de9cd9a
DN
4771 e->dest->aux = (void *)2;
4772 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4773 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4774 {
ab532386 4775 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4776 bb->index);
4777 err = 1;
4778 }
4779 }
4780
4781 /* Check that we have all of them. */
4782 for (i = 0; i < n; ++i)
4783 {
726a989a 4784 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
6de9cd9a
DN
4785 basic_block label_bb = label_to_block (lab);
4786
4787 if (label_bb->aux != (void *)2)
4788 {
726a989a 4789 error ("missing edge %i->%i", bb->index, label_bb->index);
6de9cd9a
DN
4790 err = 1;
4791 }
4792 }
4793
628f6a4e 4794 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4795 e->dest->aux = (void *)0;
4796 }
4797
4798 default: ;
4799 }
4800 }
4801
2b28c07a 4802 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
6de9cd9a
DN
4803 verify_dominators (CDI_DOMINATORS);
4804
4805 return err;
4806}
4807
4808
f0b698c1 4809/* Updates phi nodes after creating a forwarder block joined
6de9cd9a
DN
4810 by edge FALLTHRU. */
4811
4812static void
726a989a 4813gimple_make_forwarder_block (edge fallthru)
6de9cd9a
DN
4814{
4815 edge e;
628f6a4e 4816 edge_iterator ei;
6de9cd9a 4817 basic_block dummy, bb;
726a989a
RB
4818 tree var;
4819 gimple_stmt_iterator gsi;
6de9cd9a
DN
4820
4821 dummy = fallthru->src;
4822 bb = fallthru->dest;
4823
c5cbcccf 4824 if (single_pred_p (bb))
6de9cd9a
DN
4825 return;
4826
cfaab3a9 4827 /* If we redirected a branch we must create new PHI nodes at the
6de9cd9a 4828 start of BB. */
726a989a 4829 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4830 {
726a989a
RB
4831 gimple phi, new_phi;
4832
4833 phi = gsi_stmt (gsi);
4834 var = gimple_phi_result (phi);
6de9cd9a
DN
4835 new_phi = create_phi_node (var, bb);
4836 SSA_NAME_DEF_STMT (var) = new_phi;
726a989a
RB
4837 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4838 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru);
6de9cd9a
DN
4839 }
4840
6de9cd9a 4841 /* Add the arguments we have stored on edges. */
628f6a4e 4842 FOR_EACH_EDGE (e, ei, bb->preds)
6de9cd9a
DN
4843 {
4844 if (e == fallthru)
4845 continue;
4846
71882046 4847 flush_pending_stmts (e);
6de9cd9a
DN
4848 }
4849}
4850
4851
6de9cd9a
DN
4852/* Return a non-special label in the head of basic block BLOCK.
4853 Create one if it doesn't exist. */
4854
d7621d3c 4855tree
726a989a 4856gimple_block_label (basic_block bb)
6de9cd9a 4857{
726a989a 4858 gimple_stmt_iterator i, s = gsi_start_bb (bb);
6de9cd9a 4859 bool first = true;
726a989a
RB
4860 tree label;
4861 gimple stmt;
6de9cd9a 4862
726a989a 4863 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
6de9cd9a 4864 {
726a989a
RB
4865 stmt = gsi_stmt (i);
4866 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a 4867 break;
726a989a 4868 label = gimple_label_label (stmt);
6de9cd9a
DN
4869 if (!DECL_NONLOCAL (label))
4870 {
4871 if (!first)
726a989a 4872 gsi_move_before (&i, &s);
6de9cd9a
DN
4873 return label;
4874 }
4875 }
4876
c2255bc4 4877 label = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
4878 stmt = gimple_build_label (label);
4879 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
6de9cd9a
DN
4880 return label;
4881}
4882
4883
4884/* Attempt to perform edge redirection by replacing a possibly complex
4885 jump instruction by a goto or by removing the jump completely.
4886 This can apply only if all edges now point to the same block. The
4887 parameters and return values are equivalent to
4888 redirect_edge_and_branch. */
4889
4890static edge
726a989a 4891gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
6de9cd9a
DN
4892{
4893 basic_block src = e->src;
726a989a
RB
4894 gimple_stmt_iterator i;
4895 gimple stmt;
6de9cd9a 4896
07b43a87
KH
4897 /* We can replace or remove a complex jump only when we have exactly
4898 two edges. */
4899 if (EDGE_COUNT (src->succs) != 2
4900 /* Verify that all targets will be TARGET. Specifically, the
4901 edge that is not E must also go to TARGET. */
4902 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6de9cd9a
DN
4903 return NULL;
4904
726a989a
RB
4905 i = gsi_last_bb (src);
4906 if (gsi_end_p (i))
6de9cd9a 4907 return NULL;
6de9cd9a 4908
726a989a
RB
4909 stmt = gsi_stmt (i);
4910
4911 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
6de9cd9a 4912 {
726a989a 4913 gsi_remove (&i, true);
6de9cd9a
DN
4914 e = ssa_redirect_edge (e, target);
4915 e->flags = EDGE_FALLTHRU;
4916 return e;
4917 }
4918
4919 return NULL;
4920}
4921
4922
4923/* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4924 edge representing the redirected branch. */
4925
4926static edge
726a989a 4927gimple_redirect_edge_and_branch (edge e, basic_block dest)
6de9cd9a
DN
4928{
4929 basic_block bb = e->src;
726a989a 4930 gimple_stmt_iterator gsi;
6de9cd9a 4931 edge ret;
726a989a 4932 gimple stmt;
6de9cd9a 4933
4f6c2131 4934 if (e->flags & EDGE_ABNORMAL)
6de9cd9a
DN
4935 return NULL;
4936
6531d1be 4937 if (e->src != ENTRY_BLOCK_PTR
726a989a 4938 && (ret = gimple_try_redirect_by_replacing_jump (e, dest)))
6de9cd9a
DN
4939 return ret;
4940
4941 if (e->dest == dest)
4942 return NULL;
4943
a3710436
JH
4944 if (e->flags & EDGE_EH)
4945 return redirect_eh_edge (e, dest);
4946
726a989a
RB
4947 gsi = gsi_last_bb (bb);
4948 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6de9cd9a 4949
d130ae11 4950 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6de9cd9a 4951 {
726a989a 4952 case GIMPLE_COND:
a9b77cd1 4953 /* For COND_EXPR, we only need to redirect the edge. */
6de9cd9a
DN
4954 break;
4955
726a989a 4956 case GIMPLE_GOTO:
6de9cd9a
DN
4957 /* No non-abnormal edges should lead from a non-simple goto, and
4958 simple ones should be represented implicitly. */
1e128c5f 4959 gcc_unreachable ();
6de9cd9a 4960
726a989a 4961 case GIMPLE_SWITCH:
6de9cd9a 4962 {
726a989a 4963 tree label = gimple_block_label (dest);
d6be0d7f 4964 tree cases = get_cases_for_edge (e, stmt);
6de9cd9a 4965
d6be0d7f
JL
4966 /* If we have a list of cases associated with E, then use it
4967 as it's a lot faster than walking the entire case vector. */
4968 if (cases)
6de9cd9a 4969 {
4edbbd3f 4970 edge e2 = find_edge (e->src, dest);
d6be0d7f
JL
4971 tree last, first;
4972
4973 first = cases;
4974 while (cases)
4975 {
4976 last = cases;
4977 CASE_LABEL (cases) = label;
4978 cases = TREE_CHAIN (cases);
4979 }
4980
4981 /* If there was already an edge in the CFG, then we need
4982 to move all the cases associated with E to E2. */
4983 if (e2)
4984 {
4985 tree cases2 = get_cases_for_edge (e2, stmt);
4986
4987 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4988 TREE_CHAIN (cases2) = first;
4989 }
6de9cd9a 4990 }
92b6dff3
JL
4991 else
4992 {
726a989a 4993 size_t i, n = gimple_switch_num_labels (stmt);
d6be0d7f
JL
4994
4995 for (i = 0; i < n; i++)
4996 {
726a989a 4997 tree elt = gimple_switch_label (stmt, i);
d6be0d7f
JL
4998 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4999 CASE_LABEL (elt) = label;
5000 }
92b6dff3 5001 }
d6be0d7f 5002
92b6dff3 5003 break;
6de9cd9a 5004 }
6de9cd9a 5005
726a989a
RB
5006 case GIMPLE_RETURN:
5007 gsi_remove (&gsi, true);
6de9cd9a
DN
5008 e->flags |= EDGE_FALLTHRU;
5009 break;
5010
726a989a
RB
5011 case GIMPLE_OMP_RETURN:
5012 case GIMPLE_OMP_CONTINUE:
5013 case GIMPLE_OMP_SECTIONS_SWITCH:
5014 case GIMPLE_OMP_FOR:
e5c95afe
ZD
5015 /* The edges from OMP constructs can be simply redirected. */
5016 break;
5017
6de9cd9a
DN
5018 default:
5019 /* Otherwise it must be a fallthru edge, and we don't need to
5020 do anything besides redirecting it. */
1e128c5f 5021 gcc_assert (e->flags & EDGE_FALLTHRU);
6de9cd9a
DN
5022 break;
5023 }
5024
5025 /* Update/insert PHI nodes as necessary. */
5026
5027 /* Now update the edges in the CFG. */
5028 e = ssa_redirect_edge (e, dest);
5029
5030 return e;
5031}
5032
14fa2cc0
ZD
5033/* Returns true if it is possible to remove edge E by redirecting
5034 it to the destination of the other edge from E->src. */
5035
5036static bool
726a989a 5037gimple_can_remove_branch_p (const_edge e)
14fa2cc0 5038{
496a4ef5 5039 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
14fa2cc0
ZD
5040 return false;
5041
5042 return true;
5043}
6de9cd9a
DN
5044
5045/* Simple wrapper, as we can always redirect fallthru edges. */
5046
5047static basic_block
726a989a 5048gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6de9cd9a 5049{
726a989a 5050 e = gimple_redirect_edge_and_branch (e, dest);
1e128c5f 5051 gcc_assert (e);
6de9cd9a
DN
5052
5053 return NULL;
5054}
5055
5056
5057/* Splits basic block BB after statement STMT (but at least after the
5058 labels). If STMT is NULL, BB is split just after the labels. */
5059
5060static basic_block
726a989a 5061gimple_split_block (basic_block bb, void *stmt)
6de9cd9a 5062{
726a989a
RB
5063 gimple_stmt_iterator gsi;
5064 gimple_stmt_iterator gsi_tgt;
5065 gimple act;
5066 gimple_seq list;
6de9cd9a
DN
5067 basic_block new_bb;
5068 edge e;
628f6a4e 5069 edge_iterator ei;
6de9cd9a
DN
5070
5071 new_bb = create_empty_bb (bb);
5072
5073 /* Redirect the outgoing edges. */
628f6a4e
BE
5074 new_bb->succs = bb->succs;
5075 bb->succs = NULL;
5076 FOR_EACH_EDGE (e, ei, new_bb->succs)
6de9cd9a
DN
5077 e->src = new_bb;
5078
726a989a 5079 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
6de9cd9a
DN
5080 stmt = NULL;
5081
726a989a
RB
5082 /* Move everything from GSI to the new basic block. */
5083 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 5084 {
726a989a
RB
5085 act = gsi_stmt (gsi);
5086 if (gimple_code (act) == GIMPLE_LABEL)
6de9cd9a
DN
5087 continue;
5088
5089 if (!stmt)
5090 break;
5091
5092 if (stmt == act)
5093 {
726a989a 5094 gsi_next (&gsi);
6de9cd9a
DN
5095 break;
5096 }
5097 }
5098
726a989a 5099 if (gsi_end_p (gsi))
597ae074
JH
5100 return new_bb;
5101
5102 /* Split the statement list - avoid re-creating new containers as this
5103 brings ugly quadratic memory consumption in the inliner.
5104 (We are still quadratic since we need to update stmt BB pointers,
5105 sadly.) */
726a989a
RB
5106 list = gsi_split_seq_before (&gsi);
5107 set_bb_seq (new_bb, list);
5108 for (gsi_tgt = gsi_start (list);
5109 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5110 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6de9cd9a
DN
5111
5112 return new_bb;
5113}
5114
5115
5116/* Moves basic block BB after block AFTER. */
5117
5118static bool
726a989a 5119gimple_move_block_after (basic_block bb, basic_block after)
6de9cd9a
DN
5120{
5121 if (bb->prev_bb == after)
5122 return true;
5123
5124 unlink_block (bb);
5125 link_block (bb, after);
5126
5127 return true;
5128}
5129
5130
5131/* Return true if basic_block can be duplicated. */
5132
5133static bool
2de58650 5134gimple_can_duplicate_bb_p (const_basic_block bb)
6de9cd9a 5135{
2de58650
JH
5136 gimple_stmt_iterator gsi = gsi_last_bb (bb);
5137
5138 /* RTL expander has quite artificial limitation to at most one RESX instruction
5139 per region. It can be fixed by turning 1-1 map to 1-many map, but since the
5140 code needs to be rewritten to gimple level lowering and there is little reason
5141 for duplicating RESX instructions in order to optimize code performance, we
5142 just disallow it for the moment. */
5143 if (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) == GIMPLE_RESX)
5144 return false;
6de9cd9a
DN
5145 return true;
5146}
5147
6de9cd9a
DN
5148/* Create a duplicate of the basic block BB. NOTE: This does not
5149 preserve SSA form. */
5150
5151static basic_block
726a989a 5152gimple_duplicate_bb (basic_block bb)
6de9cd9a
DN
5153{
5154 basic_block new_bb;
726a989a
RB
5155 gimple_stmt_iterator gsi, gsi_tgt;
5156 gimple_seq phis = phi_nodes (bb);
5157 gimple phi, stmt, copy;
6de9cd9a
DN
5158
5159 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
b0382c67 5160
84d65814
DN
5161 /* Copy the PHI nodes. We ignore PHI node arguments here because
5162 the incoming edges have not been setup yet. */
726a989a 5163 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
b0382c67 5164 {
726a989a
RB
5165 phi = gsi_stmt (gsi);
5166 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5167 create_new_def_for (gimple_phi_result (copy), copy,
5168 gimple_phi_result_ptr (copy));
b0382c67 5169 }
84d65814 5170
726a989a
RB
5171 gsi_tgt = gsi_start_bb (new_bb);
5172 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 5173 {
84d65814
DN
5174 def_operand_p def_p;
5175 ssa_op_iter op_iter;
cc7220fd 5176 int region;
6de9cd9a 5177
726a989a
RB
5178 stmt = gsi_stmt (gsi);
5179 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a
DN
5180 continue;
5181
84d65814
DN
5182 /* Create a new copy of STMT and duplicate STMT's virtual
5183 operands. */
726a989a
RB
5184 copy = gimple_copy (stmt);
5185 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
cc7220fd
JH
5186 region = lookup_stmt_eh_region (stmt);
5187 if (region >= 0)
5188 add_stmt_to_eh_region (copy, region);
6946b3f7 5189 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
84d65814
DN
5190
5191 /* Create new names for all the definitions created by COPY and
5192 add replacement mappings for each new name. */
5193 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5194 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6de9cd9a
DN
5195 }
5196
5197 return new_bb;
5198}
5199
5f40b3cb
ZD
5200/* Adds phi node arguments for edge E_COPY after basic block duplication. */
5201
5202static void
5203add_phi_args_after_copy_edge (edge e_copy)
5204{
5205 basic_block bb, bb_copy = e_copy->src, dest;
5206 edge e;
5207 edge_iterator ei;
726a989a
RB
5208 gimple phi, phi_copy;
5209 tree def;
5210 gimple_stmt_iterator psi, psi_copy;
5f40b3cb 5211
726a989a 5212 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5f40b3cb
ZD
5213 return;
5214
5215 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5216
5217 if (e_copy->dest->flags & BB_DUPLICATED)
5218 dest = get_bb_original (e_copy->dest);
5219 else
5220 dest = e_copy->dest;
5221
5222 e = find_edge (bb, dest);
5223 if (!e)
5224 {
5225 /* During loop unrolling the target of the latch edge is copied.
5226 In this case we are not looking for edge to dest, but to
5227 duplicated block whose original was dest. */
5228 FOR_EACH_EDGE (e, ei, bb->succs)
5229 {
5230 if ((e->dest->flags & BB_DUPLICATED)
5231 && get_bb_original (e->dest) == dest)
5232 break;
5233 }
5234
5235 gcc_assert (e != NULL);
5236 }
5237
726a989a
RB
5238 for (psi = gsi_start_phis (e->dest),
5239 psi_copy = gsi_start_phis (e_copy->dest);
5240 !gsi_end_p (psi);
5241 gsi_next (&psi), gsi_next (&psi_copy))
5f40b3cb 5242 {
726a989a
RB
5243 phi = gsi_stmt (psi);
5244 phi_copy = gsi_stmt (psi_copy);
5f40b3cb
ZD
5245 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5246 add_phi_arg (phi_copy, def, e_copy);
5247 }
5248}
5249
84d65814 5250
42759f1e
ZD
5251/* Basic block BB_COPY was created by code duplication. Add phi node
5252 arguments for edges going out of BB_COPY. The blocks that were
6580ee77 5253 duplicated have BB_DUPLICATED set. */
42759f1e
ZD
5254
5255void
5256add_phi_args_after_copy_bb (basic_block bb_copy)
5257{
5f40b3cb 5258 edge e_copy;
726a989a 5259 edge_iterator ei;
42759f1e 5260
628f6a4e 5261 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
42759f1e 5262 {
5f40b3cb 5263 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
5264 }
5265}
5266
5267/* Blocks in REGION_COPY array of length N_REGION were created by
5268 duplication of basic blocks. Add phi node arguments for edges
5f40b3cb
ZD
5269 going from these blocks. If E_COPY is not NULL, also add
5270 phi node arguments for its destination.*/
42759f1e
ZD
5271
5272void
5f40b3cb
ZD
5273add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5274 edge e_copy)
42759f1e
ZD
5275{
5276 unsigned i;
5277
5278 for (i = 0; i < n_region; i++)
6580ee77 5279 region_copy[i]->flags |= BB_DUPLICATED;
42759f1e
ZD
5280
5281 for (i = 0; i < n_region; i++)
5282 add_phi_args_after_copy_bb (region_copy[i]);
5f40b3cb
ZD
5283 if (e_copy)
5284 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
5285
5286 for (i = 0; i < n_region; i++)
6580ee77 5287 region_copy[i]->flags &= ~BB_DUPLICATED;
42759f1e
ZD
5288}
5289
42759f1e
ZD
5290/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5291 important exit edge EXIT. By important we mean that no SSA name defined
5292 inside region is live over the other exit edges of the region. All entry
5293 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5294 to the duplicate of the region. SSA form, dominance and loop information
5295 is updated. The new basic blocks are stored to REGION_COPY in the same
5296 order as they had in REGION, provided that REGION_COPY is not NULL.
5297 The function returns false if it is unable to copy the region,
5298 true otherwise. */
5299
5300bool
726a989a 5301gimple_duplicate_sese_region (edge entry, edge exit,
42759f1e
ZD
5302 basic_block *region, unsigned n_region,
5303 basic_block *region_copy)
5304{
66f97d31 5305 unsigned i;
42759f1e
ZD
5306 bool free_region_copy = false, copying_header = false;
5307 struct loop *loop = entry->dest->loop_father;
5308 edge exit_copy;
66f97d31 5309 VEC (basic_block, heap) *doms;
42759f1e 5310 edge redirected;
09bac500
JH
5311 int total_freq = 0, entry_freq = 0;
5312 gcov_type total_count = 0, entry_count = 0;
42759f1e
ZD
5313
5314 if (!can_copy_bbs_p (region, n_region))
5315 return false;
5316
5317 /* Some sanity checking. Note that we do not check for all possible
5318 missuses of the functions. I.e. if you ask to copy something weird,
5319 it will work, but the state of structures probably will not be
5320 correct. */
42759f1e
ZD
5321 for (i = 0; i < n_region; i++)
5322 {
5323 /* We do not handle subloops, i.e. all the blocks must belong to the
5324 same loop. */
5325 if (region[i]->loop_father != loop)
5326 return false;
5327
5328 if (region[i] != entry->dest
5329 && region[i] == loop->header)
5330 return false;
5331 }
5332
561e8a90 5333 set_loop_copy (loop, loop);
42759f1e
ZD
5334
5335 /* In case the function is used for loop header copying (which is the primary
5336 use), ensure that EXIT and its copy will be new latch and entry edges. */
5337 if (loop->header == entry->dest)
5338 {
5339 copying_header = true;
561e8a90 5340 set_loop_copy (loop, loop_outer (loop));
42759f1e
ZD
5341
5342 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5343 return false;
5344
5345 for (i = 0; i < n_region; i++)
5346 if (region[i] != exit->src
5347 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5348 return false;
5349 }
5350
5351 if (!region_copy)
5352 {
858904db 5353 region_copy = XNEWVEC (basic_block, n_region);
42759f1e
ZD
5354 free_region_copy = true;
5355 }
5356
5006671f 5357 gcc_assert (!need_ssa_update_p (cfun));
42759f1e 5358
5deaef19 5359 /* Record blocks outside the region that are dominated by something
42759f1e 5360 inside. */
66f97d31 5361 doms = NULL;
6580ee77
JH
5362 initialize_original_copy_tables ();
5363
66f97d31 5364 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
42759f1e 5365
09bac500
JH
5366 if (entry->dest->count)
5367 {
5368 total_count = entry->dest->count;
5369 entry_count = entry->count;
5370 /* Fix up corner cases, to avoid division by zero or creation of negative
5371 frequencies. */
5372 if (entry_count > total_count)
5373 entry_count = total_count;
5374 }
5375 else
5376 {
5377 total_freq = entry->dest->frequency;
5378 entry_freq = EDGE_FREQUENCY (entry);
5379 /* Fix up corner cases, to avoid division by zero or creation of negative
5380 frequencies. */
5381 if (total_freq == 0)
5382 total_freq = 1;
5383 else if (entry_freq > total_freq)
5384 entry_freq = total_freq;
5385 }
5deaef19 5386
b9a66240
ZD
5387 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5388 split_edge_bb_loc (entry));
09bac500
JH
5389 if (total_count)
5390 {
5391 scale_bbs_frequencies_gcov_type (region, n_region,
5392 total_count - entry_count,
5393 total_count);
5394 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6531d1be 5395 total_count);
09bac500
JH
5396 }
5397 else
5398 {
5399 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5400 total_freq);
5401 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5402 }
42759f1e
ZD
5403
5404 if (copying_header)
5405 {
5406 loop->header = exit->dest;
5407 loop->latch = exit->src;
5408 }
5409
5410 /* Redirect the entry and add the phi node arguments. */
6580ee77 5411 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
42759f1e 5412 gcc_assert (redirected != NULL);
71882046 5413 flush_pending_stmts (entry);
42759f1e
ZD
5414
5415 /* Concerning updating of dominators: We must recount dominators
84d65814
DN
5416 for entry block and its copy. Anything that is outside of the
5417 region, but was dominated by something inside needs recounting as
5418 well. */
42759f1e 5419 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
66f97d31
ZD
5420 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5421 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5f40b3cb 5422 VEC_free (basic_block, heap, doms);
42759f1e 5423
84d65814 5424 /* Add the other PHI node arguments. */
5f40b3cb
ZD
5425 add_phi_args_after_copy (region_copy, n_region, NULL);
5426
5427 /* Update the SSA web. */
5428 update_ssa (TODO_update_ssa);
5429
5430 if (free_region_copy)
5431 free (region_copy);
5432
5433 free_original_copy_tables ();
5434 return true;
5435}
5436
5437/* Duplicates REGION consisting of N_REGION blocks. The new blocks
5438 are stored to REGION_COPY in the same order in that they appear
5439 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5440 the region, EXIT an exit from it. The condition guarding EXIT
5441 is moved to ENTRY. Returns true if duplication succeeds, false
5442 otherwise.
5443
5444 For example,
5445
5446 some_code;
5447 if (cond)
5448 A;
5449 else
5450 B;
5451
5452 is transformed to
5453
5454 if (cond)
5455 {
5456 some_code;
5457 A;
5458 }
5459 else
5460 {
5461 some_code;
5462 B;
5463 }
5464*/
5465
5466bool
726a989a
RB
5467gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5468 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5469 basic_block *region_copy ATTRIBUTE_UNUSED)
5f40b3cb
ZD
5470{
5471 unsigned i;
5472 bool free_region_copy = false;
5473 struct loop *loop = exit->dest->loop_father;
5474 struct loop *orig_loop = entry->dest->loop_father;
5475 basic_block switch_bb, entry_bb, nentry_bb;
5476 VEC (basic_block, heap) *doms;
5477 int total_freq = 0, exit_freq = 0;
5478 gcov_type total_count = 0, exit_count = 0;
5479 edge exits[2], nexits[2], e;
726a989a
RB
5480 gimple_stmt_iterator gsi;
5481 gimple cond_stmt;
5f40b3cb
ZD
5482 edge sorig, snew;
5483
5484 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5485 exits[0] = exit;
5486 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5487
5488 if (!can_copy_bbs_p (region, n_region))
5489 return false;
5490
5491 /* Some sanity checking. Note that we do not check for all possible
5492 missuses of the functions. I.e. if you ask to copy something weird
5493 (e.g., in the example, if there is a jump from inside to the middle
5494 of some_code, or come_code defines some of the values used in cond)
5495 it will work, but the resulting code will not be correct. */
5496 for (i = 0; i < n_region; i++)
5497 {
5498 /* We do not handle subloops, i.e. all the blocks must belong to the
5499 same loop. */
5500 if (region[i]->loop_father != orig_loop)
5501 return false;
5502
5503 if (region[i] == orig_loop->latch)
5504 return false;
5505 }
5506
5507 initialize_original_copy_tables ();
5508 set_loop_copy (orig_loop, loop);
5509
5510 if (!region_copy)
5511 {
5512 region_copy = XNEWVEC (basic_block, n_region);
5513 free_region_copy = true;
5514 }
5515
5006671f 5516 gcc_assert (!need_ssa_update_p (cfun));
5f40b3cb
ZD
5517
5518 /* Record blocks outside the region that are dominated by something
5519 inside. */
5520 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5521
5522 if (exit->src->count)
5523 {
5524 total_count = exit->src->count;
5525 exit_count = exit->count;
5526 /* Fix up corner cases, to avoid division by zero or creation of negative
5527 frequencies. */
5528 if (exit_count > total_count)
5529 exit_count = total_count;
5530 }
5531 else
5532 {
5533 total_freq = exit->src->frequency;
5534 exit_freq = EDGE_FREQUENCY (exit);
5535 /* Fix up corner cases, to avoid division by zero or creation of negative
5536 frequencies. */
5537 if (total_freq == 0)
5538 total_freq = 1;
5539 if (exit_freq > total_freq)
5540 exit_freq = total_freq;
5541 }
5542
5543 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5544 split_edge_bb_loc (exit));
5545 if (total_count)
5546 {
5547 scale_bbs_frequencies_gcov_type (region, n_region,
5548 total_count - exit_count,
5549 total_count);
5550 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5551 total_count);
5552 }
5553 else
5554 {
5555 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5556 total_freq);
5557 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5558 }
5559
5560 /* Create the switch block, and put the exit condition to it. */
5561 entry_bb = entry->dest;
5562 nentry_bb = get_bb_copy (entry_bb);
5563 if (!last_stmt (entry->src)
5564 || !stmt_ends_bb_p (last_stmt (entry->src)))
5565 switch_bb = entry->src;
5566 else
5567 switch_bb = split_edge (entry);
5568 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5569
726a989a
RB
5570 gsi = gsi_last_bb (switch_bb);
5571 cond_stmt = last_stmt (exit->src);
5572 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5573 cond_stmt = gimple_copy (cond_stmt);
5574 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5575 gimple_cond_set_rhs (cond_stmt, unshare_expr (gimple_cond_rhs (cond_stmt)));
5576 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5f40b3cb
ZD
5577
5578 sorig = single_succ_edge (switch_bb);
5579 sorig->flags = exits[1]->flags;
5580 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5581
5582 /* Register the new edge from SWITCH_BB in loop exit lists. */
5583 rescan_loop_exit (snew, true, false);
5584
5585 /* Add the PHI node arguments. */
5586 add_phi_args_after_copy (region_copy, n_region, snew);
5587
5588 /* Get rid of now superfluous conditions and associated edges (and phi node
5589 arguments). */
5590 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
726a989a 5591 PENDING_STMT (e) = NULL;
5f40b3cb 5592 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
726a989a 5593 PENDING_STMT (e) = NULL;
5f40b3cb
ZD
5594
5595 /* Anything that is outside of the region, but was dominated by something
5596 inside needs to update dominance info. */
5597 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5598 VEC_free (basic_block, heap, doms);
42759f1e 5599
84d65814
DN
5600 /* Update the SSA web. */
5601 update_ssa (TODO_update_ssa);
42759f1e
ZD
5602
5603 if (free_region_copy)
5604 free (region_copy);
5605
6580ee77 5606 free_original_copy_tables ();
42759f1e
ZD
5607 return true;
5608}
6de9cd9a 5609
50674e96
DN
5610/* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5611 adding blocks when the dominator traversal reaches EXIT. This
5612 function silently assumes that ENTRY strictly dominates EXIT. */
5613
9f9f72aa 5614void
50674e96
DN
5615gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5616 VEC(basic_block,heap) **bbs_p)
5617{
5618 basic_block son;
5619
5620 for (son = first_dom_son (CDI_DOMINATORS, entry);
5621 son;
5622 son = next_dom_son (CDI_DOMINATORS, son))
5623 {
5624 VEC_safe_push (basic_block, heap, *bbs_p, son);
5625 if (son != exit)
5626 gather_blocks_in_sese_region (son, exit, bbs_p);
5627 }
5628}
5629
917948d3
ZD
5630/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5631 The duplicates are recorded in VARS_MAP. */
5632
5633static void
5634replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5635 tree to_context)
5636{
5637 tree t = *tp, new_t;
5638 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5639 void **loc;
5640
5641 if (DECL_CONTEXT (t) == to_context)
5642 return;
5643
5644 loc = pointer_map_contains (vars_map, t);
5645
5646 if (!loc)
5647 {
5648 loc = pointer_map_insert (vars_map, t);
5649
5650 if (SSA_VAR_P (t))
5651 {
5652 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
cb91fab0 5653 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
917948d3
ZD
5654 }
5655 else
5656 {
5657 gcc_assert (TREE_CODE (t) == CONST_DECL);
5658 new_t = copy_node (t);
5659 }
5660 DECL_CONTEXT (new_t) = to_context;
5661
5662 *loc = new_t;
5663 }
5664 else
3d9a9f94 5665 new_t = (tree) *loc;
917948d3
ZD
5666
5667 *tp = new_t;
5668}
5669
726a989a 5670
917948d3
ZD
5671/* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5672 VARS_MAP maps old ssa names and var_decls to the new ones. */
5673
5674static tree
5675replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5676 tree to_context)
5677{
5678 void **loc;
5679 tree new_name, decl = SSA_NAME_VAR (name);
5680
5681 gcc_assert (is_gimple_reg (name));
5682
5683 loc = pointer_map_contains (vars_map, name);
5684
5685 if (!loc)
5686 {
5687 replace_by_duplicate_decl (&decl, vars_map, to_context);
5688
5689 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5690 if (gimple_in_ssa_p (cfun))
5691 add_referenced_var (decl);
5692
5693 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5694 if (SSA_NAME_IS_DEFAULT_DEF (name))
5695 set_default_def (decl, new_name);
5696 pop_cfun ();
5697
5698 loc = pointer_map_insert (vars_map, name);
5699 *loc = new_name;
5700 }
5701 else
3d9a9f94 5702 new_name = (tree) *loc;
917948d3
ZD
5703
5704 return new_name;
5705}
50674e96
DN
5706
5707struct move_stmt_d
5708{
b357f682
JJ
5709 tree orig_block;
5710 tree new_block;
50674e96
DN
5711 tree from_context;
5712 tree to_context;
917948d3 5713 struct pointer_map_t *vars_map;
fad41cd7 5714 htab_t new_label_map;
50674e96
DN
5715 bool remap_decls_p;
5716};
5717
5718/* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
b357f682
JJ
5719 contained in *TP if it has been ORIG_BLOCK previously and change the
5720 DECL_CONTEXT of every local variable referenced in *TP. */
50674e96
DN
5721
5722static tree
726a989a 5723move_stmt_op (tree *tp, int *walk_subtrees, void *data)
50674e96 5724{
726a989a
RB
5725 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5726 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
fad41cd7 5727 tree t = *tp;
50674e96 5728
726a989a
RB
5729 if (EXPR_P (t))
5730 /* We should never have TREE_BLOCK set on non-statements. */
5731 gcc_assert (!TREE_BLOCK (t));
fad41cd7 5732
917948d3 5733 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
50674e96 5734 {
917948d3
ZD
5735 if (TREE_CODE (t) == SSA_NAME)
5736 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5737 else if (TREE_CODE (t) == LABEL_DECL)
fad41cd7
RH
5738 {
5739 if (p->new_label_map)
5740 {
5741 struct tree_map in, *out;
fc8600f9 5742 in.base.from = t;
3d9a9f94
KG
5743 out = (struct tree_map *)
5744 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
fad41cd7
RH
5745 if (out)
5746 *tp = t = out->to;
5747 }
50674e96 5748
fad41cd7
RH
5749 DECL_CONTEXT (t) = p->to_context;
5750 }
5751 else if (p->remap_decls_p)
50674e96 5752 {
917948d3
ZD
5753 /* Replace T with its duplicate. T should no longer appear in the
5754 parent function, so this looks wasteful; however, it may appear
5755 in referenced_vars, and more importantly, as virtual operands of
5756 statements, and in alias lists of other variables. It would be
5757 quite difficult to expunge it from all those places. ??? It might
5758 suffice to do this for addressable variables. */
5759 if ((TREE_CODE (t) == VAR_DECL
5760 && !is_global_var (t))
5761 || TREE_CODE (t) == CONST_DECL)
5762 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5763
5764 if (SSA_VAR_P (t)
5765 && gimple_in_ssa_p (cfun))
fad41cd7 5766 {
917948d3
ZD
5767 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5768 add_referenced_var (*tp);
5769 pop_cfun ();
fad41cd7 5770 }
50674e96 5771 }
917948d3 5772 *walk_subtrees = 0;
50674e96 5773 }
fad41cd7
RH
5774 else if (TYPE_P (t))
5775 *walk_subtrees = 0;
50674e96
DN
5776
5777 return NULL_TREE;
5778}
5779
726a989a
RB
5780/* Like move_stmt_op, but for gimple statements.
5781
5782 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5783 contained in the current statement in *GSI_P and change the
5784 DECL_CONTEXT of every local variable referenced in the current
5785 statement. */
5786
5787static tree
5788move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5789 struct walk_stmt_info *wi)
5790{
5791 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5792 gimple stmt = gsi_stmt (*gsi_p);
5793 tree block = gimple_block (stmt);
5794
5795 if (p->orig_block == NULL_TREE
5796 || block == p->orig_block
5797 || block == NULL_TREE)
5798 gimple_set_block (stmt, p->new_block);
5799#ifdef ENABLE_CHECKING
5800 else if (block != p->new_block)
5801 {
5802 while (block && block != p->orig_block)
5803 block = BLOCK_SUPERCONTEXT (block);
5804 gcc_assert (block);
5805 }
5806#endif
5807
5808 if (is_gimple_omp (stmt)
5809 && gimple_code (stmt) != GIMPLE_OMP_RETURN
5810 && gimple_code (stmt) != GIMPLE_OMP_CONTINUE)
5811 {
5812 /* Do not remap variables inside OMP directives. Variables
5813 referenced in clauses and directive header belong to the
5814 parent function and should not be moved into the child
5815 function. */
5816 bool save_remap_decls_p = p->remap_decls_p;
5817 p->remap_decls_p = false;
5818 *handled_ops_p = true;
5819
5820 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r, move_stmt_op, wi);
5821
5822 p->remap_decls_p = save_remap_decls_p;
5823 }
5824
5825 return NULL_TREE;
5826}
5827
917948d3
ZD
5828/* Marks virtual operands of all statements in basic blocks BBS for
5829 renaming. */
5830
dea61d92
SP
5831void
5832mark_virtual_ops_in_bb (basic_block bb)
917948d3 5833{
726a989a 5834 gimple_stmt_iterator gsi;
dea61d92 5835
726a989a
RB
5836 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5837 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
dea61d92 5838
726a989a
RB
5839 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5840 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
dea61d92
SP
5841}
5842
50674e96
DN
5843/* Move basic block BB from function CFUN to function DEST_FN. The
5844 block is moved out of the original linked list and placed after
5845 block AFTER in the new list. Also, the block is removed from the
5846 original array of blocks and placed in DEST_FN's array of blocks.
5847 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5848 updated to reflect the moved edges.
6531d1be 5849
917948d3
ZD
5850 The local variables are remapped to new instances, VARS_MAP is used
5851 to record the mapping. */
50674e96
DN
5852
5853static void
5854move_block_to_fn (struct function *dest_cfun, basic_block bb,
5855 basic_block after, bool update_edge_count_p,
b357f682 5856 struct move_stmt_d *d, int eh_offset)
50674e96
DN
5857{
5858 struct control_flow_graph *cfg;
5859 edge_iterator ei;
5860 edge e;
726a989a 5861 gimple_stmt_iterator si;
728b26bb 5862 unsigned old_len, new_len;
50674e96 5863
3722506a
ZD
5864 /* Remove BB from dominance structures. */
5865 delete_from_dominance_info (CDI_DOMINATORS, bb);
5f40b3cb
ZD
5866 if (current_loops)
5867 remove_bb_from_loops (bb);
3722506a 5868
50674e96
DN
5869 /* Link BB to the new linked list. */
5870 move_block_after (bb, after);
5871
5872 /* Update the edge count in the corresponding flowgraphs. */
5873 if (update_edge_count_p)
5874 FOR_EACH_EDGE (e, ei, bb->succs)
5875 {
5876 cfun->cfg->x_n_edges--;
5877 dest_cfun->cfg->x_n_edges++;
5878 }
5879
5880 /* Remove BB from the original basic block array. */
5881 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5882 cfun->cfg->x_n_basic_blocks--;
5883
5884 /* Grow DEST_CFUN's basic block array if needed. */
5885 cfg = dest_cfun->cfg;
5886 cfg->x_n_basic_blocks++;
3722506a
ZD
5887 if (bb->index >= cfg->x_last_basic_block)
5888 cfg->x_last_basic_block = bb->index + 1;
50674e96 5889
728b26bb
DN
5890 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5891 if ((unsigned) cfg->x_last_basic_block >= old_len)
50674e96 5892 {
728b26bb 5893 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
a590ac65
KH
5894 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5895 new_len);
50674e96
DN
5896 }
5897
5898 VEC_replace (basic_block, cfg->x_basic_block_info,
e0310afb 5899 bb->index, bb);
50674e96 5900
917948d3 5901 /* Remap the variables in phi nodes. */
726a989a 5902 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
917948d3 5903 {
726a989a 5904 gimple phi = gsi_stmt (si);
917948d3
ZD
5905 use_operand_p use;
5906 tree op = PHI_RESULT (phi);
5907 ssa_op_iter oi;
5908
5909 if (!is_gimple_reg (op))
5f40b3cb
ZD
5910 {
5911 /* Remove the phi nodes for virtual operands (alias analysis will be
5912 run for the new function, anyway). */
726a989a 5913 remove_phi_node (&si, true);
5f40b3cb
ZD
5914 continue;
5915 }
917948d3 5916
b357f682
JJ
5917 SET_PHI_RESULT (phi,
5918 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
917948d3
ZD
5919 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5920 {
5921 op = USE_FROM_PTR (use);
5922 if (TREE_CODE (op) == SSA_NAME)
b357f682 5923 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
917948d3 5924 }
726a989a
RB
5925
5926 gsi_next (&si);
917948d3
ZD
5927 }
5928
726a989a 5929 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
50674e96 5930 {
726a989a 5931 gimple stmt = gsi_stmt (si);
fad41cd7 5932 int region;
726a989a 5933 struct walk_stmt_info wi;
50674e96 5934
726a989a
RB
5935 memset (&wi, 0, sizeof (wi));
5936 wi.info = d;
5937 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
50674e96 5938
726a989a 5939 if (gimple_code (stmt) == GIMPLE_LABEL)
50674e96 5940 {
726a989a 5941 tree label = gimple_label_label (stmt);
50674e96
DN
5942 int uid = LABEL_DECL_UID (label);
5943
5944 gcc_assert (uid > -1);
5945
5946 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5947 if (old_len <= (unsigned) uid)
5948 {
5006671f 5949 new_len = 3 * uid / 2 + 1;
a590ac65
KH
5950 VEC_safe_grow_cleared (basic_block, gc,
5951 cfg->x_label_to_block_map, new_len);
50674e96
DN
5952 }
5953
5954 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5955 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5956
5957 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5958
cb91fab0
JH
5959 if (uid >= dest_cfun->cfg->last_label_uid)
5960 dest_cfun->cfg->last_label_uid = uid + 1;
50674e96 5961 }
726a989a
RB
5962 else if (gimple_code (stmt) == GIMPLE_RESX && eh_offset != 0)
5963 gimple_resx_set_region (stmt, gimple_resx_region (stmt) + eh_offset);
fad41cd7
RH
5964
5965 region = lookup_stmt_eh_region (stmt);
5966 if (region >= 0)
5967 {
5968 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5969 remove_stmt_from_eh_region (stmt);
6946b3f7
JH
5970 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5971 gimple_remove_stmt_histograms (cfun, stmt);
fad41cd7 5972 }
917948d3 5973
5f40b3cb
ZD
5974 /* We cannot leave any operands allocated from the operand caches of
5975 the current function. */
5976 free_stmt_operands (stmt);
5977 push_cfun (dest_cfun);
917948d3 5978 update_stmt (stmt);
5f40b3cb 5979 pop_cfun ();
fad41cd7 5980 }
7241571e
JJ
5981
5982 FOR_EACH_EDGE (e, ei, bb->succs)
5983 if (e->goto_locus)
5984 {
5985 tree block = e->goto_block;
5986 if (d->orig_block == NULL_TREE
5987 || block == d->orig_block)
5988 e->goto_block = d->new_block;
5989#ifdef ENABLE_CHECKING
5990 else if (block != d->new_block)
5991 {
5992 while (block && block != d->orig_block)
5993 block = BLOCK_SUPERCONTEXT (block);
5994 gcc_assert (block);
5995 }
5996#endif
5997 }
fad41cd7
RH
5998}
5999
6000/* Examine the statements in BB (which is in SRC_CFUN); find and return
6001 the outermost EH region. Use REGION as the incoming base EH region. */
6002
6003static int
6004find_outermost_region_in_block (struct function *src_cfun,
6005 basic_block bb, int region)
6006{
726a989a 6007 gimple_stmt_iterator si;
6531d1be 6008
726a989a 6009 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
fad41cd7 6010 {
726a989a 6011 gimple stmt = gsi_stmt (si);
fad41cd7 6012 int stmt_region;
1799e5d5 6013
726a989a
RB
6014 if (gimple_code (stmt) == GIMPLE_RESX)
6015 stmt_region = gimple_resx_region (stmt);
07ed51c9
JJ
6016 else
6017 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
7e2df4a1
JJ
6018 if (stmt_region > 0)
6019 {
6020 if (region < 0)
6021 region = stmt_region;
6022 else if (stmt_region != region)
6023 {
6024 region = eh_region_outermost (src_cfun, stmt_region, region);
6025 gcc_assert (region != -1);
6026 }
6027 }
50674e96 6028 }
fad41cd7
RH
6029
6030 return region;
50674e96
DN
6031}
6032
fad41cd7
RH
6033static tree
6034new_label_mapper (tree decl, void *data)
6035{
6036 htab_t hash = (htab_t) data;
6037 struct tree_map *m;
6038 void **slot;
6039
6040 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6041
3d9a9f94 6042 m = XNEW (struct tree_map);
fad41cd7 6043 m->hash = DECL_UID (decl);
fc8600f9 6044 m->base.from = decl;
c2255bc4 6045 m->to = create_artificial_label (UNKNOWN_LOCATION);
fad41cd7 6046 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
cb91fab0
JH
6047 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6048 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
fad41cd7
RH
6049
6050 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6051 gcc_assert (*slot == NULL);
6052
6053 *slot = m;
6054
6055 return m->to;
6056}
50674e96 6057
b357f682
JJ
6058/* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6059 subblocks. */
6060
6061static void
6062replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6063 tree to_context)
6064{
6065 tree *tp, t;
6066
6067 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
6068 {
6069 t = *tp;
e1e2bac4
JJ
6070 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6071 continue;
b357f682
JJ
6072 replace_by_duplicate_decl (&t, vars_map, to_context);
6073 if (t != *tp)
6074 {
6075 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6076 {
6077 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6078 DECL_HAS_VALUE_EXPR_P (t) = 1;
6079 }
6080 TREE_CHAIN (t) = TREE_CHAIN (*tp);
6081 *tp = t;
6082 }
6083 }
6084
6085 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6086 replace_block_vars_by_duplicates (block, vars_map, to_context);
6087}
6088
50674e96
DN
6089/* Move a single-entry, single-exit region delimited by ENTRY_BB and
6090 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6091 single basic block in the original CFG and the new basic block is
6092 returned. DEST_CFUN must not have a CFG yet.
6093
6094 Note that the region need not be a pure SESE region. Blocks inside
6095 the region may contain calls to abort/exit. The only restriction
6096 is that ENTRY_BB should be the only entry point and it must
6097 dominate EXIT_BB.
6098
b357f682
JJ
6099 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6100 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6101 to the new function.
6102
50674e96
DN
6103 All local variables referenced in the region are assumed to be in
6104 the corresponding BLOCK_VARS and unexpanded variable lists
6105 associated with DEST_CFUN. */
6106
6107basic_block
6108move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
b357f682 6109 basic_block exit_bb, tree orig_block)
50674e96 6110{
917948d3
ZD
6111 VEC(basic_block,heap) *bbs, *dom_bbs;
6112 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6113 basic_block after, bb, *entry_pred, *exit_succ, abb;
6114 struct function *saved_cfun = cfun;
fad41cd7 6115 int *entry_flag, *exit_flag, eh_offset;
917948d3 6116 unsigned *entry_prob, *exit_prob;
50674e96
DN
6117 unsigned i, num_entry_edges, num_exit_edges;
6118 edge e;
6119 edge_iterator ei;
fad41cd7 6120 htab_t new_label_map;
917948d3 6121 struct pointer_map_t *vars_map;
5f40b3cb 6122 struct loop *loop = entry_bb->loop_father;
b357f682 6123 struct move_stmt_d d;
50674e96
DN
6124
6125 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6126 region. */
6127 gcc_assert (entry_bb != exit_bb
2aee3e57
JJ
6128 && (!exit_bb
6129 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
50674e96 6130
917948d3
ZD
6131 /* Collect all the blocks in the region. Manually add ENTRY_BB
6132 because it won't be added by dfs_enumerate_from. */
50674e96
DN
6133 bbs = NULL;
6134 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6135 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6136
917948d3
ZD
6137 /* The blocks that used to be dominated by something in BBS will now be
6138 dominated by the new block. */
6139 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6140 VEC_address (basic_block, bbs),
6141 VEC_length (basic_block, bbs));
6142
50674e96
DN
6143 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6144 the predecessor edges to ENTRY_BB and the successor edges to
6145 EXIT_BB so that we can re-attach them to the new basic block that
6146 will replace the region. */
6147 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6148 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6149 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
917948d3 6150 entry_prob = XNEWVEC (unsigned, num_entry_edges);
50674e96
DN
6151 i = 0;
6152 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6153 {
917948d3 6154 entry_prob[i] = e->probability;
50674e96
DN
6155 entry_flag[i] = e->flags;
6156 entry_pred[i++] = e->src;
6157 remove_edge (e);
6158 }
6159
2aee3e57 6160 if (exit_bb)
50674e96 6161 {
2aee3e57
JJ
6162 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6163 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6164 sizeof (basic_block));
6165 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
917948d3 6166 exit_prob = XNEWVEC (unsigned, num_exit_edges);
2aee3e57
JJ
6167 i = 0;
6168 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6169 {
917948d3 6170 exit_prob[i] = e->probability;
2aee3e57
JJ
6171 exit_flag[i] = e->flags;
6172 exit_succ[i++] = e->dest;
6173 remove_edge (e);
6174 }
6175 }
6176 else
6177 {
6178 num_exit_edges = 0;
6179 exit_succ = NULL;
6180 exit_flag = NULL;
917948d3 6181 exit_prob = NULL;
50674e96
DN
6182 }
6183
6184 /* Switch context to the child function to initialize DEST_FN's CFG. */
6185 gcc_assert (dest_cfun->cfg == NULL);
917948d3 6186 push_cfun (dest_cfun);
fad41cd7 6187
50674e96 6188 init_empty_tree_cfg ();
fad41cd7
RH
6189
6190 /* Initialize EH information for the new function. */
6191 eh_offset = 0;
6192 new_label_map = NULL;
6193 if (saved_cfun->eh)
6194 {
6195 int region = -1;
6196
6197 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6198 region = find_outermost_region_in_block (saved_cfun, bb, region);
6199
6200 init_eh_for_function ();
6201 if (region != -1)
6202 {
6203 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6204 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
6205 new_label_map, region, 0);
6206 }
6207 }
6208
917948d3
ZD
6209 pop_cfun ();
6210
50674e96
DN
6211 /* Move blocks from BBS into DEST_CFUN. */
6212 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6213 after = dest_cfun->cfg->x_entry_block_ptr;
917948d3 6214 vars_map = pointer_map_create ();
b357f682
JJ
6215
6216 memset (&d, 0, sizeof (d));
6217 d.vars_map = vars_map;
6218 d.from_context = cfun->decl;
6219 d.to_context = dest_cfun->decl;
6220 d.new_label_map = new_label_map;
6221 d.remap_decls_p = true;
6222 d.orig_block = orig_block;
6223 d.new_block = DECL_INITIAL (dest_cfun->decl);
6224
50674e96
DN
6225 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6226 {
6227 /* No need to update edge counts on the last block. It has
6228 already been updated earlier when we detached the region from
6229 the original CFG. */
b357f682 6230 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d, eh_offset);
50674e96
DN
6231 after = bb;
6232 }
6233
b357f682
JJ
6234 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6235 if (orig_block)
6236 {
6237 tree block;
6238 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6239 == NULL_TREE);
6240 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6241 = BLOCK_SUBBLOCKS (orig_block);
6242 for (block = BLOCK_SUBBLOCKS (orig_block);
6243 block; block = BLOCK_CHAIN (block))
6244 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6245 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6246 }
6247
6248 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6249 vars_map, dest_cfun->decl);
6250
fad41cd7
RH
6251 if (new_label_map)
6252 htab_delete (new_label_map);
917948d3 6253 pointer_map_destroy (vars_map);
50674e96
DN
6254
6255 /* Rewire the entry and exit blocks. The successor to the entry
6256 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6257 the child function. Similarly, the predecessor of DEST_FN's
6258 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6259 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6260 various CFG manipulation function get to the right CFG.
6261
6262 FIXME, this is silly. The CFG ought to become a parameter to
6263 these helpers. */
917948d3 6264 push_cfun (dest_cfun);
50674e96 6265 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
2aee3e57
JJ
6266 if (exit_bb)
6267 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
917948d3 6268 pop_cfun ();
50674e96
DN
6269
6270 /* Back in the original function, the SESE region has disappeared,
6271 create a new basic block in its place. */
6272 bb = create_empty_bb (entry_pred[0]);
5f40b3cb
ZD
6273 if (current_loops)
6274 add_bb_to_loop (bb, loop);
50674e96 6275 for (i = 0; i < num_entry_edges; i++)
917948d3
ZD
6276 {
6277 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6278 e->probability = entry_prob[i];
6279 }
50674e96
DN
6280
6281 for (i = 0; i < num_exit_edges; i++)
917948d3
ZD
6282 {
6283 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6284 e->probability = exit_prob[i];
6285 }
6286
6287 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6288 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6289 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6290 VEC_free (basic_block, heap, dom_bbs);
50674e96 6291
2aee3e57
JJ
6292 if (exit_bb)
6293 {
917948d3 6294 free (exit_prob);
2aee3e57
JJ
6295 free (exit_flag);
6296 free (exit_succ);
6297 }
917948d3 6298 free (entry_prob);
50674e96
DN
6299 free (entry_flag);
6300 free (entry_pred);
50674e96
DN
6301 VEC_free (basic_block, heap, bbs);
6302
6303 return bb;
6304}
6305
84d65814 6306
726a989a
RB
6307/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6308 */
6de9cd9a
DN
6309
6310void
6311dump_function_to_file (tree fn, FILE *file, int flags)
6312{
6313 tree arg, vars, var;
459ffad3 6314 struct function *dsf;
6de9cd9a
DN
6315 bool ignore_topmost_bind = false, any_var = false;
6316 basic_block bb;
6317 tree chain;
6531d1be 6318
673fda6b 6319 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6de9cd9a
DN
6320
6321 arg = DECL_ARGUMENTS (fn);
6322 while (arg)
6323 {
2f9ea521
RG
6324 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6325 fprintf (file, " ");
6de9cd9a 6326 print_generic_expr (file, arg, dump_flags);
3e894af1
KZ
6327 if (flags & TDF_VERBOSE)
6328 print_node (file, "", arg, 4);
6de9cd9a
DN
6329 if (TREE_CHAIN (arg))
6330 fprintf (file, ", ");
6331 arg = TREE_CHAIN (arg);
6332 }
6333 fprintf (file, ")\n");
6334
3e894af1
KZ
6335 if (flags & TDF_VERBOSE)
6336 print_node (file, "", fn, 2);
6337
459ffad3
EB
6338 dsf = DECL_STRUCT_FUNCTION (fn);
6339 if (dsf && (flags & TDF_DETAILS))
6340 dump_eh_tree (file, dsf);
6341
39ecc018 6342 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6de9cd9a
DN
6343 {
6344 dump_node (fn, TDF_SLIM | flags, file);
6345 return;
6346 }
6347
953ff289 6348 /* Switch CFUN to point to FN. */
db2960f4 6349 push_cfun (DECL_STRUCT_FUNCTION (fn));
953ff289 6350
6de9cd9a
DN
6351 /* When GIMPLE is lowered, the variables are no longer available in
6352 BIND_EXPRs, so display them separately. */
cb91fab0 6353 if (cfun && cfun->decl == fn && cfun->local_decls)
6de9cd9a
DN
6354 {
6355 ignore_topmost_bind = true;
6356
6357 fprintf (file, "{\n");
cb91fab0 6358 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6de9cd9a
DN
6359 {
6360 var = TREE_VALUE (vars);
6361
6362 print_generic_decl (file, var, flags);
3e894af1
KZ
6363 if (flags & TDF_VERBOSE)
6364 print_node (file, "", var, 4);
6de9cd9a
DN
6365 fprintf (file, "\n");
6366
6367 any_var = true;
6368 }
6369 }
6370
32a87d45 6371 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6de9cd9a 6372 {
726a989a 6373 /* If the CFG has been built, emit a CFG-based dump. */
878f99d2 6374 check_bb_profile (ENTRY_BLOCK_PTR, file);
6de9cd9a
DN
6375 if (!ignore_topmost_bind)
6376 fprintf (file, "{\n");
6377
6378 if (any_var && n_basic_blocks)
6379 fprintf (file, "\n");
6380
6381 FOR_EACH_BB (bb)
726a989a 6382 gimple_dump_bb (bb, file, 2, flags);
6531d1be 6383
6de9cd9a 6384 fprintf (file, "}\n");
878f99d2 6385 check_bb_profile (EXIT_BLOCK_PTR, file);
6de9cd9a 6386 }
726a989a
RB
6387 else if (DECL_SAVED_TREE (fn) == NULL)
6388 {
6389 /* The function is now in GIMPLE form but the CFG has not been
6390 built yet. Emit the single sequence of GIMPLE statements
6391 that make up its body. */
6392 gimple_seq body = gimple_body (fn);
6393
6394 if (gimple_seq_first_stmt (body)
6395 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6396 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6397 print_gimple_seq (file, body, 0, flags);
6398 else
6399 {
6400 if (!ignore_topmost_bind)
6401 fprintf (file, "{\n");
6402
6403 if (any_var)
6404 fprintf (file, "\n");
6405
6406 print_gimple_seq (file, body, 2, flags);
6407 fprintf (file, "}\n");
6408 }
6409 }
6de9cd9a
DN
6410 else
6411 {
6412 int indent;
6413
6414 /* Make a tree based dump. */
6415 chain = DECL_SAVED_TREE (fn);
6416
953ff289 6417 if (chain && TREE_CODE (chain) == BIND_EXPR)
6de9cd9a
DN
6418 {
6419 if (ignore_topmost_bind)
6420 {
6421 chain = BIND_EXPR_BODY (chain);
6422 indent = 2;
6423 }
6424 else
6425 indent = 0;
6426 }
6427 else
6428 {
6429 if (!ignore_topmost_bind)
6430 fprintf (file, "{\n");
6431 indent = 2;
6432 }
6433
6434 if (any_var)
6435 fprintf (file, "\n");
6436
6437 print_generic_stmt_indented (file, chain, flags, indent);
6438 if (ignore_topmost_bind)
6439 fprintf (file, "}\n");
6440 }
6441
6442 fprintf (file, "\n\n");
953ff289
DN
6443
6444 /* Restore CFUN. */
db2960f4 6445 pop_cfun ();
953ff289
DN
6446}
6447
6448
6449/* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6450
6451void
6452debug_function (tree fn, int flags)
6453{
6454 dump_function_to_file (fn, stderr, flags);
6de9cd9a
DN
6455}
6456
6457
d7770457 6458/* Print on FILE the indexes for the predecessors of basic_block BB. */
6de9cd9a
DN
6459
6460static void
628f6a4e 6461print_pred_bbs (FILE *file, basic_block bb)
6de9cd9a 6462{
628f6a4e
BE
6463 edge e;
6464 edge_iterator ei;
6465
6466 FOR_EACH_EDGE (e, ei, bb->preds)
d7770457 6467 fprintf (file, "bb_%d ", e->src->index);
6de9cd9a
DN
6468}
6469
6470
d7770457 6471/* Print on FILE the indexes for the successors of basic_block BB. */
6de9cd9a
DN
6472
6473static void
628f6a4e 6474print_succ_bbs (FILE *file, basic_block bb)
6de9cd9a 6475{
628f6a4e
BE
6476 edge e;
6477 edge_iterator ei;
6478
6479 FOR_EACH_EDGE (e, ei, bb->succs)
d7770457 6480 fprintf (file, "bb_%d ", e->dest->index);
6de9cd9a
DN
6481}
6482
0c8efed8
SP
6483/* Print to FILE the basic block BB following the VERBOSITY level. */
6484
6485void
6486print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6487{
6488 char *s_indent = (char *) alloca ((size_t) indent + 1);
6489 memset ((void *) s_indent, ' ', (size_t) indent);
6490 s_indent[indent] = '\0';
6491
6492 /* Print basic_block's header. */
6493 if (verbosity >= 2)
6494 {
6495 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6496 print_pred_bbs (file, bb);
6497 fprintf (file, "}, succs = {");
6498 print_succ_bbs (file, bb);
6499 fprintf (file, "})\n");
6500 }
6501
6502 /* Print basic_block's body. */
6503 if (verbosity >= 3)
6504 {
6505 fprintf (file, "%s {\n", s_indent);
726a989a 6506 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
0c8efed8
SP
6507 fprintf (file, "%s }\n", s_indent);
6508 }
6509}
6510
6511static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6de9cd9a 6512
0c8efed8
SP
6513/* Pretty print LOOP on FILE, indented INDENT spaces. Following
6514 VERBOSITY level this outputs the contents of the loop, or just its
6515 structure. */
6de9cd9a
DN
6516
6517static void
0c8efed8 6518print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6de9cd9a
DN
6519{
6520 char *s_indent;
6521 basic_block bb;
6531d1be 6522
6de9cd9a
DN
6523 if (loop == NULL)
6524 return;
6525
6526 s_indent = (char *) alloca ((size_t) indent + 1);
6527 memset ((void *) s_indent, ' ', (size_t) indent);
6528 s_indent[indent] = '\0';
6529
0c8efed8
SP
6530 /* Print loop's header. */
6531 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6532 loop->num, loop->header->index, loop->latch->index);
6533 fprintf (file, ", niter = ");
6534 print_generic_expr (file, loop->nb_iterations, 0);
6531d1be 6535
0c8efed8
SP
6536 if (loop->any_upper_bound)
6537 {
6538 fprintf (file, ", upper_bound = ");
6539 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6540 }
6531d1be 6541
0c8efed8
SP
6542 if (loop->any_estimate)
6543 {
6544 fprintf (file, ", estimate = ");
6545 dump_double_int (file, loop->nb_iterations_estimate, true);
6546 }
6547 fprintf (file, ")\n");
6548
6549 /* Print loop's body. */
6550 if (verbosity >= 1)
6551 {
6552 fprintf (file, "%s{\n", s_indent);
6553 FOR_EACH_BB (bb)
6554 if (bb->loop_father == loop)
6555 print_loops_bb (file, bb, indent, verbosity);
6556
6557 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6558 fprintf (file, "%s}\n", s_indent);
6559 }
6de9cd9a
DN
6560}
6561
0c8efed8
SP
6562/* Print the LOOP and its sibling loops on FILE, indented INDENT
6563 spaces. Following VERBOSITY level this outputs the contents of the
6564 loop, or just its structure. */
6565
6566static void
6567print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6568{
6569 if (loop == NULL)
6570 return;
6571
6572 print_loop (file, loop, indent, verbosity);
6573 print_loop_and_siblings (file, loop->next, indent, verbosity);
6574}
6de9cd9a
DN
6575
6576/* Follow a CFG edge from the entry point of the program, and on entry
6577 of a loop, pretty print the loop structure on FILE. */
6578
6531d1be 6579void
0c8efed8 6580print_loops (FILE *file, int verbosity)
6de9cd9a
DN
6581{
6582 basic_block bb;
6531d1be 6583
f8bf9252 6584 bb = ENTRY_BLOCK_PTR;
6de9cd9a 6585 if (bb && bb->loop_father)
0c8efed8 6586 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6de9cd9a
DN
6587}
6588
6589
0c8efed8
SP
6590/* Debugging loops structure at tree level, at some VERBOSITY level. */
6591
6592void
6593debug_loops (int verbosity)
6594{
6595 print_loops (stderr, verbosity);
6596}
6597
6598/* Print on stderr the code of LOOP, at some VERBOSITY level. */
6de9cd9a 6599
6531d1be 6600void
0c8efed8 6601debug_loop (struct loop *loop, int verbosity)
6de9cd9a 6602{
0c8efed8 6603 print_loop (stderr, loop, 0, verbosity);
6de9cd9a
DN
6604}
6605
0c8efed8
SP
6606/* Print on stderr the code of loop number NUM, at some VERBOSITY
6607 level. */
6608
6609void
6610debug_loop_num (unsigned num, int verbosity)
6611{
6612 debug_loop (get_loop (num), verbosity);
6613}
6de9cd9a
DN
6614
6615/* Return true if BB ends with a call, possibly followed by some
6616 instructions that must stay with the call. Return false,
6617 otherwise. */
6618
6619static bool
726a989a 6620gimple_block_ends_with_call_p (basic_block bb)
6de9cd9a 6621{
726a989a
RB
6622 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6623 return is_gimple_call (gsi_stmt (gsi));
6de9cd9a
DN
6624}
6625
6626
6627/* Return true if BB ends with a conditional branch. Return false,
6628 otherwise. */
6629
6630static bool
726a989a 6631gimple_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a 6632{
726a989a
RB
6633 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6634 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6de9cd9a
DN
6635}
6636
6637
6638/* Return true if we need to add fake edge to exit at statement T.
726a989a 6639 Helper function for gimple_flow_call_edges_add. */
6de9cd9a
DN
6640
6641static bool
726a989a 6642need_fake_edge_p (gimple t)
6de9cd9a 6643{
726a989a
RB
6644 tree fndecl = NULL_TREE;
6645 int call_flags = 0;
6de9cd9a
DN
6646
6647 /* NORETURN and LONGJMP calls already have an edge to exit.
321cf1f2 6648 CONST and PURE calls do not need one.
6de9cd9a
DN
6649 We don't currently check for CONST and PURE here, although
6650 it would be a good idea, because those attributes are
6651 figured out from the RTL in mark_constant_function, and
6652 the counter incrementation code from -fprofile-arcs
6653 leads to different results from -fbranch-probabilities. */
726a989a 6654 if (is_gimple_call (t))
23ef6d21 6655 {
726a989a
RB
6656 fndecl = gimple_call_fndecl (t);
6657 call_flags = gimple_call_flags (t);
23ef6d21
BE
6658 }
6659
726a989a
RB
6660 if (is_gimple_call (t)
6661 && fndecl
6662 && DECL_BUILT_IN (fndecl)
23ef6d21 6663 && (call_flags & ECF_NOTHROW)
3cfa762b
RG
6664 && !(call_flags & ECF_RETURNS_TWICE)
6665 /* fork() doesn't really return twice, but the effect of
6666 wrapping it in __gcov_fork() which calls __gcov_flush()
6667 and clears the counters before forking has the same
6668 effect as returning twice. Force a fake edge. */
6669 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6670 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6671 return false;
23ef6d21 6672
726a989a
RB
6673 if (is_gimple_call (t)
6674 && !(call_flags & ECF_NORETURN))
6de9cd9a
DN
6675 return true;
6676
e0c68ce9 6677 if (gimple_code (t) == GIMPLE_ASM
726a989a 6678 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6de9cd9a
DN
6679 return true;
6680
6681 return false;
6682}
6683
6684
6685/* Add fake edges to the function exit for any non constant and non
6686 noreturn calls, volatile inline assembly in the bitmap of blocks
6687 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6688 the number of blocks that were split.
6689
6690 The goal is to expose cases in which entering a basic block does
6691 not imply that all subsequent instructions must be executed. */
6692
6693static int
726a989a 6694gimple_flow_call_edges_add (sbitmap blocks)
6de9cd9a
DN
6695{
6696 int i;
6697 int blocks_split = 0;
6698 int last_bb = last_basic_block;
6699 bool check_last_block = false;
6700
24bd1a0b 6701 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
6702 return 0;
6703
6704 if (! blocks)
6705 check_last_block = true;
6706 else
6707 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6708
6709 /* In the last basic block, before epilogue generation, there will be
6710 a fallthru edge to EXIT. Special care is required if the last insn
6711 of the last basic block is a call because make_edge folds duplicate
6712 edges, which would result in the fallthru edge also being marked
6713 fake, which would result in the fallthru edge being removed by
6714 remove_fake_edges, which would result in an invalid CFG.
6715
6716 Moreover, we can't elide the outgoing fake edge, since the block
6717 profiler needs to take this into account in order to solve the minimal
6718 spanning tree in the case that the call doesn't return.
6719
6720 Handle this by adding a dummy instruction in a new last basic block. */
6721 if (check_last_block)
6722 {
6723 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
726a989a
RB
6724 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6725 gimple t = NULL;
6726
6727 if (!gsi_end_p (gsi))
6728 t = gsi_stmt (gsi);
6de9cd9a 6729
6a60530d 6730 if (t && need_fake_edge_p (t))
6de9cd9a
DN
6731 {
6732 edge e;
6733
9ff3d2de
JL
6734 e = find_edge (bb, EXIT_BLOCK_PTR);
6735 if (e)
6736 {
726a989a
RB
6737 gsi_insert_on_edge (e, gimple_build_nop ());
6738 gsi_commit_edge_inserts ();
9ff3d2de 6739 }
6de9cd9a
DN
6740 }
6741 }
6742
6743 /* Now add fake edges to the function exit for any non constant
6744 calls since there is no way that we can determine if they will
6745 return or not... */
6746 for (i = 0; i < last_bb; i++)
6747 {
6748 basic_block bb = BASIC_BLOCK (i);
726a989a
RB
6749 gimple_stmt_iterator gsi;
6750 gimple stmt, last_stmt;
6de9cd9a
DN
6751
6752 if (!bb)
6753 continue;
6754
6755 if (blocks && !TEST_BIT (blocks, i))
6756 continue;
6757
726a989a
RB
6758 gsi = gsi_last_bb (bb);
6759 if (!gsi_end_p (gsi))
6de9cd9a 6760 {
726a989a 6761 last_stmt = gsi_stmt (gsi);
6de9cd9a
DN
6762 do
6763 {
726a989a 6764 stmt = gsi_stmt (gsi);
6de9cd9a
DN
6765 if (need_fake_edge_p (stmt))
6766 {
6767 edge e;
726a989a 6768
6de9cd9a
DN
6769 /* The handling above of the final block before the
6770 epilogue should be enough to verify that there is
6771 no edge to the exit block in CFG already.
6772 Calling make_edge in such case would cause us to
6773 mark that edge as fake and remove it later. */
6774#ifdef ENABLE_CHECKING
6775 if (stmt == last_stmt)
628f6a4e 6776 {
9ff3d2de
JL
6777 e = find_edge (bb, EXIT_BLOCK_PTR);
6778 gcc_assert (e == NULL);
628f6a4e 6779 }
6de9cd9a
DN
6780#endif
6781
6782 /* Note that the following may create a new basic block
6783 and renumber the existing basic blocks. */
6784 if (stmt != last_stmt)
6785 {
6786 e = split_block (bb, stmt);
6787 if (e)
6788 blocks_split++;
6789 }
6790 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6791 }
726a989a 6792 gsi_prev (&gsi);
6de9cd9a 6793 }
726a989a 6794 while (!gsi_end_p (gsi));
6de9cd9a
DN
6795 }
6796 }
6797
6798 if (blocks_split)
6799 verify_flow_info ();
6800
6801 return blocks_split;
6802}
6803
4f6c2131
EB
6804/* Purge dead abnormal call edges from basic block BB. */
6805
6806bool
726a989a 6807gimple_purge_dead_abnormal_call_edges (basic_block bb)
4f6c2131 6808{
726a989a 6809 bool changed = gimple_purge_dead_eh_edges (bb);
4f6c2131 6810
e3b5732b 6811 if (cfun->has_nonlocal_label)
4f6c2131 6812 {
726a989a 6813 gimple stmt = last_stmt (bb);
4f6c2131
EB
6814 edge_iterator ei;
6815 edge e;
6816
726a989a 6817 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
4f6c2131
EB
6818 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6819 {
6820 if (e->flags & EDGE_ABNORMAL)
6821 {
6822 remove_edge (e);
6823 changed = true;
6824 }
6825 else
6826 ei_next (&ei);
6827 }
6828
726a989a 6829 /* See gimple_purge_dead_eh_edges below. */
4f6c2131
EB
6830 if (changed)
6831 free_dominance_info (CDI_DOMINATORS);
6832 }
6833
6834 return changed;
6835}
6836
672987e8
ZD
6837/* Removes edge E and all the blocks dominated by it, and updates dominance
6838 information. The IL in E->src needs to be updated separately.
6839 If dominance info is not available, only the edge E is removed.*/
6840
6841void
6842remove_edge_and_dominated_blocks (edge e)
6843{
6844 VEC (basic_block, heap) *bbs_to_remove = NULL;
6845 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6846 bitmap df, df_idom;
6847 edge f;
6848 edge_iterator ei;
6849 bool none_removed = false;
6850 unsigned i;
6851 basic_block bb, dbb;
6852 bitmap_iterator bi;
6853
2b28c07a 6854 if (!dom_info_available_p (CDI_DOMINATORS))
672987e8
ZD
6855 {
6856 remove_edge (e);
6857 return;
6858 }
6859
6860 /* No updating is needed for edges to exit. */
6861 if (e->dest == EXIT_BLOCK_PTR)
6862 {
6863 if (cfgcleanup_altered_bbs)
6864 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6865 remove_edge (e);
6866 return;
6867 }
6868
6869 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6870 that is not dominated by E->dest, then this set is empty. Otherwise,
6871 all the basic blocks dominated by E->dest are removed.
6872
6873 Also, to DF_IDOM we store the immediate dominators of the blocks in
6874 the dominance frontier of E (i.e., of the successors of the
6875 removed blocks, if there are any, and of E->dest otherwise). */
6876 FOR_EACH_EDGE (f, ei, e->dest->preds)
6877 {
6878 if (f == e)
6879 continue;
6880
6881 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6882 {
6883 none_removed = true;
6884 break;
6885 }
6886 }
6887
6888 df = BITMAP_ALLOC (NULL);
6889 df_idom = BITMAP_ALLOC (NULL);
6890
6891 if (none_removed)
6892 bitmap_set_bit (df_idom,
6893 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6894 else
6895 {
438c239d 6896 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
672987e8
ZD
6897 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6898 {
6899 FOR_EACH_EDGE (f, ei, bb->succs)
6900 {
6901 if (f->dest != EXIT_BLOCK_PTR)
6902 bitmap_set_bit (df, f->dest->index);
6903 }
6904 }
6905 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6906 bitmap_clear_bit (df, bb->index);
6907
6908 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6909 {
6910 bb = BASIC_BLOCK (i);
6911 bitmap_set_bit (df_idom,
6912 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6913 }
6914 }
6915
6916 if (cfgcleanup_altered_bbs)
6917 {
6918 /* Record the set of the altered basic blocks. */
6919 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6920 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6921 }
6922
6923 /* Remove E and the cancelled blocks. */
6924 if (none_removed)
6925 remove_edge (e);
6926 else
6927 {
6928 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6929 delete_basic_block (bb);
6930 }
6931
6932 /* Update the dominance information. The immediate dominator may change only
6933 for blocks whose immediate dominator belongs to DF_IDOM:
6934
6935 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6936 removal. Let Z the arbitrary block such that idom(Z) = Y and
6937 Z dominates X after the removal. Before removal, there exists a path P
6938 from Y to X that avoids Z. Let F be the last edge on P that is
6939 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6940 dominates W, and because of P, Z does not dominate W), and W belongs to
6941 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6942 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6943 {
6944 bb = BASIC_BLOCK (i);
6945 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6946 dbb;
6947 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6948 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6949 }
6950
66f97d31 6951 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
672987e8
ZD
6952
6953 BITMAP_FREE (df);
6954 BITMAP_FREE (df_idom);
6955 VEC_free (basic_block, heap, bbs_to_remove);
6956 VEC_free (basic_block, heap, bbs_to_fix_dom);
6957}
6958
4f6c2131
EB
6959/* Purge dead EH edges from basic block BB. */
6960
1eaba2f2 6961bool
726a989a 6962gimple_purge_dead_eh_edges (basic_block bb)
1eaba2f2
RH
6963{
6964 bool changed = false;
628f6a4e
BE
6965 edge e;
6966 edge_iterator ei;
726a989a 6967 gimple stmt = last_stmt (bb);
1eaba2f2 6968
726a989a 6969 if (stmt && stmt_can_throw_internal (stmt))
1eaba2f2
RH
6970 return false;
6971
628f6a4e 6972 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1eaba2f2 6973 {
1eaba2f2
RH
6974 if (e->flags & EDGE_EH)
6975 {
672987e8 6976 remove_edge_and_dominated_blocks (e);
1eaba2f2
RH
6977 changed = true;
6978 }
628f6a4e
BE
6979 else
6980 ei_next (&ei);
1eaba2f2
RH
6981 }
6982
6983 return changed;
6984}
6985
6986bool
726a989a 6987gimple_purge_all_dead_eh_edges (const_bitmap blocks)
1eaba2f2
RH
6988{
6989 bool changed = false;
3cd8c58a 6990 unsigned i;
87c476a2 6991 bitmap_iterator bi;
1eaba2f2 6992
87c476a2
ZD
6993 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6994 {
833ee764
JJ
6995 basic_block bb = BASIC_BLOCK (i);
6996
6997 /* Earlier gimple_purge_dead_eh_edges could have removed
6998 this basic block already. */
6999 gcc_assert (bb || changed);
7000 if (bb != NULL)
7001 changed |= gimple_purge_dead_eh_edges (bb);
87c476a2 7002 }
1eaba2f2
RH
7003
7004 return changed;
7005}
6de9cd9a 7006
a100ac1e
KH
7007/* This function is called whenever a new edge is created or
7008 redirected. */
7009
7010static void
726a989a 7011gimple_execute_on_growing_pred (edge e)
a100ac1e
KH
7012{
7013 basic_block bb = e->dest;
7014
7015 if (phi_nodes (bb))
7016 reserve_phi_args_for_new_edge (bb);
7017}
7018
e51546f8
KH
7019/* This function is called immediately before edge E is removed from
7020 the edge vector E->dest->preds. */
7021
7022static void
726a989a 7023gimple_execute_on_shrinking_pred (edge e)
e51546f8
KH
7024{
7025 if (phi_nodes (e->dest))
7026 remove_phi_args (e);
7027}
7028
1cb7dfc3
MH
7029/*---------------------------------------------------------------------------
7030 Helper functions for Loop versioning
7031 ---------------------------------------------------------------------------*/
7032
7033/* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7034 of 'first'. Both of them are dominated by 'new_head' basic block. When
7035 'new_head' was created by 'second's incoming edge it received phi arguments
7036 on the edge by split_edge(). Later, additional edge 'e' was created to
6531d1be
BF
7037 connect 'new_head' and 'first'. Now this routine adds phi args on this
7038 additional edge 'e' that new_head to second edge received as part of edge
726a989a 7039 splitting. */
1cb7dfc3
MH
7040
7041static void
726a989a
RB
7042gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7043 basic_block new_head, edge e)
1cb7dfc3 7044{
726a989a
RB
7045 gimple phi1, phi2;
7046 gimple_stmt_iterator psi1, psi2;
7047 tree def;
d0e12fc6
KH
7048 edge e2 = find_edge (new_head, second);
7049
7050 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7051 edge, we should always have an edge from NEW_HEAD to SECOND. */
7052 gcc_assert (e2 != NULL);
1cb7dfc3
MH
7053
7054 /* Browse all 'second' basic block phi nodes and add phi args to
7055 edge 'e' for 'first' head. PHI args are always in correct order. */
7056
726a989a
RB
7057 for (psi2 = gsi_start_phis (second),
7058 psi1 = gsi_start_phis (first);
7059 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7060 gsi_next (&psi2), gsi_next (&psi1))
1cb7dfc3 7061 {
726a989a
RB
7062 phi1 = gsi_stmt (psi1);
7063 phi2 = gsi_stmt (psi2);
7064 def = PHI_ARG_DEF (phi2, e2->dest_idx);
d0e12fc6 7065 add_phi_arg (phi1, def, e);
1cb7dfc3
MH
7066 }
7067}
7068
726a989a 7069
6531d1be
BF
7070/* Adds a if else statement to COND_BB with condition COND_EXPR.
7071 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
1cb7dfc3 7072 the destination of the ELSE part. */
726a989a 7073
1cb7dfc3 7074static void
726a989a
RB
7075gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7076 basic_block second_head ATTRIBUTE_UNUSED,
7077 basic_block cond_bb, void *cond_e)
1cb7dfc3 7078{
726a989a
RB
7079 gimple_stmt_iterator gsi;
7080 gimple new_cond_expr;
1cb7dfc3
MH
7081 tree cond_expr = (tree) cond_e;
7082 edge e0;
7083
7084 /* Build new conditional expr */
726a989a
RB
7085 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7086 NULL_TREE, NULL_TREE);
1cb7dfc3 7087
6531d1be 7088 /* Add new cond in cond_bb. */
726a989a
RB
7089 gsi = gsi_last_bb (cond_bb);
7090 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7091
1cb7dfc3
MH
7092 /* Adjust edges appropriately to connect new head with first head
7093 as well as second head. */
7094 e0 = single_succ_edge (cond_bb);
7095 e0->flags &= ~EDGE_FALLTHRU;
7096 e0->flags |= EDGE_FALSE_VALUE;
7097}
7098
726a989a
RB
7099struct cfg_hooks gimple_cfg_hooks = {
7100 "gimple",
7101 gimple_verify_flow_info,
7102 gimple_dump_bb, /* dump_bb */
6de9cd9a 7103 create_bb, /* create_basic_block */
726a989a
RB
7104 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7105 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7106 gimple_can_remove_branch_p, /* can_remove_branch_p */
6de9cd9a 7107 remove_bb, /* delete_basic_block */
726a989a
RB
7108 gimple_split_block, /* split_block */
7109 gimple_move_block_after, /* move_block_after */
7110 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7111 gimple_merge_blocks, /* merge_blocks */
7112 gimple_predict_edge, /* predict_edge */
7113 gimple_predicted_by_p, /* predicted_by_p */
7114 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7115 gimple_duplicate_bb, /* duplicate_block */
7116 gimple_split_edge, /* split_edge */
7117 gimple_make_forwarder_block, /* make_forward_block */
6de9cd9a 7118 NULL, /* tidy_fallthru_edge */
726a989a
RB
7119 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7120 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7121 gimple_flow_call_edges_add, /* flow_call_edges_add */
7122 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7123 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7124 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7125 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7126 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
1cb7dfc3 7127 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6531d1be 7128 flush_pending_stmts /* flush_pending_stmts */
6de9cd9a
DN
7129};
7130
7131
7132/* Split all critical edges. */
7133
c2924966 7134static unsigned int
6de9cd9a
DN
7135split_critical_edges (void)
7136{
7137 basic_block bb;
7138 edge e;
628f6a4e 7139 edge_iterator ei;
6de9cd9a 7140
d6be0d7f
JL
7141 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7142 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7143 mappings around the calls to split_edge. */
7144 start_recording_case_labels ();
6de9cd9a
DN
7145 FOR_ALL_BB (bb)
7146 {
628f6a4e 7147 FOR_EACH_EDGE (e, ei, bb->succs)
496a4ef5
JH
7148 {
7149 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6de9cd9a 7150 split_edge (e);
496a4ef5
JH
7151 /* PRE inserts statements to edges and expects that
7152 since split_critical_edges was done beforehand, committing edge
7153 insertions will not split more edges. In addition to critical
7154 edges we must split edges that have multiple successors and
7155 end by control flow statements, such as RESX.
7156 Go ahead and split them too. This matches the logic in
7157 gimple_find_edge_insert_loc. */
7158 else if ((!single_pred_p (e->dest)
7159 || phi_nodes (e->dest)
7160 || e->dest == EXIT_BLOCK_PTR)
7161 && e->src != ENTRY_BLOCK_PTR
7162 && !(e->flags & EDGE_ABNORMAL))
7163 {
7164 gimple_stmt_iterator gsi;
7165
7166 gsi = gsi_last_bb (e->src);
7167 if (!gsi_end_p (gsi)
7168 && stmt_ends_bb_p (gsi_stmt (gsi))
7169 && gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
7170 split_edge (e);
7171 }
7172 }
6de9cd9a 7173 }
d6be0d7f 7174 end_recording_case_labels ();
c2924966 7175 return 0;
6de9cd9a
DN
7176}
7177
8ddbbcae 7178struct gimple_opt_pass pass_split_crit_edges =
6de9cd9a 7179{
8ddbbcae
JH
7180 {
7181 GIMPLE_PASS,
5d44aeed 7182 "crited", /* name */
6de9cd9a
DN
7183 NULL, /* gate */
7184 split_critical_edges, /* execute */
7185 NULL, /* sub */
7186 NULL, /* next */
7187 0, /* static_pass_number */
7188 TV_TREE_SPLIT_EDGES, /* tv_id */
7189 PROP_cfg, /* properties required */
7190 PROP_no_crit_edges, /* properties_provided */
7191 0, /* properties_destroyed */
7192 0, /* todo_flags_start */
9187e02d 7193 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
8ddbbcae 7194 }
6de9cd9a 7195};
26277d41 7196
26277d41 7197
726a989a 7198/* Build a ternary operation and gimplify it. Emit code before GSI.
26277d41
PB
7199 Return the gimple_val holding the result. */
7200
7201tree
726a989a 7202gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
26277d41
PB
7203 tree type, tree a, tree b, tree c)
7204{
7205 tree ret;
7206
987b67bc 7207 ret = fold_build3 (code, type, a, b, c);
26277d41
PB
7208 STRIP_NOPS (ret);
7209
726a989a
RB
7210 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7211 GSI_SAME_STMT);
26277d41
PB
7212}
7213
726a989a 7214/* Build a binary operation and gimplify it. Emit code before GSI.
26277d41
PB
7215 Return the gimple_val holding the result. */
7216
7217tree
726a989a 7218gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
26277d41
PB
7219 tree type, tree a, tree b)
7220{
7221 tree ret;
7222
987b67bc 7223 ret = fold_build2 (code, type, a, b);
26277d41
PB
7224 STRIP_NOPS (ret);
7225
726a989a
RB
7226 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7227 GSI_SAME_STMT);
26277d41
PB
7228}
7229
726a989a 7230/* Build a unary operation and gimplify it. Emit code before GSI.
26277d41
PB
7231 Return the gimple_val holding the result. */
7232
7233tree
726a989a 7234gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
26277d41
PB
7235 tree a)
7236{
7237 tree ret;
7238
987b67bc 7239 ret = fold_build1 (code, type, a);
26277d41
PB
7240 STRIP_NOPS (ret);
7241
726a989a
RB
7242 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7243 GSI_SAME_STMT);
26277d41
PB
7244}
7245
7246
6de9cd9a
DN
7247\f
7248/* Emit return warnings. */
7249
c2924966 7250static unsigned int
6de9cd9a
DN
7251execute_warn_function_return (void)
7252{
9506ac2b 7253 source_location location;
726a989a 7254 gimple last;
6de9cd9a 7255 edge e;
628f6a4e 7256 edge_iterator ei;
6de9cd9a 7257
6de9cd9a
DN
7258 /* If we have a path to EXIT, then we do return. */
7259 if (TREE_THIS_VOLATILE (cfun->decl)
628f6a4e 7260 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
6de9cd9a 7261 {
9506ac2b 7262 location = UNKNOWN_LOCATION;
628f6a4e 7263 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
7264 {
7265 last = last_stmt (e->src);
726a989a
RB
7266 if (gimple_code (last) == GIMPLE_RETURN
7267 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
6de9cd9a
DN
7268 break;
7269 }
9506ac2b
PB
7270 if (location == UNKNOWN_LOCATION)
7271 location = cfun->function_end_locus;
d4ee4d25 7272 warning (0, "%H%<noreturn%> function does return", &location);
6de9cd9a
DN
7273 }
7274
7275 /* If we see "return;" in some basic block, then we do reach the end
7276 without returning a value. */
7277 else if (warn_return_type
089efaa4 7278 && !TREE_NO_WARNING (cfun->decl)
628f6a4e 7279 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
6de9cd9a
DN
7280 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7281 {
628f6a4e 7282 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a 7283 {
726a989a
RB
7284 gimple last = last_stmt (e->src);
7285 if (gimple_code (last) == GIMPLE_RETURN
7286 && gimple_return_retval (last) == NULL
7287 && !gimple_no_warning_p (last))
6de9cd9a 7288 {
726a989a 7289 location = gimple_location (last);
9506ac2b
PB
7290 if (location == UNKNOWN_LOCATION)
7291 location = cfun->function_end_locus;
aa14403d 7292 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
089efaa4 7293 TREE_NO_WARNING (cfun->decl) = 1;
6de9cd9a
DN
7294 break;
7295 }
7296 }
7297 }
c2924966 7298 return 0;
6de9cd9a
DN
7299}
7300
7301
7302/* Given a basic block B which ends with a conditional and has
7303 precisely two successors, determine which of the edges is taken if
7304 the conditional is true and which is taken if the conditional is
7305 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7306
7307void
7308extract_true_false_edges_from_block (basic_block b,
7309 edge *true_edge,
7310 edge *false_edge)
7311{
628f6a4e 7312 edge e = EDGE_SUCC (b, 0);
6de9cd9a
DN
7313
7314 if (e->flags & EDGE_TRUE_VALUE)
7315 {
7316 *true_edge = e;
628f6a4e 7317 *false_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7318 }
7319 else
7320 {
7321 *false_edge = e;
628f6a4e 7322 *true_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7323 }
7324}
7325
8ddbbcae 7326struct gimple_opt_pass pass_warn_function_return =
6de9cd9a 7327{
8ddbbcae
JH
7328 {
7329 GIMPLE_PASS,
6de9cd9a
DN
7330 NULL, /* name */
7331 NULL, /* gate */
7332 execute_warn_function_return, /* execute */
7333 NULL, /* sub */
7334 NULL, /* next */
7335 0, /* static_pass_number */
7072a650 7336 TV_NONE, /* tv_id */
00bfee6f 7337 PROP_cfg, /* properties_required */
6de9cd9a
DN
7338 0, /* properties_provided */
7339 0, /* properties_destroyed */
7340 0, /* todo_flags_start */
8ddbbcae
JH
7341 0 /* todo_flags_finish */
7342 }
6de9cd9a 7343};
aa313ed4
JH
7344
7345/* Emit noreturn warnings. */
7346
c2924966 7347static unsigned int
aa313ed4
JH
7348execute_warn_function_noreturn (void)
7349{
7350 if (warn_missing_noreturn
7351 && !TREE_THIS_VOLATILE (cfun->decl)
7352 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
e8924938 7353 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
3176a0c2
DD
7354 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
7355 "for attribute %<noreturn%>",
aa313ed4 7356 cfun->decl);
c2924966 7357 return 0;
aa313ed4
JH
7358}
7359
8ddbbcae 7360struct gimple_opt_pass pass_warn_function_noreturn =
aa313ed4 7361{
8ddbbcae
JH
7362 {
7363 GIMPLE_PASS,
aa313ed4
JH
7364 NULL, /* name */
7365 NULL, /* gate */
7366 execute_warn_function_noreturn, /* execute */
7367 NULL, /* sub */
7368 NULL, /* next */
7369 0, /* static_pass_number */
7072a650 7370 TV_NONE, /* tv_id */
aa313ed4
JH
7371 PROP_cfg, /* properties_required */
7372 0, /* properties_provided */
7373 0, /* properties_destroyed */
7374 0, /* todo_flags_start */
8ddbbcae
JH
7375 0 /* todo_flags_finish */
7376 }
aa313ed4 7377};