]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-cfg.c
utils.c (init_gnat_to_gnu): Use typed GC allocation.
[thirdparty/gcc.git] / gcc / tree-cfg.c
CommitLineData
6de9cd9a 1/* Control flow functions for trees.
7c028163
AO
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
6de9cd9a
DN
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
6de9cd9a
DN
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
6de9cd9a 27#include "tm_p.h"
6de9cd9a
DN
28#include "basic-block.h"
29#include "output.h"
6de9cd9a
DN
30#include "flags.h"
31#include "function.h"
6de9cd9a
DN
32#include "ggc.h"
33#include "langhooks.h"
cf835838
JM
34#include "tree-pretty-print.h"
35#include "gimple-pretty-print.h"
6de9cd9a
DN
36#include "tree-flow.h"
37#include "timevar.h"
38#include "tree-dump.h"
39#include "tree-pass.h"
40#include "toplev.h"
41#include "except.h"
42#include "cfgloop.h"
42759f1e 43#include "cfglayout.h"
9af0df6b 44#include "tree-ssa-propagate.h"
6946b3f7 45#include "value-prof.h"
4437b50d 46#include "pointer-set.h"
917948d3 47#include "tree-inline.h"
6de9cd9a
DN
48
49/* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
51
52/* Local declarations. */
53
54/* Initial capacity for the basic block array. */
55static const int initial_cfg_capacity = 20;
56
d6be0d7f
JL
57/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
726a989a 60 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
92b6dff3 61
d6be0d7f
JL
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
92b6dff3 64
d6be0d7f
JL
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
69
15814ba0 70static struct pointer_map_t *edge_to_cases;
92b6dff3 71
fc249fe5
MM
72/* If we record edge_to_cases, this bitmap will hold indexes
73 of basic blocks that end in a GIMPLE_SWITCH which we touched
74 due to edge manipulations. */
75
76static bitmap touched_switch_bbs;
77
6de9cd9a
DN
78/* CFG statistics. */
79struct cfg_stats_d
80{
81 long num_merged_labels;
82};
83
84static struct cfg_stats_d cfg_stats;
85
86/* Nonzero if we found a computed goto while building basic blocks. */
87static bool found_computed_goto;
88
6c52e687
CC
89/* Hash table to store last discriminator assigned for each locus. */
90struct locus_discrim_map
91{
92 location_t locus;
93 int discriminator;
94};
95static htab_t discriminator_per_locus;
96
6de9cd9a 97/* Basic blocks and flowgraphs. */
726a989a 98static void make_blocks (gimple_seq);
6de9cd9a 99static void factor_computed_gotos (void);
6de9cd9a
DN
100
101/* Edges. */
102static void make_edges (void);
6de9cd9a 103static void make_cond_expr_edges (basic_block);
726a989a 104static void make_gimple_switch_edges (basic_block);
6de9cd9a 105static void make_goto_expr_edges (basic_block);
1c384bf1 106static void make_gimple_asm_edges (basic_block);
6c52e687
CC
107static unsigned int locus_map_hash (const void *);
108static int locus_map_eq (const void *, const void *);
109static void assign_discriminator (location_t, basic_block);
726a989a
RB
110static edge gimple_redirect_edge_and_branch (edge, basic_block);
111static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
c2924966 112static unsigned int split_critical_edges (void);
6de9cd9a
DN
113
114/* Various helpers. */
726a989a
RB
115static inline bool stmt_starts_bb_p (gimple, gimple);
116static int gimple_verify_flow_info (void);
117static void gimple_make_forwarder_block (edge);
118static void gimple_cfg2vcg (FILE *);
6c52e687 119static gimple first_non_label_stmt (basic_block);
6de9cd9a
DN
120
121/* Flowgraph optimization and cleanup. */
726a989a
RB
122static void gimple_merge_blocks (basic_block, basic_block);
123static bool gimple_can_merge_blocks_p (basic_block, basic_block);
6de9cd9a 124static void remove_bb (basic_block);
be477406 125static edge find_taken_edge_computed_goto (basic_block, tree);
6de9cd9a
DN
126static edge find_taken_edge_cond_expr (basic_block, tree);
127static edge find_taken_edge_switch_expr (basic_block, tree);
726a989a 128static tree find_case_label_for_value (gimple, tree);
fc249fe5 129static void group_case_labels_stmt (gimple);
6de9cd9a 130
a930a4ef 131void
9defb1fe 132init_empty_tree_cfg_for_function (struct function *fn)
a930a4ef
JH
133{
134 /* Initialize the basic block array. */
9defb1fe
DN
135 init_flow (fn);
136 profile_status_for_function (fn) = PROFILE_ABSENT;
137 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
138 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
139 basic_block_info_for_function (fn)
140 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
141 VEC_safe_grow_cleared (basic_block, gc,
142 basic_block_info_for_function (fn),
a590ac65 143 initial_cfg_capacity);
a930a4ef
JH
144
145 /* Build a mapping of labels to their associated blocks. */
9defb1fe
DN
146 label_to_block_map_for_function (fn)
147 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
148 VEC_safe_grow_cleared (basic_block, gc,
149 label_to_block_map_for_function (fn),
a590ac65 150 initial_cfg_capacity);
a930a4ef 151
b8698a0f 152 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
9defb1fe 153 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
b8698a0f 154 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
9defb1fe
DN
155 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
156
157 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
158 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
159 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
160 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
161}
162
163void
164init_empty_tree_cfg (void)
165{
166 init_empty_tree_cfg_for_function (cfun);
a930a4ef 167}
6de9cd9a
DN
168
169/*---------------------------------------------------------------------------
170 Create basic blocks
171---------------------------------------------------------------------------*/
172
726a989a 173/* Entry point to the CFG builder for trees. SEQ is the sequence of
6de9cd9a
DN
174 statements to be added to the flowgraph. */
175
176static void
726a989a 177build_gimple_cfg (gimple_seq seq)
6de9cd9a 178{
726a989a
RB
179 /* Register specific gimple functions. */
180 gimple_register_cfg_hooks ();
6de9cd9a 181
6de9cd9a
DN
182 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
183
a930a4ef 184 init_empty_tree_cfg ();
6de9cd9a
DN
185
186 found_computed_goto = 0;
726a989a 187 make_blocks (seq);
6de9cd9a
DN
188
189 /* Computed gotos are hell to deal with, especially if there are
190 lots of them with a large number of destinations. So we factor
191 them to a common computed goto location before we build the
192 edge list. After we convert back to normal form, we will un-factor
193 the computed gotos since factoring introduces an unwanted jump. */
194 if (found_computed_goto)
195 factor_computed_gotos ();
196
f0b698c1 197 /* Make sure there is always at least one block, even if it's empty. */
24bd1a0b 198 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
199 create_empty_bb (ENTRY_BLOCK_PTR);
200
6de9cd9a 201 /* Adjust the size of the array. */
68f9b844 202 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
a590ac65 203 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
6de9cd9a 204
f667741c
SB
205 /* To speed up statement iterator walks, we first purge dead labels. */
206 cleanup_dead_labels ();
207
208 /* Group case nodes to reduce the number of edges.
209 We do this after cleaning up dead labels because otherwise we miss
210 a lot of obvious case merging opportunities. */
211 group_case_labels ();
212
6de9cd9a 213 /* Create the edges of the flowgraph. */
6c52e687
CC
214 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
215 free);
6de9cd9a 216 make_edges ();
8b11009b 217 cleanup_dead_labels ();
6c52e687 218 htab_delete (discriminator_per_locus);
6de9cd9a
DN
219
220 /* Debugging dumps. */
221
222 /* Write the flowgraph to a VCG file. */
223 {
224 int local_dump_flags;
10d22567
ZD
225 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
226 if (vcg_file)
6de9cd9a 227 {
726a989a 228 gimple_cfg2vcg (vcg_file);
10d22567 229 dump_end (TDI_vcg, vcg_file);
6de9cd9a
DN
230 }
231 }
232
81cfbbc2
JH
233#ifdef ENABLE_CHECKING
234 verify_stmts ();
235#endif
6de9cd9a
DN
236}
237
c2924966 238static unsigned int
6de9cd9a
DN
239execute_build_cfg (void)
240{
39ecc018
JH
241 gimple_seq body = gimple_body (current_function_decl);
242
243 build_gimple_cfg (body);
244 gimple_set_body (current_function_decl, NULL);
cff7525f
JH
245 if (dump_file && (dump_flags & TDF_DETAILS))
246 {
247 fprintf (dump_file, "Scope blocks:\n");
248 dump_scope_blocks (dump_file, dump_flags);
249 }
c2924966 250 return 0;
6de9cd9a
DN
251}
252
8ddbbcae 253struct gimple_opt_pass pass_build_cfg =
6de9cd9a 254{
8ddbbcae
JH
255 {
256 GIMPLE_PASS,
6de9cd9a
DN
257 "cfg", /* name */
258 NULL, /* gate */
259 execute_build_cfg, /* execute */
260 NULL, /* sub */
261 NULL, /* next */
262 0, /* static_pass_number */
263 TV_TREE_CFG, /* tv_id */
726a989a 264 PROP_gimple_leh, /* properties_required */
6de9cd9a
DN
265 PROP_cfg, /* properties_provided */
266 0, /* properties_destroyed */
267 0, /* todo_flags_start */
11b08ee9
RG
268 TODO_verify_stmts | TODO_cleanup_cfg
269 | TODO_dump_func /* todo_flags_finish */
8ddbbcae 270 }
6de9cd9a
DN
271};
272
726a989a
RB
273
274/* Return true if T is a computed goto. */
275
276static bool
277computed_goto_p (gimple t)
278{
279 return (gimple_code (t) == GIMPLE_GOTO
280 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
281}
282
283
6531d1be 284/* Search the CFG for any computed gotos. If found, factor them to a
6de9cd9a 285 common computed goto site. Also record the location of that site so
6531d1be 286 that we can un-factor the gotos after we have converted back to
6de9cd9a
DN
287 normal form. */
288
289static void
290factor_computed_gotos (void)
291{
292 basic_block bb;
293 tree factored_label_decl = NULL;
294 tree var = NULL;
726a989a
RB
295 gimple factored_computed_goto_label = NULL;
296 gimple factored_computed_goto = NULL;
6de9cd9a
DN
297
298 /* We know there are one or more computed gotos in this function.
299 Examine the last statement in each basic block to see if the block
300 ends with a computed goto. */
6531d1be 301
6de9cd9a
DN
302 FOR_EACH_BB (bb)
303 {
726a989a
RB
304 gimple_stmt_iterator gsi = gsi_last_bb (bb);
305 gimple last;
6de9cd9a 306
726a989a 307 if (gsi_end_p (gsi))
6de9cd9a 308 continue;
726a989a
RB
309
310 last = gsi_stmt (gsi);
6de9cd9a
DN
311
312 /* Ignore the computed goto we create when we factor the original
313 computed gotos. */
314 if (last == factored_computed_goto)
315 continue;
316
317 /* If the last statement is a computed goto, factor it. */
318 if (computed_goto_p (last))
319 {
726a989a 320 gimple assignment;
6de9cd9a
DN
321
322 /* The first time we find a computed goto we need to create
323 the factored goto block and the variable each original
324 computed goto will use for their goto destination. */
726a989a 325 if (!factored_computed_goto)
6de9cd9a
DN
326 {
327 basic_block new_bb = create_empty_bb (bb);
726a989a 328 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
6de9cd9a
DN
329
330 /* Create the destination of the factored goto. Each original
331 computed goto will put its desired destination into this
332 variable and jump to the label we create immediately
333 below. */
334 var = create_tmp_var (ptr_type_node, "gotovar");
335
336 /* Build a label for the new block which will contain the
337 factored computed goto. */
c2255bc4 338 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 339 factored_computed_goto_label
726a989a
RB
340 = gimple_build_label (factored_label_decl);
341 gsi_insert_after (&new_gsi, factored_computed_goto_label,
342 GSI_NEW_STMT);
6de9cd9a
DN
343
344 /* Build our new computed goto. */
726a989a
RB
345 factored_computed_goto = gimple_build_goto (var);
346 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
6de9cd9a
DN
347 }
348
349 /* Copy the original computed goto's destination into VAR. */
726a989a
RB
350 assignment = gimple_build_assign (var, gimple_goto_dest (last));
351 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
6de9cd9a
DN
352
353 /* And re-vector the computed goto to the new destination. */
726a989a 354 gimple_goto_set_dest (last, factored_label_decl);
6de9cd9a
DN
355 }
356 }
357}
358
359
726a989a 360/* Build a flowgraph for the sequence of stmts SEQ. */
6de9cd9a
DN
361
362static void
726a989a 363make_blocks (gimple_seq seq)
6de9cd9a 364{
726a989a
RB
365 gimple_stmt_iterator i = gsi_start (seq);
366 gimple stmt = NULL;
6de9cd9a 367 bool start_new_block = true;
726a989a 368 bool first_stmt_of_seq = true;
6de9cd9a
DN
369 basic_block bb = ENTRY_BLOCK_PTR;
370
726a989a 371 while (!gsi_end_p (i))
6de9cd9a 372 {
726a989a 373 gimple prev_stmt;
6de9cd9a
DN
374
375 prev_stmt = stmt;
726a989a 376 stmt = gsi_stmt (i);
6de9cd9a
DN
377
378 /* If the statement starts a new basic block or if we have determined
379 in a previous pass that we need to create a new block for STMT, do
380 so now. */
381 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
382 {
726a989a
RB
383 if (!first_stmt_of_seq)
384 seq = gsi_split_seq_before (&i);
385 bb = create_basic_block (seq, NULL, bb);
6de9cd9a
DN
386 start_new_block = false;
387 }
388
389 /* Now add STMT to BB and create the subgraphs for special statement
390 codes. */
726a989a 391 gimple_set_bb (stmt, bb);
6de9cd9a
DN
392
393 if (computed_goto_p (stmt))
394 found_computed_goto = true;
395
396 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
397 next iteration. */
398 if (stmt_ends_bb_p (stmt))
54634841
RG
399 {
400 /* If the stmt can make abnormal goto use a new temporary
401 for the assignment to the LHS. This makes sure the old value
402 of the LHS is available on the abnormal edge. Otherwise
403 we will end up with overlapping life-ranges for abnormal
404 SSA names. */
405 if (gimple_has_lhs (stmt)
406 && stmt_can_make_abnormal_goto (stmt)
407 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
408 {
409 tree lhs = gimple_get_lhs (stmt);
410 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
411 gimple s = gimple_build_assign (lhs, tmp);
412 gimple_set_location (s, gimple_location (stmt));
413 gimple_set_block (s, gimple_block (stmt));
414 gimple_set_lhs (stmt, tmp);
415 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
416 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
417 DECL_GIMPLE_REG_P (tmp) = 1;
418 gsi_insert_after (&i, s, GSI_SAME_STMT);
419 }
420 start_new_block = true;
421 }
6de9cd9a 422
726a989a
RB
423 gsi_next (&i);
424 first_stmt_of_seq = false;
6de9cd9a
DN
425 }
426}
427
428
429/* Create and return a new empty basic block after bb AFTER. */
430
431static basic_block
432create_bb (void *h, void *e, basic_block after)
433{
434 basic_block bb;
435
1e128c5f 436 gcc_assert (!e);
6de9cd9a 437
27fd69fa 438 /* Create and initialize a new basic block. Since alloc_block uses
a9429e29
LB
439 GC allocation that clears memory to allocate a basic block, we do
440 not have to clear the newly allocated basic block here. */
6de9cd9a 441 bb = alloc_block ();
6de9cd9a
DN
442
443 bb->index = last_basic_block;
444 bb->flags = BB_NEW;
a9429e29 445 bb->il.gimple = ggc_alloc_cleared_gimple_bb_info ();
726a989a 446 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
6de9cd9a
DN
447
448 /* Add the new block to the linked list of blocks. */
449 link_block (bb, after);
450
451 /* Grow the basic block array if needed. */
68f9b844 452 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
6de9cd9a
DN
453 {
454 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
a590ac65 455 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
6de9cd9a
DN
456 }
457
458 /* Add the newly created block to the array. */
68f9b844 459 SET_BASIC_BLOCK (last_basic_block, bb);
6de9cd9a 460
6de9cd9a
DN
461 n_basic_blocks++;
462 last_basic_block++;
463
6de9cd9a
DN
464 return bb;
465}
466
467
468/*---------------------------------------------------------------------------
469 Edge creation
470---------------------------------------------------------------------------*/
471
fca01525
KH
472/* Fold COND_EXPR_COND of each COND_EXPR. */
473
e21aff8a 474void
fca01525
KH
475fold_cond_expr_cond (void)
476{
477 basic_block bb;
478
479 FOR_EACH_BB (bb)
480 {
726a989a 481 gimple stmt = last_stmt (bb);
fca01525 482
726a989a 483 if (stmt && gimple_code (stmt) == GIMPLE_COND)
fca01525 484 {
db3927fb 485 location_t loc = gimple_location (stmt);
6ac01510
ILT
486 tree cond;
487 bool zerop, onep;
488
489 fold_defer_overflow_warnings ();
db3927fb 490 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
726a989a
RB
491 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
492 if (cond)
493 {
494 zerop = integer_zerop (cond);
495 onep = integer_onep (cond);
496 }
497 else
498 zerop = onep = false;
499
e233ac97 500 fold_undefer_overflow_warnings (zerop || onep,
4df28528 501 stmt,
6ac01510
ILT
502 WARN_STRICT_OVERFLOW_CONDITIONAL);
503 if (zerop)
726a989a 504 gimple_cond_make_false (stmt);
6ac01510 505 else if (onep)
726a989a 506 gimple_cond_make_true (stmt);
fca01525
KH
507 }
508 }
509}
510
6de9cd9a
DN
511/* Join all the blocks in the flowgraph. */
512
513static void
514make_edges (void)
515{
516 basic_block bb;
bed575d5 517 struct omp_region *cur_region = NULL;
6de9cd9a
DN
518
519 /* Create an edge from entry to the first block with executable
520 statements in it. */
24bd1a0b 521 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
6de9cd9a 522
adb35797 523 /* Traverse the basic block array placing edges. */
6de9cd9a
DN
524 FOR_EACH_BB (bb)
525 {
726a989a 526 gimple last = last_stmt (bb);
56e84019 527 bool fallthru;
6de9cd9a 528
56e84019 529 if (last)
6de9cd9a 530 {
726a989a 531 enum gimple_code code = gimple_code (last);
bed575d5 532 switch (code)
56e84019 533 {
726a989a 534 case GIMPLE_GOTO:
56e84019
RH
535 make_goto_expr_edges (bb);
536 fallthru = false;
537 break;
726a989a 538 case GIMPLE_RETURN:
56e84019
RH
539 make_edge (bb, EXIT_BLOCK_PTR, 0);
540 fallthru = false;
541 break;
726a989a 542 case GIMPLE_COND:
56e84019
RH
543 make_cond_expr_edges (bb);
544 fallthru = false;
545 break;
726a989a
RB
546 case GIMPLE_SWITCH:
547 make_gimple_switch_edges (bb);
56e84019
RH
548 fallthru = false;
549 break;
726a989a 550 case GIMPLE_RESX:
56e84019
RH
551 make_eh_edges (last);
552 fallthru = false;
553 break;
1d65f45c
RH
554 case GIMPLE_EH_DISPATCH:
555 fallthru = make_eh_dispatch_edges (last);
556 break;
56e84019 557
726a989a 558 case GIMPLE_CALL:
56e84019
RH
559 /* If this function receives a nonlocal goto, then we need to
560 make edges from this call site to all the nonlocal goto
561 handlers. */
726a989a 562 if (stmt_can_make_abnormal_goto (last))
4f6c2131 563 make_abnormal_goto_edges (bb, true);
6de9cd9a 564
56e84019
RH
565 /* If this statement has reachable exception handlers, then
566 create abnormal edges to them. */
567 make_eh_edges (last);
568
c54c785d
JH
569 /* BUILTIN_RETURN is really a return statement. */
570 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
571 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
56e84019 572 /* Some calls are known not to return. */
c54c785d
JH
573 else
574 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
56e84019
RH
575 break;
576
726a989a
RB
577 case GIMPLE_ASSIGN:
578 /* A GIMPLE_ASSIGN may throw internally and thus be considered
579 control-altering. */
56e84019 580 if (is_ctrl_altering_stmt (last))
1d65f45c 581 make_eh_edges (last);
56e84019
RH
582 fallthru = true;
583 break;
584
1c384bf1
RH
585 case GIMPLE_ASM:
586 make_gimple_asm_edges (bb);
587 fallthru = true;
588 break;
589
726a989a
RB
590 case GIMPLE_OMP_PARALLEL:
591 case GIMPLE_OMP_TASK:
592 case GIMPLE_OMP_FOR:
593 case GIMPLE_OMP_SINGLE:
594 case GIMPLE_OMP_MASTER:
595 case GIMPLE_OMP_ORDERED:
596 case GIMPLE_OMP_CRITICAL:
597 case GIMPLE_OMP_SECTION:
bed575d5 598 cur_region = new_omp_region (bb, code, cur_region);
56e84019
RH
599 fallthru = true;
600 break;
601
726a989a 602 case GIMPLE_OMP_SECTIONS:
bed575d5 603 cur_region = new_omp_region (bb, code, cur_region);
e5c95afe
ZD
604 fallthru = true;
605 break;
606
726a989a 607 case GIMPLE_OMP_SECTIONS_SWITCH:
7e2df4a1 608 fallthru = false;
777f7f9a
RH
609 break;
610
726a989a
RB
611 case GIMPLE_OMP_ATOMIC_LOAD:
612 case GIMPLE_OMP_ATOMIC_STORE:
a509ebb5
RL
613 fallthru = true;
614 break;
615
726a989a
RB
616 case GIMPLE_OMP_RETURN:
617 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
618 somewhere other than the next block. This will be
619 created later. */
bed575d5 620 cur_region->exit = bb;
726a989a 621 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
bed575d5
RS
622 cur_region = cur_region->outer;
623 break;
624
726a989a 625 case GIMPLE_OMP_CONTINUE:
bed575d5
RS
626 cur_region->cont = bb;
627 switch (cur_region->type)
628 {
726a989a
RB
629 case GIMPLE_OMP_FOR:
630 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
631 succs edges as abnormal to prevent splitting
632 them. */
135a171d 633 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
e5c95afe 634 /* Make the loopback edge. */
135a171d
JJ
635 make_edge (bb, single_succ (cur_region->entry),
636 EDGE_ABNORMAL);
637
726a989a
RB
638 /* Create an edge from GIMPLE_OMP_FOR to exit, which
639 corresponds to the case that the body of the loop
640 is not executed at all. */
135a171d
JJ
641 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
642 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
643 fallthru = false;
bed575d5
RS
644 break;
645
726a989a 646 case GIMPLE_OMP_SECTIONS:
bed575d5 647 /* Wire up the edges into and out of the nested sections. */
bed575d5 648 {
e5c95afe
ZD
649 basic_block switch_bb = single_succ (cur_region->entry);
650
bed575d5
RS
651 struct omp_region *i;
652 for (i = cur_region->inner; i ; i = i->next)
653 {
726a989a 654 gcc_assert (i->type == GIMPLE_OMP_SECTION);
e5c95afe 655 make_edge (switch_bb, i->entry, 0);
bed575d5
RS
656 make_edge (i->exit, bb, EDGE_FALLTHRU);
657 }
e5c95afe
ZD
658
659 /* Make the loopback edge to the block with
726a989a 660 GIMPLE_OMP_SECTIONS_SWITCH. */
e5c95afe
ZD
661 make_edge (bb, switch_bb, 0);
662
663 /* Make the edge from the switch to exit. */
664 make_edge (switch_bb, bb->next_bb, 0);
665 fallthru = false;
bed575d5
RS
666 }
667 break;
6531d1be 668
bed575d5
RS
669 default:
670 gcc_unreachable ();
671 }
bed575d5
RS
672 break;
673
56e84019
RH
674 default:
675 gcc_assert (!stmt_ends_bb_p (last));
676 fallthru = true;
677 }
6de9cd9a 678 }
56e84019
RH
679 else
680 fallthru = true;
6de9cd9a 681
56e84019 682 if (fallthru)
6c52e687
CC
683 {
684 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
685 if (last)
686 assign_discriminator (gimple_location (last), bb->next_bb);
687 }
6de9cd9a
DN
688 }
689
bed575d5
RS
690 if (root_omp_region)
691 free_omp_regions ();
692
fca01525
KH
693 /* Fold COND_EXPR_COND of each COND_EXPR. */
694 fold_cond_expr_cond ();
6de9cd9a
DN
695}
696
6c52e687
CC
697/* Trivial hash function for a location_t. ITEM is a pointer to
698 a hash table entry that maps a location_t to a discriminator. */
699
700static unsigned int
701locus_map_hash (const void *item)
702{
703 return ((const struct locus_discrim_map *) item)->locus;
704}
705
706/* Equality function for the locus-to-discriminator map. VA and VB
707 point to the two hash table entries to compare. */
708
709static int
710locus_map_eq (const void *va, const void *vb)
711{
712 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
713 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
714 return a->locus == b->locus;
715}
716
717/* Find the next available discriminator value for LOCUS. The
718 discriminator distinguishes among several basic blocks that
719 share a common locus, allowing for more accurate sample-based
720 profiling. */
721
722static int
723next_discriminator_for_locus (location_t locus)
724{
725 struct locus_discrim_map item;
726 struct locus_discrim_map **slot;
727
728 item.locus = locus;
729 item.discriminator = 0;
730 slot = (struct locus_discrim_map **)
731 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
732 (hashval_t) locus, INSERT);
733 gcc_assert (slot);
734 if (*slot == HTAB_EMPTY_ENTRY)
735 {
736 *slot = XNEW (struct locus_discrim_map);
737 gcc_assert (*slot);
738 (*slot)->locus = locus;
739 (*slot)->discriminator = 0;
740 }
741 (*slot)->discriminator++;
742 return (*slot)->discriminator;
743}
744
745/* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
746
747static bool
748same_line_p (location_t locus1, location_t locus2)
749{
750 expanded_location from, to;
751
752 if (locus1 == locus2)
753 return true;
754
755 from = expand_location (locus1);
756 to = expand_location (locus2);
757
758 if (from.line != to.line)
759 return false;
760 if (from.file == to.file)
761 return true;
762 return (from.file != NULL
763 && to.file != NULL
764 && strcmp (from.file, to.file) == 0);
765}
766
767/* Assign a unique discriminator value to block BB if it begins at the same
768 LOCUS as its predecessor block. */
769
770static void
771assign_discriminator (location_t locus, basic_block bb)
772{
cbea518e 773 gimple first_in_to_bb, last_in_to_bb;
6c52e687
CC
774
775 if (locus == 0 || bb->discriminator != 0)
776 return;
777
cbea518e
CC
778 first_in_to_bb = first_non_label_stmt (bb);
779 last_in_to_bb = last_stmt (bb);
2a2869d6
CC
780 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
781 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
6c52e687
CC
782 bb->discriminator = next_discriminator_for_locus (locus);
783}
6de9cd9a 784
726a989a 785/* Create the edges for a GIMPLE_COND starting at block BB. */
6de9cd9a
DN
786
787static void
788make_cond_expr_edges (basic_block bb)
789{
726a989a
RB
790 gimple entry = last_stmt (bb);
791 gimple then_stmt, else_stmt;
6de9cd9a
DN
792 basic_block then_bb, else_bb;
793 tree then_label, else_label;
d783b2a2 794 edge e;
6c52e687 795 location_t entry_locus;
6de9cd9a 796
1e128c5f 797 gcc_assert (entry);
726a989a 798 gcc_assert (gimple_code (entry) == GIMPLE_COND);
6de9cd9a 799
6c52e687
CC
800 entry_locus = gimple_location (entry);
801
6de9cd9a 802 /* Entry basic blocks for each component. */
726a989a
RB
803 then_label = gimple_cond_true_label (entry);
804 else_label = gimple_cond_false_label (entry);
6de9cd9a
DN
805 then_bb = label_to_block (then_label);
806 else_bb = label_to_block (else_label);
726a989a
RB
807 then_stmt = first_stmt (then_bb);
808 else_stmt = first_stmt (else_bb);
6de9cd9a 809
d783b2a2 810 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
6c52e687 811 assign_discriminator (entry_locus, then_bb);
726a989a 812 e->goto_locus = gimple_location (then_stmt);
cc2a64dd
JJ
813 if (e->goto_locus)
814 e->goto_block = gimple_block (then_stmt);
d783b2a2
JH
815 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
816 if (e)
7241571e 817 {
6c52e687 818 assign_discriminator (entry_locus, else_bb);
7241571e 819 e->goto_locus = gimple_location (else_stmt);
cc2a64dd
JJ
820 if (e->goto_locus)
821 e->goto_block = gimple_block (else_stmt);
7241571e 822 }
a9b77cd1 823
726a989a
RB
824 /* We do not need the labels anymore. */
825 gimple_cond_set_true_label (entry, NULL_TREE);
826 gimple_cond_set_false_label (entry, NULL_TREE);
6de9cd9a
DN
827}
828
92b6dff3 829
d6be0d7f
JL
830/* Called for each element in the hash table (P) as we delete the
831 edge to cases hash table.
832
6531d1be 833 Clear all the TREE_CHAINs to prevent problems with copying of
d6be0d7f
JL
834 SWITCH_EXPRs and structure sharing rules, then free the hash table
835 element. */
836
15814ba0 837static bool
ac7d7749 838edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
15814ba0 839 void *data ATTRIBUTE_UNUSED)
d6be0d7f 840{
d6be0d7f
JL
841 tree t, next;
842
15814ba0 843 for (t = (tree) *value; t; t = next)
d6be0d7f
JL
844 {
845 next = TREE_CHAIN (t);
846 TREE_CHAIN (t) = NULL;
847 }
15814ba0
PB
848
849 *value = NULL;
850 return false;
d6be0d7f
JL
851}
852
853/* Start recording information mapping edges to case labels. */
854
c9784e6d 855void
d6be0d7f
JL
856start_recording_case_labels (void)
857{
858 gcc_assert (edge_to_cases == NULL);
15814ba0 859 edge_to_cases = pointer_map_create ();
fc249fe5 860 touched_switch_bbs = BITMAP_ALLOC (NULL);
d6be0d7f
JL
861}
862
863/* Return nonzero if we are recording information for case labels. */
864
865static bool
866recording_case_labels_p (void)
867{
868 return (edge_to_cases != NULL);
869}
870
871/* Stop recording information mapping edges to case labels and
872 remove any information we have recorded. */
c9784e6d 873void
d6be0d7f
JL
874end_recording_case_labels (void)
875{
fc249fe5
MM
876 bitmap_iterator bi;
877 unsigned i;
15814ba0
PB
878 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
879 pointer_map_destroy (edge_to_cases);
d6be0d7f 880 edge_to_cases = NULL;
fc249fe5
MM
881 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
882 {
883 basic_block bb = BASIC_BLOCK (i);
884 if (bb)
885 {
886 gimple stmt = last_stmt (bb);
887 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
888 group_case_labels_stmt (stmt);
889 }
890 }
891 BITMAP_FREE (touched_switch_bbs);
d6be0d7f
JL
892}
893
d6be0d7f
JL
894/* If we are inside a {start,end}_recording_cases block, then return
895 a chain of CASE_LABEL_EXPRs from T which reference E.
896
897 Otherwise return NULL. */
92b6dff3
JL
898
899static tree
726a989a 900get_cases_for_edge (edge e, gimple t)
92b6dff3 901{
92b6dff3 902 void **slot;
d6be0d7f 903 size_t i, n;
92b6dff3 904
d6be0d7f
JL
905 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
906 chains available. Return NULL so the caller can detect this case. */
907 if (!recording_case_labels_p ())
908 return NULL;
6531d1be 909
15814ba0 910 slot = pointer_map_contains (edge_to_cases, e);
92b6dff3 911 if (slot)
15814ba0 912 return (tree) *slot;
92b6dff3 913
d6be0d7f
JL
914 /* If we did not find E in the hash table, then this must be the first
915 time we have been queried for information about E & T. Add all the
916 elements from T to the hash table then perform the query again. */
92b6dff3 917
726a989a 918 n = gimple_switch_num_labels (t);
92b6dff3
JL
919 for (i = 0; i < n; i++)
920 {
726a989a 921 tree elt = gimple_switch_label (t, i);
15814ba0 922 tree lab = CASE_LABEL (elt);
d6be0d7f 923 basic_block label_bb = label_to_block (lab);
15814ba0
PB
924 edge this_edge = find_edge (e->src, label_bb);
925
926 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
927 a new chain. */
928 slot = pointer_map_insert (edge_to_cases, this_edge);
929 TREE_CHAIN (elt) = (tree) *slot;
930 *slot = elt;
92b6dff3 931 }
15814ba0
PB
932
933 return (tree) *pointer_map_contains (edge_to_cases, e);
92b6dff3 934}
6de9cd9a 935
726a989a 936/* Create the edges for a GIMPLE_SWITCH starting at block BB. */
6de9cd9a
DN
937
938static void
726a989a 939make_gimple_switch_edges (basic_block bb)
6de9cd9a 940{
726a989a 941 gimple entry = last_stmt (bb);
6c52e687 942 location_t entry_locus;
6de9cd9a 943 size_t i, n;
6de9cd9a 944
6c52e687
CC
945 entry_locus = gimple_location (entry);
946
726a989a 947 n = gimple_switch_num_labels (entry);
6de9cd9a
DN
948
949 for (i = 0; i < n; ++i)
950 {
726a989a 951 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
6de9cd9a 952 basic_block label_bb = label_to_block (lab);
d6be0d7f 953 make_edge (bb, label_bb, 0);
6c52e687 954 assign_discriminator (entry_locus, label_bb);
6de9cd9a
DN
955 }
956}
957
958
959/* Return the basic block holding label DEST. */
960
961basic_block
997de8ed 962label_to_block_fn (struct function *ifun, tree dest)
6de9cd9a 963{
242229bb
JH
964 int uid = LABEL_DECL_UID (dest);
965
f0b698c1
KH
966 /* We would die hard when faced by an undefined label. Emit a label to
967 the very first basic block. This will hopefully make even the dataflow
242229bb 968 and undefined variable warnings quite right. */
1da2ed5f 969 if (seen_error () && uid < 0)
242229bb 970 {
726a989a
RB
971 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
972 gimple stmt;
242229bb 973
726a989a
RB
974 stmt = gimple_build_label (dest);
975 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
242229bb
JH
976 uid = LABEL_DECL_UID (dest);
977 }
e597f337
KH
978 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
979 <= (unsigned int) uid)
98f464e0 980 return NULL;
e597f337 981 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
6de9cd9a
DN
982}
983
4f6c2131
EB
984/* Create edges for an abnormal goto statement at block BB. If FOR_CALL
985 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
986
987void
988make_abnormal_goto_edges (basic_block bb, bool for_call)
989{
990 basic_block target_bb;
726a989a 991 gimple_stmt_iterator gsi;
4f6c2131
EB
992
993 FOR_EACH_BB (target_bb)
726a989a 994 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
4f6c2131 995 {
726a989a
RB
996 gimple label_stmt = gsi_stmt (gsi);
997 tree target;
4f6c2131 998
726a989a 999 if (gimple_code (label_stmt) != GIMPLE_LABEL)
4f6c2131
EB
1000 break;
1001
726a989a 1002 target = gimple_label_label (label_stmt);
4f6c2131
EB
1003
1004 /* Make an edge to every label block that has been marked as a
1005 potential target for a computed goto or a non-local goto. */
1006 if ((FORCED_LABEL (target) && !for_call)
1007 || (DECL_NONLOCAL (target) && for_call))
1008 {
1009 make_edge (bb, target_bb, EDGE_ABNORMAL);
1010 break;
1011 }
1012 }
1013}
1014
6de9cd9a
DN
1015/* Create edges for a goto statement at block BB. */
1016
1017static void
1018make_goto_expr_edges (basic_block bb)
1019{
726a989a
RB
1020 gimple_stmt_iterator last = gsi_last_bb (bb);
1021 gimple goto_t = gsi_stmt (last);
6de9cd9a 1022
4f6c2131
EB
1023 /* A simple GOTO creates normal edges. */
1024 if (simple_goto_p (goto_t))
6de9cd9a 1025 {
726a989a 1026 tree dest = gimple_goto_dest (goto_t);
6c52e687
CC
1027 basic_block label_bb = label_to_block (dest);
1028 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
726a989a 1029 e->goto_locus = gimple_location (goto_t);
6c52e687 1030 assign_discriminator (e->goto_locus, label_bb);
cc2a64dd
JJ
1031 if (e->goto_locus)
1032 e->goto_block = gimple_block (goto_t);
726a989a 1033 gsi_remove (&last, true);
4f6c2131 1034 return;
6de9cd9a
DN
1035 }
1036
4f6c2131
EB
1037 /* A computed GOTO creates abnormal edges. */
1038 make_abnormal_goto_edges (bb, false);
6de9cd9a
DN
1039}
1040
1c384bf1
RH
1041/* Create edges for an asm statement with labels at block BB. */
1042
1043static void
1044make_gimple_asm_edges (basic_block bb)
1045{
1046 gimple stmt = last_stmt (bb);
1047 location_t stmt_loc = gimple_location (stmt);
1048 int i, n = gimple_asm_nlabels (stmt);
1049
1050 for (i = 0; i < n; ++i)
1051 {
1052 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1053 basic_block label_bb = label_to_block (label);
1054 make_edge (bb, label_bb, 0);
1055 assign_discriminator (stmt_loc, label_bb);
1056 }
1057}
6de9cd9a
DN
1058
1059/*---------------------------------------------------------------------------
1060 Flowgraph analysis
1061---------------------------------------------------------------------------*/
1062
f698d217
SB
1063/* Cleanup useless labels in basic blocks. This is something we wish
1064 to do early because it allows us to group case labels before creating
1065 the edges for the CFG, and it speeds up block statement iterators in
1066 all passes later on.
8b11009b
ZD
1067 We rerun this pass after CFG is created, to get rid of the labels that
1068 are no longer referenced. After then we do not run it any more, since
1069 (almost) no new labels should be created. */
f698d217
SB
1070
1071/* A map from basic block index to the leading label of that block. */
8b11009b
ZD
1072static struct label_record
1073{
1074 /* The label. */
1075 tree label;
1076
1077 /* True if the label is referenced from somewhere. */
1078 bool used;
1079} *label_for_bb;
f698d217 1080
242229bb 1081/* Given LABEL return the first label in the same basic block. */
726a989a 1082
242229bb
JH
1083static tree
1084main_block_label (tree label)
1085{
1086 basic_block bb = label_to_block (label);
8b11009b 1087 tree main_label = label_for_bb[bb->index].label;
242229bb
JH
1088
1089 /* label_to_block possibly inserted undefined label into the chain. */
8b11009b
ZD
1090 if (!main_label)
1091 {
1092 label_for_bb[bb->index].label = label;
1093 main_label = label;
1094 }
1095
1096 label_for_bb[bb->index].used = true;
1097 return main_label;
242229bb
JH
1098}
1099
1d65f45c
RH
1100/* Clean up redundant labels within the exception tree. */
1101
1102static void
1103cleanup_dead_labels_eh (void)
1104{
1105 eh_landing_pad lp;
1106 eh_region r;
1107 tree lab;
1108 int i;
1109
1110 if (cfun->eh == NULL)
1111 return;
1112
1113 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1114 if (lp && lp->post_landing_pad)
1115 {
1116 lab = main_block_label (lp->post_landing_pad);
1117 if (lab != lp->post_landing_pad)
1118 {
1119 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1120 EH_LANDING_PAD_NR (lab) = lp->index;
1121 }
1122 }
1123
1124 FOR_ALL_EH_REGION (r)
1125 switch (r->type)
1126 {
1127 case ERT_CLEANUP:
1128 case ERT_MUST_NOT_THROW:
1129 break;
1130
1131 case ERT_TRY:
1132 {
1133 eh_catch c;
1134 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1135 {
1136 lab = c->label;
1137 if (lab)
1138 c->label = main_block_label (lab);
1139 }
1140 }
1141 break;
1142
1143 case ERT_ALLOWED_EXCEPTIONS:
1144 lab = r->u.allowed.label;
1145 if (lab)
1146 r->u.allowed.label = main_block_label (lab);
1147 break;
1148 }
1149}
1150
1151
b986ebf3 1152/* Cleanup redundant labels. This is a three-step process:
f698d217
SB
1153 1) Find the leading label for each block.
1154 2) Redirect all references to labels to the leading labels.
1155 3) Cleanup all useless labels. */
6de9cd9a 1156
165b54c3 1157void
6de9cd9a
DN
1158cleanup_dead_labels (void)
1159{
1160 basic_block bb;
8b11009b 1161 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
6de9cd9a
DN
1162
1163 /* Find a suitable label for each block. We use the first user-defined
f0b698c1 1164 label if there is one, or otherwise just the first label we see. */
6de9cd9a
DN
1165 FOR_EACH_BB (bb)
1166 {
726a989a 1167 gimple_stmt_iterator i;
6de9cd9a 1168
726a989a 1169 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
6de9cd9a 1170 {
726a989a
RB
1171 tree label;
1172 gimple stmt = gsi_stmt (i);
6de9cd9a 1173
726a989a 1174 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
1175 break;
1176
726a989a 1177 label = gimple_label_label (stmt);
6de9cd9a
DN
1178
1179 /* If we have not yet seen a label for the current block,
1180 remember this one and see if there are more labels. */
8b11009b 1181 if (!label_for_bb[bb->index].label)
6de9cd9a 1182 {
8b11009b 1183 label_for_bb[bb->index].label = label;
6de9cd9a
DN
1184 continue;
1185 }
1186
1187 /* If we did see a label for the current block already, but it
1188 is an artificially created label, replace it if the current
1189 label is a user defined label. */
8b11009b
ZD
1190 if (!DECL_ARTIFICIAL (label)
1191 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
6de9cd9a 1192 {
8b11009b 1193 label_for_bb[bb->index].label = label;
6de9cd9a
DN
1194 break;
1195 }
1196 }
1197 }
1198
f698d217
SB
1199 /* Now redirect all jumps/branches to the selected label.
1200 First do so for each block ending in a control statement. */
6de9cd9a
DN
1201 FOR_EACH_BB (bb)
1202 {
726a989a 1203 gimple stmt = last_stmt (bb);
6de9cd9a
DN
1204 if (!stmt)
1205 continue;
1206
726a989a 1207 switch (gimple_code (stmt))
6de9cd9a 1208 {
726a989a 1209 case GIMPLE_COND:
6de9cd9a 1210 {
726a989a
RB
1211 tree true_label = gimple_cond_true_label (stmt);
1212 tree false_label = gimple_cond_false_label (stmt);
6de9cd9a 1213
726a989a
RB
1214 if (true_label)
1215 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1216 if (false_label)
1217 gimple_cond_set_false_label (stmt, main_block_label (false_label));
6de9cd9a
DN
1218 break;
1219 }
6531d1be 1220
726a989a 1221 case GIMPLE_SWITCH:
6de9cd9a 1222 {
726a989a 1223 size_t i, n = gimple_switch_num_labels (stmt);
6531d1be 1224
6de9cd9a
DN
1225 /* Replace all destination labels. */
1226 for (i = 0; i < n; ++i)
92b6dff3 1227 {
726a989a
RB
1228 tree case_label = gimple_switch_label (stmt, i);
1229 tree label = main_block_label (CASE_LABEL (case_label));
1230 CASE_LABEL (case_label) = label;
92b6dff3 1231 }
6de9cd9a
DN
1232 break;
1233 }
1234
1c384bf1
RH
1235 case GIMPLE_ASM:
1236 {
1237 int i, n = gimple_asm_nlabels (stmt);
1238
1239 for (i = 0; i < n; ++i)
1240 {
1241 tree cons = gimple_asm_label_op (stmt, i);
1242 tree label = main_block_label (TREE_VALUE (cons));
1243 TREE_VALUE (cons) = label;
1244 }
1245 break;
1246 }
1247
726a989a 1248 /* We have to handle gotos until they're removed, and we don't
f667741c 1249 remove them until after we've created the CFG edges. */
726a989a
RB
1250 case GIMPLE_GOTO:
1251 if (!computed_goto_p (stmt))
242229bb 1252 {
726a989a
RB
1253 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1254 gimple_goto_set_dest (stmt, new_dest);
242229bb 1255 }
1c384bf1 1256 break;
f667741c 1257
6de9cd9a
DN
1258 default:
1259 break;
1260 }
1261 }
1262
1d65f45c
RH
1263 /* Do the same for the exception region tree labels. */
1264 cleanup_dead_labels_eh ();
f698d217 1265
6de9cd9a 1266 /* Finally, purge dead labels. All user-defined labels and labels that
cea0f4f1
AP
1267 can be the target of non-local gotos and labels which have their
1268 address taken are preserved. */
6de9cd9a
DN
1269 FOR_EACH_BB (bb)
1270 {
726a989a 1271 gimple_stmt_iterator i;
8b11009b 1272 tree label_for_this_bb = label_for_bb[bb->index].label;
6de9cd9a 1273
8b11009b 1274 if (!label_for_this_bb)
6de9cd9a
DN
1275 continue;
1276
8b11009b
ZD
1277 /* If the main label of the block is unused, we may still remove it. */
1278 if (!label_for_bb[bb->index].used)
1279 label_for_this_bb = NULL;
1280
726a989a 1281 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
6de9cd9a 1282 {
726a989a
RB
1283 tree label;
1284 gimple stmt = gsi_stmt (i);
6de9cd9a 1285
726a989a 1286 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
1287 break;
1288
726a989a 1289 label = gimple_label_label (stmt);
6de9cd9a
DN
1290
1291 if (label == label_for_this_bb
726a989a 1292 || !DECL_ARTIFICIAL (label)
cea0f4f1
AP
1293 || DECL_NONLOCAL (label)
1294 || FORCED_LABEL (label))
726a989a 1295 gsi_next (&i);
6de9cd9a 1296 else
726a989a 1297 gsi_remove (&i, true);
6de9cd9a
DN
1298 }
1299 }
1300
1301 free (label_for_bb);
1302}
1303
fc249fe5
MM
1304/* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1305 the ones jumping to the same label.
f667741c
SB
1306 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1307
fc249fe5
MM
1308static void
1309group_case_labels_stmt (gimple stmt)
f667741c 1310{
fc249fe5
MM
1311 int old_size = gimple_switch_num_labels (stmt);
1312 int i, j, new_size = old_size;
1313 tree default_case = NULL_TREE;
1314 tree default_label = NULL_TREE;
1315 bool has_default;
1316
1317 /* The default label is always the first case in a switch
1318 statement after gimplification if it was not optimized
1319 away */
1320 if (!CASE_LOW (gimple_switch_default_label (stmt))
1321 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1322 {
1323 default_case = gimple_switch_default_label (stmt);
1324 default_label = CASE_LABEL (default_case);
1325 has_default = true;
1326 }
1327 else
1328 has_default = false;
f667741c 1329
fc249fe5
MM
1330 /* Look for possible opportunities to merge cases. */
1331 if (has_default)
1332 i = 1;
1333 else
1334 i = 0;
1335 while (i < old_size)
f667741c 1336 {
fc249fe5
MM
1337 tree base_case, base_label, base_high;
1338 base_case = gimple_switch_label (stmt, i);
1339
1340 gcc_assert (base_case);
1341 base_label = CASE_LABEL (base_case);
1342
1343 /* Discard cases that have the same destination as the
1344 default case. */
1345 if (base_label == default_label)
1346 {
1347 gimple_switch_set_label (stmt, i, NULL_TREE);
1348 i++;
1349 new_size--;
1350 continue;
1351 }
1352
1353 base_high = CASE_HIGH (base_case)
1354 ? CASE_HIGH (base_case)
1355 : CASE_LOW (base_case);
1356 i++;
1357
1358 /* Try to merge case labels. Break out when we reach the end
1359 of the label vector or when we cannot merge the next case
1360 label with the current one. */
1361 while (i < old_size)
f667741c 1362 {
fc249fe5
MM
1363 tree merge_case = gimple_switch_label (stmt, i);
1364 tree merge_label = CASE_LABEL (merge_case);
1365 tree t = int_const_binop (PLUS_EXPR, base_high,
1366 integer_one_node, 1);
1367
1368 /* Merge the cases if they jump to the same place,
1369 and their ranges are consecutive. */
1370 if (merge_label == base_label
1371 && tree_int_cst_equal (CASE_LOW (merge_case), t))
b7814a18 1372 {
fc249fe5
MM
1373 base_high = CASE_HIGH (merge_case) ?
1374 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1375 CASE_HIGH (base_case) = base_high;
1376 gimple_switch_set_label (stmt, i, NULL_TREE);
1377 new_size--;
1378 i++;
b7814a18 1379 }
726a989a 1380 else
fc249fe5
MM
1381 break;
1382 }
1383 }
31e9eea2 1384
fc249fe5
MM
1385 /* Compress the case labels in the label vector, and adjust the
1386 length of the vector. */
1387 for (i = 0, j = 0; i < new_size; i++)
1388 {
1389 while (! gimple_switch_label (stmt, j))
1390 j++;
1391 gimple_switch_set_label (stmt, i,
1392 gimple_switch_label (stmt, j++));
1393 }
31e9eea2 1394
fc249fe5
MM
1395 gcc_assert (new_size <= old_size);
1396 gimple_switch_set_num_labels (stmt, new_size);
1397}
726a989a 1398
fc249fe5
MM
1399/* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1400 and scan the sorted vector of cases. Combine the ones jumping to the
1401 same label. */
f667741c 1402
fc249fe5
MM
1403void
1404group_case_labels (void)
1405{
1406 basic_block bb;
726a989a 1407
fc249fe5
MM
1408 FOR_EACH_BB (bb)
1409 {
1410 gimple stmt = last_stmt (bb);
1411 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1412 group_case_labels_stmt (stmt);
f667741c
SB
1413 }
1414}
6de9cd9a
DN
1415
1416/* Checks whether we can merge block B into block A. */
1417
1418static bool
726a989a 1419gimple_can_merge_blocks_p (basic_block a, basic_block b)
6de9cd9a 1420{
726a989a
RB
1421 gimple stmt;
1422 gimple_stmt_iterator gsi;
1423 gimple_seq phis;
6de9cd9a 1424
c5cbcccf 1425 if (!single_succ_p (a))
6de9cd9a
DN
1426 return false;
1427
496a4ef5 1428 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
6de9cd9a
DN
1429 return false;
1430
c5cbcccf 1431 if (single_succ (a) != b)
6de9cd9a
DN
1432 return false;
1433
c5cbcccf 1434 if (!single_pred_p (b))
6de9cd9a
DN
1435 return false;
1436
26e75214
KH
1437 if (b == EXIT_BLOCK_PTR)
1438 return false;
6531d1be 1439
6de9cd9a
DN
1440 /* If A ends by a statement causing exceptions or something similar, we
1441 cannot merge the blocks. */
726a989a 1442 stmt = last_stmt (a);
6de9cd9a
DN
1443 if (stmt && stmt_ends_bb_p (stmt))
1444 return false;
1445
1446 /* Do not allow a block with only a non-local label to be merged. */
726a989a
RB
1447 if (stmt
1448 && gimple_code (stmt) == GIMPLE_LABEL
1449 && DECL_NONLOCAL (gimple_label_label (stmt)))
6de9cd9a
DN
1450 return false;
1451
8e7c5fdd
RH
1452 /* Examine the labels at the beginning of B. */
1453 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1454 {
1455 tree lab;
1456 stmt = gsi_stmt (gsi);
1457 if (gimple_code (stmt) != GIMPLE_LABEL)
1458 break;
1459 lab = gimple_label_label (stmt);
1460
1461 /* Do not remove user labels. */
1462 if (!DECL_ARTIFICIAL (lab))
1463 return false;
1464 }
1465
1466 /* Protect the loop latches. */
1467 if (current_loops && b->loop_father->latch == b)
1468 return false;
1469
38965eb2 1470 /* It must be possible to eliminate all phi nodes in B. If ssa form
610b64e2
RG
1471 is not up-to-date and a name-mapping is registered, we cannot eliminate
1472 any phis. Symbols marked for renaming are never a problem though. */
726a989a 1473 phis = phi_nodes (b);
610b64e2
RG
1474 if (!gimple_seq_empty_p (phis)
1475 && name_mappings_registered_p ())
1476 return false;
6de9cd9a 1477
c87d6057
JJ
1478 /* When not optimizing, don't merge if we'd lose goto_locus. */
1479 if (!optimize
1480 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1481 {
1482 location_t goto_locus = single_succ_edge (a)->goto_locus;
1483 gimple_stmt_iterator prev, next;
1484 prev = gsi_last_nondebug_bb (a);
1485 next = gsi_after_labels (b);
1486 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1487 gsi_next_nondebug (&next);
1488 if ((gsi_end_p (prev)
1489 || gimple_location (gsi_stmt (prev)) != goto_locus)
1490 && (gsi_end_p (next)
1491 || gimple_location (gsi_stmt (next)) != goto_locus))
1492 return false;
1493 }
1494
6de9cd9a
DN
1495 return true;
1496}
1497
b5b8b0ac
AO
1498/* Return true if the var whose chain of uses starts at PTR has no
1499 nondebug uses. */
1500bool
1501has_zero_uses_1 (const ssa_use_operand_t *head)
1502{
1503 const ssa_use_operand_t *ptr;
1504
1505 for (ptr = head->next; ptr != head; ptr = ptr->next)
1506 if (!is_gimple_debug (USE_STMT (ptr)))
1507 return false;
1508
1509 return true;
1510}
1511
1512/* Return true if the var whose chain of uses starts at PTR has a
1513 single nondebug use. Set USE_P and STMT to that single nondebug
1514 use, if so, or to NULL otherwise. */
1515bool
1516single_imm_use_1 (const ssa_use_operand_t *head,
1517 use_operand_p *use_p, gimple *stmt)
1518{
1519 ssa_use_operand_t *ptr, *single_use = 0;
1520
1521 for (ptr = head->next; ptr != head; ptr = ptr->next)
1522 if (!is_gimple_debug (USE_STMT (ptr)))
1523 {
1524 if (single_use)
1525 {
1526 single_use = NULL;
1527 break;
1528 }
1529 single_use = ptr;
1530 }
1531
1532 if (use_p)
1533 *use_p = single_use;
1534
1535 if (stmt)
1536 *stmt = single_use ? single_use->loc.stmt : NULL;
1537
1538 return !!single_use;
1539}
1540
38965eb2
ZD
1541/* Replaces all uses of NAME by VAL. */
1542
684aaf29 1543void
38965eb2
ZD
1544replace_uses_by (tree name, tree val)
1545{
1546 imm_use_iterator imm_iter;
1547 use_operand_p use;
726a989a 1548 gimple stmt;
38965eb2 1549 edge e;
38965eb2 1550
6c00f606 1551 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
38965eb2 1552 {
6c00f606
AM
1553 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1554 {
1555 replace_exp (use, val);
38965eb2 1556
726a989a 1557 if (gimple_code (stmt) == GIMPLE_PHI)
38965eb2 1558 {
726a989a 1559 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
6c00f606
AM
1560 if (e->flags & EDGE_ABNORMAL)
1561 {
1562 /* This can only occur for virtual operands, since
1563 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1564 would prevent replacement. */
1565 gcc_assert (!is_gimple_reg (name));
1566 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1567 }
38965eb2
ZD
1568 }
1569 }
cfaab3a9 1570
726a989a 1571 if (gimple_code (stmt) != GIMPLE_PHI)
6c00f606 1572 {
726a989a 1573 size_t i;
9af0df6b 1574
6c00f606 1575 fold_stmt_inplace (stmt);
672987e8 1576 if (cfgcleanup_altered_bbs)
726a989a 1577 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
cfaab3a9 1578
cff4e50d 1579 /* FIXME. This should go in update_stmt. */
726a989a
RB
1580 for (i = 0; i < gimple_num_ops (stmt); i++)
1581 {
1582 tree op = gimple_op (stmt, i);
1583 /* Operands may be empty here. For example, the labels
1584 of a GIMPLE_COND are nulled out following the creation
1585 of the corresponding CFG edges. */
1586 if (op && TREE_CODE (op) == ADDR_EXPR)
1587 recompute_tree_invariant_for_addr_expr (op);
1588 }
9af0df6b 1589
6c00f606 1590 maybe_clean_or_replace_eh_stmt (stmt, stmt);
cff4e50d 1591 update_stmt (stmt);
6c00f606 1592 }
38965eb2 1593 }
6531d1be 1594
40b448ef 1595 gcc_assert (has_zero_uses (name));
d5ab5675
ZD
1596
1597 /* Also update the trees stored in loop structures. */
1598 if (current_loops)
1599 {
1600 struct loop *loop;
42fd6772 1601 loop_iterator li;
d5ab5675 1602
42fd6772 1603 FOR_EACH_LOOP (li, loop, 0)
d5ab5675 1604 {
42fd6772 1605 substitute_in_loop_info (loop, name, val);
d5ab5675
ZD
1606 }
1607 }
38965eb2 1608}
6de9cd9a
DN
1609
1610/* Merge block B into block A. */
1611
1612static void
726a989a 1613gimple_merge_blocks (basic_block a, basic_block b)
6de9cd9a 1614{
726a989a
RB
1615 gimple_stmt_iterator last, gsi, psi;
1616 gimple_seq phis = phi_nodes (b);
6de9cd9a
DN
1617
1618 if (dump_file)
1619 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1620
c4f548b8
DN
1621 /* Remove all single-valued PHI nodes from block B of the form
1622 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
726a989a
RB
1623 gsi = gsi_last_bb (a);
1624 for (psi = gsi_start (phis); !gsi_end_p (psi); )
38965eb2 1625 {
726a989a
RB
1626 gimple phi = gsi_stmt (psi);
1627 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1628 gimple copy;
1629 bool may_replace_uses = !is_gimple_reg (def)
1630 || may_propagate_copy (def, use);
d7f0e25c 1631
7c8eb293
ZD
1632 /* In case we maintain loop closed ssa form, do not propagate arguments
1633 of loop exit phi nodes. */
d7f0e25c 1634 if (current_loops
f87000d0 1635 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
d7f0e25c
ZD
1636 && is_gimple_reg (def)
1637 && TREE_CODE (use) == SSA_NAME
1638 && a->loop_father != b->loop_father)
1639 may_replace_uses = false;
1640
1641 if (!may_replace_uses)
38965eb2
ZD
1642 {
1643 gcc_assert (is_gimple_reg (def));
1644
128a79fb 1645 /* Note that just emitting the copies is fine -- there is no problem
38965eb2
ZD
1646 with ordering of phi nodes. This is because A is the single
1647 predecessor of B, therefore results of the phi nodes cannot
1648 appear as arguments of the phi nodes. */
726a989a
RB
1649 copy = gimple_build_assign (def, use);
1650 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1651 remove_phi_node (&psi, false);
38965eb2
ZD
1652 }
1653 else
611021e1 1654 {
d0f76c4b
RG
1655 /* If we deal with a PHI for virtual operands, we can simply
1656 propagate these without fussing with folding or updating
1657 the stmt. */
1658 if (!is_gimple_reg (def))
1659 {
1660 imm_use_iterator iter;
1661 use_operand_p use_p;
726a989a 1662 gimple stmt;
d0f76c4b
RG
1663
1664 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1665 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1666 SET_USE (use_p, use);
7686fb74
RG
1667
1668 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1669 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
d0f76c4b
RG
1670 }
1671 else
1672 replace_uses_by (def, use);
726a989a
RB
1673
1674 remove_phi_node (&psi, true);
611021e1 1675 }
38965eb2
ZD
1676 }
1677
6de9cd9a
DN
1678 /* Ensure that B follows A. */
1679 move_block_after (b, a);
1680
c5cbcccf 1681 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1e128c5f 1682 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
6de9cd9a 1683
726a989a
RB
1684 /* Remove labels from B and set gimple_bb to A for other statements. */
1685 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
6de9cd9a 1686 {
1d65f45c
RH
1687 gimple stmt = gsi_stmt (gsi);
1688 if (gimple_code (stmt) == GIMPLE_LABEL)
be477406 1689 {
1d65f45c
RH
1690 tree label = gimple_label_label (stmt);
1691 int lp_nr;
726a989a
RB
1692
1693 gsi_remove (&gsi, false);
be477406 1694
be477406
JL
1695 /* Now that we can thread computed gotos, we might have
1696 a situation where we have a forced label in block B
1697 However, the label at the start of block B might still be
1698 used in other ways (think about the runtime checking for
1699 Fortran assigned gotos). So we can not just delete the
1700 label. Instead we move the label to the start of block A. */
1d65f45c 1701 if (FORCED_LABEL (label))
be477406 1702 {
726a989a 1703 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1d65f45c
RH
1704 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1705 }
1706
1707 lp_nr = EH_LANDING_PAD_NR (label);
1708 if (lp_nr)
1709 {
1710 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1711 lp->post_landing_pad = NULL;
be477406
JL
1712 }
1713 }
6de9cd9a
DN
1714 else
1715 {
1d65f45c 1716 gimple_set_bb (stmt, a);
726a989a 1717 gsi_next (&gsi);
6de9cd9a
DN
1718 }
1719 }
1720
726a989a
RB
1721 /* Merge the sequences. */
1722 last = gsi_last_bb (a);
1723 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1724 set_bb_seq (b, NULL);
672987e8
ZD
1725
1726 if (cfgcleanup_altered_bbs)
1727 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
6de9cd9a
DN
1728}
1729
1730
bc23502b 1731/* Return the one of two successors of BB that is not reachable by a
2cd713a0 1732 complex edge, if there is one. Else, return BB. We use
bc23502b
PB
1733 this in optimizations that use post-dominators for their heuristics,
1734 to catch the cases in C++ where function calls are involved. */
6531d1be 1735
bc23502b 1736basic_block
6531d1be 1737single_noncomplex_succ (basic_block bb)
bc23502b
PB
1738{
1739 edge e0, e1;
1740 if (EDGE_COUNT (bb->succs) != 2)
1741 return bb;
6531d1be 1742
bc23502b
PB
1743 e0 = EDGE_SUCC (bb, 0);
1744 e1 = EDGE_SUCC (bb, 1);
1745 if (e0->flags & EDGE_COMPLEX)
1746 return e1->dest;
1747 if (e1->flags & EDGE_COMPLEX)
1748 return e0->dest;
6531d1be 1749
bc23502b 1750 return bb;
6531d1be 1751}
bc23502b 1752
6de9cd9a
DN
1753/* T is CALL_EXPR. Set current_function_calls_* flags. */
1754
1755void
726a989a 1756notice_special_calls (gimple call)
6de9cd9a 1757{
726a989a 1758 int flags = gimple_call_flags (call);
6de9cd9a
DN
1759
1760 if (flags & ECF_MAY_BE_ALLOCA)
e3b5732b 1761 cfun->calls_alloca = true;
6de9cd9a 1762 if (flags & ECF_RETURNS_TWICE)
e3b5732b 1763 cfun->calls_setjmp = true;
6de9cd9a
DN
1764}
1765
1766
1767/* Clear flags set by notice_special_calls. Used by dead code removal
1768 to update the flags. */
1769
1770void
1771clear_special_calls (void)
1772{
e3b5732b
JH
1773 cfun->calls_alloca = false;
1774 cfun->calls_setjmp = false;
6de9cd9a
DN
1775}
1776
6de9cd9a
DN
1777/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1778
1779static void
1780remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1781{
6de9cd9a
DN
1782 /* Since this block is no longer reachable, we can just delete all
1783 of its PHI nodes. */
81b822d5 1784 remove_phi_nodes (bb);
6de9cd9a
DN
1785
1786 /* Remove edges to BB's successors. */
628f6a4e 1787 while (EDGE_COUNT (bb->succs) > 0)
d0d2cc21 1788 remove_edge (EDGE_SUCC (bb, 0));
6de9cd9a
DN
1789}
1790
1791
1792/* Remove statements of basic block BB. */
1793
1794static void
1795remove_bb (basic_block bb)
1796{
726a989a 1797 gimple_stmt_iterator i;
6de9cd9a
DN
1798
1799 if (dump_file)
1800 {
1801 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1802 if (dump_flags & TDF_DETAILS)
1803 {
1804 dump_bb (bb, dump_file, 0);
1805 fprintf (dump_file, "\n");
1806 }
1807 }
1808
2b271002
ZD
1809 if (current_loops)
1810 {
1811 struct loop *loop = bb->loop_father;
1812
598ec7bd
ZD
1813 /* If a loop gets removed, clean up the information associated
1814 with it. */
2b271002
ZD
1815 if (loop->latch == bb
1816 || loop->header == bb)
598ec7bd 1817 free_numbers_of_iterations_estimates_loop (loop);
2b271002
ZD
1818 }
1819
6de9cd9a 1820 /* Remove all the instructions in the block. */
726a989a 1821 if (bb_seq (bb) != NULL)
6de9cd9a 1822 {
b5b8b0ac
AO
1823 /* Walk backwards so as to get a chance to substitute all
1824 released DEFs into debug stmts. See
1825 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1826 details. */
1827 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
77568960 1828 {
726a989a
RB
1829 gimple stmt = gsi_stmt (i);
1830 if (gimple_code (stmt) == GIMPLE_LABEL
1831 && (FORCED_LABEL (gimple_label_label (stmt))
1832 || DECL_NONLOCAL (gimple_label_label (stmt))))
7506e1cb
ZD
1833 {
1834 basic_block new_bb;
726a989a 1835 gimple_stmt_iterator new_gsi;
7506e1cb
ZD
1836
1837 /* A non-reachable non-local label may still be referenced.
1838 But it no longer needs to carry the extra semantics of
1839 non-locality. */
726a989a 1840 if (DECL_NONLOCAL (gimple_label_label (stmt)))
7506e1cb 1841 {
726a989a
RB
1842 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1843 FORCED_LABEL (gimple_label_label (stmt)) = 1;
7506e1cb 1844 }
bb1ecfe8 1845
7506e1cb 1846 new_bb = bb->prev_bb;
726a989a
RB
1847 new_gsi = gsi_start_bb (new_bb);
1848 gsi_remove (&i, false);
1849 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
7506e1cb
ZD
1850 }
1851 else
bb1ecfe8 1852 {
7506e1cb
ZD
1853 /* Release SSA definitions if we are in SSA. Note that we
1854 may be called when not in SSA. For example,
1855 final_cleanup calls this function via
1856 cleanup_tree_cfg. */
1857 if (gimple_in_ssa_p (cfun))
1858 release_defs (stmt);
1859
726a989a 1860 gsi_remove (&i, true);
bb1ecfe8 1861 }
6531d1be 1862
b5b8b0ac
AO
1863 if (gsi_end_p (i))
1864 i = gsi_last_bb (bb);
1865 else
1866 gsi_prev (&i);
43e05e45 1867 }
6de9cd9a
DN
1868 }
1869
6de9cd9a 1870 remove_phi_nodes_and_edges_for_unreachable_block (bb);
726a989a 1871 bb->il.gimple = NULL;
6de9cd9a
DN
1872}
1873
6de9cd9a 1874
35920270
KH
1875/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1876 predicate VAL, return the edge that will be taken out of the block.
1877 If VAL does not match a unique edge, NULL is returned. */
6de9cd9a
DN
1878
1879edge
1880find_taken_edge (basic_block bb, tree val)
1881{
726a989a 1882 gimple stmt;
6de9cd9a
DN
1883
1884 stmt = last_stmt (bb);
1885
1e128c5f
GB
1886 gcc_assert (stmt);
1887 gcc_assert (is_ctrl_stmt (stmt));
6de9cd9a 1888
726a989a
RB
1889 if (val == NULL)
1890 return NULL;
1891
1892 if (!is_gimple_min_invariant (val))
6de9cd9a
DN
1893 return NULL;
1894
726a989a 1895 if (gimple_code (stmt) == GIMPLE_COND)
6de9cd9a
DN
1896 return find_taken_edge_cond_expr (bb, val);
1897
726a989a 1898 if (gimple_code (stmt) == GIMPLE_SWITCH)
6de9cd9a
DN
1899 return find_taken_edge_switch_expr (bb, val);
1900
be477406 1901 if (computed_goto_p (stmt))
1799efef
JL
1902 {
1903 /* Only optimize if the argument is a label, if the argument is
1904 not a label then we can not construct a proper CFG.
1905
1906 It may be the case that we only need to allow the LABEL_REF to
1907 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1908 appear inside a LABEL_EXPR just to be safe. */
1909 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1910 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1911 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1912 return NULL;
1913 }
be477406 1914
35920270 1915 gcc_unreachable ();
6de9cd9a
DN
1916}
1917
be477406
JL
1918/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1919 statement, determine which of the outgoing edges will be taken out of the
1920 block. Return NULL if either edge may be taken. */
1921
1922static edge
1923find_taken_edge_computed_goto (basic_block bb, tree val)
1924{
1925 basic_block dest;
1926 edge e = NULL;
1927
1928 dest = label_to_block (val);
1929 if (dest)
1930 {
1931 e = find_edge (bb, dest);
1932 gcc_assert (e != NULL);
1933 }
1934
1935 return e;
1936}
6de9cd9a
DN
1937
1938/* Given a constant value VAL and the entry block BB to a COND_EXPR
1939 statement, determine which of the two edges will be taken out of the
1940 block. Return NULL if either edge may be taken. */
1941
1942static edge
1943find_taken_edge_cond_expr (basic_block bb, tree val)
1944{
1945 edge true_edge, false_edge;
1946
1947 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
6531d1be 1948
f1b19062 1949 gcc_assert (TREE_CODE (val) == INTEGER_CST);
6e682d7e 1950 return (integer_zerop (val) ? false_edge : true_edge);
6de9cd9a
DN
1951}
1952
fca01525 1953/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
6de9cd9a
DN
1954 statement, determine which edge will be taken out of the block. Return
1955 NULL if any edge may be taken. */
1956
1957static edge
1958find_taken_edge_switch_expr (basic_block bb, tree val)
1959{
6de9cd9a
DN
1960 basic_block dest_bb;
1961 edge e;
726a989a
RB
1962 gimple switch_stmt;
1963 tree taken_case;
6de9cd9a 1964
726a989a
RB
1965 switch_stmt = last_stmt (bb);
1966 taken_case = find_case_label_for_value (switch_stmt, val);
6de9cd9a
DN
1967 dest_bb = label_to_block (CASE_LABEL (taken_case));
1968
1969 e = find_edge (bb, dest_bb);
1e128c5f 1970 gcc_assert (e);
6de9cd9a
DN
1971 return e;
1972}
1973
1974
726a989a 1975/* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
f667741c
SB
1976 We can make optimal use here of the fact that the case labels are
1977 sorted: We can do a binary search for a case matching VAL. */
6de9cd9a
DN
1978
1979static tree
726a989a 1980find_case_label_for_value (gimple switch_stmt, tree val)
6de9cd9a 1981{
726a989a
RB
1982 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1983 tree default_case = gimple_switch_default_label (switch_stmt);
6de9cd9a 1984
726a989a 1985 for (low = 0, high = n; high - low > 1; )
6de9cd9a 1986 {
f667741c 1987 size_t i = (high + low) / 2;
726a989a 1988 tree t = gimple_switch_label (switch_stmt, i);
f667741c
SB
1989 int cmp;
1990
1991 /* Cache the result of comparing CASE_LOW and val. */
1992 cmp = tree_int_cst_compare (CASE_LOW (t), val);
6de9cd9a 1993
f667741c
SB
1994 if (cmp > 0)
1995 high = i;
1996 else
1997 low = i;
1998
1999 if (CASE_HIGH (t) == NULL)
6de9cd9a 2000 {
f667741c
SB
2001 /* A singe-valued case label. */
2002 if (cmp == 0)
6de9cd9a
DN
2003 return t;
2004 }
2005 else
2006 {
2007 /* A case range. We can only handle integer ranges. */
f667741c 2008 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
6de9cd9a
DN
2009 return t;
2010 }
2011 }
2012
6de9cd9a
DN
2013 return default_case;
2014}
2015
2016
6de9cd9a
DN
2017/* Dump a basic block on stderr. */
2018
2019void
726a989a 2020gimple_debug_bb (basic_block bb)
6de9cd9a 2021{
726a989a 2022 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
6de9cd9a
DN
2023}
2024
2025
2026/* Dump basic block with index N on stderr. */
2027
2028basic_block
726a989a 2029gimple_debug_bb_n (int n)
6de9cd9a 2030{
726a989a 2031 gimple_debug_bb (BASIC_BLOCK (n));
6de9cd9a 2032 return BASIC_BLOCK (n);
6531d1be 2033}
6de9cd9a
DN
2034
2035
2036/* Dump the CFG on stderr.
2037
2038 FLAGS are the same used by the tree dumping functions
6531d1be 2039 (see TDF_* in tree-pass.h). */
6de9cd9a
DN
2040
2041void
726a989a 2042gimple_debug_cfg (int flags)
6de9cd9a 2043{
726a989a 2044 gimple_dump_cfg (stderr, flags);
6de9cd9a
DN
2045}
2046
2047
2048/* Dump the program showing basic block boundaries on the given FILE.
2049
2050 FLAGS are the same used by the tree dumping functions (see TDF_* in
2051 tree.h). */
2052
2053void
726a989a 2054gimple_dump_cfg (FILE *file, int flags)
6de9cd9a
DN
2055{
2056 if (flags & TDF_DETAILS)
2057 {
2058 const char *funcname
673fda6b 2059 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2060
2061 fputc ('\n', file);
2062 fprintf (file, ";; Function %s\n\n", funcname);
2063 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2064 n_basic_blocks, n_edges, last_basic_block);
2065
2066 brief_dump_cfg (file);
2067 fprintf (file, "\n");
2068 }
2069
2070 if (flags & TDF_STATS)
2071 dump_cfg_stats (file);
2072
2073 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2074}
2075
2076
2077/* Dump CFG statistics on FILE. */
2078
2079void
2080dump_cfg_stats (FILE *file)
2081{
2082 static long max_num_merged_labels = 0;
2083 unsigned long size, total = 0;
7b0cab99 2084 long num_edges;
6de9cd9a
DN
2085 basic_block bb;
2086 const char * const fmt_str = "%-30s%-13s%12s\n";
f7fda749 2087 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
cac50d94 2088 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
6de9cd9a
DN
2089 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2090 const char *funcname
673fda6b 2091 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2092
2093
2094 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2095
2096 fprintf (file, "---------------------------------------------------------\n");
2097 fprintf (file, fmt_str, "", " Number of ", "Memory");
2098 fprintf (file, fmt_str, "", " instances ", "used ");
2099 fprintf (file, "---------------------------------------------------------\n");
2100
2101 size = n_basic_blocks * sizeof (struct basic_block_def);
2102 total += size;
f7fda749
RH
2103 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2104 SCALE (size), LABEL (size));
6de9cd9a 2105
7b0cab99 2106 num_edges = 0;
6de9cd9a 2107 FOR_EACH_BB (bb)
7b0cab99
JH
2108 num_edges += EDGE_COUNT (bb->succs);
2109 size = num_edges * sizeof (struct edge_def);
6de9cd9a 2110 total += size;
cac50d94 2111 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
6de9cd9a 2112
6de9cd9a
DN
2113 fprintf (file, "---------------------------------------------------------\n");
2114 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2115 LABEL (total));
2116 fprintf (file, "---------------------------------------------------------\n");
2117 fprintf (file, "\n");
2118
2119 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2120 max_num_merged_labels = cfg_stats.num_merged_labels;
2121
2122 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2123 cfg_stats.num_merged_labels, max_num_merged_labels);
2124
2125 fprintf (file, "\n");
2126}
2127
2128
2129/* Dump CFG statistics on stderr. Keep extern so that it's always
2130 linked in the final executable. */
2131
24e47c76 2132DEBUG_FUNCTION void
6de9cd9a
DN
2133debug_cfg_stats (void)
2134{
2135 dump_cfg_stats (stderr);
2136}
2137
2138
2139/* Dump the flowgraph to a .vcg FILE. */
2140
2141static void
726a989a 2142gimple_cfg2vcg (FILE *file)
6de9cd9a
DN
2143{
2144 edge e;
628f6a4e 2145 edge_iterator ei;
6de9cd9a
DN
2146 basic_block bb;
2147 const char *funcname
673fda6b 2148 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2149
2150 /* Write the file header. */
2151 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2152 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2153 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2154
2155 /* Write blocks and edges. */
628f6a4e 2156 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
6de9cd9a
DN
2157 {
2158 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2159 e->dest->index);
2160
2161 if (e->flags & EDGE_FAKE)
2162 fprintf (file, " linestyle: dotted priority: 10");
2163 else
2164 fprintf (file, " linestyle: solid priority: 100");
2165
2166 fprintf (file, " }\n");
2167 }
2168 fputc ('\n', file);
2169
2170 FOR_EACH_BB (bb)
2171 {
726a989a 2172 enum gimple_code head_code, end_code;
6de9cd9a
DN
2173 const char *head_name, *end_name;
2174 int head_line = 0;
2175 int end_line = 0;
726a989a
RB
2176 gimple first = first_stmt (bb);
2177 gimple last = last_stmt (bb);
6de9cd9a
DN
2178
2179 if (first)
2180 {
726a989a
RB
2181 head_code = gimple_code (first);
2182 head_name = gimple_code_name[head_code];
6de9cd9a
DN
2183 head_line = get_lineno (first);
2184 }
2185 else
2186 head_name = "no-statement";
2187
2188 if (last)
2189 {
726a989a
RB
2190 end_code = gimple_code (last);
2191 end_name = gimple_code_name[end_code];
6de9cd9a
DN
2192 end_line = get_lineno (last);
2193 }
2194 else
2195 end_name = "no-statement";
2196
2197 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2198 bb->index, bb->index, head_name, head_line, end_name,
2199 end_line);
2200
628f6a4e 2201 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
2202 {
2203 if (e->dest == EXIT_BLOCK_PTR)
2204 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2205 else
2206 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2207
2208 if (e->flags & EDGE_FAKE)
2209 fprintf (file, " priority: 10 linestyle: dotted");
2210 else
2211 fprintf (file, " priority: 100 linestyle: solid");
2212
2213 fprintf (file, " }\n");
2214 }
2215
2216 if (bb->next_bb != EXIT_BLOCK_PTR)
2217 fputc ('\n', file);
2218 }
2219
2220 fputs ("}\n\n", file);
2221}
2222
2223
2224
2225/*---------------------------------------------------------------------------
2226 Miscellaneous helpers
2227---------------------------------------------------------------------------*/
2228
2229/* Return true if T represents a stmt that always transfers control. */
2230
2231bool
726a989a 2232is_ctrl_stmt (gimple t)
6de9cd9a 2233{
40a32862
RH
2234 switch (gimple_code (t))
2235 {
2236 case GIMPLE_COND:
2237 case GIMPLE_SWITCH:
2238 case GIMPLE_GOTO:
2239 case GIMPLE_RETURN:
2240 case GIMPLE_RESX:
2241 return true;
2242 default:
2243 return false;
2244 }
6de9cd9a
DN
2245}
2246
2247
2248/* Return true if T is a statement that may alter the flow of control
2249 (e.g., a call to a non-returning function). */
2250
2251bool
726a989a 2252is_ctrl_altering_stmt (gimple t)
6de9cd9a 2253{
1e128c5f 2254 gcc_assert (t);
726a989a 2255
8b9db065 2256 switch (gimple_code (t))
6de9cd9a 2257 {
8b9db065
RH
2258 case GIMPLE_CALL:
2259 {
2260 int flags = gimple_call_flags (t);
726a989a 2261
8b9db065
RH
2262 /* A non-pure/const call alters flow control if the current
2263 function has nonlocal labels. */
2264 if (!(flags & (ECF_CONST | ECF_PURE)) && cfun->has_nonlocal_label)
2265 return true;
6de9cd9a 2266
8b9db065 2267 /* A call also alters control flow if it does not return. */
5de8da9b 2268 if (flags & ECF_NORETURN)
8b9db065 2269 return true;
c54c785d
JH
2270
2271 /* BUILT_IN_RETURN call is same as return statement. */
2272 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2273 return true;
8b9db065
RH
2274 }
2275 break;
6de9cd9a 2276
1d65f45c
RH
2277 case GIMPLE_EH_DISPATCH:
2278 /* EH_DISPATCH branches to the individual catch handlers at
2279 this level of a try or allowed-exceptions region. It can
2280 fallthru to the next statement as well. */
2281 return true;
2282
1c384bf1
RH
2283 case GIMPLE_ASM:
2284 if (gimple_asm_nlabels (t) > 0)
2285 return true;
2286 break;
2287
8b9db065
RH
2288 CASE_GIMPLE_OMP:
2289 /* OpenMP directives alter control flow. */
2290 return true;
2291
2292 default:
2293 break;
2294 }
50674e96 2295
6de9cd9a 2296 /* If a statement can throw, it alters control flow. */
726a989a 2297 return stmt_can_throw_internal (t);
6de9cd9a
DN
2298}
2299
2300
4f6c2131 2301/* Return true if T is a simple local goto. */
6de9cd9a
DN
2302
2303bool
726a989a 2304simple_goto_p (gimple t)
6de9cd9a 2305{
726a989a
RB
2306 return (gimple_code (t) == GIMPLE_GOTO
2307 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
4f6c2131
EB
2308}
2309
2310
2311/* Return true if T can make an abnormal transfer of control flow.
2312 Transfers of control flow associated with EH are excluded. */
2313
2314bool
726a989a 2315stmt_can_make_abnormal_goto (gimple t)
4f6c2131
EB
2316{
2317 if (computed_goto_p (t))
2318 return true;
726a989a
RB
2319 if (is_gimple_call (t))
2320 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
4f6c2131 2321 return false;
6de9cd9a
DN
2322}
2323
2324
726a989a
RB
2325/* Return true if STMT should start a new basic block. PREV_STMT is
2326 the statement preceding STMT. It is used when STMT is a label or a
2327 case label. Labels should only start a new basic block if their
2328 previous statement wasn't a label. Otherwise, sequence of labels
2329 would generate unnecessary basic blocks that only contain a single
2330 label. */
6de9cd9a
DN
2331
2332static inline bool
726a989a 2333stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
6de9cd9a 2334{
726a989a 2335 if (stmt == NULL)
6de9cd9a
DN
2336 return false;
2337
726a989a
RB
2338 /* Labels start a new basic block only if the preceding statement
2339 wasn't a label of the same type. This prevents the creation of
2340 consecutive blocks that have nothing but a single label. */
2341 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a
DN
2342 {
2343 /* Nonlocal and computed GOTO targets always start a new block. */
726a989a
RB
2344 if (DECL_NONLOCAL (gimple_label_label (stmt))
2345 || FORCED_LABEL (gimple_label_label (stmt)))
6de9cd9a
DN
2346 return true;
2347
726a989a 2348 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
6de9cd9a 2349 {
726a989a 2350 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
6de9cd9a
DN
2351 return true;
2352
2353 cfg_stats.num_merged_labels++;
2354 return false;
2355 }
2356 else
2357 return true;
2358 }
2359
2360 return false;
2361}
2362
2363
2364/* Return true if T should end a basic block. */
2365
2366bool
726a989a 2367stmt_ends_bb_p (gimple t)
6de9cd9a
DN
2368{
2369 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2370}
2371
726a989a 2372/* Remove block annotations and other data structures. */
6de9cd9a
DN
2373
2374void
242229bb 2375delete_tree_cfg_annotations (void)
6de9cd9a 2376{
6de9cd9a 2377 label_to_block_map = NULL;
6de9cd9a
DN
2378}
2379
2380
2381/* Return the first statement in basic block BB. */
2382
726a989a 2383gimple
6de9cd9a
DN
2384first_stmt (basic_block bb)
2385{
726a989a 2386 gimple_stmt_iterator i = gsi_start_bb (bb);
b5b8b0ac
AO
2387 gimple stmt = NULL;
2388
2389 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2390 {
2391 gsi_next (&i);
2392 stmt = NULL;
2393 }
2394 return stmt;
6de9cd9a
DN
2395}
2396
6c52e687
CC
2397/* Return the first non-label statement in basic block BB. */
2398
2399static gimple
2400first_non_label_stmt (basic_block bb)
2401{
2402 gimple_stmt_iterator i = gsi_start_bb (bb);
2403 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2404 gsi_next (&i);
2405 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2406}
2407
6de9cd9a
DN
2408/* Return the last statement in basic block BB. */
2409
726a989a 2410gimple
6de9cd9a
DN
2411last_stmt (basic_block bb)
2412{
b5b8b0ac
AO
2413 gimple_stmt_iterator i = gsi_last_bb (bb);
2414 gimple stmt = NULL;
2415
2416 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2417 {
2418 gsi_prev (&i);
2419 stmt = NULL;
2420 }
2421 return stmt;
6de9cd9a
DN
2422}
2423
6de9cd9a
DN
2424/* Return the last statement of an otherwise empty block. Return NULL
2425 if the block is totally empty, or if it contains more than one
2426 statement. */
2427
726a989a 2428gimple
6de9cd9a
DN
2429last_and_only_stmt (basic_block bb)
2430{
b5b8b0ac 2431 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
726a989a 2432 gimple last, prev;
6de9cd9a 2433
726a989a
RB
2434 if (gsi_end_p (i))
2435 return NULL;
6de9cd9a 2436
726a989a 2437 last = gsi_stmt (i);
b5b8b0ac 2438 gsi_prev_nondebug (&i);
726a989a 2439 if (gsi_end_p (i))
6de9cd9a
DN
2440 return last;
2441
2442 /* Empty statements should no longer appear in the instruction stream.
2443 Everything that might have appeared before should be deleted by
726a989a 2444 remove_useless_stmts, and the optimizers should just gsi_remove
6de9cd9a
DN
2445 instead of smashing with build_empty_stmt.
2446
2447 Thus the only thing that should appear here in a block containing
2448 one executable statement is a label. */
726a989a
RB
2449 prev = gsi_stmt (i);
2450 if (gimple_code (prev) == GIMPLE_LABEL)
6de9cd9a
DN
2451 return last;
2452 else
726a989a 2453 return NULL;
82b85a85 2454}
6de9cd9a 2455
4f7db7f7
KH
2456/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2457
2458static void
2459reinstall_phi_args (edge new_edge, edge old_edge)
2460{
ea7e6d5a
AH
2461 edge_var_map_vector v;
2462 edge_var_map *vm;
2463 int i;
726a989a 2464 gimple_stmt_iterator phis;
b8698a0f 2465
ea7e6d5a
AH
2466 v = redirect_edge_var_map_vector (old_edge);
2467 if (!v)
4f7db7f7 2468 return;
b8698a0f 2469
726a989a
RB
2470 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2471 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2472 i++, gsi_next (&phis))
4f7db7f7 2473 {
726a989a 2474 gimple phi = gsi_stmt (phis);
ea7e6d5a
AH
2475 tree result = redirect_edge_var_map_result (vm);
2476 tree arg = redirect_edge_var_map_def (vm);
b8698a0f 2477
726a989a 2478 gcc_assert (result == gimple_phi_result (phi));
b8698a0f 2479
f5045c96 2480 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
4f7db7f7 2481 }
b8698a0f 2482
ea7e6d5a 2483 redirect_edge_var_map_clear (old_edge);
4f7db7f7
KH
2484}
2485
2a8a8292 2486/* Returns the basic block after which the new basic block created
b9a66240
ZD
2487 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2488 near its "logical" location. This is of most help to humans looking
2489 at debugging dumps. */
2490
2491static basic_block
2492split_edge_bb_loc (edge edge_in)
2493{
2494 basic_block dest = edge_in->dest;
88e24a5a 2495 basic_block dest_prev = dest->prev_bb;
b9a66240 2496
88e24a5a
RH
2497 if (dest_prev)
2498 {
2499 edge e = find_edge (dest_prev, dest);
2500 if (e && !(e->flags & EDGE_COMPLEX))
2501 return edge_in->src;
2502 }
2503 return dest_prev;
b9a66240
ZD
2504}
2505
6de9cd9a
DN
2506/* Split a (typically critical) edge EDGE_IN. Return the new block.
2507 Abort on abnormal edges. */
2508
2509static basic_block
726a989a 2510gimple_split_edge (edge edge_in)
6de9cd9a 2511{
4741d956 2512 basic_block new_bb, after_bb, dest;
6de9cd9a 2513 edge new_edge, e;
6de9cd9a
DN
2514
2515 /* Abnormal edges cannot be split. */
1e128c5f 2516 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
6de9cd9a 2517
6de9cd9a
DN
2518 dest = edge_in->dest;
2519
b9a66240 2520 after_bb = split_edge_bb_loc (edge_in);
6de9cd9a
DN
2521
2522 new_bb = create_empty_bb (after_bb);
b829f3fa
JH
2523 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2524 new_bb->count = edge_in->count;
6de9cd9a 2525 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
b829f3fa
JH
2526 new_edge->probability = REG_BR_PROB_BASE;
2527 new_edge->count = edge_in->count;
6de9cd9a 2528
1e128c5f 2529 e = redirect_edge_and_branch (edge_in, new_bb);
c7b852c8 2530 gcc_assert (e == edge_in);
4f7db7f7 2531 reinstall_phi_args (new_edge, e);
6de9cd9a
DN
2532
2533 return new_bb;
2534}
2535
6de9cd9a 2536/* Callback for walk_tree, check that all elements with address taken are
7a442a1d
SB
2537 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2538 inside a PHI node. */
6de9cd9a
DN
2539
2540static tree
2fbe90f2 2541verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
6de9cd9a
DN
2542{
2543 tree t = *tp, x;
2544
2545 if (TYPE_P (t))
2546 *walk_subtrees = 0;
6531d1be 2547
e8ca4159 2548 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2fbe90f2 2549#define CHECK_OP(N, MSG) \
e8ca4159 2550 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2fbe90f2 2551 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
6de9cd9a
DN
2552
2553 switch (TREE_CODE (t))
2554 {
2555 case SSA_NAME:
2556 if (SSA_NAME_IN_FREE_LIST (t))
2557 {
2558 error ("SSA name in freelist but still referenced");
2559 return *tp;
2560 }
2561 break;
2562
26de0bcb
AP
2563 case INDIRECT_REF:
2564 x = TREE_OPERAND (t, 0);
2565 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2566 {
2567 error ("Indirect reference's operand is not a register or a constant.");
2568 return x;
2569 }
2570 break;
2571
0bca51f0
DN
2572 case ASSERT_EXPR:
2573 x = fold (ASSERT_EXPR_COND (t));
2574 if (x == boolean_false_node)
2575 {
2576 error ("ASSERT_EXPR with an always-false condition");
2577 return *tp;
2578 }
2579 break;
2580
6de9cd9a 2581 case MODIFY_EXPR:
26de0bcb 2582 error ("MODIFY_EXPR not expected while having tuples.");
e57fcb68 2583 return *tp;
6de9cd9a
DN
2584
2585 case ADDR_EXPR:
81fc3052 2586 {
81fc3052
DB
2587 bool old_constant;
2588 bool old_side_effects;
81fc3052
DB
2589 bool new_constant;
2590 bool new_side_effects;
2591
51eed280
PB
2592 gcc_assert (is_gimple_address (t));
2593
81fc3052
DB
2594 old_constant = TREE_CONSTANT (t);
2595 old_side_effects = TREE_SIDE_EFFECTS (t);
2596
127203ac 2597 recompute_tree_invariant_for_addr_expr (t);
81fc3052
DB
2598 new_side_effects = TREE_SIDE_EFFECTS (t);
2599 new_constant = TREE_CONSTANT (t);
2600
81fc3052
DB
2601 if (old_constant != new_constant)
2602 {
2603 error ("constant not recomputed when ADDR_EXPR changed");
2604 return t;
2605 }
2606 if (old_side_effects != new_side_effects)
2607 {
2608 error ("side effects not recomputed when ADDR_EXPR changed");
2609 return t;
2610 }
2611
2612 /* Skip any references (they will be checked when we recurse down the
2613 tree) and ensure that any variable used as a prefix is marked
2614 addressable. */
2615 for (x = TREE_OPERAND (t, 0);
2616 handled_component_p (x);
2617 x = TREE_OPERAND (x, 0))
2618 ;
2619
5006671f
RG
2620 if (!(TREE_CODE (x) == VAR_DECL
2621 || TREE_CODE (x) == PARM_DECL
2622 || TREE_CODE (x) == RESULT_DECL))
81fc3052
DB
2623 return NULL;
2624 if (!TREE_ADDRESSABLE (x))
2625 {
2626 error ("address taken, but ADDRESSABLE bit not set");
2627 return x;
2628 }
ba4d8f9d
RG
2629 if (DECL_GIMPLE_REG_P (x))
2630 {
2631 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2632 return x;
2633 }
bdb69bee 2634
81fc3052
DB
2635 break;
2636 }
6de9cd9a
DN
2637
2638 case COND_EXPR:
a6234684 2639 x = COND_EXPR_COND (t);
d40055ab 2640 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
6de9cd9a 2641 {
d40055ab 2642 error ("non-integral used in condition");
6de9cd9a
DN
2643 return x;
2644 }
9c691961
AP
2645 if (!is_gimple_condexpr (x))
2646 {
ab532386 2647 error ("invalid conditional operand");
9c691961
AP
2648 return x;
2649 }
6de9cd9a
DN
2650 break;
2651
a134e5f3
TB
2652 case NON_LVALUE_EXPR:
2653 gcc_unreachable ();
2654
1043771b 2655 CASE_CONVERT:
6de9cd9a 2656 case FIX_TRUNC_EXPR:
6de9cd9a
DN
2657 case FLOAT_EXPR:
2658 case NEGATE_EXPR:
2659 case ABS_EXPR:
2660 case BIT_NOT_EXPR:
6de9cd9a 2661 case TRUTH_NOT_EXPR:
ab532386 2662 CHECK_OP (0, "invalid operand to unary operator");
6de9cd9a
DN
2663 break;
2664
2665 case REALPART_EXPR:
2666 case IMAGPART_EXPR:
2fbe90f2
RK
2667 case COMPONENT_REF:
2668 case ARRAY_REF:
2669 case ARRAY_RANGE_REF:
2670 case BIT_FIELD_REF:
2671 case VIEW_CONVERT_EXPR:
2672 /* We have a nest of references. Verify that each of the operands
2673 that determine where to reference is either a constant or a variable,
2674 verify that the base is valid, and then show we've already checked
2675 the subtrees. */
afe84921 2676 while (handled_component_p (t))
2fbe90f2
RK
2677 {
2678 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
ab532386 2679 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2fbe90f2
RK
2680 else if (TREE_CODE (t) == ARRAY_REF
2681 || TREE_CODE (t) == ARRAY_RANGE_REF)
2682 {
ab532386 2683 CHECK_OP (1, "invalid array index");
2fbe90f2 2684 if (TREE_OPERAND (t, 2))
ab532386 2685 CHECK_OP (2, "invalid array lower bound");
2fbe90f2 2686 if (TREE_OPERAND (t, 3))
ab532386 2687 CHECK_OP (3, "invalid array stride");
2fbe90f2
RK
2688 }
2689 else if (TREE_CODE (t) == BIT_FIELD_REF)
2690 {
e55f42fb
RG
2691 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2692 || !host_integerp (TREE_OPERAND (t, 2), 1))
2693 {
2694 error ("invalid position or size operand to BIT_FIELD_REF");
2695 return t;
2696 }
fc0f49f3
RG
2697 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2698 && (TYPE_PRECISION (TREE_TYPE (t))
2699 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2700 {
2701 error ("integral result type precision does not match "
2702 "field size of BIT_FIELD_REF");
2703 return t;
2704 }
2705 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2706 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2707 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2708 {
2709 error ("mode precision of non-integral result does not "
2710 "match field size of BIT_FIELD_REF");
2711 return t;
2712 }
2fbe90f2
RK
2713 }
2714
2715 t = TREE_OPERAND (t, 0);
2716 }
2717
bb0c55f6 2718 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2fbe90f2 2719 {
ab532386 2720 error ("invalid reference prefix");
2fbe90f2
RK
2721 return t;
2722 }
2723 *walk_subtrees = 0;
6de9cd9a 2724 break;
5be014d5
AP
2725 case PLUS_EXPR:
2726 case MINUS_EXPR:
2727 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2728 POINTER_PLUS_EXPR. */
2729 if (POINTER_TYPE_P (TREE_TYPE (t)))
2730 {
2731 error ("invalid operand to plus/minus, type is a pointer");
2732 return t;
2733 }
2734 CHECK_OP (0, "invalid operand to binary operator");
2735 CHECK_OP (1, "invalid operand to binary operator");
2736 break;
6de9cd9a 2737
5be014d5
AP
2738 case POINTER_PLUS_EXPR:
2739 /* Check to make sure the first operand is a pointer or reference type. */
2740 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2741 {
2742 error ("invalid operand to pointer plus, first operand is not a pointer");
2743 return t;
2744 }
2745 /* Check to make sure the second operand is an integer with type of
2746 sizetype. */
36618b93
RG
2747 if (!useless_type_conversion_p (sizetype,
2748 TREE_TYPE (TREE_OPERAND (t, 1))))
5be014d5
AP
2749 {
2750 error ("invalid operand to pointer plus, second operand is not an "
2751 "integer with type of sizetype.");
2752 return t;
2753 }
2754 /* FALLTHROUGH */
6de9cd9a
DN
2755 case LT_EXPR:
2756 case LE_EXPR:
2757 case GT_EXPR:
2758 case GE_EXPR:
2759 case EQ_EXPR:
2760 case NE_EXPR:
2761 case UNORDERED_EXPR:
2762 case ORDERED_EXPR:
2763 case UNLT_EXPR:
2764 case UNLE_EXPR:
2765 case UNGT_EXPR:
2766 case UNGE_EXPR:
2767 case UNEQ_EXPR:
d1a7edaf 2768 case LTGT_EXPR:
6de9cd9a
DN
2769 case MULT_EXPR:
2770 case TRUNC_DIV_EXPR:
2771 case CEIL_DIV_EXPR:
2772 case FLOOR_DIV_EXPR:
2773 case ROUND_DIV_EXPR:
2774 case TRUNC_MOD_EXPR:
2775 case CEIL_MOD_EXPR:
2776 case FLOOR_MOD_EXPR:
2777 case ROUND_MOD_EXPR:
2778 case RDIV_EXPR:
2779 case EXACT_DIV_EXPR:
2780 case MIN_EXPR:
2781 case MAX_EXPR:
2782 case LSHIFT_EXPR:
2783 case RSHIFT_EXPR:
2784 case LROTATE_EXPR:
2785 case RROTATE_EXPR:
2786 case BIT_IOR_EXPR:
2787 case BIT_XOR_EXPR:
2788 case BIT_AND_EXPR:
ab532386
JM
2789 CHECK_OP (0, "invalid operand to binary operator");
2790 CHECK_OP (1, "invalid operand to binary operator");
6de9cd9a
DN
2791 break;
2792
84816907
JM
2793 case CONSTRUCTOR:
2794 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2795 *walk_subtrees = 0;
2796 break;
2797
6de9cd9a
DN
2798 default:
2799 break;
2800 }
2801 return NULL;
2fbe90f2
RK
2802
2803#undef CHECK_OP
6de9cd9a
DN
2804}
2805
7e98624c
RG
2806
2807/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2808 Returns true if there is an error, otherwise false. */
2809
2810static bool
726a989a 2811verify_types_in_gimple_min_lval (tree expr)
7e98624c
RG
2812{
2813 tree op;
2814
2815 if (is_gimple_id (expr))
2816 return false;
2817
9f509004
RG
2818 if (!INDIRECT_REF_P (expr)
2819 && TREE_CODE (expr) != TARGET_MEM_REF)
7e98624c
RG
2820 {
2821 error ("invalid expression for min lvalue");
2822 return true;
2823 }
2824
9f509004
RG
2825 /* TARGET_MEM_REFs are strange beasts. */
2826 if (TREE_CODE (expr) == TARGET_MEM_REF)
2827 return false;
2828
7e98624c
RG
2829 op = TREE_OPERAND (expr, 0);
2830 if (!is_gimple_val (op))
2831 {
2832 error ("invalid operand in indirect reference");
2833 debug_generic_stmt (op);
2834 return true;
2835 }
2836 if (!useless_type_conversion_p (TREE_TYPE (expr),
2837 TREE_TYPE (TREE_TYPE (op))))
2838 {
2839 error ("type mismatch in indirect reference");
2840 debug_generic_stmt (TREE_TYPE (expr));
2841 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2842 return true;
2843 }
2844
2845 return false;
2846}
2847
3a19701a
RG
2848/* Verify if EXPR is a valid GIMPLE reference expression. If
2849 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
7e98624c
RG
2850 if there is an error, otherwise false. */
2851
2852static bool
3a19701a 2853verify_types_in_gimple_reference (tree expr, bool require_lvalue)
7e98624c
RG
2854{
2855 while (handled_component_p (expr))
2856 {
2857 tree op = TREE_OPERAND (expr, 0);
2858
2859 if (TREE_CODE (expr) == ARRAY_REF
2860 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2861 {
2862 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2863 || (TREE_OPERAND (expr, 2)
2864 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2865 || (TREE_OPERAND (expr, 3)
2866 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2867 {
2868 error ("invalid operands to array reference");
2869 debug_generic_stmt (expr);
2870 return true;
2871 }
2872 }
2873
2874 /* Verify if the reference array element types are compatible. */
2875 if (TREE_CODE (expr) == ARRAY_REF
2876 && !useless_type_conversion_p (TREE_TYPE (expr),
2877 TREE_TYPE (TREE_TYPE (op))))
2878 {
2879 error ("type mismatch in array reference");
2880 debug_generic_stmt (TREE_TYPE (expr));
2881 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2882 return true;
2883 }
2884 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2885 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2886 TREE_TYPE (TREE_TYPE (op))))
2887 {
2888 error ("type mismatch in array range reference");
2889 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2890 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2891 return true;
2892 }
2893
2894 if ((TREE_CODE (expr) == REALPART_EXPR
2895 || TREE_CODE (expr) == IMAGPART_EXPR)
2896 && !useless_type_conversion_p (TREE_TYPE (expr),
2897 TREE_TYPE (TREE_TYPE (op))))
2898 {
2899 error ("type mismatch in real/imagpart reference");
2900 debug_generic_stmt (TREE_TYPE (expr));
2901 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2902 return true;
2903 }
2904
2905 if (TREE_CODE (expr) == COMPONENT_REF
2906 && !useless_type_conversion_p (TREE_TYPE (expr),
2907 TREE_TYPE (TREE_OPERAND (expr, 1))))
2908 {
2909 error ("type mismatch in component reference");
2910 debug_generic_stmt (TREE_TYPE (expr));
2911 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2912 return true;
2913 }
2914
cc12e760
MJ
2915 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2916 {
2917 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2918 that their operand is not an SSA name or an invariant when
2919 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2920 bug). Otherwise there is nothing to verify, gross mismatches at
2921 most invoke undefined behavior. */
2922 if (require_lvalue
2923 && (TREE_CODE (op) == SSA_NAME
2924 || is_gimple_min_invariant (op)))
2925 {
2926 error ("Conversion of an SSA_NAME on the left hand side.");
2927 debug_generic_stmt (expr);
2928 return true;
2929 }
2930 else if (!handled_component_p (op))
2931 return false;
2932 }
7e98624c
RG
2933
2934 expr = op;
2935 }
2936
3a19701a
RG
2937 return ((require_lvalue || !is_gimple_min_invariant (expr))
2938 && verify_types_in_gimple_min_lval (expr));
7e98624c
RG
2939}
2940
20dcff2a
RG
2941/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2942 list of pointer-to types that is trivially convertible to DEST. */
2943
2944static bool
2945one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2946{
2947 tree src;
2948
2949 if (!TYPE_POINTER_TO (src_obj))
2950 return true;
2951
2952 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2953 if (useless_type_conversion_p (dest, src))
2954 return true;
2955
2956 return false;
2957}
2958
726a989a
RB
2959/* Return true if TYPE1 is a fixed-point type and if conversions to and
2960 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
2961
2962static bool
2963valid_fixed_convert_types_p (tree type1, tree type2)
2964{
2965 return (FIXED_POINT_TYPE_P (type1)
2966 && (INTEGRAL_TYPE_P (type2)
2967 || SCALAR_FLOAT_TYPE_P (type2)
2968 || FIXED_POINT_TYPE_P (type2)));
2969}
2970
726a989a
RB
2971/* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
2972 is a problem, otherwise false. */
2973
2974static bool
b59d3976 2975verify_gimple_call (gimple stmt)
726a989a 2976{
b59d3976
RG
2977 tree fn = gimple_call_fn (stmt);
2978 tree fntype;
f68a75df
RG
2979 unsigned i;
2980
2981 if (TREE_CODE (fn) != OBJ_TYPE_REF
2982 && !is_gimple_val (fn))
2983 {
2984 error ("invalid function in gimple call");
2985 debug_generic_stmt (fn);
2986 return true;
2987 }
726a989a 2988
b59d3976
RG
2989 if (!POINTER_TYPE_P (TREE_TYPE (fn))
2990 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
2991 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
2992 {
2993 error ("non-function in gimple call");
2994 return true;
2995 }
726a989a 2996
b59d3976 2997 if (gimple_call_lhs (stmt)
cc12e760
MJ
2998 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
2999 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
b59d3976
RG
3000 {
3001 error ("invalid LHS in gimple call");
3002 return true;
3003 }
726a989a 3004
5de8da9b
AO
3005 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3006 {
3007 error ("LHS in noreturn call");
3008 return true;
3009 }
3010
b59d3976
RG
3011 fntype = TREE_TYPE (TREE_TYPE (fn));
3012 if (gimple_call_lhs (stmt)
3013 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3014 TREE_TYPE (fntype))
3015 /* ??? At least C++ misses conversions at assignments from
3016 void * call results.
3017 ??? Java is completely off. Especially with functions
3018 returning java.lang.Object.
3019 For now simply allow arbitrary pointer type conversions. */
3020 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3021 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3022 {
3023 error ("invalid conversion in gimple call");
3024 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3025 debug_generic_stmt (TREE_TYPE (fntype));
3026 return true;
3027 }
726a989a 3028
f68a75df
RG
3029 if (gimple_call_chain (stmt)
3030 && !is_gimple_val (gimple_call_chain (stmt)))
3031 {
3032 error ("invalid static chain in gimple call");
3033 debug_generic_stmt (gimple_call_chain (stmt));
3034 return true;
3035 }
3036
fe663f4e 3037 /* If there is a static chain argument, this should not be an indirect
9f62cb92 3038 call, and the decl should have DECL_STATIC_CHAIN set. */
fe663f4e
RH
3039 if (gimple_call_chain (stmt))
3040 {
3041 if (TREE_CODE (fn) != ADDR_EXPR
3042 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
3043 {
3044 error ("static chain in indirect gimple call");
3045 return true;
3046 }
3047 fn = TREE_OPERAND (fn, 0);
3048
9f62cb92 3049 if (!DECL_STATIC_CHAIN (fn))
fe663f4e
RH
3050 {
3051 error ("static chain with function that doesn't use one");
3052 return true;
3053 }
3054 }
3055
b59d3976
RG
3056 /* ??? The C frontend passes unpromoted arguments in case it
3057 didn't see a function declaration before the call. So for now
f68a75df 3058 leave the call arguments mostly unverified. Once we gimplify
b59d3976 3059 unit-at-a-time we have a chance to fix this. */
726a989a 3060
f68a75df
RG
3061 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3062 {
3063 tree arg = gimple_call_arg (stmt, i);
3064 if (!is_gimple_operand (arg))
3065 {
3066 error ("invalid argument to gimple call");
3067 debug_generic_expr (arg);
3068 }
3069 }
3070
b59d3976 3071 return false;
726a989a
RB
3072}
3073
b59d3976
RG
3074/* Verifies the gimple comparison with the result type TYPE and
3075 the operands OP0 and OP1. */
17d23165
RS
3076
3077static bool
b59d3976 3078verify_gimple_comparison (tree type, tree op0, tree op1)
17d23165 3079{
b59d3976
RG
3080 tree op0_type = TREE_TYPE (op0);
3081 tree op1_type = TREE_TYPE (op1);
726a989a 3082
b59d3976
RG
3083 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3084 {
3085 error ("invalid operands in gimple comparison");
3086 return true;
3087 }
17d23165 3088
b59d3976
RG
3089 /* For comparisons we do not have the operations type as the
3090 effective type the comparison is carried out in. Instead
3091 we require that either the first operand is trivially
3092 convertible into the second, or the other way around.
3093 The resulting type of a comparison may be any integral type.
3094 Because we special-case pointers to void we allow
3095 comparisons of pointers with the same mode as well. */
3096 if ((!useless_type_conversion_p (op0_type, op1_type)
3097 && !useless_type_conversion_p (op1_type, op0_type)
3098 && (!POINTER_TYPE_P (op0_type)
3099 || !POINTER_TYPE_P (op1_type)
3100 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3101 || !INTEGRAL_TYPE_P (type))
3102 {
3103 error ("type mismatch in comparison expression");
3104 debug_generic_expr (type);
3105 debug_generic_expr (op0_type);
3106 debug_generic_expr (op1_type);
3107 return true;
3108 }
3109
3110 return false;
3111}
726a989a 3112
9f509004
RG
3113/* Verify a gimple assignment statement STMT with an unary rhs.
3114 Returns true if anything is wrong. */
7e98624c
RG
3115
3116static bool
9f509004 3117verify_gimple_assign_unary (gimple stmt)
7e98624c 3118{
726a989a
RB
3119 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3120 tree lhs = gimple_assign_lhs (stmt);
726a989a 3121 tree lhs_type = TREE_TYPE (lhs);
9f509004 3122 tree rhs1 = gimple_assign_rhs1 (stmt);
726a989a 3123 tree rhs1_type = TREE_TYPE (rhs1);
7e98624c 3124
9f509004
RG
3125 if (!is_gimple_reg (lhs)
3126 && !(optimize == 0
3127 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3128 {
3129 error ("non-register as LHS of unary operation");
3130 return true;
3131 }
3132
3133 if (!is_gimple_val (rhs1))
3134 {
3135 error ("invalid operand in unary operation");
3136 return true;
3137 }
3138
3139 /* First handle conversions. */
726a989a 3140 switch (rhs_code)
7e98624c 3141 {
1043771b 3142 CASE_CONVERT:
7e98624c 3143 {
7e98624c 3144 /* Allow conversions between integral types and pointers only if
9f509004
RG
3145 there is no sign or zero extension involved.
3146 For targets were the precision of sizetype doesn't match that
3147 of pointers we need to allow arbitrary conversions from and
3148 to sizetype. */
3149 if ((POINTER_TYPE_P (lhs_type)
3150 && INTEGRAL_TYPE_P (rhs1_type)
3151 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3152 || rhs1_type == sizetype))
3153 || (POINTER_TYPE_P (rhs1_type)
3154 && INTEGRAL_TYPE_P (lhs_type)
3155 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3156 || lhs_type == sizetype)))
7e98624c
RG
3157 return false;
3158
3159 /* Allow conversion from integer to offset type and vice versa. */
726a989a
RB
3160 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3161 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3162 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3163 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
7e98624c
RG
3164 return false;
3165
3166 /* Otherwise assert we are converting between types of the
3167 same kind. */
726a989a 3168 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
7e98624c
RG
3169 {
3170 error ("invalid types in nop conversion");
726a989a
RB
3171 debug_generic_expr (lhs_type);
3172 debug_generic_expr (rhs1_type);
7e98624c
RG
3173 return true;
3174 }
3175
3176 return false;
3177 }
3178
09e881c9
BE
3179 case ADDR_SPACE_CONVERT_EXPR:
3180 {
3181 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3182 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3183 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3184 {
3185 error ("invalid types in address space conversion");
3186 debug_generic_expr (lhs_type);
3187 debug_generic_expr (rhs1_type);
3188 return true;
3189 }
3190
3191 return false;
3192 }
3193
17d23165
RS
3194 case FIXED_CONVERT_EXPR:
3195 {
726a989a
RB
3196 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3197 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
17d23165
RS
3198 {
3199 error ("invalid types in fixed-point conversion");
726a989a
RB
3200 debug_generic_expr (lhs_type);
3201 debug_generic_expr (rhs1_type);
17d23165
RS
3202 return true;
3203 }
3204
3205 return false;
3206 }
3207
7e98624c
RG
3208 case FLOAT_EXPR:
3209 {
726a989a 3210 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
7e98624c
RG
3211 {
3212 error ("invalid types in conversion to floating point");
726a989a
RB
3213 debug_generic_expr (lhs_type);
3214 debug_generic_expr (rhs1_type);
7e98624c
RG
3215 return true;
3216 }
726a989a 3217
7e98624c
RG
3218 return false;
3219 }
3220
3221 case FIX_TRUNC_EXPR:
3222 {
726a989a 3223 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
7e98624c
RG
3224 {
3225 error ("invalid types in conversion to integer");
726a989a
RB
3226 debug_generic_expr (lhs_type);
3227 debug_generic_expr (rhs1_type);
7e98624c
RG
3228 return true;
3229 }
726a989a 3230
7e98624c
RG
3231 return false;
3232 }
3233
587aa063
RG
3234 case VEC_UNPACK_HI_EXPR:
3235 case VEC_UNPACK_LO_EXPR:
3236 case REDUC_MAX_EXPR:
3237 case REDUC_MIN_EXPR:
3238 case REDUC_PLUS_EXPR:
3239 case VEC_UNPACK_FLOAT_HI_EXPR:
3240 case VEC_UNPACK_FLOAT_LO_EXPR:
3241 /* FIXME. */
3242 return false;
9f509004 3243
587aa063 3244 case TRUTH_NOT_EXPR:
9f509004
RG
3245 case NEGATE_EXPR:
3246 case ABS_EXPR:
3247 case BIT_NOT_EXPR:
3248 case PAREN_EXPR:
3249 case NON_LVALUE_EXPR:
3250 case CONJ_EXPR:
9f509004
RG
3251 break;
3252
3253 default:
3254 gcc_unreachable ();
3255 }
3256
3257 /* For the remaining codes assert there is no conversion involved. */
3258 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3259 {
3260 error ("non-trivial conversion in unary operation");
3261 debug_generic_expr (lhs_type);
3262 debug_generic_expr (rhs1_type);
3263 return true;
3264 }
3265
3266 return false;
3267}
3268
3269/* Verify a gimple assignment statement STMT with a binary rhs.
3270 Returns true if anything is wrong. */
3271
3272static bool
3273verify_gimple_assign_binary (gimple stmt)
3274{
3275 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3276 tree lhs = gimple_assign_lhs (stmt);
3277 tree lhs_type = TREE_TYPE (lhs);
3278 tree rhs1 = gimple_assign_rhs1 (stmt);
3279 tree rhs1_type = TREE_TYPE (rhs1);
3280 tree rhs2 = gimple_assign_rhs2 (stmt);
3281 tree rhs2_type = TREE_TYPE (rhs2);
3282
3283 if (!is_gimple_reg (lhs)
3284 && !(optimize == 0
3285 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3286 {
3287 error ("non-register as LHS of binary operation");
3288 return true;
3289 }
726a989a 3290
9f509004
RG
3291 if (!is_gimple_val (rhs1)
3292 || !is_gimple_val (rhs2))
3293 {
3294 error ("invalid operands in binary operation");
3295 return true;
3296 }
3297
3298 /* First handle operations that involve different types. */
3299 switch (rhs_code)
3300 {
3301 case COMPLEX_EXPR:
3302 {
3303 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3304 || !(INTEGRAL_TYPE_P (rhs1_type)
726a989a 3305 || SCALAR_FLOAT_TYPE_P (rhs1_type))
9f509004 3306 || !(INTEGRAL_TYPE_P (rhs2_type)
726a989a 3307 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
7e98624c
RG
3308 {
3309 error ("type mismatch in complex expression");
726a989a
RB
3310 debug_generic_expr (lhs_type);
3311 debug_generic_expr (rhs1_type);
3312 debug_generic_expr (rhs2_type);
7e98624c
RG
3313 return true;
3314 }
726a989a 3315
7e98624c
RG
3316 return false;
3317 }
3318
7e98624c
RG
3319 case LSHIFT_EXPR:
3320 case RSHIFT_EXPR:
3321 case LROTATE_EXPR:
3322 case RROTATE_EXPR:
3323 {
587aa063
RG
3324 /* Shifts and rotates are ok on integral types, fixed point
3325 types and integer vector types. */
3326 if ((!INTEGRAL_TYPE_P (rhs1_type)
3327 && !FIXED_POINT_TYPE_P (rhs1_type)
3328 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
bf8e3b77 3329 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
587aa063
RG
3330 || (!INTEGRAL_TYPE_P (rhs2_type)
3331 /* Vector shifts of vectors are also ok. */
3332 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
bf8e3b77 3333 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
587aa063 3334 && TREE_CODE (rhs2_type) == VECTOR_TYPE
bf8e3b77 3335 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
726a989a 3336 || !useless_type_conversion_p (lhs_type, rhs1_type))
7e98624c
RG
3337 {
3338 error ("type mismatch in shift expression");
726a989a
RB
3339 debug_generic_expr (lhs_type);
3340 debug_generic_expr (rhs1_type);
3341 debug_generic_expr (rhs2_type);
7e98624c
RG
3342 return true;
3343 }
726a989a 3344
7e98624c
RG
3345 return false;
3346 }
3347
9f509004
RG
3348 case VEC_LSHIFT_EXPR:
3349 case VEC_RSHIFT_EXPR:
7e98624c 3350 {
9f509004 3351 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
0009b473 3352 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
1fe479fd
RG
3353 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3354 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
9f509004
RG
3355 || (!INTEGRAL_TYPE_P (rhs2_type)
3356 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3357 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3358 || !useless_type_conversion_p (lhs_type, rhs1_type))
7e98624c 3359 {
9f509004
RG
3360 error ("type mismatch in vector shift expression");
3361 debug_generic_expr (lhs_type);
3362 debug_generic_expr (rhs1_type);
3363 debug_generic_expr (rhs2_type);
7e98624c
RG
3364 return true;
3365 }
1fe479fd
RG
3366 /* For shifting a vector of floating point components we
3367 only allow shifting by a constant multiple of the element size. */
3368 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3369 && (TREE_CODE (rhs2) != INTEGER_CST
3370 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3371 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3372 {
3373 error ("non-element sized vector shift of floating point vector");
3374 return true;
3375 }
726a989a 3376
9f509004 3377 return false;
7e98624c
RG
3378 }
3379
646bea10
RG
3380 case PLUS_EXPR:
3381 {
3382 /* We use regular PLUS_EXPR for vectors.
3383 ??? This just makes the checker happy and may not be what is
3384 intended. */
3385 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3386 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3387 {
3388 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3389 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3390 {
3391 error ("invalid non-vector operands to vector valued plus");
3392 return true;
3393 }
3394 lhs_type = TREE_TYPE (lhs_type);
3395 rhs1_type = TREE_TYPE (rhs1_type);
3396 rhs2_type = TREE_TYPE (rhs2_type);
3397 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3398 the pointer to 2nd place. */
3399 if (POINTER_TYPE_P (rhs2_type))
3400 {
3401 tree tem = rhs1_type;
3402 rhs1_type = rhs2_type;
3403 rhs2_type = tem;
3404 }
3405 goto do_pointer_plus_expr_check;
3406 }
3407 }
3408 /* Fallthru. */
3409 case MINUS_EXPR:
3410 {
3411 if (POINTER_TYPE_P (lhs_type)
3412 || POINTER_TYPE_P (rhs1_type)
3413 || POINTER_TYPE_P (rhs2_type))
3414 {
3415 error ("invalid (pointer) operands to plus/minus");
3416 return true;
3417 }
3418
3419 /* Continue with generic binary expression handling. */
3420 break;
3421 }
3422
7e98624c
RG
3423 case POINTER_PLUS_EXPR:
3424 {
646bea10 3425do_pointer_plus_expr_check:
726a989a
RB
3426 if (!POINTER_TYPE_P (rhs1_type)
3427 || !useless_type_conversion_p (lhs_type, rhs1_type)
3428 || !useless_type_conversion_p (sizetype, rhs2_type))
7e98624c
RG
3429 {
3430 error ("type mismatch in pointer plus expression");
726a989a
RB
3431 debug_generic_stmt (lhs_type);
3432 debug_generic_stmt (rhs1_type);
3433 debug_generic_stmt (rhs2_type);
7e98624c
RG
3434 return true;
3435 }
7e98624c 3436
726a989a 3437 return false;
b8698a0f 3438 }
7e98624c 3439
7e98624c
RG
3440 case TRUTH_ANDIF_EXPR:
3441 case TRUTH_ORIF_EXPR:
2893f753
RAE
3442 gcc_unreachable ();
3443
7e98624c
RG
3444 case TRUTH_AND_EXPR:
3445 case TRUTH_OR_EXPR:
3446 case TRUTH_XOR_EXPR:
3447 {
7e98624c 3448 /* We allow any kind of integral typed argument and result. */
726a989a
RB
3449 if (!INTEGRAL_TYPE_P (rhs1_type)
3450 || !INTEGRAL_TYPE_P (rhs2_type)
3451 || !INTEGRAL_TYPE_P (lhs_type))
7e98624c
RG
3452 {
3453 error ("type mismatch in binary truth expression");
726a989a
RB
3454 debug_generic_expr (lhs_type);
3455 debug_generic_expr (rhs1_type);
3456 debug_generic_expr (rhs2_type);
7e98624c
RG
3457 return true;
3458 }
3459
3460 return false;
3461 }
3462
9f509004
RG
3463 case LT_EXPR:
3464 case LE_EXPR:
3465 case GT_EXPR:
3466 case GE_EXPR:
3467 case EQ_EXPR:
3468 case NE_EXPR:
3469 case UNORDERED_EXPR:
3470 case ORDERED_EXPR:
3471 case UNLT_EXPR:
3472 case UNLE_EXPR:
3473 case UNGT_EXPR:
3474 case UNGE_EXPR:
3475 case UNEQ_EXPR:
3476 case LTGT_EXPR:
3477 /* Comparisons are also binary, but the result type is not
3478 connected to the operand types. */
3479 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
7e98624c 3480
587aa063 3481 case WIDEN_MULT_EXPR:
5b58b39b
BS
3482 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3483 return true;
3484 return ((2 * TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (lhs_type))
3485 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3486
3487 case WIDEN_SUM_EXPR:
587aa063
RG
3488 case VEC_WIDEN_MULT_HI_EXPR:
3489 case VEC_WIDEN_MULT_LO_EXPR:
3490 case VEC_PACK_TRUNC_EXPR:
3491 case VEC_PACK_SAT_EXPR:
3492 case VEC_PACK_FIX_TRUNC_EXPR:
3493 case VEC_EXTRACT_EVEN_EXPR:
3494 case VEC_EXTRACT_ODD_EXPR:
3495 case VEC_INTERLEAVE_HIGH_EXPR:
3496 case VEC_INTERLEAVE_LOW_EXPR:
3497 /* FIXME. */
3498 return false;
3499
9f509004
RG
3500 case MULT_EXPR:
3501 case TRUNC_DIV_EXPR:
3502 case CEIL_DIV_EXPR:
3503 case FLOOR_DIV_EXPR:
3504 case ROUND_DIV_EXPR:
3505 case TRUNC_MOD_EXPR:
3506 case CEIL_MOD_EXPR:
3507 case FLOOR_MOD_EXPR:
3508 case ROUND_MOD_EXPR:
3509 case RDIV_EXPR:
3510 case EXACT_DIV_EXPR:
3511 case MIN_EXPR:
3512 case MAX_EXPR:
3513 case BIT_IOR_EXPR:
3514 case BIT_XOR_EXPR:
3515 case BIT_AND_EXPR:
9f509004
RG
3516 /* Continue with generic binary expression handling. */
3517 break;
7e98624c 3518
9f509004
RG
3519 default:
3520 gcc_unreachable ();
3521 }
b691d4b0 3522
9f509004
RG
3523 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3524 || !useless_type_conversion_p (lhs_type, rhs2_type))
3525 {
3526 error ("type mismatch in binary expression");
3527 debug_generic_stmt (lhs_type);
3528 debug_generic_stmt (rhs1_type);
3529 debug_generic_stmt (rhs2_type);
3530 return true;
3531 }
3532
3533 return false;
3534}
3535
3536/* Verify a gimple assignment statement STMT with a single rhs.
3537 Returns true if anything is wrong. */
3538
3539static bool
3540verify_gimple_assign_single (gimple stmt)
3541{
3542 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3543 tree lhs = gimple_assign_lhs (stmt);
3544 tree lhs_type = TREE_TYPE (lhs);
3545 tree rhs1 = gimple_assign_rhs1 (stmt);
3546 tree rhs1_type = TREE_TYPE (rhs1);
3547 bool res = false;
3548
3549 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3550 {
3551 error ("non-trivial conversion at assignment");
3552 debug_generic_expr (lhs_type);
3553 debug_generic_expr (rhs1_type);
3554 return true;
7e98624c
RG
3555 }
3556
9f509004 3557 if (handled_component_p (lhs))
3a19701a 3558 res |= verify_types_in_gimple_reference (lhs, true);
9f509004
RG
3559
3560 /* Special codes we cannot handle via their class. */
3561 switch (rhs_code)
7e98624c 3562 {
9f509004
RG
3563 case ADDR_EXPR:
3564 {
3565 tree op = TREE_OPERAND (rhs1, 0);
3566 if (!is_gimple_addressable (op))
3567 {
3568 error ("invalid operand in unary expression");
3569 return true;
3570 }
f5e85907 3571
6b8b9e42
RG
3572 if (!types_compatible_p (TREE_TYPE (op), TREE_TYPE (TREE_TYPE (rhs1)))
3573 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3574 TREE_TYPE (op)))
9f509004
RG
3575 {
3576 error ("type mismatch in address expression");
6b8b9e42
RG
3577 debug_generic_stmt (TREE_TYPE (rhs1));
3578 debug_generic_stmt (TREE_TYPE (op));
9f509004
RG
3579 return true;
3580 }
3581
3a19701a 3582 return verify_types_in_gimple_reference (op, true);
9f509004
RG
3583 }
3584
3585 /* tcc_reference */
3586 case COMPONENT_REF:
3587 case BIT_FIELD_REF:
3588 case INDIRECT_REF:
3589 case ALIGN_INDIRECT_REF:
3590 case MISALIGNED_INDIRECT_REF:
3591 case ARRAY_REF:
3592 case ARRAY_RANGE_REF:
3593 case VIEW_CONVERT_EXPR:
3594 case REALPART_EXPR:
3595 case IMAGPART_EXPR:
3596 case TARGET_MEM_REF:
3597 if (!is_gimple_reg (lhs)
3598 && is_gimple_reg_type (TREE_TYPE (lhs)))
f5e85907 3599 {
9f509004
RG
3600 error ("invalid rhs for gimple memory store");
3601 debug_generic_stmt (lhs);
3602 debug_generic_stmt (rhs1);
726a989a
RB
3603 return true;
3604 }
3a19701a 3605 return res || verify_types_in_gimple_reference (rhs1, false);
7e98624c 3606
9f509004
RG
3607 /* tcc_constant */
3608 case SSA_NAME:
3609 case INTEGER_CST:
3610 case REAL_CST:
3611 case FIXED_CST:
3612 case COMPLEX_CST:
3613 case VECTOR_CST:
3614 case STRING_CST:
3615 return res;
3616
3617 /* tcc_declaration */
3618 case CONST_DECL:
3619 return res;
3620 case VAR_DECL:
3621 case PARM_DECL:
3622 if (!is_gimple_reg (lhs)
3623 && !is_gimple_reg (rhs1)
3624 && is_gimple_reg_type (TREE_TYPE (lhs)))
2f9864e6 3625 {
9f509004
RG
3626 error ("invalid rhs for gimple memory store");
3627 debug_generic_stmt (lhs);
3628 debug_generic_stmt (rhs1);
2f9864e6
RG
3629 return true;
3630 }
9f509004 3631 return res;
7e98624c 3632
9f509004 3633 case COND_EXPR:
fbd767b1
RG
3634 if (!is_gimple_reg (lhs)
3635 || (!is_gimple_reg (TREE_OPERAND (rhs1, 0))
3636 && !COMPARISON_CLASS_P (TREE_OPERAND (rhs1, 0)))
3637 || (!is_gimple_reg (TREE_OPERAND (rhs1, 1))
3638 && !is_gimple_min_invariant (TREE_OPERAND (rhs1, 1)))
3639 || (!is_gimple_reg (TREE_OPERAND (rhs1, 2))
3640 && !is_gimple_min_invariant (TREE_OPERAND (rhs1, 2))))
3641 {
3642 error ("invalid COND_EXPR in gimple assignment");
3643 debug_generic_stmt (rhs1);
3644 return true;
3645 }
3646 return res;
3647
9f509004
RG
3648 case CONSTRUCTOR:
3649 case OBJ_TYPE_REF:
3650 case ASSERT_EXPR:
3651 case WITH_SIZE_EXPR:
9f509004
RG
3652 case POLYNOMIAL_CHREC:
3653 case DOT_PROD_EXPR:
3654 case VEC_COND_EXPR:
3655 case REALIGN_LOAD_EXPR:
3656 /* FIXME. */
3657 return res;
7e98624c 3658
726a989a 3659 default:;
7e98624c
RG
3660 }
3661
9f509004 3662 return res;
7e98624c
RG
3663}
3664
9f509004
RG
3665/* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3666 is a problem, otherwise false. */
3667
3668static bool
3669verify_gimple_assign (gimple stmt)
3670{
3671 switch (gimple_assign_rhs_class (stmt))
3672 {
3673 case GIMPLE_SINGLE_RHS:
3674 return verify_gimple_assign_single (stmt);
3675
3676 case GIMPLE_UNARY_RHS:
3677 return verify_gimple_assign_unary (stmt);
3678
3679 case GIMPLE_BINARY_RHS:
3680 return verify_gimple_assign_binary (stmt);
3681
3682 default:
3683 gcc_unreachable ();
3684 }
3685}
726a989a
RB
3686
3687/* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3688 is a problem, otherwise false. */
7e98624c
RG
3689
3690static bool
b59d3976 3691verify_gimple_return (gimple stmt)
7e98624c 3692{
726a989a 3693 tree op = gimple_return_retval (stmt);
b59d3976 3694 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
726a989a 3695
b59d3976
RG
3696 /* We cannot test for present return values as we do not fix up missing
3697 return values from the original source. */
726a989a
RB
3698 if (op == NULL)
3699 return false;
b8698a0f 3700
b59d3976
RG
3701 if (!is_gimple_val (op)
3702 && TREE_CODE (op) != RESULT_DECL)
3703 {
3704 error ("invalid operand in return statement");
3705 debug_generic_stmt (op);
3706 return true;
3707 }
3708
3709 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
3710 /* ??? With C++ we can have the situation that the result
3711 decl is a reference type while the return type is an aggregate. */
3712 && !(TREE_CODE (op) == RESULT_DECL
3713 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
3714 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
3715 {
3716 error ("invalid conversion in return statement");
3717 debug_generic_stmt (restype);
3718 debug_generic_stmt (TREE_TYPE (op));
3719 return true;
3720 }
3721
3722 return false;
726a989a 3723}
7e98624c 3724
7e98624c 3725
b59d3976
RG
3726/* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3727 is a problem, otherwise false. */
3728
3729static bool
3730verify_gimple_goto (gimple stmt)
3731{
3732 tree dest = gimple_goto_dest (stmt);
3733
3734 /* ??? We have two canonical forms of direct goto destinations, a
3735 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
3736 if (TREE_CODE (dest) != LABEL_DECL
3737 && (!is_gimple_val (dest)
3738 || !POINTER_TYPE_P (TREE_TYPE (dest))))
3739 {
3740 error ("goto destination is neither a label nor a pointer");
3741 return true;
3742 }
3743
3744 return false;
3745}
3746
726a989a
RB
3747/* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
3748 is a problem, otherwise false. */
3749
3750static bool
b59d3976 3751verify_gimple_switch (gimple stmt)
726a989a
RB
3752{
3753 if (!is_gimple_val (gimple_switch_index (stmt)))
7e98624c 3754 {
726a989a 3755 error ("invalid operand to switch statement");
b59d3976 3756 debug_generic_stmt (gimple_switch_index (stmt));
7e98624c
RG
3757 return true;
3758 }
3759
726a989a
RB
3760 return false;
3761}
7e98624c 3762
7e98624c 3763
726a989a
RB
3764/* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
3765 and false otherwise. */
7e98624c 3766
726a989a 3767static bool
b59d3976 3768verify_gimple_phi (gimple stmt)
726a989a 3769{
b59d3976
RG
3770 tree type = TREE_TYPE (gimple_phi_result (stmt));
3771 unsigned i;
7e98624c 3772
7648edc4 3773 if (TREE_CODE (gimple_phi_result (stmt)) != SSA_NAME)
b59d3976
RG
3774 {
3775 error ("Invalid PHI result");
3776 return true;
3777 }
7e98624c 3778
726a989a 3779 for (i = 0; i < gimple_phi_num_args (stmt); i++)
b59d3976
RG
3780 {
3781 tree arg = gimple_phi_arg_def (stmt, i);
9f509004
RG
3782 if ((is_gimple_reg (gimple_phi_result (stmt))
3783 && !is_gimple_val (arg))
3784 || (!is_gimple_reg (gimple_phi_result (stmt))
3785 && !is_gimple_addressable (arg)))
b59d3976
RG
3786 {
3787 error ("Invalid PHI argument");
3788 debug_generic_stmt (arg);
3789 return true;
3790 }
3791 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
3792 {
587aa063 3793 error ("Incompatible types in PHI argument %u", i);
b59d3976
RG
3794 debug_generic_stmt (type);
3795 debug_generic_stmt (TREE_TYPE (arg));
3796 return true;
3797 }
3798 }
726a989a 3799
7e98624c
RG
3800 return false;
3801}
3802
726a989a 3803
b5b8b0ac
AO
3804/* Verify a gimple debug statement STMT.
3805 Returns true if anything is wrong. */
3806
3807static bool
3808verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
3809{
3810 /* There isn't much that could be wrong in a gimple debug stmt. A
3811 gimple debug bind stmt, for example, maps a tree, that's usually
3812 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
3813 component or member of an aggregate type, to another tree, that
3814 can be an arbitrary expression. These stmts expand into debug
3815 insns, and are converted to debug notes by var-tracking.c. */
3816 return false;
3817}
3818
3819
7e98624c
RG
3820/* Verify the GIMPLE statement STMT. Returns true if there is an
3821 error, otherwise false. */
3822
3823static bool
726a989a 3824verify_types_in_gimple_stmt (gimple stmt)
7e98624c 3825{
726a989a 3826 switch (gimple_code (stmt))
7e98624c 3827 {
726a989a 3828 case GIMPLE_ASSIGN:
9f509004 3829 return verify_gimple_assign (stmt);
7e98624c 3830
726a989a
RB
3831 case GIMPLE_LABEL:
3832 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
7e98624c 3833
726a989a 3834 case GIMPLE_CALL:
b59d3976 3835 return verify_gimple_call (stmt);
7e98624c 3836
726a989a 3837 case GIMPLE_COND:
f68a75df
RG
3838 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
3839 {
3840 error ("invalid comparison code in gimple cond");
3841 return true;
3842 }
3843 if (!(!gimple_cond_true_label (stmt)
3844 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
3845 || !(!gimple_cond_false_label (stmt)
3846 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
3847 {
3848 error ("invalid labels in gimple cond");
3849 return true;
3850 }
3851
b59d3976
RG
3852 return verify_gimple_comparison (boolean_type_node,
3853 gimple_cond_lhs (stmt),
3854 gimple_cond_rhs (stmt));
7e98624c 3855
726a989a 3856 case GIMPLE_GOTO:
b59d3976 3857 return verify_gimple_goto (stmt);
7e98624c 3858
726a989a 3859 case GIMPLE_SWITCH:
b59d3976 3860 return verify_gimple_switch (stmt);
7e98624c 3861
726a989a 3862 case GIMPLE_RETURN:
b59d3976 3863 return verify_gimple_return (stmt);
7e98624c 3864
726a989a 3865 case GIMPLE_ASM:
7e98624c
RG
3866 return false;
3867
726a989a 3868 case GIMPLE_PHI:
b59d3976
RG
3869 return verify_gimple_phi (stmt);
3870
3871 /* Tuples that do not have tree operands. */
3872 case GIMPLE_NOP:
b59d3976 3873 case GIMPLE_PREDICT:
1d65f45c
RH
3874 case GIMPLE_RESX:
3875 case GIMPLE_EH_DISPATCH:
40742b42 3876 case GIMPLE_EH_MUST_NOT_THROW:
b59d3976 3877 return false;
726a989a 3878
8b9db065
RH
3879 CASE_GIMPLE_OMP:
3880 /* OpenMP directives are validated by the FE and never operated
3881 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
3882 non-gimple expressions when the main index variable has had
3883 its address taken. This does not affect the loop itself
3884 because the header of an GIMPLE_OMP_FOR is merely used to determine
3885 how to setup the parallel iteration. */
3886 return false;
3887
b5b8b0ac
AO
3888 case GIMPLE_DEBUG:
3889 return verify_gimple_debug (stmt);
3890
7e98624c
RG
3891 default:
3892 gcc_unreachable ();
3893 }
3894}
3895
726a989a 3896/* Verify the GIMPLE statements inside the sequence STMTS. */
7e98624c 3897
7dc83ebc 3898static bool
726a989a 3899verify_types_in_gimple_seq_2 (gimple_seq stmts)
7e98624c 3900{
726a989a 3901 gimple_stmt_iterator ittr;
7dc83ebc 3902 bool err = false;
7e98624c 3903
726a989a 3904 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
7e98624c 3905 {
726a989a 3906 gimple stmt = gsi_stmt (ittr);
7e98624c 3907
726a989a
RB
3908 switch (gimple_code (stmt))
3909 {
b59d3976
RG
3910 case GIMPLE_BIND:
3911 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
3912 break;
3913
3914 case GIMPLE_TRY:
3915 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
3916 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
3917 break;
3918
3919 case GIMPLE_EH_FILTER:
3920 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
3921 break;
3922
3923 case GIMPLE_CATCH:
3924 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
3925 break;
7e98624c
RG
3926
3927 default:
7dc83ebc 3928 {
726a989a 3929 bool err2 = verify_types_in_gimple_stmt (stmt);
7dc83ebc 3930 if (err2)
726a989a 3931 debug_gimple_stmt (stmt);
7dc83ebc
RG
3932 err |= err2;
3933 }
7e98624c
RG
3934 }
3935 }
7dc83ebc
RG
3936
3937 return err;
3938}
3939
3940
3941/* Verify the GIMPLE statements inside the statement list STMTS. */
3942
3943void
726a989a 3944verify_types_in_gimple_seq (gimple_seq stmts)
7dc83ebc 3945{
726a989a 3946 if (verify_types_in_gimple_seq_2 (stmts))
7dc83ebc 3947 internal_error ("verify_gimple failed");
7e98624c
RG
3948}
3949
6de9cd9a
DN
3950
3951/* Verify STMT, return true if STMT is not in GIMPLE form.
3952 TODO: Implement type checking. */
3953
3954static bool
726a989a 3955verify_stmt (gimple_stmt_iterator *gsi)
6de9cd9a
DN
3956{
3957 tree addr;
726a989a
RB
3958 struct walk_stmt_info wi;
3959 bool last_in_block = gsi_one_before_end_p (*gsi);
3960 gimple stmt = gsi_stmt (*gsi);
1d65f45c 3961 int lp_nr;
6de9cd9a 3962
726a989a 3963 if (is_gimple_omp (stmt))
50674e96
DN
3964 {
3965 /* OpenMP directives are validated by the FE and never operated
726a989a 3966 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
50674e96
DN
3967 non-gimple expressions when the main index variable has had
3968 its address taken. This does not affect the loop itself
726a989a 3969 because the header of an GIMPLE_OMP_FOR is merely used to determine
50674e96
DN
3970 how to setup the parallel iteration. */
3971 return false;
3972 }
3973
726a989a
RB
3974 /* FIXME. The C frontend passes unpromoted arguments in case it
3975 didn't see a function declaration before the call. */
3976 if (is_gimple_call (stmt))
6de9cd9a 3977 {
7c9577be 3978 tree decl;
726a989a 3979
7c9577be
RG
3980 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
3981 {
3982 error ("invalid function in call statement");
3983 return true;
3984 }
3985
3986 decl = gimple_call_fndecl (stmt);
3987 if (decl
3988 && TREE_CODE (decl) == FUNCTION_DECL
726a989a
RB
3989 && DECL_LOOPING_CONST_OR_PURE_P (decl)
3990 && (!DECL_PURE_P (decl))
3991 && (!TREE_READONLY (decl)))
3992 {
3993 error ("invalid pure const state for function");
3994 return true;
3995 }
6de9cd9a
DN
3996 }
3997
b5b8b0ac
AO
3998 if (is_gimple_debug (stmt))
3999 return false;
4000
726a989a
RB
4001 memset (&wi, 0, sizeof (wi));
4002 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
6de9cd9a
DN
4003 if (addr)
4004 {
726a989a 4005 debug_generic_expr (addr);
c2255bc4 4006 inform (gimple_location (gsi_stmt (*gsi)), "in statement");
726a989a 4007 debug_gimple_stmt (stmt);
6de9cd9a
DN
4008 return true;
4009 }
4010
1eaba2f2
RH
4011 /* If the statement is marked as part of an EH region, then it is
4012 expected that the statement could throw. Verify that when we
4013 have optimizations that simplify statements such that we prove
4014 that they cannot throw, that we update other data structures
4015 to match. */
1d65f45c
RH
4016 lp_nr = lookup_stmt_eh_lp (stmt);
4017 if (lp_nr != 0)
1eaba2f2 4018 {
1d65f45c 4019 if (!stmt_could_throw_p (stmt))
1eaba2f2 4020 {
f7fbb475
JH
4021 error ("statement marked for throw, but doesn%'t");
4022 goto fail;
1eaba2f2 4023 }
1d65f45c 4024 else if (lp_nr > 0 && !last_in_block && stmt_can_throw_internal (stmt))
1eaba2f2 4025 {
ab532386 4026 error ("statement marked for throw in middle of block");
1eaba2f2
RH
4027 goto fail;
4028 }
4029 }
4030
6de9cd9a 4031 return false;
1eaba2f2
RH
4032
4033 fail:
726a989a 4034 debug_gimple_stmt (stmt);
1eaba2f2 4035 return true;
6de9cd9a
DN
4036}
4037
4038
4039/* Return true when the T can be shared. */
4040
d7f09764 4041bool
6de9cd9a
DN
4042tree_node_can_be_shared (tree t)
4043{
6615c446 4044 if (IS_TYPE_OR_DECL_P (t)
6de9cd9a 4045 || is_gimple_min_invariant (t)
5e23162d 4046 || TREE_CODE (t) == SSA_NAME
953ff289
DN
4047 || t == error_mark_node
4048 || TREE_CODE (t) == IDENTIFIER_NODE)
6de9cd9a
DN
4049 return true;
4050
92b6dff3
JL
4051 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4052 return true;
4053
44de5aeb 4054 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
953ff289
DN
4055 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4056 || TREE_CODE (t) == COMPONENT_REF
4057 || TREE_CODE (t) == REALPART_EXPR
4058 || TREE_CODE (t) == IMAGPART_EXPR)
6de9cd9a
DN
4059 t = TREE_OPERAND (t, 0);
4060
4061 if (DECL_P (t))
4062 return true;
4063
4064 return false;
4065}
4066
4067
726a989a 4068/* Called via walk_gimple_stmt. Verify tree sharing. */
6de9cd9a
DN
4069
4070static tree
726a989a 4071verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 4072{
726a989a
RB
4073 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4074 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
6de9cd9a
DN
4075
4076 if (tree_node_can_be_shared (*tp))
4077 {
4078 *walk_subtrees = false;
4079 return NULL;
4080 }
4081
4437b50d
JH
4082 if (pointer_set_insert (visited, *tp))
4083 return *tp;
6de9cd9a
DN
4084
4085 return NULL;
4086}
4087
4088
4437b50d
JH
4089static bool eh_error_found;
4090static int
4091verify_eh_throw_stmt_node (void **slot, void *data)
4092{
4093 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4094 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4095
4096 if (!pointer_set_contains (visited, node->stmt))
4097 {
4098 error ("Dead STMT in EH table");
726a989a 4099 debug_gimple_stmt (node->stmt);
4437b50d
JH
4100 eh_error_found = true;
4101 }
c13edb67 4102 return 1;
4437b50d
JH
4103}
4104
726a989a
RB
4105
4106/* Verify the GIMPLE statements in every basic block. */
6de9cd9a 4107
24e47c76 4108DEBUG_FUNCTION void
6de9cd9a
DN
4109verify_stmts (void)
4110{
4111 basic_block bb;
726a989a 4112 gimple_stmt_iterator gsi;
6de9cd9a 4113 bool err = false;
4437b50d 4114 struct pointer_set_t *visited, *visited_stmts;
6de9cd9a 4115 tree addr;
726a989a 4116 struct walk_stmt_info wi;
6de9cd9a
DN
4117
4118 timevar_push (TV_TREE_STMT_VERIFY);
4437b50d
JH
4119 visited = pointer_set_create ();
4120 visited_stmts = pointer_set_create ();
6de9cd9a 4121
726a989a
RB
4122 memset (&wi, 0, sizeof (wi));
4123 wi.info = (void *) visited;
4124
6de9cd9a
DN
4125 FOR_EACH_BB (bb)
4126 {
726a989a
RB
4127 gimple phi;
4128 size_t i;
6de9cd9a 4129
726a989a 4130 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4131 {
726a989a 4132 phi = gsi_stmt (gsi);
4437b50d 4133 pointer_set_insert (visited_stmts, phi);
726a989a 4134 if (gimple_bb (phi) != bb)
8de1fc1b 4135 {
726a989a 4136 error ("gimple_bb (phi) is set to a wrong basic block");
8de1fc1b
KH
4137 err |= true;
4138 }
4139
726a989a 4140 for (i = 0; i < gimple_phi_num_args (phi); i++)
6de9cd9a 4141 {
726a989a 4142 tree t = gimple_phi_arg_def (phi, i);
6de9cd9a
DN
4143 tree addr;
4144
e9705dc5
AO
4145 if (!t)
4146 {
4147 error ("missing PHI def");
726a989a 4148 debug_gimple_stmt (phi);
e9705dc5
AO
4149 err |= true;
4150 continue;
4151 }
6de9cd9a
DN
4152 /* Addressable variables do have SSA_NAMEs but they
4153 are not considered gimple values. */
e9705dc5
AO
4154 else if (TREE_CODE (t) != SSA_NAME
4155 && TREE_CODE (t) != FUNCTION_DECL
220f1c29 4156 && !is_gimple_min_invariant (t))
6de9cd9a 4157 {
726a989a
RB
4158 error ("PHI argument is not a GIMPLE value");
4159 debug_gimple_stmt (phi);
4160 debug_generic_expr (t);
6de9cd9a
DN
4161 err |= true;
4162 }
4163
4437b50d 4164 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
6de9cd9a
DN
4165 if (addr)
4166 {
ab532386 4167 error ("incorrect sharing of tree nodes");
726a989a
RB
4168 debug_gimple_stmt (phi);
4169 debug_generic_expr (addr);
6de9cd9a
DN
4170 err |= true;
4171 }
4172 }
211ca15c
RG
4173
4174#ifdef ENABLE_TYPES_CHECKING
4175 if (verify_gimple_phi (phi))
4176 {
4177 debug_gimple_stmt (phi);
4178 err |= true;
4179 }
4180#endif
6de9cd9a
DN
4181 }
4182
726a989a 4183 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
6de9cd9a 4184 {
726a989a
RB
4185 gimple stmt = gsi_stmt (gsi);
4186
4187 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4188 || gimple_code (stmt) == GIMPLE_BIND)
4189 {
4190 error ("invalid GIMPLE statement");
4191 debug_gimple_stmt (stmt);
4192 err |= true;
4193 }
8de1fc1b 4194
4437b50d 4195 pointer_set_insert (visited_stmts, stmt);
07beea0d 4196
726a989a 4197 if (gimple_bb (stmt) != bb)
8de1fc1b 4198 {
726a989a 4199 error ("gimple_bb (stmt) is set to a wrong basic block");
2cd713a0 4200 debug_gimple_stmt (stmt);
8de1fc1b
KH
4201 err |= true;
4202 }
4203
726a989a
RB
4204 if (gimple_code (stmt) == GIMPLE_LABEL)
4205 {
4206 tree decl = gimple_label_label (stmt);
4207 int uid = LABEL_DECL_UID (decl);
4208
4209 if (uid == -1
4210 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4211 {
1d65f45c 4212 error ("incorrect entry in label_to_block_map");
726a989a
RB
4213 err |= true;
4214 }
1d65f45c
RH
4215
4216 uid = EH_LANDING_PAD_NR (decl);
4217 if (uid)
4218 {
4219 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4220 if (decl != lp->post_landing_pad)
4221 {
4222 error ("incorrect setting of landing pad number");
4223 err |= true;
4224 }
4225 }
726a989a
RB
4226 }
4227
4228 err |= verify_stmt (&gsi);
211ca15c
RG
4229
4230#ifdef ENABLE_TYPES_CHECKING
4231 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4232 {
4233 debug_gimple_stmt (stmt);
4234 err |= true;
4235 }
4236#endif
726a989a 4237 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
6de9cd9a
DN
4238 if (addr)
4239 {
ab532386 4240 error ("incorrect sharing of tree nodes");
726a989a
RB
4241 debug_gimple_stmt (stmt);
4242 debug_generic_expr (addr);
6de9cd9a
DN
4243 err |= true;
4244 }
726a989a 4245 gsi_next (&gsi);
6de9cd9a
DN
4246 }
4247 }
726a989a 4248
4437b50d
JH
4249 eh_error_found = false;
4250 if (get_eh_throw_stmt_table (cfun))
4251 htab_traverse (get_eh_throw_stmt_table (cfun),
4252 verify_eh_throw_stmt_node,
4253 visited_stmts);
6de9cd9a 4254
4437b50d 4255 if (err | eh_error_found)
ab532386 4256 internal_error ("verify_stmts failed");
6de9cd9a 4257
4437b50d
JH
4258 pointer_set_destroy (visited);
4259 pointer_set_destroy (visited_stmts);
6946b3f7 4260 verify_histograms ();
6de9cd9a
DN
4261 timevar_pop (TV_TREE_STMT_VERIFY);
4262}
4263
4264
4265/* Verifies that the flow information is OK. */
4266
4267static int
726a989a 4268gimple_verify_flow_info (void)
6de9cd9a
DN
4269{
4270 int err = 0;
4271 basic_block bb;
726a989a
RB
4272 gimple_stmt_iterator gsi;
4273 gimple stmt;
6de9cd9a 4274 edge e;
628f6a4e 4275 edge_iterator ei;
6de9cd9a 4276
726a989a 4277 if (ENTRY_BLOCK_PTR->il.gimple)
6de9cd9a 4278 {
7506e1cb 4279 error ("ENTRY_BLOCK has IL associated with it");
6de9cd9a
DN
4280 err = 1;
4281 }
4282
726a989a 4283 if (EXIT_BLOCK_PTR->il.gimple)
6de9cd9a 4284 {
7506e1cb 4285 error ("EXIT_BLOCK has IL associated with it");
6de9cd9a
DN
4286 err = 1;
4287 }
4288
628f6a4e 4289 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
4290 if (e->flags & EDGE_FALLTHRU)
4291 {
ab532386 4292 error ("fallthru to exit from bb %d", e->src->index);
6de9cd9a
DN
4293 err = 1;
4294 }
4295
4296 FOR_EACH_BB (bb)
4297 {
4298 bool found_ctrl_stmt = false;
4299
726a989a 4300 stmt = NULL;
548414c6 4301
6de9cd9a 4302 /* Skip labels on the start of basic block. */
726a989a 4303 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4304 {
726a989a
RB
4305 tree label;
4306 gimple prev_stmt = stmt;
548414c6 4307
726a989a 4308 stmt = gsi_stmt (gsi);
548414c6 4309
726a989a 4310 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
4311 break;
4312
726a989a
RB
4313 label = gimple_label_label (stmt);
4314 if (prev_stmt && DECL_NONLOCAL (label))
548414c6 4315 {
953ff289 4316 error ("nonlocal label ");
726a989a 4317 print_generic_expr (stderr, label, 0);
953ff289
DN
4318 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4319 bb->index);
548414c6
KH
4320 err = 1;
4321 }
4322
1197e789
RG
4323 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4324 {
4325 error ("EH landing pad label ");
4326 print_generic_expr (stderr, label, 0);
4327 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4328 bb->index);
4329 err = 1;
4330 }
4331
726a989a 4332 if (label_to_block (label) != bb)
6de9cd9a 4333 {
953ff289 4334 error ("label ");
726a989a 4335 print_generic_expr (stderr, label, 0);
953ff289
DN
4336 fprintf (stderr, " to block does not match in bb %d",
4337 bb->index);
6de9cd9a
DN
4338 err = 1;
4339 }
4340
726a989a 4341 if (decl_function_context (label) != current_function_decl)
6de9cd9a 4342 {
953ff289 4343 error ("label ");
726a989a 4344 print_generic_expr (stderr, label, 0);
953ff289
DN
4345 fprintf (stderr, " has incorrect context in bb %d",
4346 bb->index);
6de9cd9a
DN
4347 err = 1;
4348 }
4349 }
4350
4351 /* Verify that body of basic block BB is free of control flow. */
726a989a 4352 for (; !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4353 {
726a989a 4354 gimple stmt = gsi_stmt (gsi);
6de9cd9a
DN
4355
4356 if (found_ctrl_stmt)
4357 {
ab532386 4358 error ("control flow in the middle of basic block %d",
6de9cd9a
DN
4359 bb->index);
4360 err = 1;
4361 }
4362
4363 if (stmt_ends_bb_p (stmt))
4364 found_ctrl_stmt = true;
4365
726a989a 4366 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a 4367 {
953ff289 4368 error ("label ");
726a989a 4369 print_generic_expr (stderr, gimple_label_label (stmt), 0);
953ff289 4370 fprintf (stderr, " in the middle of basic block %d", bb->index);
6de9cd9a
DN
4371 err = 1;
4372 }
4373 }
953ff289 4374
726a989a
RB
4375 gsi = gsi_last_bb (bb);
4376 if (gsi_end_p (gsi))
6de9cd9a
DN
4377 continue;
4378
726a989a 4379 stmt = gsi_stmt (gsi);
6de9cd9a 4380
1d65f45c
RH
4381 if (gimple_code (stmt) == GIMPLE_LABEL)
4382 continue;
4383
cc7220fd
JH
4384 err |= verify_eh_edges (stmt);
4385
6de9cd9a
DN
4386 if (is_ctrl_stmt (stmt))
4387 {
628f6a4e 4388 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4389 if (e->flags & EDGE_FALLTHRU)
4390 {
ab532386 4391 error ("fallthru edge after a control statement in bb %d",
6de9cd9a
DN
4392 bb->index);
4393 err = 1;
4394 }
4395 }
4396
726a989a 4397 if (gimple_code (stmt) != GIMPLE_COND)
36b24193
ZD
4398 {
4399 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4400 after anything else but if statement. */
4401 FOR_EACH_EDGE (e, ei, bb->succs)
4402 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4403 {
726a989a 4404 error ("true/false edge after a non-GIMPLE_COND in bb %d",
36b24193
ZD
4405 bb->index);
4406 err = 1;
4407 }
4408 }
4409
726a989a 4410 switch (gimple_code (stmt))
6de9cd9a 4411 {
726a989a 4412 case GIMPLE_COND:
6de9cd9a
DN
4413 {
4414 edge true_edge;
4415 edge false_edge;
b8698a0f 4416
6de9cd9a
DN
4417 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4418
726a989a
RB
4419 if (!true_edge
4420 || !false_edge
6de9cd9a
DN
4421 || !(true_edge->flags & EDGE_TRUE_VALUE)
4422 || !(false_edge->flags & EDGE_FALSE_VALUE)
4423 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4424 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
628f6a4e 4425 || EDGE_COUNT (bb->succs) >= 3)
6de9cd9a 4426 {
ab532386 4427 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4428 bb->index);
4429 err = 1;
4430 }
6de9cd9a
DN
4431 }
4432 break;
4433
726a989a 4434 case GIMPLE_GOTO:
6de9cd9a
DN
4435 if (simple_goto_p (stmt))
4436 {
ab532386 4437 error ("explicit goto at end of bb %d", bb->index);
6531d1be 4438 err = 1;
6de9cd9a
DN
4439 }
4440 else
4441 {
6531d1be 4442 /* FIXME. We should double check that the labels in the
6de9cd9a 4443 destination blocks have their address taken. */
628f6a4e 4444 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4445 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4446 | EDGE_FALSE_VALUE))
4447 || !(e->flags & EDGE_ABNORMAL))
4448 {
ab532386 4449 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4450 bb->index);
4451 err = 1;
4452 }
4453 }
4454 break;
4455
c54c785d
JH
4456 case GIMPLE_CALL:
4457 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4458 break;
4459 /* ... fallthru ... */
726a989a 4460 case GIMPLE_RETURN:
c5cbcccf
ZD
4461 if (!single_succ_p (bb)
4462 || (single_succ_edge (bb)->flags
4463 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4464 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
6de9cd9a 4465 {
ab532386 4466 error ("wrong outgoing edge flags at end of bb %d", bb->index);
6de9cd9a
DN
4467 err = 1;
4468 }
c5cbcccf 4469 if (single_succ (bb) != EXIT_BLOCK_PTR)
6de9cd9a 4470 {
ab532386 4471 error ("return edge does not point to exit in bb %d",
6de9cd9a
DN
4472 bb->index);
4473 err = 1;
4474 }
4475 break;
4476
726a989a 4477 case GIMPLE_SWITCH:
6de9cd9a 4478 {
7853504d 4479 tree prev;
6de9cd9a
DN
4480 edge e;
4481 size_t i, n;
6de9cd9a 4482
726a989a 4483 n = gimple_switch_num_labels (stmt);
6de9cd9a
DN
4484
4485 /* Mark all the destination basic blocks. */
4486 for (i = 0; i < n; ++i)
4487 {
726a989a 4488 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
6de9cd9a 4489 basic_block label_bb = label_to_block (lab);
1e128c5f 4490 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
6de9cd9a
DN
4491 label_bb->aux = (void *)1;
4492 }
4493
7853504d 4494 /* Verify that the case labels are sorted. */
726a989a 4495 prev = gimple_switch_label (stmt, 0);
b7814a18 4496 for (i = 1; i < n; ++i)
7853504d 4497 {
726a989a
RB
4498 tree c = gimple_switch_label (stmt, i);
4499 if (!CASE_LOW (c))
7853504d 4500 {
726a989a
RB
4501 error ("found default case not at the start of "
4502 "case vector");
4503 err = 1;
7853504d
SB
4504 continue;
4505 }
726a989a
RB
4506 if (CASE_LOW (prev)
4507 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
7853504d 4508 {
953ff289 4509 error ("case labels not sorted: ");
7853504d
SB
4510 print_generic_expr (stderr, prev, 0);
4511 fprintf (stderr," is greater than ");
4512 print_generic_expr (stderr, c, 0);
4513 fprintf (stderr," but comes before it.\n");
4514 err = 1;
4515 }
4516 prev = c;
4517 }
b7814a18
RG
4518 /* VRP will remove the default case if it can prove it will
4519 never be executed. So do not verify there always exists
4520 a default case here. */
7853504d 4521
628f6a4e 4522 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4523 {
4524 if (!e->dest->aux)
4525 {
ab532386 4526 error ("extra outgoing edge %d->%d",
6de9cd9a
DN
4527 bb->index, e->dest->index);
4528 err = 1;
4529 }
726a989a 4530
6de9cd9a
DN
4531 e->dest->aux = (void *)2;
4532 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4533 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4534 {
ab532386 4535 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4536 bb->index);
4537 err = 1;
4538 }
4539 }
4540
4541 /* Check that we have all of them. */
4542 for (i = 0; i < n; ++i)
4543 {
726a989a 4544 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
6de9cd9a
DN
4545 basic_block label_bb = label_to_block (lab);
4546
4547 if (label_bb->aux != (void *)2)
4548 {
726a989a 4549 error ("missing edge %i->%i", bb->index, label_bb->index);
6de9cd9a
DN
4550 err = 1;
4551 }
4552 }
4553
628f6a4e 4554 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4555 e->dest->aux = (void *)0;
4556 }
1d65f45c
RH
4557 break;
4558
4559 case GIMPLE_EH_DISPATCH:
4560 err |= verify_eh_dispatch_edge (stmt);
4561 break;
6de9cd9a 4562
1d65f45c
RH
4563 default:
4564 break;
6de9cd9a
DN
4565 }
4566 }
4567
2b28c07a 4568 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
6de9cd9a
DN
4569 verify_dominators (CDI_DOMINATORS);
4570
4571 return err;
4572}
4573
4574
f0b698c1 4575/* Updates phi nodes after creating a forwarder block joined
6de9cd9a
DN
4576 by edge FALLTHRU. */
4577
4578static void
726a989a 4579gimple_make_forwarder_block (edge fallthru)
6de9cd9a
DN
4580{
4581 edge e;
628f6a4e 4582 edge_iterator ei;
6de9cd9a 4583 basic_block dummy, bb;
726a989a
RB
4584 tree var;
4585 gimple_stmt_iterator gsi;
6de9cd9a
DN
4586
4587 dummy = fallthru->src;
4588 bb = fallthru->dest;
4589
c5cbcccf 4590 if (single_pred_p (bb))
6de9cd9a
DN
4591 return;
4592
cfaab3a9 4593 /* If we redirected a branch we must create new PHI nodes at the
6de9cd9a 4594 start of BB. */
726a989a 4595 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4596 {
726a989a 4597 gimple phi, new_phi;
b8698a0f 4598
726a989a
RB
4599 phi = gsi_stmt (gsi);
4600 var = gimple_phi_result (phi);
6de9cd9a
DN
4601 new_phi = create_phi_node (var, bb);
4602 SSA_NAME_DEF_STMT (var) = new_phi;
726a989a 4603 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
b8698a0f 4604 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
f5045c96 4605 UNKNOWN_LOCATION);
6de9cd9a
DN
4606 }
4607
6de9cd9a 4608 /* Add the arguments we have stored on edges. */
628f6a4e 4609 FOR_EACH_EDGE (e, ei, bb->preds)
6de9cd9a
DN
4610 {
4611 if (e == fallthru)
4612 continue;
4613
71882046 4614 flush_pending_stmts (e);
6de9cd9a
DN
4615 }
4616}
4617
4618
6de9cd9a
DN
4619/* Return a non-special label in the head of basic block BLOCK.
4620 Create one if it doesn't exist. */
4621
d7621d3c 4622tree
726a989a 4623gimple_block_label (basic_block bb)
6de9cd9a 4624{
726a989a 4625 gimple_stmt_iterator i, s = gsi_start_bb (bb);
6de9cd9a 4626 bool first = true;
726a989a
RB
4627 tree label;
4628 gimple stmt;
6de9cd9a 4629
726a989a 4630 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
6de9cd9a 4631 {
726a989a
RB
4632 stmt = gsi_stmt (i);
4633 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a 4634 break;
726a989a 4635 label = gimple_label_label (stmt);
6de9cd9a
DN
4636 if (!DECL_NONLOCAL (label))
4637 {
4638 if (!first)
726a989a 4639 gsi_move_before (&i, &s);
6de9cd9a
DN
4640 return label;
4641 }
4642 }
4643
c2255bc4 4644 label = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
4645 stmt = gimple_build_label (label);
4646 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
6de9cd9a
DN
4647 return label;
4648}
4649
4650
4651/* Attempt to perform edge redirection by replacing a possibly complex
4652 jump instruction by a goto or by removing the jump completely.
4653 This can apply only if all edges now point to the same block. The
4654 parameters and return values are equivalent to
4655 redirect_edge_and_branch. */
4656
4657static edge
726a989a 4658gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
6de9cd9a
DN
4659{
4660 basic_block src = e->src;
726a989a
RB
4661 gimple_stmt_iterator i;
4662 gimple stmt;
6de9cd9a 4663
07b43a87
KH
4664 /* We can replace or remove a complex jump only when we have exactly
4665 two edges. */
4666 if (EDGE_COUNT (src->succs) != 2
4667 /* Verify that all targets will be TARGET. Specifically, the
4668 edge that is not E must also go to TARGET. */
4669 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6de9cd9a
DN
4670 return NULL;
4671
726a989a
RB
4672 i = gsi_last_bb (src);
4673 if (gsi_end_p (i))
6de9cd9a 4674 return NULL;
6de9cd9a 4675
726a989a
RB
4676 stmt = gsi_stmt (i);
4677
4678 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
6de9cd9a 4679 {
726a989a 4680 gsi_remove (&i, true);
6de9cd9a
DN
4681 e = ssa_redirect_edge (e, target);
4682 e->flags = EDGE_FALLTHRU;
4683 return e;
4684 }
4685
4686 return NULL;
4687}
4688
4689
4690/* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4691 edge representing the redirected branch. */
4692
4693static edge
726a989a 4694gimple_redirect_edge_and_branch (edge e, basic_block dest)
6de9cd9a
DN
4695{
4696 basic_block bb = e->src;
726a989a 4697 gimple_stmt_iterator gsi;
6de9cd9a 4698 edge ret;
726a989a 4699 gimple stmt;
6de9cd9a 4700
4f6c2131 4701 if (e->flags & EDGE_ABNORMAL)
6de9cd9a
DN
4702 return NULL;
4703
6de9cd9a
DN
4704 if (e->dest == dest)
4705 return NULL;
4706
a3710436
JH
4707 if (e->flags & EDGE_EH)
4708 return redirect_eh_edge (e, dest);
4709
1c061be5
RH
4710 if (e->src != ENTRY_BLOCK_PTR)
4711 {
4712 ret = gimple_try_redirect_by_replacing_jump (e, dest);
4713 if (ret)
4714 return ret;
4715 }
4716
726a989a
RB
4717 gsi = gsi_last_bb (bb);
4718 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6de9cd9a 4719
d130ae11 4720 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6de9cd9a 4721 {
726a989a 4722 case GIMPLE_COND:
a9b77cd1 4723 /* For COND_EXPR, we only need to redirect the edge. */
6de9cd9a
DN
4724 break;
4725
726a989a 4726 case GIMPLE_GOTO:
6de9cd9a
DN
4727 /* No non-abnormal edges should lead from a non-simple goto, and
4728 simple ones should be represented implicitly. */
1e128c5f 4729 gcc_unreachable ();
6de9cd9a 4730
726a989a 4731 case GIMPLE_SWITCH:
6de9cd9a 4732 {
726a989a 4733 tree label = gimple_block_label (dest);
d6be0d7f 4734 tree cases = get_cases_for_edge (e, stmt);
6de9cd9a 4735
d6be0d7f
JL
4736 /* If we have a list of cases associated with E, then use it
4737 as it's a lot faster than walking the entire case vector. */
4738 if (cases)
6de9cd9a 4739 {
4edbbd3f 4740 edge e2 = find_edge (e->src, dest);
d6be0d7f
JL
4741 tree last, first;
4742
4743 first = cases;
4744 while (cases)
4745 {
4746 last = cases;
4747 CASE_LABEL (cases) = label;
4748 cases = TREE_CHAIN (cases);
4749 }
4750
4751 /* If there was already an edge in the CFG, then we need
4752 to move all the cases associated with E to E2. */
4753 if (e2)
4754 {
4755 tree cases2 = get_cases_for_edge (e2, stmt);
4756
4757 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4758 TREE_CHAIN (cases2) = first;
4759 }
fc249fe5 4760 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6de9cd9a 4761 }
92b6dff3
JL
4762 else
4763 {
726a989a 4764 size_t i, n = gimple_switch_num_labels (stmt);
d6be0d7f
JL
4765
4766 for (i = 0; i < n; i++)
4767 {
726a989a 4768 tree elt = gimple_switch_label (stmt, i);
d6be0d7f
JL
4769 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4770 CASE_LABEL (elt) = label;
4771 }
92b6dff3 4772 }
1c384bf1
RH
4773 }
4774 break;
d6be0d7f 4775
1c384bf1
RH
4776 case GIMPLE_ASM:
4777 {
4778 int i, n = gimple_asm_nlabels (stmt);
f8981d1c 4779 tree label = NULL;
1c384bf1
RH
4780
4781 for (i = 0; i < n; ++i)
4782 {
4783 tree cons = gimple_asm_label_op (stmt, i);
4784 if (label_to_block (TREE_VALUE (cons)) == e->dest)
f8981d1c
AO
4785 {
4786 if (!label)
4787 label = gimple_block_label (dest);
4788 TREE_VALUE (cons) = label;
4789 }
1c384bf1 4790 }
f8981d1c
AO
4791
4792 /* If we didn't find any label matching the former edge in the
4793 asm labels, we must be redirecting the fallthrough
4794 edge. */
4795 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6de9cd9a 4796 }
1c384bf1 4797 break;
6de9cd9a 4798
726a989a
RB
4799 case GIMPLE_RETURN:
4800 gsi_remove (&gsi, true);
6de9cd9a
DN
4801 e->flags |= EDGE_FALLTHRU;
4802 break;
4803
726a989a
RB
4804 case GIMPLE_OMP_RETURN:
4805 case GIMPLE_OMP_CONTINUE:
4806 case GIMPLE_OMP_SECTIONS_SWITCH:
4807 case GIMPLE_OMP_FOR:
e5c95afe
ZD
4808 /* The edges from OMP constructs can be simply redirected. */
4809 break;
4810
1d65f45c
RH
4811 case GIMPLE_EH_DISPATCH:
4812 if (!(e->flags & EDGE_FALLTHRU))
4813 redirect_eh_dispatch_edge (stmt, e, dest);
4814 break;
4815
6de9cd9a
DN
4816 default:
4817 /* Otherwise it must be a fallthru edge, and we don't need to
4818 do anything besides redirecting it. */
1e128c5f 4819 gcc_assert (e->flags & EDGE_FALLTHRU);
6de9cd9a
DN
4820 break;
4821 }
4822
4823 /* Update/insert PHI nodes as necessary. */
4824
4825 /* Now update the edges in the CFG. */
4826 e = ssa_redirect_edge (e, dest);
4827
4828 return e;
4829}
4830
14fa2cc0
ZD
4831/* Returns true if it is possible to remove edge E by redirecting
4832 it to the destination of the other edge from E->src. */
4833
4834static bool
726a989a 4835gimple_can_remove_branch_p (const_edge e)
14fa2cc0 4836{
496a4ef5 4837 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
14fa2cc0
ZD
4838 return false;
4839
4840 return true;
4841}
6de9cd9a
DN
4842
4843/* Simple wrapper, as we can always redirect fallthru edges. */
4844
4845static basic_block
726a989a 4846gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6de9cd9a 4847{
726a989a 4848 e = gimple_redirect_edge_and_branch (e, dest);
1e128c5f 4849 gcc_assert (e);
6de9cd9a
DN
4850
4851 return NULL;
4852}
4853
4854
4855/* Splits basic block BB after statement STMT (but at least after the
4856 labels). If STMT is NULL, BB is split just after the labels. */
4857
4858static basic_block
726a989a 4859gimple_split_block (basic_block bb, void *stmt)
6de9cd9a 4860{
726a989a
RB
4861 gimple_stmt_iterator gsi;
4862 gimple_stmt_iterator gsi_tgt;
4863 gimple act;
4864 gimple_seq list;
6de9cd9a
DN
4865 basic_block new_bb;
4866 edge e;
628f6a4e 4867 edge_iterator ei;
6de9cd9a
DN
4868
4869 new_bb = create_empty_bb (bb);
4870
4871 /* Redirect the outgoing edges. */
628f6a4e
BE
4872 new_bb->succs = bb->succs;
4873 bb->succs = NULL;
4874 FOR_EACH_EDGE (e, ei, new_bb->succs)
6de9cd9a
DN
4875 e->src = new_bb;
4876
726a989a 4877 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
6de9cd9a
DN
4878 stmt = NULL;
4879
726a989a
RB
4880 /* Move everything from GSI to the new basic block. */
4881 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4882 {
726a989a
RB
4883 act = gsi_stmt (gsi);
4884 if (gimple_code (act) == GIMPLE_LABEL)
6de9cd9a
DN
4885 continue;
4886
4887 if (!stmt)
4888 break;
4889
4890 if (stmt == act)
4891 {
726a989a 4892 gsi_next (&gsi);
6de9cd9a
DN
4893 break;
4894 }
4895 }
4896
726a989a 4897 if (gsi_end_p (gsi))
597ae074
JH
4898 return new_bb;
4899
4900 /* Split the statement list - avoid re-creating new containers as this
b8698a0f 4901 brings ugly quadratic memory consumption in the inliner.
597ae074
JH
4902 (We are still quadratic since we need to update stmt BB pointers,
4903 sadly.) */
726a989a
RB
4904 list = gsi_split_seq_before (&gsi);
4905 set_bb_seq (new_bb, list);
4906 for (gsi_tgt = gsi_start (list);
4907 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
4908 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6de9cd9a
DN
4909
4910 return new_bb;
4911}
4912
4913
4914/* Moves basic block BB after block AFTER. */
4915
4916static bool
726a989a 4917gimple_move_block_after (basic_block bb, basic_block after)
6de9cd9a
DN
4918{
4919 if (bb->prev_bb == after)
4920 return true;
4921
4922 unlink_block (bb);
4923 link_block (bb, after);
4924
4925 return true;
4926}
4927
4928
4929/* Return true if basic_block can be duplicated. */
4930
4931static bool
ca89096d 4932gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6de9cd9a
DN
4933{
4934 return true;
4935}
4936
6de9cd9a
DN
4937/* Create a duplicate of the basic block BB. NOTE: This does not
4938 preserve SSA form. */
4939
4940static basic_block
726a989a 4941gimple_duplicate_bb (basic_block bb)
6de9cd9a
DN
4942{
4943 basic_block new_bb;
726a989a
RB
4944 gimple_stmt_iterator gsi, gsi_tgt;
4945 gimple_seq phis = phi_nodes (bb);
4946 gimple phi, stmt, copy;
6de9cd9a
DN
4947
4948 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
b0382c67 4949
84d65814
DN
4950 /* Copy the PHI nodes. We ignore PHI node arguments here because
4951 the incoming edges have not been setup yet. */
726a989a 4952 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
b0382c67 4953 {
726a989a
RB
4954 phi = gsi_stmt (gsi);
4955 copy = create_phi_node (gimple_phi_result (phi), new_bb);
4956 create_new_def_for (gimple_phi_result (copy), copy,
4957 gimple_phi_result_ptr (copy));
b0382c67 4958 }
84d65814 4959
726a989a
RB
4960 gsi_tgt = gsi_start_bb (new_bb);
4961 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4962 {
84d65814
DN
4963 def_operand_p def_p;
4964 ssa_op_iter op_iter;
6de9cd9a 4965
726a989a
RB
4966 stmt = gsi_stmt (gsi);
4967 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a
DN
4968 continue;
4969
84d65814
DN
4970 /* Create a new copy of STMT and duplicate STMT's virtual
4971 operands. */
726a989a
RB
4972 copy = gimple_copy (stmt);
4973 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
1d65f45c
RH
4974
4975 maybe_duplicate_eh_stmt (copy, stmt);
6946b3f7 4976 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
84d65814
DN
4977
4978 /* Create new names for all the definitions created by COPY and
4979 add replacement mappings for each new name. */
4980 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4981 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6de9cd9a
DN
4982 }
4983
4984 return new_bb;
4985}
4986
5f40b3cb
ZD
4987/* Adds phi node arguments for edge E_COPY after basic block duplication. */
4988
4989static void
4990add_phi_args_after_copy_edge (edge e_copy)
4991{
4992 basic_block bb, bb_copy = e_copy->src, dest;
4993 edge e;
4994 edge_iterator ei;
726a989a
RB
4995 gimple phi, phi_copy;
4996 tree def;
4997 gimple_stmt_iterator psi, psi_copy;
5f40b3cb 4998
726a989a 4999 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5f40b3cb
ZD
5000 return;
5001
5002 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5003
5004 if (e_copy->dest->flags & BB_DUPLICATED)
5005 dest = get_bb_original (e_copy->dest);
5006 else
5007 dest = e_copy->dest;
5008
5009 e = find_edge (bb, dest);
5010 if (!e)
5011 {
5012 /* During loop unrolling the target of the latch edge is copied.
5013 In this case we are not looking for edge to dest, but to
5014 duplicated block whose original was dest. */
5015 FOR_EACH_EDGE (e, ei, bb->succs)
5016 {
5017 if ((e->dest->flags & BB_DUPLICATED)
5018 && get_bb_original (e->dest) == dest)
5019 break;
5020 }
5021
5022 gcc_assert (e != NULL);
5023 }
5024
726a989a
RB
5025 for (psi = gsi_start_phis (e->dest),
5026 psi_copy = gsi_start_phis (e_copy->dest);
5027 !gsi_end_p (psi);
5028 gsi_next (&psi), gsi_next (&psi_copy))
5f40b3cb 5029 {
726a989a
RB
5030 phi = gsi_stmt (psi);
5031 phi_copy = gsi_stmt (psi_copy);
5f40b3cb 5032 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
b8698a0f 5033 add_phi_arg (phi_copy, def, e_copy,
f5045c96 5034 gimple_phi_arg_location_from_edge (phi, e));
5f40b3cb
ZD
5035 }
5036}
5037
84d65814 5038
42759f1e
ZD
5039/* Basic block BB_COPY was created by code duplication. Add phi node
5040 arguments for edges going out of BB_COPY. The blocks that were
6580ee77 5041 duplicated have BB_DUPLICATED set. */
42759f1e
ZD
5042
5043void
5044add_phi_args_after_copy_bb (basic_block bb_copy)
5045{
5f40b3cb 5046 edge e_copy;
726a989a 5047 edge_iterator ei;
42759f1e 5048
628f6a4e 5049 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
42759f1e 5050 {
5f40b3cb 5051 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
5052 }
5053}
5054
5055/* Blocks in REGION_COPY array of length N_REGION were created by
5056 duplication of basic blocks. Add phi node arguments for edges
5f40b3cb
ZD
5057 going from these blocks. If E_COPY is not NULL, also add
5058 phi node arguments for its destination.*/
42759f1e
ZD
5059
5060void
5f40b3cb
ZD
5061add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5062 edge e_copy)
42759f1e
ZD
5063{
5064 unsigned i;
5065
5066 for (i = 0; i < n_region; i++)
6580ee77 5067 region_copy[i]->flags |= BB_DUPLICATED;
42759f1e
ZD
5068
5069 for (i = 0; i < n_region; i++)
5070 add_phi_args_after_copy_bb (region_copy[i]);
5f40b3cb
ZD
5071 if (e_copy)
5072 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
5073
5074 for (i = 0; i < n_region; i++)
6580ee77 5075 region_copy[i]->flags &= ~BB_DUPLICATED;
42759f1e
ZD
5076}
5077
42759f1e
ZD
5078/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5079 important exit edge EXIT. By important we mean that no SSA name defined
5080 inside region is live over the other exit edges of the region. All entry
5081 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5082 to the duplicate of the region. SSA form, dominance and loop information
5083 is updated. The new basic blocks are stored to REGION_COPY in the same
5084 order as they had in REGION, provided that REGION_COPY is not NULL.
5085 The function returns false if it is unable to copy the region,
5086 true otherwise. */
5087
5088bool
726a989a 5089gimple_duplicate_sese_region (edge entry, edge exit,
42759f1e
ZD
5090 basic_block *region, unsigned n_region,
5091 basic_block *region_copy)
5092{
66f97d31 5093 unsigned i;
42759f1e
ZD
5094 bool free_region_copy = false, copying_header = false;
5095 struct loop *loop = entry->dest->loop_father;
5096 edge exit_copy;
66f97d31 5097 VEC (basic_block, heap) *doms;
42759f1e 5098 edge redirected;
09bac500
JH
5099 int total_freq = 0, entry_freq = 0;
5100 gcov_type total_count = 0, entry_count = 0;
42759f1e
ZD
5101
5102 if (!can_copy_bbs_p (region, n_region))
5103 return false;
5104
5105 /* Some sanity checking. Note that we do not check for all possible
5106 missuses of the functions. I.e. if you ask to copy something weird,
5107 it will work, but the state of structures probably will not be
5108 correct. */
42759f1e
ZD
5109 for (i = 0; i < n_region; i++)
5110 {
5111 /* We do not handle subloops, i.e. all the blocks must belong to the
5112 same loop. */
5113 if (region[i]->loop_father != loop)
5114 return false;
5115
5116 if (region[i] != entry->dest
5117 && region[i] == loop->header)
5118 return false;
5119 }
5120
561e8a90 5121 set_loop_copy (loop, loop);
42759f1e
ZD
5122
5123 /* In case the function is used for loop header copying (which is the primary
5124 use), ensure that EXIT and its copy will be new latch and entry edges. */
5125 if (loop->header == entry->dest)
5126 {
5127 copying_header = true;
561e8a90 5128 set_loop_copy (loop, loop_outer (loop));
42759f1e
ZD
5129
5130 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5131 return false;
5132
5133 for (i = 0; i < n_region; i++)
5134 if (region[i] != exit->src
5135 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5136 return false;
5137 }
5138
5139 if (!region_copy)
5140 {
858904db 5141 region_copy = XNEWVEC (basic_block, n_region);
42759f1e
ZD
5142 free_region_copy = true;
5143 }
5144
5006671f 5145 gcc_assert (!need_ssa_update_p (cfun));
42759f1e 5146
5deaef19 5147 /* Record blocks outside the region that are dominated by something
42759f1e 5148 inside. */
66f97d31 5149 doms = NULL;
6580ee77
JH
5150 initialize_original_copy_tables ();
5151
66f97d31 5152 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
42759f1e 5153
09bac500
JH
5154 if (entry->dest->count)
5155 {
5156 total_count = entry->dest->count;
5157 entry_count = entry->count;
5158 /* Fix up corner cases, to avoid division by zero or creation of negative
5159 frequencies. */
5160 if (entry_count > total_count)
5161 entry_count = total_count;
5162 }
5163 else
5164 {
5165 total_freq = entry->dest->frequency;
5166 entry_freq = EDGE_FREQUENCY (entry);
5167 /* Fix up corner cases, to avoid division by zero or creation of negative
5168 frequencies. */
5169 if (total_freq == 0)
5170 total_freq = 1;
5171 else if (entry_freq > total_freq)
5172 entry_freq = total_freq;
5173 }
5deaef19 5174
b9a66240
ZD
5175 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5176 split_edge_bb_loc (entry));
09bac500
JH
5177 if (total_count)
5178 {
5179 scale_bbs_frequencies_gcov_type (region, n_region,
5180 total_count - entry_count,
5181 total_count);
5182 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6531d1be 5183 total_count);
09bac500
JH
5184 }
5185 else
5186 {
5187 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5188 total_freq);
5189 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5190 }
42759f1e
ZD
5191
5192 if (copying_header)
5193 {
5194 loop->header = exit->dest;
5195 loop->latch = exit->src;
5196 }
5197
5198 /* Redirect the entry and add the phi node arguments. */
6580ee77 5199 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
42759f1e 5200 gcc_assert (redirected != NULL);
71882046 5201 flush_pending_stmts (entry);
42759f1e
ZD
5202
5203 /* Concerning updating of dominators: We must recount dominators
84d65814
DN
5204 for entry block and its copy. Anything that is outside of the
5205 region, but was dominated by something inside needs recounting as
5206 well. */
42759f1e 5207 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
66f97d31
ZD
5208 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5209 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5f40b3cb 5210 VEC_free (basic_block, heap, doms);
42759f1e 5211
84d65814 5212 /* Add the other PHI node arguments. */
5f40b3cb
ZD
5213 add_phi_args_after_copy (region_copy, n_region, NULL);
5214
5215 /* Update the SSA web. */
5216 update_ssa (TODO_update_ssa);
5217
5218 if (free_region_copy)
5219 free (region_copy);
5220
5221 free_original_copy_tables ();
5222 return true;
5223}
5224
5225/* Duplicates REGION consisting of N_REGION blocks. The new blocks
5226 are stored to REGION_COPY in the same order in that they appear
5227 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5228 the region, EXIT an exit from it. The condition guarding EXIT
5229 is moved to ENTRY. Returns true if duplication succeeds, false
5230 otherwise.
5231
b8698a0f
L
5232 For example,
5233
5f40b3cb
ZD
5234 some_code;
5235 if (cond)
5236 A;
5237 else
5238 B;
5239
5240 is transformed to
5241
5242 if (cond)
5243 {
5244 some_code;
5245 A;
5246 }
5247 else
5248 {
5249 some_code;
5250 B;
5251 }
5252*/
5253
5254bool
726a989a
RB
5255gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5256 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5257 basic_block *region_copy ATTRIBUTE_UNUSED)
5f40b3cb
ZD
5258{
5259 unsigned i;
5260 bool free_region_copy = false;
5261 struct loop *loop = exit->dest->loop_father;
5262 struct loop *orig_loop = entry->dest->loop_father;
5263 basic_block switch_bb, entry_bb, nentry_bb;
5264 VEC (basic_block, heap) *doms;
5265 int total_freq = 0, exit_freq = 0;
5266 gcov_type total_count = 0, exit_count = 0;
5267 edge exits[2], nexits[2], e;
48710229 5268 gimple_stmt_iterator gsi,gsi1;
726a989a 5269 gimple cond_stmt;
8adfe01d 5270 edge sorig, snew;
48710229 5271 basic_block exit_bb;
8adfe01d 5272 basic_block iters_bb;
48710229 5273 tree new_rhs;
8adfe01d
RL
5274 gimple_stmt_iterator psi;
5275 gimple phi;
5276 tree def;
5f40b3cb
ZD
5277
5278 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5279 exits[0] = exit;
5280 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5281
5282 if (!can_copy_bbs_p (region, n_region))
5283 return false;
5284
5f40b3cb
ZD
5285 initialize_original_copy_tables ();
5286 set_loop_copy (orig_loop, loop);
48710229 5287 duplicate_subloops (orig_loop, loop);
5f40b3cb
ZD
5288
5289 if (!region_copy)
5290 {
5291 region_copy = XNEWVEC (basic_block, n_region);
5292 free_region_copy = true;
5293 }
5294
5006671f 5295 gcc_assert (!need_ssa_update_p (cfun));
5f40b3cb
ZD
5296
5297 /* Record blocks outside the region that are dominated by something
5298 inside. */
5299 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5300
5301 if (exit->src->count)
5302 {
5303 total_count = exit->src->count;
5304 exit_count = exit->count;
5305 /* Fix up corner cases, to avoid division by zero or creation of negative
5306 frequencies. */
5307 if (exit_count > total_count)
5308 exit_count = total_count;
5309 }
5310 else
5311 {
5312 total_freq = exit->src->frequency;
5313 exit_freq = EDGE_FREQUENCY (exit);
5314 /* Fix up corner cases, to avoid division by zero or creation of negative
5315 frequencies. */
5316 if (total_freq == 0)
5317 total_freq = 1;
5318 if (exit_freq > total_freq)
5319 exit_freq = total_freq;
5320 }
5321
5322 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5323 split_edge_bb_loc (exit));
5324 if (total_count)
5325 {
5326 scale_bbs_frequencies_gcov_type (region, n_region,
5327 total_count - exit_count,
5328 total_count);
5329 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5330 total_count);
5331 }
5332 else
5333 {
5334 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5335 total_freq);
5336 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5337 }
5338
5339 /* Create the switch block, and put the exit condition to it. */
5340 entry_bb = entry->dest;
5341 nentry_bb = get_bb_copy (entry_bb);
5342 if (!last_stmt (entry->src)
5343 || !stmt_ends_bb_p (last_stmt (entry->src)))
5344 switch_bb = entry->src;
5345 else
5346 switch_bb = split_edge (entry);
5347 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5348
726a989a
RB
5349 gsi = gsi_last_bb (switch_bb);
5350 cond_stmt = last_stmt (exit->src);
5351 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5352 cond_stmt = gimple_copy (cond_stmt);
b8698a0f
L
5353
5354 /* If the block consisting of the exit condition has the latch as
5355 successor, then the body of the loop is executed before
5356 the exit condition is tested. In such case, moving the
5357 condition to the entry, causes that the loop will iterate
5358 one less iteration (which is the wanted outcome, since we
5359 peel out the last iteration). If the body is executed after
5360 the condition, moving the condition to the entry requires
48710229
RL
5361 decrementing one iteration. */
5362 if (exits[1]->dest == orig_loop->latch)
5363 new_rhs = gimple_cond_rhs (cond_stmt);
5364 else
5365 {
5366 new_rhs = fold_build2 (MINUS_EXPR, TREE_TYPE (gimple_cond_rhs (cond_stmt)),
b8698a0f 5367 gimple_cond_rhs (cond_stmt),
48710229
RL
5368 build_int_cst (TREE_TYPE (gimple_cond_rhs (cond_stmt)), 1));
5369
5370 if (TREE_CODE (gimple_cond_rhs (cond_stmt)) == SSA_NAME)
5371 {
5372 iters_bb = gimple_bb (SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)));
5373 for (gsi1 = gsi_start_bb (iters_bb); !gsi_end_p (gsi1); gsi_next (&gsi1))
6d8752c4 5374 if (gsi_stmt (gsi1) == SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)))
48710229 5375 break;
b8698a0f 5376
48710229
RL
5377 new_rhs = force_gimple_operand_gsi (&gsi1, new_rhs, true,
5378 NULL_TREE,false,GSI_CONTINUE_LINKING);
5379 }
b8698a0f
L
5380 }
5381 gimple_cond_set_rhs (cond_stmt, unshare_expr (new_rhs));
726a989a 5382 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
726a989a 5383 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5f40b3cb
ZD
5384
5385 sorig = single_succ_edge (switch_bb);
5386 sorig->flags = exits[1]->flags;
5387 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5388
5389 /* Register the new edge from SWITCH_BB in loop exit lists. */
5390 rescan_loop_exit (snew, true, false);
5391
5392 /* Add the PHI node arguments. */
5393 add_phi_args_after_copy (region_copy, n_region, snew);
b8698a0f 5394
5f40b3cb
ZD
5395 /* Get rid of now superfluous conditions and associated edges (and phi node
5396 arguments). */
48710229 5397 exit_bb = exit->dest;
b8698a0f 5398
5f40b3cb 5399 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
726a989a 5400 PENDING_STMT (e) = NULL;
b8698a0f 5401
8adfe01d
RL
5402 /* The latch of ORIG_LOOP was copied, and so was the backedge
5403 to the original header. We redirect this backedge to EXIT_BB. */
48710229 5404 for (i = 0; i < n_region; i++)
8adfe01d
RL
5405 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5406 {
5407 gcc_assert (single_succ_edge (region_copy[i]));
5408 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5409 PENDING_STMT (e) = NULL;
5410 for (psi = gsi_start_phis (exit_bb);
5411 !gsi_end_p (psi);
5412 gsi_next (&psi))
5413 {
5414 phi = gsi_stmt (psi);
5415 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5416 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5417 }
5418 }
5419 e = redirect_edge_and_branch (nexits[0], nexits[1]->dest);
5420 PENDING_STMT (e) = NULL;
5421
5f40b3cb
ZD
5422 /* Anything that is outside of the region, but was dominated by something
5423 inside needs to update dominance info. */
5424 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5425 VEC_free (basic_block, heap, doms);
84d65814
DN
5426 /* Update the SSA web. */
5427 update_ssa (TODO_update_ssa);
6d8752c4 5428
42759f1e
ZD
5429 if (free_region_copy)
5430 free (region_copy);
6d8752c4 5431
6580ee77 5432 free_original_copy_tables ();
42759f1e
ZD
5433 return true;
5434}
6de9cd9a 5435
50674e96
DN
5436/* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5437 adding blocks when the dominator traversal reaches EXIT. This
5438 function silently assumes that ENTRY strictly dominates EXIT. */
5439
9f9f72aa 5440void
50674e96
DN
5441gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5442 VEC(basic_block,heap) **bbs_p)
5443{
5444 basic_block son;
5445
5446 for (son = first_dom_son (CDI_DOMINATORS, entry);
5447 son;
5448 son = next_dom_son (CDI_DOMINATORS, son))
5449 {
5450 VEC_safe_push (basic_block, heap, *bbs_p, son);
5451 if (son != exit)
5452 gather_blocks_in_sese_region (son, exit, bbs_p);
5453 }
5454}
5455
917948d3
ZD
5456/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5457 The duplicates are recorded in VARS_MAP. */
5458
5459static void
5460replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5461 tree to_context)
5462{
5463 tree t = *tp, new_t;
5464 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5465 void **loc;
5466
5467 if (DECL_CONTEXT (t) == to_context)
5468 return;
5469
5470 loc = pointer_map_contains (vars_map, t);
5471
5472 if (!loc)
5473 {
5474 loc = pointer_map_insert (vars_map, t);
5475
5476 if (SSA_VAR_P (t))
5477 {
5478 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
cb91fab0 5479 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
917948d3
ZD
5480 }
5481 else
5482 {
5483 gcc_assert (TREE_CODE (t) == CONST_DECL);
5484 new_t = copy_node (t);
5485 }
5486 DECL_CONTEXT (new_t) = to_context;
5487
5488 *loc = new_t;
5489 }
5490 else
3d9a9f94 5491 new_t = (tree) *loc;
917948d3
ZD
5492
5493 *tp = new_t;
5494}
5495
726a989a 5496
917948d3
ZD
5497/* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5498 VARS_MAP maps old ssa names and var_decls to the new ones. */
5499
5500static tree
5501replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5502 tree to_context)
5503{
5504 void **loc;
5505 tree new_name, decl = SSA_NAME_VAR (name);
5506
5507 gcc_assert (is_gimple_reg (name));
5508
5509 loc = pointer_map_contains (vars_map, name);
5510
5511 if (!loc)
5512 {
5513 replace_by_duplicate_decl (&decl, vars_map, to_context);
5514
5515 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5516 if (gimple_in_ssa_p (cfun))
5517 add_referenced_var (decl);
5518
5519 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5520 if (SSA_NAME_IS_DEFAULT_DEF (name))
5521 set_default_def (decl, new_name);
5522 pop_cfun ();
5523
5524 loc = pointer_map_insert (vars_map, name);
5525 *loc = new_name;
5526 }
5527 else
3d9a9f94 5528 new_name = (tree) *loc;
917948d3
ZD
5529
5530 return new_name;
5531}
50674e96
DN
5532
5533struct move_stmt_d
5534{
b357f682
JJ
5535 tree orig_block;
5536 tree new_block;
50674e96
DN
5537 tree from_context;
5538 tree to_context;
917948d3 5539 struct pointer_map_t *vars_map;
fad41cd7 5540 htab_t new_label_map;
1d65f45c 5541 struct pointer_map_t *eh_map;
50674e96
DN
5542 bool remap_decls_p;
5543};
5544
5545/* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
b357f682
JJ
5546 contained in *TP if it has been ORIG_BLOCK previously and change the
5547 DECL_CONTEXT of every local variable referenced in *TP. */
50674e96
DN
5548
5549static tree
726a989a 5550move_stmt_op (tree *tp, int *walk_subtrees, void *data)
50674e96 5551{
726a989a
RB
5552 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5553 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
fad41cd7 5554 tree t = *tp;
50674e96 5555
726a989a
RB
5556 if (EXPR_P (t))
5557 /* We should never have TREE_BLOCK set on non-statements. */
5558 gcc_assert (!TREE_BLOCK (t));
fad41cd7 5559
917948d3 5560 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
50674e96 5561 {
917948d3
ZD
5562 if (TREE_CODE (t) == SSA_NAME)
5563 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5564 else if (TREE_CODE (t) == LABEL_DECL)
fad41cd7
RH
5565 {
5566 if (p->new_label_map)
5567 {
5568 struct tree_map in, *out;
fc8600f9 5569 in.base.from = t;
3d9a9f94
KG
5570 out = (struct tree_map *)
5571 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
fad41cd7
RH
5572 if (out)
5573 *tp = t = out->to;
5574 }
50674e96 5575
fad41cd7
RH
5576 DECL_CONTEXT (t) = p->to_context;
5577 }
5578 else if (p->remap_decls_p)
50674e96 5579 {
917948d3
ZD
5580 /* Replace T with its duplicate. T should no longer appear in the
5581 parent function, so this looks wasteful; however, it may appear
5582 in referenced_vars, and more importantly, as virtual operands of
5583 statements, and in alias lists of other variables. It would be
5584 quite difficult to expunge it from all those places. ??? It might
5585 suffice to do this for addressable variables. */
5586 if ((TREE_CODE (t) == VAR_DECL
5587 && !is_global_var (t))
5588 || TREE_CODE (t) == CONST_DECL)
5589 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
b8698a0f 5590
917948d3
ZD
5591 if (SSA_VAR_P (t)
5592 && gimple_in_ssa_p (cfun))
fad41cd7 5593 {
917948d3
ZD
5594 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5595 add_referenced_var (*tp);
5596 pop_cfun ();
fad41cd7 5597 }
50674e96 5598 }
917948d3 5599 *walk_subtrees = 0;
50674e96 5600 }
fad41cd7
RH
5601 else if (TYPE_P (t))
5602 *walk_subtrees = 0;
50674e96
DN
5603
5604 return NULL_TREE;
5605}
5606
1d65f45c
RH
5607/* Helper for move_stmt_r. Given an EH region number for the source
5608 function, map that to the duplicate EH regio number in the dest. */
5609
5610static int
5611move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
5612{
5613 eh_region old_r, new_r;
5614 void **slot;
5615
5616 old_r = get_eh_region_from_number (old_nr);
5617 slot = pointer_map_contains (p->eh_map, old_r);
5618 new_r = (eh_region) *slot;
5619
5620 return new_r->index;
5621}
5622
5623/* Similar, but operate on INTEGER_CSTs. */
5624
5625static tree
5626move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
5627{
5628 int old_nr, new_nr;
5629
5630 old_nr = tree_low_cst (old_t_nr, 0);
5631 new_nr = move_stmt_eh_region_nr (old_nr, p);
5632
5633 return build_int_cst (NULL, new_nr);
5634}
5635
726a989a
RB
5636/* Like move_stmt_op, but for gimple statements.
5637
5638 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5639 contained in the current statement in *GSI_P and change the
5640 DECL_CONTEXT of every local variable referenced in the current
5641 statement. */
5642
5643static tree
5644move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5645 struct walk_stmt_info *wi)
5646{
5647 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5648 gimple stmt = gsi_stmt (*gsi_p);
5649 tree block = gimple_block (stmt);
5650
5651 if (p->orig_block == NULL_TREE
5652 || block == p->orig_block
5653 || block == NULL_TREE)
5654 gimple_set_block (stmt, p->new_block);
5655#ifdef ENABLE_CHECKING
5656 else if (block != p->new_block)
5657 {
5658 while (block && block != p->orig_block)
5659 block = BLOCK_SUPERCONTEXT (block);
5660 gcc_assert (block);
5661 }
5662#endif
5663
1d65f45c 5664 switch (gimple_code (stmt))
726a989a 5665 {
1d65f45c
RH
5666 case GIMPLE_CALL:
5667 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
5668 {
5669 tree r, fndecl = gimple_call_fndecl (stmt);
5670 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5671 switch (DECL_FUNCTION_CODE (fndecl))
5672 {
5673 case BUILT_IN_EH_COPY_VALUES:
5674 r = gimple_call_arg (stmt, 1);
5675 r = move_stmt_eh_region_tree_nr (r, p);
5676 gimple_call_set_arg (stmt, 1, r);
5677 /* FALLTHRU */
5678
5679 case BUILT_IN_EH_POINTER:
5680 case BUILT_IN_EH_FILTER:
5681 r = gimple_call_arg (stmt, 0);
5682 r = move_stmt_eh_region_tree_nr (r, p);
5683 gimple_call_set_arg (stmt, 0, r);
5684 break;
726a989a 5685
1d65f45c
RH
5686 default:
5687 break;
5688 }
5689 }
5690 break;
5691
5692 case GIMPLE_RESX:
5693 {
5694 int r = gimple_resx_region (stmt);
5695 r = move_stmt_eh_region_nr (r, p);
5696 gimple_resx_set_region (stmt, r);
5697 }
5698 break;
726a989a 5699
1d65f45c
RH
5700 case GIMPLE_EH_DISPATCH:
5701 {
5702 int r = gimple_eh_dispatch_region (stmt);
5703 r = move_stmt_eh_region_nr (r, p);
5704 gimple_eh_dispatch_set_region (stmt, r);
5705 }
5706 break;
5707
5708 case GIMPLE_OMP_RETURN:
5709 case GIMPLE_OMP_CONTINUE:
5710 break;
5711 default:
5712 if (is_gimple_omp (stmt))
5713 {
5714 /* Do not remap variables inside OMP directives. Variables
5715 referenced in clauses and directive header belong to the
5716 parent function and should not be moved into the child
5717 function. */
5718 bool save_remap_decls_p = p->remap_decls_p;
5719 p->remap_decls_p = false;
5720 *handled_ops_p = true;
5721
5722 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r,
5723 move_stmt_op, wi);
5724
5725 p->remap_decls_p = save_remap_decls_p;
5726 }
5727 break;
726a989a
RB
5728 }
5729
5730 return NULL_TREE;
5731}
5732
917948d3
ZD
5733/* Marks virtual operands of all statements in basic blocks BBS for
5734 renaming. */
5735
dea61d92
SP
5736void
5737mark_virtual_ops_in_bb (basic_block bb)
917948d3 5738{
726a989a 5739 gimple_stmt_iterator gsi;
dea61d92 5740
726a989a
RB
5741 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5742 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
dea61d92 5743
726a989a
RB
5744 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5745 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
dea61d92
SP
5746}
5747
50674e96
DN
5748/* Move basic block BB from function CFUN to function DEST_FN. The
5749 block is moved out of the original linked list and placed after
5750 block AFTER in the new list. Also, the block is removed from the
5751 original array of blocks and placed in DEST_FN's array of blocks.
5752 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5753 updated to reflect the moved edges.
6531d1be 5754
917948d3
ZD
5755 The local variables are remapped to new instances, VARS_MAP is used
5756 to record the mapping. */
50674e96
DN
5757
5758static void
5759move_block_to_fn (struct function *dest_cfun, basic_block bb,
5760 basic_block after, bool update_edge_count_p,
1d65f45c 5761 struct move_stmt_d *d)
50674e96
DN
5762{
5763 struct control_flow_graph *cfg;
5764 edge_iterator ei;
5765 edge e;
726a989a 5766 gimple_stmt_iterator si;
728b26bb 5767 unsigned old_len, new_len;
50674e96 5768
3722506a
ZD
5769 /* Remove BB from dominance structures. */
5770 delete_from_dominance_info (CDI_DOMINATORS, bb);
5f40b3cb
ZD
5771 if (current_loops)
5772 remove_bb_from_loops (bb);
3722506a 5773
50674e96
DN
5774 /* Link BB to the new linked list. */
5775 move_block_after (bb, after);
5776
5777 /* Update the edge count in the corresponding flowgraphs. */
5778 if (update_edge_count_p)
5779 FOR_EACH_EDGE (e, ei, bb->succs)
5780 {
5781 cfun->cfg->x_n_edges--;
5782 dest_cfun->cfg->x_n_edges++;
5783 }
5784
5785 /* Remove BB from the original basic block array. */
5786 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5787 cfun->cfg->x_n_basic_blocks--;
5788
5789 /* Grow DEST_CFUN's basic block array if needed. */
5790 cfg = dest_cfun->cfg;
5791 cfg->x_n_basic_blocks++;
3722506a
ZD
5792 if (bb->index >= cfg->x_last_basic_block)
5793 cfg->x_last_basic_block = bb->index + 1;
50674e96 5794
728b26bb
DN
5795 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5796 if ((unsigned) cfg->x_last_basic_block >= old_len)
50674e96 5797 {
728b26bb 5798 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
a590ac65
KH
5799 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5800 new_len);
50674e96
DN
5801 }
5802
5803 VEC_replace (basic_block, cfg->x_basic_block_info,
e0310afb 5804 bb->index, bb);
50674e96 5805
917948d3 5806 /* Remap the variables in phi nodes. */
726a989a 5807 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
917948d3 5808 {
726a989a 5809 gimple phi = gsi_stmt (si);
917948d3
ZD
5810 use_operand_p use;
5811 tree op = PHI_RESULT (phi);
5812 ssa_op_iter oi;
5813
5814 if (!is_gimple_reg (op))
5f40b3cb
ZD
5815 {
5816 /* Remove the phi nodes for virtual operands (alias analysis will be
5817 run for the new function, anyway). */
726a989a 5818 remove_phi_node (&si, true);
5f40b3cb
ZD
5819 continue;
5820 }
917948d3 5821
b357f682
JJ
5822 SET_PHI_RESULT (phi,
5823 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
917948d3
ZD
5824 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5825 {
5826 op = USE_FROM_PTR (use);
5827 if (TREE_CODE (op) == SSA_NAME)
b357f682 5828 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
917948d3 5829 }
726a989a
RB
5830
5831 gsi_next (&si);
917948d3
ZD
5832 }
5833
726a989a 5834 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
50674e96 5835 {
726a989a 5836 gimple stmt = gsi_stmt (si);
726a989a 5837 struct walk_stmt_info wi;
50674e96 5838
726a989a
RB
5839 memset (&wi, 0, sizeof (wi));
5840 wi.info = d;
5841 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
50674e96 5842
726a989a 5843 if (gimple_code (stmt) == GIMPLE_LABEL)
50674e96 5844 {
726a989a 5845 tree label = gimple_label_label (stmt);
50674e96
DN
5846 int uid = LABEL_DECL_UID (label);
5847
5848 gcc_assert (uid > -1);
5849
5850 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5851 if (old_len <= (unsigned) uid)
5852 {
5006671f 5853 new_len = 3 * uid / 2 + 1;
a590ac65
KH
5854 VEC_safe_grow_cleared (basic_block, gc,
5855 cfg->x_label_to_block_map, new_len);
50674e96
DN
5856 }
5857
5858 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5859 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5860
5861 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5862
cb91fab0
JH
5863 if (uid >= dest_cfun->cfg->last_label_uid)
5864 dest_cfun->cfg->last_label_uid = uid + 1;
50674e96 5865 }
fad41cd7 5866
1d65f45c
RH
5867 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
5868 remove_stmt_from_eh_lp_fn (cfun, stmt);
5869
5870 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5871 gimple_remove_stmt_histograms (cfun, stmt);
917948d3 5872
5f40b3cb
ZD
5873 /* We cannot leave any operands allocated from the operand caches of
5874 the current function. */
5875 free_stmt_operands (stmt);
5876 push_cfun (dest_cfun);
917948d3 5877 update_stmt (stmt);
5f40b3cb 5878 pop_cfun ();
fad41cd7 5879 }
7241571e
JJ
5880
5881 FOR_EACH_EDGE (e, ei, bb->succs)
5882 if (e->goto_locus)
5883 {
5884 tree block = e->goto_block;
5885 if (d->orig_block == NULL_TREE
5886 || block == d->orig_block)
5887 e->goto_block = d->new_block;
5888#ifdef ENABLE_CHECKING
5889 else if (block != d->new_block)
5890 {
5891 while (block && block != d->orig_block)
5892 block = BLOCK_SUPERCONTEXT (block);
5893 gcc_assert (block);
5894 }
5895#endif
5896 }
fad41cd7
RH
5897}
5898
5899/* Examine the statements in BB (which is in SRC_CFUN); find and return
5900 the outermost EH region. Use REGION as the incoming base EH region. */
5901
1d65f45c 5902static eh_region
fad41cd7 5903find_outermost_region_in_block (struct function *src_cfun,
1d65f45c 5904 basic_block bb, eh_region region)
fad41cd7 5905{
726a989a 5906 gimple_stmt_iterator si;
6531d1be 5907
726a989a 5908 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
fad41cd7 5909 {
726a989a 5910 gimple stmt = gsi_stmt (si);
1d65f45c
RH
5911 eh_region stmt_region;
5912 int lp_nr;
1799e5d5 5913
1d65f45c
RH
5914 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
5915 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
5916 if (stmt_region)
7e2df4a1 5917 {
1d65f45c 5918 if (region == NULL)
7e2df4a1
JJ
5919 region = stmt_region;
5920 else if (stmt_region != region)
5921 {
5922 region = eh_region_outermost (src_cfun, stmt_region, region);
1d65f45c 5923 gcc_assert (region != NULL);
7e2df4a1
JJ
5924 }
5925 }
50674e96 5926 }
fad41cd7
RH
5927
5928 return region;
50674e96
DN
5929}
5930
fad41cd7
RH
5931static tree
5932new_label_mapper (tree decl, void *data)
5933{
5934 htab_t hash = (htab_t) data;
5935 struct tree_map *m;
5936 void **slot;
5937
5938 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5939
3d9a9f94 5940 m = XNEW (struct tree_map);
fad41cd7 5941 m->hash = DECL_UID (decl);
fc8600f9 5942 m->base.from = decl;
c2255bc4 5943 m->to = create_artificial_label (UNKNOWN_LOCATION);
fad41cd7 5944 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
cb91fab0
JH
5945 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
5946 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
fad41cd7
RH
5947
5948 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5949 gcc_assert (*slot == NULL);
5950
5951 *slot = m;
5952
5953 return m->to;
5954}
50674e96 5955
b357f682
JJ
5956/* Change DECL_CONTEXT of all BLOCK_VARS in block, including
5957 subblocks. */
5958
5959static void
5960replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
5961 tree to_context)
5962{
5963 tree *tp, t;
5964
5965 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
5966 {
5967 t = *tp;
e1e2bac4
JJ
5968 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
5969 continue;
b357f682
JJ
5970 replace_by_duplicate_decl (&t, vars_map, to_context);
5971 if (t != *tp)
5972 {
5973 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
5974 {
5975 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
5976 DECL_HAS_VALUE_EXPR_P (t) = 1;
5977 }
5978 TREE_CHAIN (t) = TREE_CHAIN (*tp);
5979 *tp = t;
5980 }
5981 }
5982
5983 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
5984 replace_block_vars_by_duplicates (block, vars_map, to_context);
5985}
5986
50674e96
DN
5987/* Move a single-entry, single-exit region delimited by ENTRY_BB and
5988 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
5989 single basic block in the original CFG and the new basic block is
5990 returned. DEST_CFUN must not have a CFG yet.
5991
5992 Note that the region need not be a pure SESE region. Blocks inside
5993 the region may contain calls to abort/exit. The only restriction
5994 is that ENTRY_BB should be the only entry point and it must
5995 dominate EXIT_BB.
5996
b357f682
JJ
5997 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
5998 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
5999 to the new function.
6000
50674e96
DN
6001 All local variables referenced in the region are assumed to be in
6002 the corresponding BLOCK_VARS and unexpanded variable lists
6003 associated with DEST_CFUN. */
6004
6005basic_block
6006move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
b357f682 6007 basic_block exit_bb, tree orig_block)
50674e96 6008{
917948d3
ZD
6009 VEC(basic_block,heap) *bbs, *dom_bbs;
6010 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6011 basic_block after, bb, *entry_pred, *exit_succ, abb;
6012 struct function *saved_cfun = cfun;
1d65f45c 6013 int *entry_flag, *exit_flag;
917948d3 6014 unsigned *entry_prob, *exit_prob;
50674e96
DN
6015 unsigned i, num_entry_edges, num_exit_edges;
6016 edge e;
6017 edge_iterator ei;
fad41cd7 6018 htab_t new_label_map;
1d65f45c 6019 struct pointer_map_t *vars_map, *eh_map;
5f40b3cb 6020 struct loop *loop = entry_bb->loop_father;
b357f682 6021 struct move_stmt_d d;
50674e96
DN
6022
6023 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6024 region. */
6025 gcc_assert (entry_bb != exit_bb
2aee3e57
JJ
6026 && (!exit_bb
6027 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
50674e96 6028
917948d3
ZD
6029 /* Collect all the blocks in the region. Manually add ENTRY_BB
6030 because it won't be added by dfs_enumerate_from. */
50674e96
DN
6031 bbs = NULL;
6032 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6033 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6034
917948d3
ZD
6035 /* The blocks that used to be dominated by something in BBS will now be
6036 dominated by the new block. */
6037 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6038 VEC_address (basic_block, bbs),
6039 VEC_length (basic_block, bbs));
6040
50674e96
DN
6041 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6042 the predecessor edges to ENTRY_BB and the successor edges to
6043 EXIT_BB so that we can re-attach them to the new basic block that
6044 will replace the region. */
6045 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6046 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6047 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
917948d3 6048 entry_prob = XNEWVEC (unsigned, num_entry_edges);
50674e96
DN
6049 i = 0;
6050 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6051 {
917948d3 6052 entry_prob[i] = e->probability;
50674e96
DN
6053 entry_flag[i] = e->flags;
6054 entry_pred[i++] = e->src;
6055 remove_edge (e);
6056 }
6057
2aee3e57 6058 if (exit_bb)
50674e96 6059 {
2aee3e57
JJ
6060 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6061 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6062 sizeof (basic_block));
6063 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
917948d3 6064 exit_prob = XNEWVEC (unsigned, num_exit_edges);
2aee3e57
JJ
6065 i = 0;
6066 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6067 {
917948d3 6068 exit_prob[i] = e->probability;
2aee3e57
JJ
6069 exit_flag[i] = e->flags;
6070 exit_succ[i++] = e->dest;
6071 remove_edge (e);
6072 }
6073 }
6074 else
6075 {
6076 num_exit_edges = 0;
6077 exit_succ = NULL;
6078 exit_flag = NULL;
917948d3 6079 exit_prob = NULL;
50674e96
DN
6080 }
6081
6082 /* Switch context to the child function to initialize DEST_FN's CFG. */
6083 gcc_assert (dest_cfun->cfg == NULL);
917948d3 6084 push_cfun (dest_cfun);
fad41cd7 6085
50674e96 6086 init_empty_tree_cfg ();
fad41cd7
RH
6087
6088 /* Initialize EH information for the new function. */
1d65f45c 6089 eh_map = NULL;
fad41cd7
RH
6090 new_label_map = NULL;
6091 if (saved_cfun->eh)
6092 {
1d65f45c 6093 eh_region region = NULL;
fad41cd7
RH
6094
6095 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6096 region = find_outermost_region_in_block (saved_cfun, bb, region);
6097
6098 init_eh_for_function ();
1d65f45c 6099 if (region != NULL)
fad41cd7
RH
6100 {
6101 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
1d65f45c
RH
6102 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6103 new_label_mapper, new_label_map);
fad41cd7
RH
6104 }
6105 }
6106
917948d3
ZD
6107 pop_cfun ();
6108
50674e96
DN
6109 /* Move blocks from BBS into DEST_CFUN. */
6110 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6111 after = dest_cfun->cfg->x_entry_block_ptr;
917948d3 6112 vars_map = pointer_map_create ();
b357f682
JJ
6113
6114 memset (&d, 0, sizeof (d));
1d65f45c
RH
6115 d.orig_block = orig_block;
6116 d.new_block = DECL_INITIAL (dest_cfun->decl);
b357f682
JJ
6117 d.from_context = cfun->decl;
6118 d.to_context = dest_cfun->decl;
1d65f45c 6119 d.vars_map = vars_map;
b357f682 6120 d.new_label_map = new_label_map;
1d65f45c 6121 d.eh_map = eh_map;
b357f682 6122 d.remap_decls_p = true;
b357f682 6123
50674e96
DN
6124 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6125 {
6126 /* No need to update edge counts on the last block. It has
6127 already been updated earlier when we detached the region from
6128 the original CFG. */
1d65f45c 6129 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
50674e96
DN
6130 after = bb;
6131 }
6132
b357f682
JJ
6133 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6134 if (orig_block)
6135 {
6136 tree block;
6137 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6138 == NULL_TREE);
6139 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6140 = BLOCK_SUBBLOCKS (orig_block);
6141 for (block = BLOCK_SUBBLOCKS (orig_block);
6142 block; block = BLOCK_CHAIN (block))
6143 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6144 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6145 }
6146
6147 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6148 vars_map, dest_cfun->decl);
6149
fad41cd7
RH
6150 if (new_label_map)
6151 htab_delete (new_label_map);
1d65f45c
RH
6152 if (eh_map)
6153 pointer_map_destroy (eh_map);
917948d3 6154 pointer_map_destroy (vars_map);
50674e96
DN
6155
6156 /* Rewire the entry and exit blocks. The successor to the entry
6157 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6158 the child function. Similarly, the predecessor of DEST_FN's
6159 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6160 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6161 various CFG manipulation function get to the right CFG.
6162
6163 FIXME, this is silly. The CFG ought to become a parameter to
6164 these helpers. */
917948d3 6165 push_cfun (dest_cfun);
50674e96 6166 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
2aee3e57
JJ
6167 if (exit_bb)
6168 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
917948d3 6169 pop_cfun ();
50674e96
DN
6170
6171 /* Back in the original function, the SESE region has disappeared,
6172 create a new basic block in its place. */
6173 bb = create_empty_bb (entry_pred[0]);
5f40b3cb
ZD
6174 if (current_loops)
6175 add_bb_to_loop (bb, loop);
50674e96 6176 for (i = 0; i < num_entry_edges; i++)
917948d3
ZD
6177 {
6178 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6179 e->probability = entry_prob[i];
6180 }
50674e96
DN
6181
6182 for (i = 0; i < num_exit_edges; i++)
917948d3
ZD
6183 {
6184 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6185 e->probability = exit_prob[i];
6186 }
6187
6188 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6189 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6190 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6191 VEC_free (basic_block, heap, dom_bbs);
50674e96 6192
2aee3e57
JJ
6193 if (exit_bb)
6194 {
917948d3 6195 free (exit_prob);
2aee3e57
JJ
6196 free (exit_flag);
6197 free (exit_succ);
6198 }
917948d3 6199 free (entry_prob);
50674e96
DN
6200 free (entry_flag);
6201 free (entry_pred);
50674e96
DN
6202 VEC_free (basic_block, heap, bbs);
6203
6204 return bb;
6205}
6206
84d65814 6207
726a989a
RB
6208/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6209 */
6de9cd9a
DN
6210
6211void
6212dump_function_to_file (tree fn, FILE *file, int flags)
6213{
6214 tree arg, vars, var;
459ffad3 6215 struct function *dsf;
6de9cd9a
DN
6216 bool ignore_topmost_bind = false, any_var = false;
6217 basic_block bb;
6218 tree chain;
6531d1be 6219
673fda6b 6220 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6de9cd9a
DN
6221
6222 arg = DECL_ARGUMENTS (fn);
6223 while (arg)
6224 {
2f9ea521
RG
6225 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6226 fprintf (file, " ");
6de9cd9a 6227 print_generic_expr (file, arg, dump_flags);
3e894af1
KZ
6228 if (flags & TDF_VERBOSE)
6229 print_node (file, "", arg, 4);
6de9cd9a
DN
6230 if (TREE_CHAIN (arg))
6231 fprintf (file, ", ");
6232 arg = TREE_CHAIN (arg);
6233 }
6234 fprintf (file, ")\n");
6235
3e894af1
KZ
6236 if (flags & TDF_VERBOSE)
6237 print_node (file, "", fn, 2);
6238
459ffad3 6239 dsf = DECL_STRUCT_FUNCTION (fn);
feb4e5ba 6240 if (dsf && (flags & TDF_EH))
459ffad3
EB
6241 dump_eh_tree (file, dsf);
6242
39ecc018 6243 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6de9cd9a
DN
6244 {
6245 dump_node (fn, TDF_SLIM | flags, file);
6246 return;
6247 }
6248
953ff289 6249 /* Switch CFUN to point to FN. */
db2960f4 6250 push_cfun (DECL_STRUCT_FUNCTION (fn));
953ff289 6251
6de9cd9a
DN
6252 /* When GIMPLE is lowered, the variables are no longer available in
6253 BIND_EXPRs, so display them separately. */
cb91fab0 6254 if (cfun && cfun->decl == fn && cfun->local_decls)
6de9cd9a
DN
6255 {
6256 ignore_topmost_bind = true;
6257
6258 fprintf (file, "{\n");
cb91fab0 6259 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6de9cd9a
DN
6260 {
6261 var = TREE_VALUE (vars);
6262
6263 print_generic_decl (file, var, flags);
3e894af1
KZ
6264 if (flags & TDF_VERBOSE)
6265 print_node (file, "", var, 4);
6de9cd9a
DN
6266 fprintf (file, "\n");
6267
6268 any_var = true;
6269 }
6270 }
6271
32a87d45 6272 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6de9cd9a 6273 {
726a989a 6274 /* If the CFG has been built, emit a CFG-based dump. */
878f99d2 6275 check_bb_profile (ENTRY_BLOCK_PTR, file);
6de9cd9a
DN
6276 if (!ignore_topmost_bind)
6277 fprintf (file, "{\n");
6278
6279 if (any_var && n_basic_blocks)
6280 fprintf (file, "\n");
6281
6282 FOR_EACH_BB (bb)
726a989a 6283 gimple_dump_bb (bb, file, 2, flags);
6531d1be 6284
6de9cd9a 6285 fprintf (file, "}\n");
878f99d2 6286 check_bb_profile (EXIT_BLOCK_PTR, file);
6de9cd9a 6287 }
726a989a
RB
6288 else if (DECL_SAVED_TREE (fn) == NULL)
6289 {
6290 /* The function is now in GIMPLE form but the CFG has not been
6291 built yet. Emit the single sequence of GIMPLE statements
6292 that make up its body. */
6293 gimple_seq body = gimple_body (fn);
6294
6295 if (gimple_seq_first_stmt (body)
6296 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6297 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6298 print_gimple_seq (file, body, 0, flags);
6299 else
6300 {
6301 if (!ignore_topmost_bind)
6302 fprintf (file, "{\n");
6303
6304 if (any_var)
6305 fprintf (file, "\n");
6306
6307 print_gimple_seq (file, body, 2, flags);
6308 fprintf (file, "}\n");
6309 }
6310 }
6de9cd9a
DN
6311 else
6312 {
6313 int indent;
6314
6315 /* Make a tree based dump. */
6316 chain = DECL_SAVED_TREE (fn);
6317
953ff289 6318 if (chain && TREE_CODE (chain) == BIND_EXPR)
6de9cd9a
DN
6319 {
6320 if (ignore_topmost_bind)
6321 {
6322 chain = BIND_EXPR_BODY (chain);
6323 indent = 2;
6324 }
6325 else
6326 indent = 0;
6327 }
6328 else
6329 {
6330 if (!ignore_topmost_bind)
6331 fprintf (file, "{\n");
6332 indent = 2;
6333 }
6334
6335 if (any_var)
6336 fprintf (file, "\n");
6337
6338 print_generic_stmt_indented (file, chain, flags, indent);
6339 if (ignore_topmost_bind)
6340 fprintf (file, "}\n");
6341 }
6342
6343 fprintf (file, "\n\n");
953ff289
DN
6344
6345 /* Restore CFUN. */
db2960f4 6346 pop_cfun ();
953ff289
DN
6347}
6348
6349
6350/* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6351
24e47c76 6352DEBUG_FUNCTION void
953ff289
DN
6353debug_function (tree fn, int flags)
6354{
6355 dump_function_to_file (fn, stderr, flags);
6de9cd9a
DN
6356}
6357
6358
d7770457 6359/* Print on FILE the indexes for the predecessors of basic_block BB. */
6de9cd9a
DN
6360
6361static void
628f6a4e 6362print_pred_bbs (FILE *file, basic_block bb)
6de9cd9a 6363{
628f6a4e
BE
6364 edge e;
6365 edge_iterator ei;
6366
6367 FOR_EACH_EDGE (e, ei, bb->preds)
d7770457 6368 fprintf (file, "bb_%d ", e->src->index);
6de9cd9a
DN
6369}
6370
6371
d7770457 6372/* Print on FILE the indexes for the successors of basic_block BB. */
6de9cd9a
DN
6373
6374static void
628f6a4e 6375print_succ_bbs (FILE *file, basic_block bb)
6de9cd9a 6376{
628f6a4e
BE
6377 edge e;
6378 edge_iterator ei;
6379
6380 FOR_EACH_EDGE (e, ei, bb->succs)
d7770457 6381 fprintf (file, "bb_%d ", e->dest->index);
6de9cd9a
DN
6382}
6383
0c8efed8
SP
6384/* Print to FILE the basic block BB following the VERBOSITY level. */
6385
b8698a0f 6386void
0c8efed8
SP
6387print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6388{
6389 char *s_indent = (char *) alloca ((size_t) indent + 1);
6390 memset ((void *) s_indent, ' ', (size_t) indent);
6391 s_indent[indent] = '\0';
6392
6393 /* Print basic_block's header. */
6394 if (verbosity >= 2)
6395 {
6396 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6397 print_pred_bbs (file, bb);
6398 fprintf (file, "}, succs = {");
6399 print_succ_bbs (file, bb);
6400 fprintf (file, "})\n");
6401 }
6402
6403 /* Print basic_block's body. */
6404 if (verbosity >= 3)
6405 {
6406 fprintf (file, "%s {\n", s_indent);
726a989a 6407 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
0c8efed8
SP
6408 fprintf (file, "%s }\n", s_indent);
6409 }
6410}
6411
6412static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6de9cd9a 6413
0c8efed8
SP
6414/* Pretty print LOOP on FILE, indented INDENT spaces. Following
6415 VERBOSITY level this outputs the contents of the loop, or just its
6416 structure. */
6de9cd9a
DN
6417
6418static void
0c8efed8 6419print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6de9cd9a
DN
6420{
6421 char *s_indent;
6422 basic_block bb;
6531d1be 6423
6de9cd9a
DN
6424 if (loop == NULL)
6425 return;
6426
6427 s_indent = (char *) alloca ((size_t) indent + 1);
6428 memset ((void *) s_indent, ' ', (size_t) indent);
6429 s_indent[indent] = '\0';
6430
0c8efed8 6431 /* Print loop's header. */
b8698a0f 6432 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
0c8efed8
SP
6433 loop->num, loop->header->index, loop->latch->index);
6434 fprintf (file, ", niter = ");
6435 print_generic_expr (file, loop->nb_iterations, 0);
6531d1be 6436
0c8efed8
SP
6437 if (loop->any_upper_bound)
6438 {
6439 fprintf (file, ", upper_bound = ");
6440 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6441 }
6531d1be 6442
0c8efed8
SP
6443 if (loop->any_estimate)
6444 {
6445 fprintf (file, ", estimate = ");
6446 dump_double_int (file, loop->nb_iterations_estimate, true);
6447 }
6448 fprintf (file, ")\n");
6449
6450 /* Print loop's body. */
6451 if (verbosity >= 1)
6452 {
6453 fprintf (file, "%s{\n", s_indent);
6454 FOR_EACH_BB (bb)
6455 if (bb->loop_father == loop)
6456 print_loops_bb (file, bb, indent, verbosity);
6457
6458 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6459 fprintf (file, "%s}\n", s_indent);
6460 }
6de9cd9a
DN
6461}
6462
0c8efed8
SP
6463/* Print the LOOP and its sibling loops on FILE, indented INDENT
6464 spaces. Following VERBOSITY level this outputs the contents of the
6465 loop, or just its structure. */
6466
6467static void
6468print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6469{
6470 if (loop == NULL)
6471 return;
6472
6473 print_loop (file, loop, indent, verbosity);
6474 print_loop_and_siblings (file, loop->next, indent, verbosity);
6475}
6de9cd9a
DN
6476
6477/* Follow a CFG edge from the entry point of the program, and on entry
6478 of a loop, pretty print the loop structure on FILE. */
6479
6531d1be 6480void
0c8efed8 6481print_loops (FILE *file, int verbosity)
6de9cd9a
DN
6482{
6483 basic_block bb;
6531d1be 6484
f8bf9252 6485 bb = ENTRY_BLOCK_PTR;
6de9cd9a 6486 if (bb && bb->loop_father)
0c8efed8 6487 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6de9cd9a
DN
6488}
6489
6490
0c8efed8
SP
6491/* Debugging loops structure at tree level, at some VERBOSITY level. */
6492
24e47c76 6493DEBUG_FUNCTION void
0c8efed8
SP
6494debug_loops (int verbosity)
6495{
6496 print_loops (stderr, verbosity);
6497}
6498
6499/* Print on stderr the code of LOOP, at some VERBOSITY level. */
6de9cd9a 6500
24e47c76 6501DEBUG_FUNCTION void
0c8efed8 6502debug_loop (struct loop *loop, int verbosity)
6de9cd9a 6503{
0c8efed8 6504 print_loop (stderr, loop, 0, verbosity);
6de9cd9a
DN
6505}
6506
0c8efed8
SP
6507/* Print on stderr the code of loop number NUM, at some VERBOSITY
6508 level. */
6509
24e47c76 6510DEBUG_FUNCTION void
0c8efed8
SP
6511debug_loop_num (unsigned num, int verbosity)
6512{
6513 debug_loop (get_loop (num), verbosity);
6514}
6de9cd9a
DN
6515
6516/* Return true if BB ends with a call, possibly followed by some
6517 instructions that must stay with the call. Return false,
6518 otherwise. */
6519
6520static bool
726a989a 6521gimple_block_ends_with_call_p (basic_block bb)
6de9cd9a 6522{
b5b8b0ac 6523 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
726a989a 6524 return is_gimple_call (gsi_stmt (gsi));
6de9cd9a
DN
6525}
6526
6527
6528/* Return true if BB ends with a conditional branch. Return false,
6529 otherwise. */
6530
6531static bool
726a989a 6532gimple_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a 6533{
726a989a
RB
6534 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6535 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6de9cd9a
DN
6536}
6537
6538
6539/* Return true if we need to add fake edge to exit at statement T.
726a989a 6540 Helper function for gimple_flow_call_edges_add. */
6de9cd9a
DN
6541
6542static bool
726a989a 6543need_fake_edge_p (gimple t)
6de9cd9a 6544{
726a989a
RB
6545 tree fndecl = NULL_TREE;
6546 int call_flags = 0;
6de9cd9a
DN
6547
6548 /* NORETURN and LONGJMP calls already have an edge to exit.
321cf1f2 6549 CONST and PURE calls do not need one.
6de9cd9a
DN
6550 We don't currently check for CONST and PURE here, although
6551 it would be a good idea, because those attributes are
6552 figured out from the RTL in mark_constant_function, and
6553 the counter incrementation code from -fprofile-arcs
6554 leads to different results from -fbranch-probabilities. */
726a989a 6555 if (is_gimple_call (t))
23ef6d21 6556 {
726a989a
RB
6557 fndecl = gimple_call_fndecl (t);
6558 call_flags = gimple_call_flags (t);
23ef6d21
BE
6559 }
6560
726a989a
RB
6561 if (is_gimple_call (t)
6562 && fndecl
6563 && DECL_BUILT_IN (fndecl)
23ef6d21 6564 && (call_flags & ECF_NOTHROW)
3cfa762b
RG
6565 && !(call_flags & ECF_RETURNS_TWICE)
6566 /* fork() doesn't really return twice, but the effect of
6567 wrapping it in __gcov_fork() which calls __gcov_flush()
6568 and clears the counters before forking has the same
6569 effect as returning twice. Force a fake edge. */
6570 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6571 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6572 return false;
23ef6d21 6573
726a989a
RB
6574 if (is_gimple_call (t)
6575 && !(call_flags & ECF_NORETURN))
6de9cd9a
DN
6576 return true;
6577
e0c68ce9 6578 if (gimple_code (t) == GIMPLE_ASM
726a989a 6579 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6de9cd9a
DN
6580 return true;
6581
6582 return false;
6583}
6584
6585
6586/* Add fake edges to the function exit for any non constant and non
6587 noreturn calls, volatile inline assembly in the bitmap of blocks
6588 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6589 the number of blocks that were split.
6590
6591 The goal is to expose cases in which entering a basic block does
6592 not imply that all subsequent instructions must be executed. */
6593
6594static int
726a989a 6595gimple_flow_call_edges_add (sbitmap blocks)
6de9cd9a
DN
6596{
6597 int i;
6598 int blocks_split = 0;
6599 int last_bb = last_basic_block;
6600 bool check_last_block = false;
6601
24bd1a0b 6602 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
6603 return 0;
6604
6605 if (! blocks)
6606 check_last_block = true;
6607 else
6608 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6609
6610 /* In the last basic block, before epilogue generation, there will be
6611 a fallthru edge to EXIT. Special care is required if the last insn
6612 of the last basic block is a call because make_edge folds duplicate
6613 edges, which would result in the fallthru edge also being marked
6614 fake, which would result in the fallthru edge being removed by
6615 remove_fake_edges, which would result in an invalid CFG.
6616
6617 Moreover, we can't elide the outgoing fake edge, since the block
6618 profiler needs to take this into account in order to solve the minimal
6619 spanning tree in the case that the call doesn't return.
6620
6621 Handle this by adding a dummy instruction in a new last basic block. */
6622 if (check_last_block)
6623 {
6624 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
726a989a
RB
6625 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6626 gimple t = NULL;
6627
6628 if (!gsi_end_p (gsi))
6629 t = gsi_stmt (gsi);
6de9cd9a 6630
6a60530d 6631 if (t && need_fake_edge_p (t))
6de9cd9a
DN
6632 {
6633 edge e;
6634
9ff3d2de
JL
6635 e = find_edge (bb, EXIT_BLOCK_PTR);
6636 if (e)
6637 {
726a989a
RB
6638 gsi_insert_on_edge (e, gimple_build_nop ());
6639 gsi_commit_edge_inserts ();
9ff3d2de 6640 }
6de9cd9a
DN
6641 }
6642 }
6643
6644 /* Now add fake edges to the function exit for any non constant
6645 calls since there is no way that we can determine if they will
6646 return or not... */
6647 for (i = 0; i < last_bb; i++)
6648 {
6649 basic_block bb = BASIC_BLOCK (i);
726a989a
RB
6650 gimple_stmt_iterator gsi;
6651 gimple stmt, last_stmt;
6de9cd9a
DN
6652
6653 if (!bb)
6654 continue;
6655
6656 if (blocks && !TEST_BIT (blocks, i))
6657 continue;
6658
726a989a
RB
6659 gsi = gsi_last_bb (bb);
6660 if (!gsi_end_p (gsi))
6de9cd9a 6661 {
726a989a 6662 last_stmt = gsi_stmt (gsi);
6de9cd9a
DN
6663 do
6664 {
726a989a 6665 stmt = gsi_stmt (gsi);
6de9cd9a
DN
6666 if (need_fake_edge_p (stmt))
6667 {
6668 edge e;
726a989a 6669
6de9cd9a
DN
6670 /* The handling above of the final block before the
6671 epilogue should be enough to verify that there is
6672 no edge to the exit block in CFG already.
6673 Calling make_edge in such case would cause us to
6674 mark that edge as fake and remove it later. */
6675#ifdef ENABLE_CHECKING
6676 if (stmt == last_stmt)
628f6a4e 6677 {
9ff3d2de
JL
6678 e = find_edge (bb, EXIT_BLOCK_PTR);
6679 gcc_assert (e == NULL);
628f6a4e 6680 }
6de9cd9a
DN
6681#endif
6682
6683 /* Note that the following may create a new basic block
6684 and renumber the existing basic blocks. */
6685 if (stmt != last_stmt)
6686 {
6687 e = split_block (bb, stmt);
6688 if (e)
6689 blocks_split++;
6690 }
6691 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6692 }
726a989a 6693 gsi_prev (&gsi);
6de9cd9a 6694 }
726a989a 6695 while (!gsi_end_p (gsi));
6de9cd9a
DN
6696 }
6697 }
6698
6699 if (blocks_split)
6700 verify_flow_info ();
6701
6702 return blocks_split;
6703}
6704
4f6c2131
EB
6705/* Purge dead abnormal call edges from basic block BB. */
6706
6707bool
726a989a 6708gimple_purge_dead_abnormal_call_edges (basic_block bb)
4f6c2131 6709{
726a989a 6710 bool changed = gimple_purge_dead_eh_edges (bb);
4f6c2131 6711
e3b5732b 6712 if (cfun->has_nonlocal_label)
4f6c2131 6713 {
726a989a 6714 gimple stmt = last_stmt (bb);
4f6c2131
EB
6715 edge_iterator ei;
6716 edge e;
6717
726a989a 6718 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
4f6c2131
EB
6719 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6720 {
6721 if (e->flags & EDGE_ABNORMAL)
6722 {
6723 remove_edge (e);
6724 changed = true;
6725 }
6726 else
6727 ei_next (&ei);
6728 }
6729
726a989a 6730 /* See gimple_purge_dead_eh_edges below. */
4f6c2131
EB
6731 if (changed)
6732 free_dominance_info (CDI_DOMINATORS);
6733 }
6734
6735 return changed;
6736}
6737
672987e8
ZD
6738/* Removes edge E and all the blocks dominated by it, and updates dominance
6739 information. The IL in E->src needs to be updated separately.
6740 If dominance info is not available, only the edge E is removed.*/
6741
6742void
6743remove_edge_and_dominated_blocks (edge e)
6744{
6745 VEC (basic_block, heap) *bbs_to_remove = NULL;
6746 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6747 bitmap df, df_idom;
6748 edge f;
6749 edge_iterator ei;
6750 bool none_removed = false;
6751 unsigned i;
6752 basic_block bb, dbb;
6753 bitmap_iterator bi;
6754
2b28c07a 6755 if (!dom_info_available_p (CDI_DOMINATORS))
672987e8
ZD
6756 {
6757 remove_edge (e);
6758 return;
6759 }
6760
6761 /* No updating is needed for edges to exit. */
6762 if (e->dest == EXIT_BLOCK_PTR)
6763 {
6764 if (cfgcleanup_altered_bbs)
6765 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6766 remove_edge (e);
6767 return;
6768 }
6769
6770 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6771 that is not dominated by E->dest, then this set is empty. Otherwise,
6772 all the basic blocks dominated by E->dest are removed.
6773
6774 Also, to DF_IDOM we store the immediate dominators of the blocks in
6775 the dominance frontier of E (i.e., of the successors of the
6776 removed blocks, if there are any, and of E->dest otherwise). */
6777 FOR_EACH_EDGE (f, ei, e->dest->preds)
6778 {
6779 if (f == e)
6780 continue;
6781
6782 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6783 {
6784 none_removed = true;
6785 break;
6786 }
6787 }
6788
6789 df = BITMAP_ALLOC (NULL);
6790 df_idom = BITMAP_ALLOC (NULL);
6791
6792 if (none_removed)
6793 bitmap_set_bit (df_idom,
6794 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6795 else
6796 {
438c239d 6797 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
672987e8
ZD
6798 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6799 {
6800 FOR_EACH_EDGE (f, ei, bb->succs)
6801 {
6802 if (f->dest != EXIT_BLOCK_PTR)
6803 bitmap_set_bit (df, f->dest->index);
6804 }
6805 }
6806 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6807 bitmap_clear_bit (df, bb->index);
6808
6809 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6810 {
6811 bb = BASIC_BLOCK (i);
6812 bitmap_set_bit (df_idom,
6813 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6814 }
6815 }
6816
6817 if (cfgcleanup_altered_bbs)
6818 {
6819 /* Record the set of the altered basic blocks. */
6820 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6821 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6822 }
6823
6824 /* Remove E and the cancelled blocks. */
6825 if (none_removed)
6826 remove_edge (e);
6827 else
6828 {
b5b8b0ac
AO
6829 /* Walk backwards so as to get a chance to substitute all
6830 released DEFs into debug stmts. See
6831 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
6832 details. */
6833 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
6834 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
672987e8
ZD
6835 }
6836
6837 /* Update the dominance information. The immediate dominator may change only
6838 for blocks whose immediate dominator belongs to DF_IDOM:
b8698a0f 6839
672987e8
ZD
6840 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6841 removal. Let Z the arbitrary block such that idom(Z) = Y and
6842 Z dominates X after the removal. Before removal, there exists a path P
6843 from Y to X that avoids Z. Let F be the last edge on P that is
6844 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6845 dominates W, and because of P, Z does not dominate W), and W belongs to
b8698a0f 6846 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
672987e8
ZD
6847 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6848 {
6849 bb = BASIC_BLOCK (i);
6850 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6851 dbb;
6852 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6853 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6854 }
6855
66f97d31 6856 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
672987e8
ZD
6857
6858 BITMAP_FREE (df);
6859 BITMAP_FREE (df_idom);
6860 VEC_free (basic_block, heap, bbs_to_remove);
6861 VEC_free (basic_block, heap, bbs_to_fix_dom);
6862}
6863
4f6c2131
EB
6864/* Purge dead EH edges from basic block BB. */
6865
1eaba2f2 6866bool
726a989a 6867gimple_purge_dead_eh_edges (basic_block bb)
1eaba2f2
RH
6868{
6869 bool changed = false;
628f6a4e
BE
6870 edge e;
6871 edge_iterator ei;
726a989a 6872 gimple stmt = last_stmt (bb);
1eaba2f2 6873
726a989a 6874 if (stmt && stmt_can_throw_internal (stmt))
1eaba2f2
RH
6875 return false;
6876
628f6a4e 6877 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1eaba2f2 6878 {
1eaba2f2
RH
6879 if (e->flags & EDGE_EH)
6880 {
672987e8 6881 remove_edge_and_dominated_blocks (e);
1eaba2f2
RH
6882 changed = true;
6883 }
628f6a4e
BE
6884 else
6885 ei_next (&ei);
1eaba2f2
RH
6886 }
6887
6888 return changed;
6889}
6890
6891bool
726a989a 6892gimple_purge_all_dead_eh_edges (const_bitmap blocks)
1eaba2f2
RH
6893{
6894 bool changed = false;
3cd8c58a 6895 unsigned i;
87c476a2 6896 bitmap_iterator bi;
1eaba2f2 6897
87c476a2
ZD
6898 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6899 {
833ee764
JJ
6900 basic_block bb = BASIC_BLOCK (i);
6901
6902 /* Earlier gimple_purge_dead_eh_edges could have removed
6903 this basic block already. */
6904 gcc_assert (bb || changed);
6905 if (bb != NULL)
6906 changed |= gimple_purge_dead_eh_edges (bb);
87c476a2 6907 }
1eaba2f2
RH
6908
6909 return changed;
6910}
6de9cd9a 6911
a100ac1e
KH
6912/* This function is called whenever a new edge is created or
6913 redirected. */
6914
6915static void
726a989a 6916gimple_execute_on_growing_pred (edge e)
a100ac1e
KH
6917{
6918 basic_block bb = e->dest;
6919
8eacd016 6920 if (!gimple_seq_empty_p (phi_nodes (bb)))
a100ac1e
KH
6921 reserve_phi_args_for_new_edge (bb);
6922}
6923
e51546f8
KH
6924/* This function is called immediately before edge E is removed from
6925 the edge vector E->dest->preds. */
6926
6927static void
726a989a 6928gimple_execute_on_shrinking_pred (edge e)
e51546f8 6929{
8eacd016 6930 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
e51546f8
KH
6931 remove_phi_args (e);
6932}
6933
1cb7dfc3
MH
6934/*---------------------------------------------------------------------------
6935 Helper functions for Loop versioning
6936 ---------------------------------------------------------------------------*/
6937
6938/* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6939 of 'first'. Both of them are dominated by 'new_head' basic block. When
6940 'new_head' was created by 'second's incoming edge it received phi arguments
6941 on the edge by split_edge(). Later, additional edge 'e' was created to
6531d1be
BF
6942 connect 'new_head' and 'first'. Now this routine adds phi args on this
6943 additional edge 'e' that new_head to second edge received as part of edge
726a989a 6944 splitting. */
1cb7dfc3
MH
6945
6946static void
726a989a
RB
6947gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6948 basic_block new_head, edge e)
1cb7dfc3 6949{
726a989a
RB
6950 gimple phi1, phi2;
6951 gimple_stmt_iterator psi1, psi2;
6952 tree def;
d0e12fc6
KH
6953 edge e2 = find_edge (new_head, second);
6954
6955 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6956 edge, we should always have an edge from NEW_HEAD to SECOND. */
6957 gcc_assert (e2 != NULL);
1cb7dfc3
MH
6958
6959 /* Browse all 'second' basic block phi nodes and add phi args to
6960 edge 'e' for 'first' head. PHI args are always in correct order. */
6961
726a989a
RB
6962 for (psi2 = gsi_start_phis (second),
6963 psi1 = gsi_start_phis (first);
6964 !gsi_end_p (psi2) && !gsi_end_p (psi1);
6965 gsi_next (&psi2), gsi_next (&psi1))
1cb7dfc3 6966 {
726a989a
RB
6967 phi1 = gsi_stmt (psi1);
6968 phi2 = gsi_stmt (psi2);
6969 def = PHI_ARG_DEF (phi2, e2->dest_idx);
f5045c96 6970 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
1cb7dfc3
MH
6971 }
6972}
6973
726a989a 6974
6531d1be
BF
6975/* Adds a if else statement to COND_BB with condition COND_EXPR.
6976 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
1cb7dfc3 6977 the destination of the ELSE part. */
726a989a 6978
1cb7dfc3 6979static void
726a989a
RB
6980gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6981 basic_block second_head ATTRIBUTE_UNUSED,
6982 basic_block cond_bb, void *cond_e)
1cb7dfc3 6983{
726a989a
RB
6984 gimple_stmt_iterator gsi;
6985 gimple new_cond_expr;
1cb7dfc3
MH
6986 tree cond_expr = (tree) cond_e;
6987 edge e0;
6988
6989 /* Build new conditional expr */
726a989a
RB
6990 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
6991 NULL_TREE, NULL_TREE);
1cb7dfc3 6992
6531d1be 6993 /* Add new cond in cond_bb. */
726a989a
RB
6994 gsi = gsi_last_bb (cond_bb);
6995 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
6996
1cb7dfc3
MH
6997 /* Adjust edges appropriately to connect new head with first head
6998 as well as second head. */
6999 e0 = single_succ_edge (cond_bb);
7000 e0->flags &= ~EDGE_FALLTHRU;
7001 e0->flags |= EDGE_FALSE_VALUE;
7002}
7003
726a989a
RB
7004struct cfg_hooks gimple_cfg_hooks = {
7005 "gimple",
7006 gimple_verify_flow_info,
7007 gimple_dump_bb, /* dump_bb */
6de9cd9a 7008 create_bb, /* create_basic_block */
726a989a
RB
7009 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7010 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7011 gimple_can_remove_branch_p, /* can_remove_branch_p */
6de9cd9a 7012 remove_bb, /* delete_basic_block */
726a989a
RB
7013 gimple_split_block, /* split_block */
7014 gimple_move_block_after, /* move_block_after */
7015 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7016 gimple_merge_blocks, /* merge_blocks */
7017 gimple_predict_edge, /* predict_edge */
7018 gimple_predicted_by_p, /* predicted_by_p */
7019 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7020 gimple_duplicate_bb, /* duplicate_block */
7021 gimple_split_edge, /* split_edge */
7022 gimple_make_forwarder_block, /* make_forward_block */
6de9cd9a 7023 NULL, /* tidy_fallthru_edge */
726a989a
RB
7024 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7025 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7026 gimple_flow_call_edges_add, /* flow_call_edges_add */
7027 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7028 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7029 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7030 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7031 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
1cb7dfc3 7032 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6531d1be 7033 flush_pending_stmts /* flush_pending_stmts */
6de9cd9a
DN
7034};
7035
7036
7037/* Split all critical edges. */
7038
c2924966 7039static unsigned int
6de9cd9a
DN
7040split_critical_edges (void)
7041{
7042 basic_block bb;
7043 edge e;
628f6a4e 7044 edge_iterator ei;
6de9cd9a 7045
d6be0d7f
JL
7046 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7047 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7048 mappings around the calls to split_edge. */
7049 start_recording_case_labels ();
6de9cd9a
DN
7050 FOR_ALL_BB (bb)
7051 {
628f6a4e 7052 FOR_EACH_EDGE (e, ei, bb->succs)
496a4ef5
JH
7053 {
7054 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6de9cd9a 7055 split_edge (e);
b8698a0f 7056 /* PRE inserts statements to edges and expects that
496a4ef5
JH
7057 since split_critical_edges was done beforehand, committing edge
7058 insertions will not split more edges. In addition to critical
7059 edges we must split edges that have multiple successors and
b8698a0f 7060 end by control flow statements, such as RESX.
496a4ef5
JH
7061 Go ahead and split them too. This matches the logic in
7062 gimple_find_edge_insert_loc. */
7063 else if ((!single_pred_p (e->dest)
671f9f30 7064 || !gimple_seq_empty_p (phi_nodes (e->dest))
496a4ef5
JH
7065 || e->dest == EXIT_BLOCK_PTR)
7066 && e->src != ENTRY_BLOCK_PTR
7067 && !(e->flags & EDGE_ABNORMAL))
7068 {
7069 gimple_stmt_iterator gsi;
7070
7071 gsi = gsi_last_bb (e->src);
7072 if (!gsi_end_p (gsi)
7073 && stmt_ends_bb_p (gsi_stmt (gsi))
c54c785d
JH
7074 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7075 && !gimple_call_builtin_p (gsi_stmt (gsi),
7076 BUILT_IN_RETURN)))
496a4ef5
JH
7077 split_edge (e);
7078 }
7079 }
6de9cd9a 7080 }
d6be0d7f 7081 end_recording_case_labels ();
c2924966 7082 return 0;
6de9cd9a
DN
7083}
7084
8ddbbcae 7085struct gimple_opt_pass pass_split_crit_edges =
6de9cd9a 7086{
8ddbbcae
JH
7087 {
7088 GIMPLE_PASS,
5d44aeed 7089 "crited", /* name */
6de9cd9a
DN
7090 NULL, /* gate */
7091 split_critical_edges, /* execute */
7092 NULL, /* sub */
7093 NULL, /* next */
7094 0, /* static_pass_number */
7095 TV_TREE_SPLIT_EDGES, /* tv_id */
7096 PROP_cfg, /* properties required */
7097 PROP_no_crit_edges, /* properties_provided */
7098 0, /* properties_destroyed */
7099 0, /* todo_flags_start */
9187e02d 7100 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
8ddbbcae 7101 }
6de9cd9a 7102};
26277d41 7103
26277d41 7104
726a989a 7105/* Build a ternary operation and gimplify it. Emit code before GSI.
26277d41
PB
7106 Return the gimple_val holding the result. */
7107
7108tree
726a989a 7109gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
26277d41
PB
7110 tree type, tree a, tree b, tree c)
7111{
7112 tree ret;
db3927fb 7113 location_t loc = gimple_location (gsi_stmt (*gsi));
26277d41 7114
db3927fb 7115 ret = fold_build3_loc (loc, code, type, a, b, c);
26277d41
PB
7116 STRIP_NOPS (ret);
7117
726a989a
RB
7118 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7119 GSI_SAME_STMT);
26277d41
PB
7120}
7121
726a989a 7122/* Build a binary operation and gimplify it. Emit code before GSI.
26277d41
PB
7123 Return the gimple_val holding the result. */
7124
7125tree
726a989a 7126gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
26277d41
PB
7127 tree type, tree a, tree b)
7128{
7129 tree ret;
7130
db3927fb 7131 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
26277d41
PB
7132 STRIP_NOPS (ret);
7133
726a989a
RB
7134 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7135 GSI_SAME_STMT);
26277d41
PB
7136}
7137
726a989a 7138/* Build a unary operation and gimplify it. Emit code before GSI.
26277d41
PB
7139 Return the gimple_val holding the result. */
7140
7141tree
726a989a 7142gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
26277d41
PB
7143 tree a)
7144{
7145 tree ret;
7146
db3927fb 7147 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
26277d41
PB
7148 STRIP_NOPS (ret);
7149
726a989a
RB
7150 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7151 GSI_SAME_STMT);
26277d41
PB
7152}
7153
7154
6de9cd9a
DN
7155\f
7156/* Emit return warnings. */
7157
c2924966 7158static unsigned int
6de9cd9a
DN
7159execute_warn_function_return (void)
7160{
9506ac2b 7161 source_location location;
726a989a 7162 gimple last;
6de9cd9a 7163 edge e;
628f6a4e 7164 edge_iterator ei;
6de9cd9a 7165
6de9cd9a
DN
7166 /* If we have a path to EXIT, then we do return. */
7167 if (TREE_THIS_VOLATILE (cfun->decl)
628f6a4e 7168 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
6de9cd9a 7169 {
9506ac2b 7170 location = UNKNOWN_LOCATION;
628f6a4e 7171 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
7172 {
7173 last = last_stmt (e->src);
c54c785d
JH
7174 if ((gimple_code (last) == GIMPLE_RETURN
7175 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
726a989a 7176 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
6de9cd9a
DN
7177 break;
7178 }
9506ac2b
PB
7179 if (location == UNKNOWN_LOCATION)
7180 location = cfun->function_end_locus;
fab922b1 7181 warning_at (location, 0, "%<noreturn%> function does return");
6de9cd9a
DN
7182 }
7183
7184 /* If we see "return;" in some basic block, then we do reach the end
7185 without returning a value. */
7186 else if (warn_return_type
089efaa4 7187 && !TREE_NO_WARNING (cfun->decl)
628f6a4e 7188 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
6de9cd9a
DN
7189 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7190 {
628f6a4e 7191 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a 7192 {
726a989a
RB
7193 gimple last = last_stmt (e->src);
7194 if (gimple_code (last) == GIMPLE_RETURN
7195 && gimple_return_retval (last) == NULL
7196 && !gimple_no_warning_p (last))
6de9cd9a 7197 {
726a989a 7198 location = gimple_location (last);
9506ac2b
PB
7199 if (location == UNKNOWN_LOCATION)
7200 location = cfun->function_end_locus;
aa14403d 7201 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
089efaa4 7202 TREE_NO_WARNING (cfun->decl) = 1;
6de9cd9a
DN
7203 break;
7204 }
7205 }
7206 }
c2924966 7207 return 0;
6de9cd9a
DN
7208}
7209
7210
7211/* Given a basic block B which ends with a conditional and has
7212 precisely two successors, determine which of the edges is taken if
7213 the conditional is true and which is taken if the conditional is
7214 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7215
7216void
7217extract_true_false_edges_from_block (basic_block b,
7218 edge *true_edge,
7219 edge *false_edge)
7220{
628f6a4e 7221 edge e = EDGE_SUCC (b, 0);
6de9cd9a
DN
7222
7223 if (e->flags & EDGE_TRUE_VALUE)
7224 {
7225 *true_edge = e;
628f6a4e 7226 *false_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7227 }
7228 else
7229 {
7230 *false_edge = e;
628f6a4e 7231 *true_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7232 }
7233}
7234
8ddbbcae 7235struct gimple_opt_pass pass_warn_function_return =
6de9cd9a 7236{
8ddbbcae
JH
7237 {
7238 GIMPLE_PASS,
e0a42b0f 7239 "*warn_function_return", /* name */
6de9cd9a
DN
7240 NULL, /* gate */
7241 execute_warn_function_return, /* execute */
7242 NULL, /* sub */
7243 NULL, /* next */
7244 0, /* static_pass_number */
7072a650 7245 TV_NONE, /* tv_id */
00bfee6f 7246 PROP_cfg, /* properties_required */
6de9cd9a
DN
7247 0, /* properties_provided */
7248 0, /* properties_destroyed */
7249 0, /* todo_flags_start */
8ddbbcae
JH
7250 0 /* todo_flags_finish */
7251 }
6de9cd9a 7252};
aa313ed4
JH
7253
7254/* Emit noreturn warnings. */
7255
c2924966 7256static unsigned int
aa313ed4
JH
7257execute_warn_function_noreturn (void)
7258{
7259 if (warn_missing_noreturn
7260 && !TREE_THIS_VOLATILE (cfun->decl)
7261 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
e8924938 7262 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
c5d75364
MLI
7263 warning_at (DECL_SOURCE_LOCATION (cfun->decl), OPT_Wmissing_noreturn,
7264 "function might be possible candidate "
7265 "for attribute %<noreturn%>");
c2924966 7266 return 0;
aa313ed4
JH
7267}
7268
8ddbbcae 7269struct gimple_opt_pass pass_warn_function_noreturn =
aa313ed4 7270{
8ddbbcae
JH
7271 {
7272 GIMPLE_PASS,
e0a42b0f 7273 "*warn_function_noreturn", /* name */
aa313ed4
JH
7274 NULL, /* gate */
7275 execute_warn_function_noreturn, /* execute */
7276 NULL, /* sub */
7277 NULL, /* next */
7278 0, /* static_pass_number */
7072a650 7279 TV_NONE, /* tv_id */
aa313ed4
JH
7280 PROP_cfg, /* properties_required */
7281 0, /* properties_provided */
7282 0, /* properties_destroyed */
7283 0, /* todo_flags_start */
8ddbbcae
JH
7284 0 /* todo_flags_finish */
7285 }
aa313ed4 7286};
a406865a
RG
7287
7288
7289/* Walk a gimplified function and warn for functions whose return value is
7290 ignored and attribute((warn_unused_result)) is set. This is done before
7291 inlining, so we don't have to worry about that. */
7292
7293static void
7294do_warn_unused_result (gimple_seq seq)
7295{
7296 tree fdecl, ftype;
7297 gimple_stmt_iterator i;
7298
7299 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7300 {
7301 gimple g = gsi_stmt (i);
7302
7303 switch (gimple_code (g))
7304 {
7305 case GIMPLE_BIND:
7306 do_warn_unused_result (gimple_bind_body (g));
7307 break;
7308 case GIMPLE_TRY:
7309 do_warn_unused_result (gimple_try_eval (g));
7310 do_warn_unused_result (gimple_try_cleanup (g));
7311 break;
7312 case GIMPLE_CATCH:
7313 do_warn_unused_result (gimple_catch_handler (g));
7314 break;
7315 case GIMPLE_EH_FILTER:
7316 do_warn_unused_result (gimple_eh_filter_failure (g));
7317 break;
7318
7319 case GIMPLE_CALL:
7320 if (gimple_call_lhs (g))
7321 break;
7322
7323 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7324 LHS. All calls whose value is ignored should be
7325 represented like this. Look for the attribute. */
7326 fdecl = gimple_call_fndecl (g);
7327 ftype = TREE_TYPE (TREE_TYPE (gimple_call_fn (g)));
7328
7329 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7330 {
7331 location_t loc = gimple_location (g);
7332
7333 if (fdecl)
7334 warning_at (loc, OPT_Wunused_result,
7335 "ignoring return value of %qD, "
7336 "declared with attribute warn_unused_result",
7337 fdecl);
7338 else
7339 warning_at (loc, OPT_Wunused_result,
7340 "ignoring return value of function "
7341 "declared with attribute warn_unused_result");
7342 }
7343 break;
7344
7345 default:
7346 /* Not a container, not a call, or a call whose value is used. */
7347 break;
7348 }
7349 }
7350}
7351
7352static unsigned int
7353run_warn_unused_result (void)
7354{
7355 do_warn_unused_result (gimple_body (current_function_decl));
7356 return 0;
7357}
7358
7359static bool
7360gate_warn_unused_result (void)
7361{
7362 return flag_warn_unused_result;
7363}
7364
7365struct gimple_opt_pass pass_warn_unused_result =
7366{
7367 {
7368 GIMPLE_PASS,
2329c6f5 7369 "*warn_unused_result", /* name */
a406865a
RG
7370 gate_warn_unused_result, /* gate */
7371 run_warn_unused_result, /* execute */
7372 NULL, /* sub */
7373 NULL, /* next */
7374 0, /* static_pass_number */
7375 TV_NONE, /* tv_id */
7376 PROP_gimple_any, /* properties_required */
7377 0, /* properties_provided */
7378 0, /* properties_destroyed */
7379 0, /* todo_flags_start */
7380 0, /* todo_flags_finish */
7381 }
7382};
7383