]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-cfg.c
This patch rewrites the old VEC macro-based interface into a new one
[thirdparty/gcc.git] / gcc / tree-cfg.c
CommitLineData
4ee9c684 1/* Control flow functions for trees.
73685692 2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
44e41ce0 3 2010, 2011, 2012 Free Software Foundation, Inc.
4ee9c684 4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
8c4c00c1 10the Free Software Foundation; either version 3, or (at your option)
4ee9c684 11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
4ee9c684 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
4ee9c684 27#include "tm_p.h"
4ee9c684 28#include "basic-block.h"
4ee9c684 29#include "flags.h"
30#include "function.h"
4ee9c684 31#include "ggc.h"
ce084dfc 32#include "gimple-pretty-print.h"
4ee9c684 33#include "tree-flow.h"
4ee9c684 34#include "tree-dump.h"
35#include "tree-pass.h"
0b205f4c 36#include "diagnostic-core.h"
4ee9c684 37#include "except.h"
38#include "cfgloop.h"
fcd2418b 39#include "tree-ssa-propagate.h"
4992f399 40#include "value-prof.h"
836074a5 41#include "pointer-set.h"
79acaae1 42#include "tree-inline.h"
08c6cbd2 43#include "target.h"
4ee9c684 44
45/* This file contains functions for building the Control Flow Graph (CFG)
46 for a function tree. */
47
48/* Local declarations. */
49
50/* Initial capacity for the basic block array. */
51static const int initial_cfg_capacity = 20;
52
6ff867cc 53/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
54 which use a particular edge. The CASE_LABEL_EXPRs are chained together
9541f573 55 via their CASE_CHAIN field, which we clear after we're done with the
75a70cf9 56 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
194ae074 57
6ff867cc 58 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
59 update the case vector in response to edge redirections.
194ae074 60
6ff867cc 61 Right now this table is set up and torn down at key points in the
62 compilation process. It would be nice if we could make the table
63 more persistent. The key is getting notification of changes to
64 the CFG (particularly edge removal, creation and redirection). */
65
b30a8715 66static struct pointer_map_t *edge_to_cases;
194ae074 67
836a3d81 68/* If we record edge_to_cases, this bitmap will hold indexes
69 of basic blocks that end in a GIMPLE_SWITCH which we touched
70 due to edge manipulations. */
71
72static bitmap touched_switch_bbs;
73
4ee9c684 74/* CFG statistics. */
75struct cfg_stats_d
76{
77 long num_merged_labels;
78};
79
80static struct cfg_stats_d cfg_stats;
81
82/* Nonzero if we found a computed goto while building basic blocks. */
83static bool found_computed_goto;
84
d01c707b 85/* Hash table to store last discriminator assigned for each locus. */
86struct locus_discrim_map
87{
88 location_t locus;
89 int discriminator;
90};
91static htab_t discriminator_per_locus;
92
4ee9c684 93/* Basic blocks and flowgraphs. */
75a70cf9 94static void make_blocks (gimple_seq);
4ee9c684 95static void factor_computed_gotos (void);
4ee9c684 96
97/* Edges. */
98static void make_edges (void);
4ee9c684 99static void make_cond_expr_edges (basic_block);
75a70cf9 100static void make_gimple_switch_edges (basic_block);
4ee9c684 101static void make_goto_expr_edges (basic_block);
78f55ca8 102static void make_gimple_asm_edges (basic_block);
d01c707b 103static unsigned int locus_map_hash (const void *);
104static int locus_map_eq (const void *, const void *);
105static void assign_discriminator (location_t, basic_block);
75a70cf9 106static edge gimple_redirect_edge_and_branch (edge, basic_block);
107static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
2a1990e9 108static unsigned int split_critical_edges (void);
4ee9c684 109
110/* Various helpers. */
75a70cf9 111static inline bool stmt_starts_bb_p (gimple, gimple);
112static int gimple_verify_flow_info (void);
113static void gimple_make_forwarder_block (edge);
114static void gimple_cfg2vcg (FILE *);
d01c707b 115static gimple first_non_label_stmt (basic_block);
4c0315d0 116static bool verify_gimple_transaction (gimple);
4ee9c684 117
118/* Flowgraph optimization and cleanup. */
75a70cf9 119static void gimple_merge_blocks (basic_block, basic_block);
120static bool gimple_can_merge_blocks_p (basic_block, basic_block);
4ee9c684 121static void remove_bb (basic_block);
6d7413d8 122static edge find_taken_edge_computed_goto (basic_block, tree);
4ee9c684 123static edge find_taken_edge_cond_expr (basic_block, tree);
124static edge find_taken_edge_switch_expr (basic_block, tree);
75a70cf9 125static tree find_case_label_for_value (gimple, tree);
4ee9c684 126
4f217f69 127void
c27baad4 128init_empty_tree_cfg_for_function (struct function *fn)
4f217f69 129{
130 /* Initialize the basic block array. */
c27baad4 131 init_flow (fn);
132 profile_status_for_function (fn) = PROFILE_ABSENT;
133 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
134 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
f1f41a6c 135 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
136 vec_safe_grow_cleared (basic_block_info_for_function (fn),
e85c2c2d 137 initial_cfg_capacity);
4f217f69 138
139 /* Build a mapping of labels to their associated blocks. */
f1f41a6c 140 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
141 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
e85c2c2d 142 initial_cfg_capacity);
4f217f69 143
48e1416a 144 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
c27baad4 145 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
48e1416a 146 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
c27baad4 147 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
148
149 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
150 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
151 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
152 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
153}
154
155void
156init_empty_tree_cfg (void)
157{
158 init_empty_tree_cfg_for_function (cfun);
4f217f69 159}
4ee9c684 160
161/*---------------------------------------------------------------------------
162 Create basic blocks
163---------------------------------------------------------------------------*/
164
75a70cf9 165/* Entry point to the CFG builder for trees. SEQ is the sequence of
4ee9c684 166 statements to be added to the flowgraph. */
167
168static void
75a70cf9 169build_gimple_cfg (gimple_seq seq)
4ee9c684 170{
75a70cf9 171 /* Register specific gimple functions. */
172 gimple_register_cfg_hooks ();
4ee9c684 173
4ee9c684 174 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
175
4f217f69 176 init_empty_tree_cfg ();
4ee9c684 177
178 found_computed_goto = 0;
75a70cf9 179 make_blocks (seq);
4ee9c684 180
181 /* Computed gotos are hell to deal with, especially if there are
182 lots of them with a large number of destinations. So we factor
183 them to a common computed goto location before we build the
184 edge list. After we convert back to normal form, we will un-factor
185 the computed gotos since factoring introduces an unwanted jump. */
186 if (found_computed_goto)
187 factor_computed_gotos ();
188
ebeaefa4 189 /* Make sure there is always at least one block, even if it's empty. */
4d2e5d52 190 if (n_basic_blocks == NUM_FIXED_BLOCKS)
4ee9c684 191 create_empty_bb (ENTRY_BLOCK_PTR);
192
4ee9c684 193 /* Adjust the size of the array. */
f1f41a6c 194 if (basic_block_info->length () < (size_t) n_basic_blocks)
195 vec_safe_grow_cleared (basic_block_info, n_basic_blocks);
4ee9c684 196
b624a250 197 /* To speed up statement iterator walks, we first purge dead labels. */
198 cleanup_dead_labels ();
199
200 /* Group case nodes to reduce the number of edges.
201 We do this after cleaning up dead labels because otherwise we miss
202 a lot of obvious case merging opportunities. */
203 group_case_labels ();
204
4ee9c684 205 /* Create the edges of the flowgraph. */
d01c707b 206 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
207 free);
4ee9c684 208 make_edges ();
6313ae8b 209 cleanup_dead_labels ();
d01c707b 210 htab_delete (discriminator_per_locus);
4ee9c684 211
212 /* Debugging dumps. */
213
214 /* Write the flowgraph to a VCG file. */
215 {
216 int local_dump_flags;
3f5be5f4 217 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
218 if (vcg_file)
4ee9c684 219 {
75a70cf9 220 gimple_cfg2vcg (vcg_file);
3f5be5f4 221 dump_end (TDI_vcg, vcg_file);
4ee9c684 222 }
223 }
4ee9c684 224}
225
2a1990e9 226static unsigned int
4ee9c684 227execute_build_cfg (void)
228{
1a1a827a 229 gimple_seq body = gimple_body (current_function_decl);
230
231 build_gimple_cfg (body);
232 gimple_set_body (current_function_decl, NULL);
cee43f7e 233 if (dump_file && (dump_flags & TDF_DETAILS))
234 {
235 fprintf (dump_file, "Scope blocks:\n");
236 dump_scope_blocks (dump_file, dump_flags);
237 }
2a1990e9 238 return 0;
4ee9c684 239}
240
20099e35 241struct gimple_opt_pass pass_build_cfg =
4ee9c684 242{
20099e35 243 {
244 GIMPLE_PASS,
4ee9c684 245 "cfg", /* name */
c7875731 246 OPTGROUP_NONE, /* optinfo_flags */
4ee9c684 247 NULL, /* gate */
248 execute_build_cfg, /* execute */
249 NULL, /* sub */
250 NULL, /* next */
251 0, /* static_pass_number */
252 TV_TREE_CFG, /* tv_id */
75a70cf9 253 PROP_gimple_leh, /* properties_required */
4ee9c684 254 PROP_cfg, /* properties_provided */
255 0, /* properties_destroyed */
256 0, /* todo_flags_start */
771e2890 257 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
20099e35 258 }
4ee9c684 259};
260
75a70cf9 261
262/* Return true if T is a computed goto. */
263
264static bool
265computed_goto_p (gimple t)
266{
267 return (gimple_code (t) == GIMPLE_GOTO
268 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
269}
270
271
13b96211 272/* Search the CFG for any computed gotos. If found, factor them to a
4ee9c684 273 common computed goto site. Also record the location of that site so
13b96211 274 that we can un-factor the gotos after we have converted back to
4ee9c684 275 normal form. */
276
277static void
278factor_computed_gotos (void)
279{
280 basic_block bb;
281 tree factored_label_decl = NULL;
282 tree var = NULL;
75a70cf9 283 gimple factored_computed_goto_label = NULL;
284 gimple factored_computed_goto = NULL;
4ee9c684 285
286 /* We know there are one or more computed gotos in this function.
287 Examine the last statement in each basic block to see if the block
288 ends with a computed goto. */
13b96211 289
4ee9c684 290 FOR_EACH_BB (bb)
291 {
75a70cf9 292 gimple_stmt_iterator gsi = gsi_last_bb (bb);
293 gimple last;
4ee9c684 294
75a70cf9 295 if (gsi_end_p (gsi))
4ee9c684 296 continue;
75a70cf9 297
298 last = gsi_stmt (gsi);
4ee9c684 299
300 /* Ignore the computed goto we create when we factor the original
301 computed gotos. */
302 if (last == factored_computed_goto)
303 continue;
304
305 /* If the last statement is a computed goto, factor it. */
306 if (computed_goto_p (last))
307 {
75a70cf9 308 gimple assignment;
4ee9c684 309
310 /* The first time we find a computed goto we need to create
311 the factored goto block and the variable each original
312 computed goto will use for their goto destination. */
75a70cf9 313 if (!factored_computed_goto)
4ee9c684 314 {
315 basic_block new_bb = create_empty_bb (bb);
75a70cf9 316 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
4ee9c684 317
318 /* Create the destination of the factored goto. Each original
319 computed goto will put its desired destination into this
320 variable and jump to the label we create immediately
321 below. */
322 var = create_tmp_var (ptr_type_node, "gotovar");
323
324 /* Build a label for the new block which will contain the
325 factored computed goto. */
e60a6f7b 326 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
4ee9c684 327 factored_computed_goto_label
75a70cf9 328 = gimple_build_label (factored_label_decl);
329 gsi_insert_after (&new_gsi, factored_computed_goto_label,
330 GSI_NEW_STMT);
4ee9c684 331
332 /* Build our new computed goto. */
75a70cf9 333 factored_computed_goto = gimple_build_goto (var);
334 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
4ee9c684 335 }
336
337 /* Copy the original computed goto's destination into VAR. */
75a70cf9 338 assignment = gimple_build_assign (var, gimple_goto_dest (last));
339 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
4ee9c684 340
341 /* And re-vector the computed goto to the new destination. */
75a70cf9 342 gimple_goto_set_dest (last, factored_label_decl);
4ee9c684 343 }
344 }
345}
346
347
75a70cf9 348/* Build a flowgraph for the sequence of stmts SEQ. */
4ee9c684 349
350static void
75a70cf9 351make_blocks (gimple_seq seq)
4ee9c684 352{
75a70cf9 353 gimple_stmt_iterator i = gsi_start (seq);
354 gimple stmt = NULL;
4ee9c684 355 bool start_new_block = true;
75a70cf9 356 bool first_stmt_of_seq = true;
4ee9c684 357 basic_block bb = ENTRY_BLOCK_PTR;
358
75a70cf9 359 while (!gsi_end_p (i))
4ee9c684 360 {
75a70cf9 361 gimple prev_stmt;
4ee9c684 362
363 prev_stmt = stmt;
75a70cf9 364 stmt = gsi_stmt (i);
4ee9c684 365
366 /* If the statement starts a new basic block or if we have determined
367 in a previous pass that we need to create a new block for STMT, do
368 so now. */
369 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
370 {
75a70cf9 371 if (!first_stmt_of_seq)
e3a19533 372 gsi_split_seq_before (&i, &seq);
75a70cf9 373 bb = create_basic_block (seq, NULL, bb);
4ee9c684 374 start_new_block = false;
375 }
376
377 /* Now add STMT to BB and create the subgraphs for special statement
378 codes. */
75a70cf9 379 gimple_set_bb (stmt, bb);
4ee9c684 380
381 if (computed_goto_p (stmt))
382 found_computed_goto = true;
383
384 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
385 next iteration. */
386 if (stmt_ends_bb_p (stmt))
3e40d86f 387 {
388 /* If the stmt can make abnormal goto use a new temporary
389 for the assignment to the LHS. This makes sure the old value
390 of the LHS is available on the abnormal edge. Otherwise
391 we will end up with overlapping life-ranges for abnormal
392 SSA names. */
393 if (gimple_has_lhs (stmt)
394 && stmt_can_make_abnormal_goto (stmt)
395 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
396 {
397 tree lhs = gimple_get_lhs (stmt);
398 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
399 gimple s = gimple_build_assign (lhs, tmp);
400 gimple_set_location (s, gimple_location (stmt));
401 gimple_set_block (s, gimple_block (stmt));
402 gimple_set_lhs (stmt, tmp);
403 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
404 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
405 DECL_GIMPLE_REG_P (tmp) = 1;
406 gsi_insert_after (&i, s, GSI_SAME_STMT);
407 }
408 start_new_block = true;
409 }
4ee9c684 410
75a70cf9 411 gsi_next (&i);
412 first_stmt_of_seq = false;
4ee9c684 413 }
414}
415
416
417/* Create and return a new empty basic block after bb AFTER. */
418
419static basic_block
420create_bb (void *h, void *e, basic_block after)
421{
422 basic_block bb;
423
8c0963c4 424 gcc_assert (!e);
4ee9c684 425
740f575d 426 /* Create and initialize a new basic block. Since alloc_block uses
ba72912a 427 GC allocation that clears memory to allocate a basic block, we do
428 not have to clear the newly allocated basic block here. */
4ee9c684 429 bb = alloc_block ();
4ee9c684 430
431 bb->index = last_basic_block;
432 bb->flags = BB_NEW;
e3a19533 433 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
4ee9c684 434
435 /* Add the new block to the linked list of blocks. */
436 link_block (bb, after);
437
438 /* Grow the basic block array if needed. */
f1f41a6c 439 if ((size_t) last_basic_block == basic_block_info->length ())
4ee9c684 440 {
441 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
f1f41a6c 442 vec_safe_grow_cleared (basic_block_info, new_size);
4ee9c684 443 }
444
445 /* Add the newly created block to the array. */
85b938d0 446 SET_BASIC_BLOCK (last_basic_block, bb);
4ee9c684 447
4ee9c684 448 n_basic_blocks++;
449 last_basic_block++;
450
4ee9c684 451 return bb;
452}
453
454
455/*---------------------------------------------------------------------------
456 Edge creation
457---------------------------------------------------------------------------*/
458
9677695f 459/* Fold COND_EXPR_COND of each COND_EXPR. */
460
e27482aa 461void
9677695f 462fold_cond_expr_cond (void)
463{
464 basic_block bb;
465
466 FOR_EACH_BB (bb)
467 {
75a70cf9 468 gimple stmt = last_stmt (bb);
9677695f 469
75a70cf9 470 if (stmt && gimple_code (stmt) == GIMPLE_COND)
9677695f 471 {
389dd41b 472 location_t loc = gimple_location (stmt);
add6ee5e 473 tree cond;
474 bool zerop, onep;
475
476 fold_defer_overflow_warnings ();
389dd41b 477 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
75a70cf9 478 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
479 if (cond)
480 {
481 zerop = integer_zerop (cond);
482 onep = integer_onep (cond);
483 }
484 else
485 zerop = onep = false;
486
72c59a18 487 fold_undefer_overflow_warnings (zerop || onep,
c7addd8c 488 stmt,
add6ee5e 489 WARN_STRICT_OVERFLOW_CONDITIONAL);
490 if (zerop)
75a70cf9 491 gimple_cond_make_false (stmt);
add6ee5e 492 else if (onep)
75a70cf9 493 gimple_cond_make_true (stmt);
9677695f 494 }
495 }
496}
497
4ee9c684 498/* Join all the blocks in the flowgraph. */
499
500static void
501make_edges (void)
502{
503 basic_block bb;
caa1b5c9 504 struct omp_region *cur_region = NULL;
4ee9c684 505
506 /* Create an edge from entry to the first block with executable
507 statements in it. */
4d2e5d52 508 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
4ee9c684 509
a33d8949 510 /* Traverse the basic block array placing edges. */
4ee9c684 511 FOR_EACH_BB (bb)
512 {
75a70cf9 513 gimple last = last_stmt (bb);
2a09b1df 514 bool fallthru;
4ee9c684 515
2a09b1df 516 if (last)
4ee9c684 517 {
75a70cf9 518 enum gimple_code code = gimple_code (last);
caa1b5c9 519 switch (code)
2a09b1df 520 {
75a70cf9 521 case GIMPLE_GOTO:
2a09b1df 522 make_goto_expr_edges (bb);
523 fallthru = false;
524 break;
75a70cf9 525 case GIMPLE_RETURN:
2a09b1df 526 make_edge (bb, EXIT_BLOCK_PTR, 0);
527 fallthru = false;
528 break;
75a70cf9 529 case GIMPLE_COND:
2a09b1df 530 make_cond_expr_edges (bb);
531 fallthru = false;
532 break;
75a70cf9 533 case GIMPLE_SWITCH:
534 make_gimple_switch_edges (bb);
2a09b1df 535 fallthru = false;
536 break;
75a70cf9 537 case GIMPLE_RESX:
2a09b1df 538 make_eh_edges (last);
539 fallthru = false;
540 break;
e38def9c 541 case GIMPLE_EH_DISPATCH:
542 fallthru = make_eh_dispatch_edges (last);
543 break;
2a09b1df 544
75a70cf9 545 case GIMPLE_CALL:
2a09b1df 546 /* If this function receives a nonlocal goto, then we need to
547 make edges from this call site to all the nonlocal goto
548 handlers. */
75a70cf9 549 if (stmt_can_make_abnormal_goto (last))
2c8a1497 550 make_abnormal_goto_edges (bb, true);
4ee9c684 551
2a09b1df 552 /* If this statement has reachable exception handlers, then
553 create abnormal edges to them. */
554 make_eh_edges (last);
555
3ea38c1f 556 /* BUILTIN_RETURN is really a return statement. */
557 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
558 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
2a09b1df 559 /* Some calls are known not to return. */
3ea38c1f 560 else
561 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
2a09b1df 562 break;
563
75a70cf9 564 case GIMPLE_ASSIGN:
565 /* A GIMPLE_ASSIGN may throw internally and thus be considered
566 control-altering. */
2a09b1df 567 if (is_ctrl_altering_stmt (last))
e38def9c 568 make_eh_edges (last);
2a09b1df 569 fallthru = true;
570 break;
571
78f55ca8 572 case GIMPLE_ASM:
573 make_gimple_asm_edges (bb);
574 fallthru = true;
575 break;
576
75a70cf9 577 case GIMPLE_OMP_PARALLEL:
578 case GIMPLE_OMP_TASK:
579 case GIMPLE_OMP_FOR:
580 case GIMPLE_OMP_SINGLE:
581 case GIMPLE_OMP_MASTER:
582 case GIMPLE_OMP_ORDERED:
583 case GIMPLE_OMP_CRITICAL:
584 case GIMPLE_OMP_SECTION:
caa1b5c9 585 cur_region = new_omp_region (bb, code, cur_region);
2a09b1df 586 fallthru = true;
587 break;
588
75a70cf9 589 case GIMPLE_OMP_SECTIONS:
caa1b5c9 590 cur_region = new_omp_region (bb, code, cur_region);
ac6e3339 591 fallthru = true;
592 break;
593
75a70cf9 594 case GIMPLE_OMP_SECTIONS_SWITCH:
6c105785 595 fallthru = false;
61e47ac8 596 break;
597
75a70cf9 598 case GIMPLE_OMP_ATOMIC_LOAD:
599 case GIMPLE_OMP_ATOMIC_STORE:
cb7f680b 600 fallthru = true;
601 break;
602
75a70cf9 603 case GIMPLE_OMP_RETURN:
604 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
605 somewhere other than the next block. This will be
606 created later. */
caa1b5c9 607 cur_region->exit = bb;
75a70cf9 608 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
caa1b5c9 609 cur_region = cur_region->outer;
610 break;
611
75a70cf9 612 case GIMPLE_OMP_CONTINUE:
caa1b5c9 613 cur_region->cont = bb;
614 switch (cur_region->type)
615 {
75a70cf9 616 case GIMPLE_OMP_FOR:
617 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
618 succs edges as abnormal to prevent splitting
619 them. */
b3a3ddec 620 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
ac6e3339 621 /* Make the loopback edge. */
b3a3ddec 622 make_edge (bb, single_succ (cur_region->entry),
623 EDGE_ABNORMAL);
624
75a70cf9 625 /* Create an edge from GIMPLE_OMP_FOR to exit, which
626 corresponds to the case that the body of the loop
627 is not executed at all. */
b3a3ddec 628 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
629 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
630 fallthru = false;
caa1b5c9 631 break;
632
75a70cf9 633 case GIMPLE_OMP_SECTIONS:
caa1b5c9 634 /* Wire up the edges into and out of the nested sections. */
caa1b5c9 635 {
ac6e3339 636 basic_block switch_bb = single_succ (cur_region->entry);
637
caa1b5c9 638 struct omp_region *i;
639 for (i = cur_region->inner; i ; i = i->next)
640 {
75a70cf9 641 gcc_assert (i->type == GIMPLE_OMP_SECTION);
ac6e3339 642 make_edge (switch_bb, i->entry, 0);
caa1b5c9 643 make_edge (i->exit, bb, EDGE_FALLTHRU);
644 }
ac6e3339 645
646 /* Make the loopback edge to the block with
75a70cf9 647 GIMPLE_OMP_SECTIONS_SWITCH. */
ac6e3339 648 make_edge (bb, switch_bb, 0);
649
650 /* Make the edge from the switch to exit. */
651 make_edge (switch_bb, bb->next_bb, 0);
652 fallthru = false;
caa1b5c9 653 }
654 break;
13b96211 655
caa1b5c9 656 default:
657 gcc_unreachable ();
658 }
caa1b5c9 659 break;
660
4c0315d0 661 case GIMPLE_TRANSACTION:
662 {
663 tree abort_label = gimple_transaction_label (last);
664 if (abort_label)
0cd02a19 665 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
4c0315d0 666 fallthru = true;
667 }
668 break;
669
2a09b1df 670 default:
671 gcc_assert (!stmt_ends_bb_p (last));
672 fallthru = true;
673 }
4ee9c684 674 }
2a09b1df 675 else
676 fallthru = true;
4ee9c684 677
2a09b1df 678 if (fallthru)
d01c707b 679 {
680 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
681 if (last)
682 assign_discriminator (gimple_location (last), bb->next_bb);
683 }
4ee9c684 684 }
685
caa1b5c9 686 if (root_omp_region)
687 free_omp_regions ();
688
9677695f 689 /* Fold COND_EXPR_COND of each COND_EXPR. */
690 fold_cond_expr_cond ();
4ee9c684 691}
692
d01c707b 693/* Trivial hash function for a location_t. ITEM is a pointer to
694 a hash table entry that maps a location_t to a discriminator. */
695
696static unsigned int
697locus_map_hash (const void *item)
698{
699 return ((const struct locus_discrim_map *) item)->locus;
700}
701
702/* Equality function for the locus-to-discriminator map. VA and VB
703 point to the two hash table entries to compare. */
704
705static int
706locus_map_eq (const void *va, const void *vb)
707{
708 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
709 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
710 return a->locus == b->locus;
711}
712
713/* Find the next available discriminator value for LOCUS. The
714 discriminator distinguishes among several basic blocks that
715 share a common locus, allowing for more accurate sample-based
716 profiling. */
717
718static int
719next_discriminator_for_locus (location_t locus)
720{
721 struct locus_discrim_map item;
722 struct locus_discrim_map **slot;
723
724 item.locus = locus;
725 item.discriminator = 0;
726 slot = (struct locus_discrim_map **)
727 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
728 (hashval_t) locus, INSERT);
729 gcc_assert (slot);
730 if (*slot == HTAB_EMPTY_ENTRY)
731 {
732 *slot = XNEW (struct locus_discrim_map);
733 gcc_assert (*slot);
734 (*slot)->locus = locus;
735 (*slot)->discriminator = 0;
736 }
737 (*slot)->discriminator++;
738 return (*slot)->discriminator;
739}
740
741/* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
742
743static bool
744same_line_p (location_t locus1, location_t locus2)
745{
746 expanded_location from, to;
747
748 if (locus1 == locus2)
749 return true;
750
751 from = expand_location (locus1);
752 to = expand_location (locus2);
753
754 if (from.line != to.line)
755 return false;
756 if (from.file == to.file)
757 return true;
758 return (from.file != NULL
759 && to.file != NULL
82715bcd 760 && filename_cmp (from.file, to.file) == 0);
d01c707b 761}
762
763/* Assign a unique discriminator value to block BB if it begins at the same
764 LOCUS as its predecessor block. */
765
766static void
767assign_discriminator (location_t locus, basic_block bb)
768{
04953c08 769 gimple first_in_to_bb, last_in_to_bb;
d01c707b 770
771 if (locus == 0 || bb->discriminator != 0)
772 return;
773
04953c08 774 first_in_to_bb = first_non_label_stmt (bb);
775 last_in_to_bb = last_stmt (bb);
15b7ca3a 776 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
777 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
d01c707b 778 bb->discriminator = next_discriminator_for_locus (locus);
779}
4ee9c684 780
75a70cf9 781/* Create the edges for a GIMPLE_COND starting at block BB. */
4ee9c684 782
783static void
784make_cond_expr_edges (basic_block bb)
785{
75a70cf9 786 gimple entry = last_stmt (bb);
787 gimple then_stmt, else_stmt;
4ee9c684 788 basic_block then_bb, else_bb;
789 tree then_label, else_label;
b5f162df 790 edge e;
d01c707b 791 location_t entry_locus;
4ee9c684 792
8c0963c4 793 gcc_assert (entry);
75a70cf9 794 gcc_assert (gimple_code (entry) == GIMPLE_COND);
4ee9c684 795
d01c707b 796 entry_locus = gimple_location (entry);
797
4ee9c684 798 /* Entry basic blocks for each component. */
75a70cf9 799 then_label = gimple_cond_true_label (entry);
800 else_label = gimple_cond_false_label (entry);
4ee9c684 801 then_bb = label_to_block (then_label);
802 else_bb = label_to_block (else_label);
75a70cf9 803 then_stmt = first_stmt (then_bb);
804 else_stmt = first_stmt (else_bb);
4ee9c684 805
b5f162df 806 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
d01c707b 807 assign_discriminator (entry_locus, then_bb);
75a70cf9 808 e->goto_locus = gimple_location (then_stmt);
b5f162df 809 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
810 if (e)
9c388755 811 {
d01c707b 812 assign_discriminator (entry_locus, else_bb);
9c388755 813 e->goto_locus = gimple_location (else_stmt);
9c388755 814 }
63f88450 815
75a70cf9 816 /* We do not need the labels anymore. */
817 gimple_cond_set_true_label (entry, NULL_TREE);
818 gimple_cond_set_false_label (entry, NULL_TREE);
4ee9c684 819}
820
194ae074 821
6ff867cc 822/* Called for each element in the hash table (P) as we delete the
823 edge to cases hash table.
824
13b96211 825 Clear all the TREE_CHAINs to prevent problems with copying of
6ff867cc 826 SWITCH_EXPRs and structure sharing rules, then free the hash table
827 element. */
828
b30a8715 829static bool
f8fd23c0 830edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
b30a8715 831 void *data ATTRIBUTE_UNUSED)
6ff867cc 832{
6ff867cc 833 tree t, next;
834
b30a8715 835 for (t = (tree) *value; t; t = next)
6ff867cc 836 {
edb0f60d 837 next = CASE_CHAIN (t);
838 CASE_CHAIN (t) = NULL;
6ff867cc 839 }
b30a8715 840
841 *value = NULL;
afec0a77 842 return true;
6ff867cc 843}
844
845/* Start recording information mapping edges to case labels. */
846
bfd49939 847void
6ff867cc 848start_recording_case_labels (void)
849{
850 gcc_assert (edge_to_cases == NULL);
b30a8715 851 edge_to_cases = pointer_map_create ();
836a3d81 852 touched_switch_bbs = BITMAP_ALLOC (NULL);
6ff867cc 853}
854
855/* Return nonzero if we are recording information for case labels. */
856
857static bool
858recording_case_labels_p (void)
859{
860 return (edge_to_cases != NULL);
861}
862
863/* Stop recording information mapping edges to case labels and
864 remove any information we have recorded. */
bfd49939 865void
6ff867cc 866end_recording_case_labels (void)
867{
836a3d81 868 bitmap_iterator bi;
869 unsigned i;
b30a8715 870 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
871 pointer_map_destroy (edge_to_cases);
6ff867cc 872 edge_to_cases = NULL;
836a3d81 873 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
874 {
875 basic_block bb = BASIC_BLOCK (i);
876 if (bb)
877 {
878 gimple stmt = last_stmt (bb);
879 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
880 group_case_labels_stmt (stmt);
881 }
882 }
883 BITMAP_FREE (touched_switch_bbs);
6ff867cc 884}
885
6ff867cc 886/* If we are inside a {start,end}_recording_cases block, then return
887 a chain of CASE_LABEL_EXPRs from T which reference E.
888
889 Otherwise return NULL. */
194ae074 890
891static tree
75a70cf9 892get_cases_for_edge (edge e, gimple t)
194ae074 893{
194ae074 894 void **slot;
6ff867cc 895 size_t i, n;
194ae074 896
6ff867cc 897 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
898 chains available. Return NULL so the caller can detect this case. */
899 if (!recording_case_labels_p ())
900 return NULL;
13b96211 901
b30a8715 902 slot = pointer_map_contains (edge_to_cases, e);
194ae074 903 if (slot)
b30a8715 904 return (tree) *slot;
194ae074 905
6ff867cc 906 /* If we did not find E in the hash table, then this must be the first
907 time we have been queried for information about E & T. Add all the
908 elements from T to the hash table then perform the query again. */
194ae074 909
75a70cf9 910 n = gimple_switch_num_labels (t);
194ae074 911 for (i = 0; i < n; i++)
912 {
75a70cf9 913 tree elt = gimple_switch_label (t, i);
b30a8715 914 tree lab = CASE_LABEL (elt);
6ff867cc 915 basic_block label_bb = label_to_block (lab);
b30a8715 916 edge this_edge = find_edge (e->src, label_bb);
917
918 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
919 a new chain. */
920 slot = pointer_map_insert (edge_to_cases, this_edge);
edb0f60d 921 CASE_CHAIN (elt) = (tree) *slot;
b30a8715 922 *slot = elt;
194ae074 923 }
b30a8715 924
925 return (tree) *pointer_map_contains (edge_to_cases, e);
194ae074 926}
4ee9c684 927
75a70cf9 928/* Create the edges for a GIMPLE_SWITCH starting at block BB. */
4ee9c684 929
930static void
75a70cf9 931make_gimple_switch_edges (basic_block bb)
4ee9c684 932{
75a70cf9 933 gimple entry = last_stmt (bb);
d01c707b 934 location_t entry_locus;
4ee9c684 935 size_t i, n;
4ee9c684 936
d01c707b 937 entry_locus = gimple_location (entry);
938
75a70cf9 939 n = gimple_switch_num_labels (entry);
4ee9c684 940
941 for (i = 0; i < n; ++i)
942 {
75a70cf9 943 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
4ee9c684 944 basic_block label_bb = label_to_block (lab);
6ff867cc 945 make_edge (bb, label_bb, 0);
d01c707b 946 assign_discriminator (entry_locus, label_bb);
4ee9c684 947 }
948}
949
950
951/* Return the basic block holding label DEST. */
952
953basic_block
7a22afab 954label_to_block_fn (struct function *ifun, tree dest)
4ee9c684 955{
0ec80471 956 int uid = LABEL_DECL_UID (dest);
957
ebeaefa4 958 /* We would die hard when faced by an undefined label. Emit a label to
959 the very first basic block. This will hopefully make even the dataflow
0ec80471 960 and undefined variable warnings quite right. */
852f689e 961 if (seen_error () && uid < 0)
0ec80471 962 {
75a70cf9 963 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
964 gimple stmt;
0ec80471 965
75a70cf9 966 stmt = gimple_build_label (dest);
967 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
0ec80471 968 uid = LABEL_DECL_UID (dest);
969 }
f1f41a6c 970 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
0de999f1 971 return NULL;
f1f41a6c 972 return (*ifun->cfg->x_label_to_block_map)[uid];
4ee9c684 973}
974
2c8a1497 975/* Create edges for an abnormal goto statement at block BB. If FOR_CALL
976 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
977
978void
979make_abnormal_goto_edges (basic_block bb, bool for_call)
980{
981 basic_block target_bb;
75a70cf9 982 gimple_stmt_iterator gsi;
2c8a1497 983
984 FOR_EACH_BB (target_bb)
75a70cf9 985 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
2c8a1497 986 {
75a70cf9 987 gimple label_stmt = gsi_stmt (gsi);
988 tree target;
2c8a1497 989
75a70cf9 990 if (gimple_code (label_stmt) != GIMPLE_LABEL)
2c8a1497 991 break;
992
75a70cf9 993 target = gimple_label_label (label_stmt);
2c8a1497 994
995 /* Make an edge to every label block that has been marked as a
996 potential target for a computed goto or a non-local goto. */
997 if ((FORCED_LABEL (target) && !for_call)
998 || (DECL_NONLOCAL (target) && for_call))
999 {
1000 make_edge (bb, target_bb, EDGE_ABNORMAL);
1001 break;
1002 }
1003 }
1004}
1005
4ee9c684 1006/* Create edges for a goto statement at block BB. */
1007
1008static void
1009make_goto_expr_edges (basic_block bb)
1010{
75a70cf9 1011 gimple_stmt_iterator last = gsi_last_bb (bb);
1012 gimple goto_t = gsi_stmt (last);
4ee9c684 1013
2c8a1497 1014 /* A simple GOTO creates normal edges. */
1015 if (simple_goto_p (goto_t))
4ee9c684 1016 {
75a70cf9 1017 tree dest = gimple_goto_dest (goto_t);
d01c707b 1018 basic_block label_bb = label_to_block (dest);
1019 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
75a70cf9 1020 e->goto_locus = gimple_location (goto_t);
d01c707b 1021 assign_discriminator (e->goto_locus, label_bb);
75a70cf9 1022 gsi_remove (&last, true);
2c8a1497 1023 return;
4ee9c684 1024 }
1025
2c8a1497 1026 /* A computed GOTO creates abnormal edges. */
1027 make_abnormal_goto_edges (bb, false);
4ee9c684 1028}
1029
78f55ca8 1030/* Create edges for an asm statement with labels at block BB. */
1031
1032static void
1033make_gimple_asm_edges (basic_block bb)
1034{
1035 gimple stmt = last_stmt (bb);
1036 location_t stmt_loc = gimple_location (stmt);
1037 int i, n = gimple_asm_nlabels (stmt);
1038
1039 for (i = 0; i < n; ++i)
1040 {
1041 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1042 basic_block label_bb = label_to_block (label);
1043 make_edge (bb, label_bb, 0);
1044 assign_discriminator (stmt_loc, label_bb);
1045 }
1046}
4ee9c684 1047
1048/*---------------------------------------------------------------------------
1049 Flowgraph analysis
1050---------------------------------------------------------------------------*/
1051
bf6102ae 1052/* Cleanup useless labels in basic blocks. This is something we wish
1053 to do early because it allows us to group case labels before creating
1054 the edges for the CFG, and it speeds up block statement iterators in
1055 all passes later on.
6313ae8b 1056 We rerun this pass after CFG is created, to get rid of the labels that
1057 are no longer referenced. After then we do not run it any more, since
1058 (almost) no new labels should be created. */
bf6102ae 1059
1060/* A map from basic block index to the leading label of that block. */
6313ae8b 1061static struct label_record
1062{
1063 /* The label. */
1064 tree label;
1065
1066 /* True if the label is referenced from somewhere. */
1067 bool used;
1068} *label_for_bb;
bf6102ae 1069
0ec80471 1070/* Given LABEL return the first label in the same basic block. */
75a70cf9 1071
0ec80471 1072static tree
1073main_block_label (tree label)
1074{
1075 basic_block bb = label_to_block (label);
6313ae8b 1076 tree main_label = label_for_bb[bb->index].label;
0ec80471 1077
1078 /* label_to_block possibly inserted undefined label into the chain. */
6313ae8b 1079 if (!main_label)
1080 {
1081 label_for_bb[bb->index].label = label;
1082 main_label = label;
1083 }
1084
1085 label_for_bb[bb->index].used = true;
1086 return main_label;
0ec80471 1087}
1088
e38def9c 1089/* Clean up redundant labels within the exception tree. */
1090
1091static void
1092cleanup_dead_labels_eh (void)
1093{
1094 eh_landing_pad lp;
1095 eh_region r;
1096 tree lab;
1097 int i;
1098
1099 if (cfun->eh == NULL)
1100 return;
1101
f1f41a6c 1102 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
e38def9c 1103 if (lp && lp->post_landing_pad)
1104 {
1105 lab = main_block_label (lp->post_landing_pad);
1106 if (lab != lp->post_landing_pad)
1107 {
1108 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1109 EH_LANDING_PAD_NR (lab) = lp->index;
1110 }
1111 }
1112
1113 FOR_ALL_EH_REGION (r)
1114 switch (r->type)
1115 {
1116 case ERT_CLEANUP:
1117 case ERT_MUST_NOT_THROW:
1118 break;
1119
1120 case ERT_TRY:
1121 {
1122 eh_catch c;
1123 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1124 {
1125 lab = c->label;
1126 if (lab)
1127 c->label = main_block_label (lab);
1128 }
1129 }
1130 break;
1131
1132 case ERT_ALLOWED_EXCEPTIONS:
1133 lab = r->u.allowed.label;
1134 if (lab)
1135 r->u.allowed.label = main_block_label (lab);
1136 break;
1137 }
1138}
1139
1140
1b28312d 1141/* Cleanup redundant labels. This is a three-step process:
bf6102ae 1142 1) Find the leading label for each block.
1143 2) Redirect all references to labels to the leading labels.
1144 3) Cleanup all useless labels. */
4ee9c684 1145
873f1e89 1146void
4ee9c684 1147cleanup_dead_labels (void)
1148{
1149 basic_block bb;
6313ae8b 1150 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
4ee9c684 1151
1152 /* Find a suitable label for each block. We use the first user-defined
ebeaefa4 1153 label if there is one, or otherwise just the first label we see. */
4ee9c684 1154 FOR_EACH_BB (bb)
1155 {
75a70cf9 1156 gimple_stmt_iterator i;
4ee9c684 1157
75a70cf9 1158 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4ee9c684 1159 {
75a70cf9 1160 tree label;
1161 gimple stmt = gsi_stmt (i);
4ee9c684 1162
75a70cf9 1163 if (gimple_code (stmt) != GIMPLE_LABEL)
4ee9c684 1164 break;
1165
75a70cf9 1166 label = gimple_label_label (stmt);
4ee9c684 1167
1168 /* If we have not yet seen a label for the current block,
1169 remember this one and see if there are more labels. */
6313ae8b 1170 if (!label_for_bb[bb->index].label)
4ee9c684 1171 {
6313ae8b 1172 label_for_bb[bb->index].label = label;
4ee9c684 1173 continue;
1174 }
1175
1176 /* If we did see a label for the current block already, but it
1177 is an artificially created label, replace it if the current
1178 label is a user defined label. */
6313ae8b 1179 if (!DECL_ARTIFICIAL (label)
1180 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
4ee9c684 1181 {
6313ae8b 1182 label_for_bb[bb->index].label = label;
4ee9c684 1183 break;
1184 }
1185 }
1186 }
1187
bf6102ae 1188 /* Now redirect all jumps/branches to the selected label.
1189 First do so for each block ending in a control statement. */
4ee9c684 1190 FOR_EACH_BB (bb)
1191 {
75a70cf9 1192 gimple stmt = last_stmt (bb);
4c0315d0 1193 tree label, new_label;
1194
4ee9c684 1195 if (!stmt)
1196 continue;
1197
75a70cf9 1198 switch (gimple_code (stmt))
4ee9c684 1199 {
75a70cf9 1200 case GIMPLE_COND:
4c0315d0 1201 label = gimple_cond_true_label (stmt);
1202 if (label)
1203 {
1204 new_label = main_block_label (label);
1205 if (new_label != label)
1206 gimple_cond_set_true_label (stmt, new_label);
1207 }
4ee9c684 1208
4c0315d0 1209 label = gimple_cond_false_label (stmt);
1210 if (label)
1211 {
1212 new_label = main_block_label (label);
1213 if (new_label != label)
1214 gimple_cond_set_false_label (stmt, new_label);
1215 }
1216 break;
13b96211 1217
75a70cf9 1218 case GIMPLE_SWITCH:
4ee9c684 1219 {
75a70cf9 1220 size_t i, n = gimple_switch_num_labels (stmt);
13b96211 1221
4ee9c684 1222 /* Replace all destination labels. */
1223 for (i = 0; i < n; ++i)
194ae074 1224 {
75a70cf9 1225 tree case_label = gimple_switch_label (stmt, i);
4c0315d0 1226 label = CASE_LABEL (case_label);
1227 new_label = main_block_label (label);
1228 if (new_label != label)
1229 CASE_LABEL (case_label) = new_label;
194ae074 1230 }
4ee9c684 1231 break;
1232 }
1233
78f55ca8 1234 case GIMPLE_ASM:
1235 {
1236 int i, n = gimple_asm_nlabels (stmt);
1237
1238 for (i = 0; i < n; ++i)
1239 {
1240 tree cons = gimple_asm_label_op (stmt, i);
1241 tree label = main_block_label (TREE_VALUE (cons));
1242 TREE_VALUE (cons) = label;
1243 }
1244 break;
1245 }
1246
75a70cf9 1247 /* We have to handle gotos until they're removed, and we don't
b624a250 1248 remove them until after we've created the CFG edges. */
75a70cf9 1249 case GIMPLE_GOTO:
4c0315d0 1250 if (!computed_goto_p (stmt))
0ec80471 1251 {
4c0315d0 1252 label = gimple_goto_dest (stmt);
1253 new_label = main_block_label (label);
1254 if (new_label != label)
1255 gimple_goto_set_dest (stmt, new_label);
0ec80471 1256 }
78f55ca8 1257 break;
b624a250 1258
4c0315d0 1259 case GIMPLE_TRANSACTION:
1260 {
1261 tree label = gimple_transaction_label (stmt);
1262 if (label)
1263 {
1264 tree new_label = main_block_label (label);
1265 if (new_label != label)
1266 gimple_transaction_set_label (stmt, new_label);
1267 }
1268 }
1269 break;
1270
4ee9c684 1271 default:
1272 break;
1273 }
1274 }
1275
e38def9c 1276 /* Do the same for the exception region tree labels. */
1277 cleanup_dead_labels_eh ();
bf6102ae 1278
4ee9c684 1279 /* Finally, purge dead labels. All user-defined labels and labels that
2cdb2cec 1280 can be the target of non-local gotos and labels which have their
1281 address taken are preserved. */
4ee9c684 1282 FOR_EACH_BB (bb)
1283 {
75a70cf9 1284 gimple_stmt_iterator i;
6313ae8b 1285 tree label_for_this_bb = label_for_bb[bb->index].label;
4ee9c684 1286
6313ae8b 1287 if (!label_for_this_bb)
4ee9c684 1288 continue;
1289
6313ae8b 1290 /* If the main label of the block is unused, we may still remove it. */
1291 if (!label_for_bb[bb->index].used)
1292 label_for_this_bb = NULL;
1293
75a70cf9 1294 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 1295 {
75a70cf9 1296 tree label;
1297 gimple stmt = gsi_stmt (i);
4ee9c684 1298
75a70cf9 1299 if (gimple_code (stmt) != GIMPLE_LABEL)
4ee9c684 1300 break;
1301
75a70cf9 1302 label = gimple_label_label (stmt);
4ee9c684 1303
1304 if (label == label_for_this_bb
75a70cf9 1305 || !DECL_ARTIFICIAL (label)
2cdb2cec 1306 || DECL_NONLOCAL (label)
1307 || FORCED_LABEL (label))
75a70cf9 1308 gsi_next (&i);
4ee9c684 1309 else
75a70cf9 1310 gsi_remove (&i, true);
4ee9c684 1311 }
1312 }
1313
1314 free (label_for_bb);
1315}
1316
836a3d81 1317/* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1318 the ones jumping to the same label.
b624a250 1319 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1320
b7d0690f 1321void
836a3d81 1322group_case_labels_stmt (gimple stmt)
b624a250 1323{
836a3d81 1324 int old_size = gimple_switch_num_labels (stmt);
1325 int i, j, new_size = old_size;
ccbef2e3 1326 basic_block default_bb = NULL;
836a3d81 1327
49a70175 1328 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
b624a250 1329
836a3d81 1330 /* Look for possible opportunities to merge cases. */
49a70175 1331 i = 1;
836a3d81 1332 while (i < old_size)
b624a250 1333 {
ccbef2e3 1334 tree base_case, base_high;
1335 basic_block base_bb;
1336
836a3d81 1337 base_case = gimple_switch_label (stmt, i);
1338
1339 gcc_assert (base_case);
ccbef2e3 1340 base_bb = label_to_block (CASE_LABEL (base_case));
836a3d81 1341
1342 /* Discard cases that have the same destination as the
1343 default case. */
ccbef2e3 1344 if (base_bb == default_bb)
836a3d81 1345 {
1346 gimple_switch_set_label (stmt, i, NULL_TREE);
1347 i++;
1348 new_size--;
1349 continue;
1350 }
1351
1352 base_high = CASE_HIGH (base_case)
1353 ? CASE_HIGH (base_case)
1354 : CASE_LOW (base_case);
1355 i++;
1356
1357 /* Try to merge case labels. Break out when we reach the end
1358 of the label vector or when we cannot merge the next case
1359 label with the current one. */
1360 while (i < old_size)
b624a250 1361 {
836a3d81 1362 tree merge_case = gimple_switch_label (stmt, i);
ccbef2e3 1363 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
cf8f0e63 1364 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
836a3d81 1365
1366 /* Merge the cases if they jump to the same place,
1367 and their ranges are consecutive. */
ccbef2e3 1368 if (merge_bb == base_bb
cf8f0e63 1369 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
72c30859 1370 {
836a3d81 1371 base_high = CASE_HIGH (merge_case) ?
1372 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1373 CASE_HIGH (base_case) = base_high;
1374 gimple_switch_set_label (stmt, i, NULL_TREE);
1375 new_size--;
1376 i++;
72c30859 1377 }
75a70cf9 1378 else
836a3d81 1379 break;
1380 }
1381 }
6722b485 1382
836a3d81 1383 /* Compress the case labels in the label vector, and adjust the
1384 length of the vector. */
1385 for (i = 0, j = 0; i < new_size; i++)
1386 {
1387 while (! gimple_switch_label (stmt, j))
1388 j++;
1389 gimple_switch_set_label (stmt, i,
1390 gimple_switch_label (stmt, j++));
1391 }
6722b485 1392
836a3d81 1393 gcc_assert (new_size <= old_size);
1394 gimple_switch_set_num_labels (stmt, new_size);
1395}
75a70cf9 1396
836a3d81 1397/* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1398 and scan the sorted vector of cases. Combine the ones jumping to the
1399 same label. */
b624a250 1400
836a3d81 1401void
1402group_case_labels (void)
1403{
1404 basic_block bb;
75a70cf9 1405
836a3d81 1406 FOR_EACH_BB (bb)
1407 {
1408 gimple stmt = last_stmt (bb);
1409 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1410 group_case_labels_stmt (stmt);
b624a250 1411 }
1412}
4ee9c684 1413
1414/* Checks whether we can merge block B into block A. */
1415
1416static bool
75a70cf9 1417gimple_can_merge_blocks_p (basic_block a, basic_block b)
4ee9c684 1418{
75a70cf9 1419 gimple stmt;
1420 gimple_stmt_iterator gsi;
4ee9c684 1421
ea091dfd 1422 if (!single_succ_p (a))
4ee9c684 1423 return false;
1424
5147ec07 1425 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
4ee9c684 1426 return false;
1427
ea091dfd 1428 if (single_succ (a) != b)
4ee9c684 1429 return false;
1430
ea091dfd 1431 if (!single_pred_p (b))
4ee9c684 1432 return false;
1433
c4ac3cc6 1434 if (b == EXIT_BLOCK_PTR)
1435 return false;
13b96211 1436
4ee9c684 1437 /* If A ends by a statement causing exceptions or something similar, we
1438 cannot merge the blocks. */
75a70cf9 1439 stmt = last_stmt (a);
4ee9c684 1440 if (stmt && stmt_ends_bb_p (stmt))
1441 return false;
1442
1443 /* Do not allow a block with only a non-local label to be merged. */
75a70cf9 1444 if (stmt
1445 && gimple_code (stmt) == GIMPLE_LABEL
1446 && DECL_NONLOCAL (gimple_label_label (stmt)))
4ee9c684 1447 return false;
1448
c6407fab 1449 /* Examine the labels at the beginning of B. */
1450 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1451 {
1452 tree lab;
1453 stmt = gsi_stmt (gsi);
1454 if (gimple_code (stmt) != GIMPLE_LABEL)
1455 break;
1456 lab = gimple_label_label (stmt);
1457
6e1c78b5 1458 /* Do not remove user forced labels or for -O0 any user labels. */
1459 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
c6407fab 1460 return false;
1461 }
1462
1463 /* Protect the loop latches. */
1464 if (current_loops && b->loop_father->latch == b)
1465 return false;
1466
8171a1dd 1467 /* It must be possible to eliminate all phi nodes in B. If ssa form
b0903654 1468 is not up-to-date and a name-mapping is registered, we cannot eliminate
1469 any phis. Symbols marked for renaming are never a problem though. */
5eeafc2d 1470 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1471 {
1472 gimple phi = gsi_stmt (gsi);
1473 /* Technically only new names matter. */
1474 if (name_registered_for_update_p (PHI_RESULT (phi)))
1475 return false;
1476 }
4ee9c684 1477
56a6836f 1478 /* When not optimizing, don't merge if we'd lose goto_locus. */
1479 if (!optimize
1480 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1481 {
1482 location_t goto_locus = single_succ_edge (a)->goto_locus;
1483 gimple_stmt_iterator prev, next;
1484 prev = gsi_last_nondebug_bb (a);
1485 next = gsi_after_labels (b);
1486 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1487 gsi_next_nondebug (&next);
1488 if ((gsi_end_p (prev)
1489 || gimple_location (gsi_stmt (prev)) != goto_locus)
1490 && (gsi_end_p (next)
1491 || gimple_location (gsi_stmt (next)) != goto_locus))
1492 return false;
1493 }
1494
4ee9c684 1495 return true;
1496}
1497
9845d120 1498/* Return true if the var whose chain of uses starts at PTR has no
1499 nondebug uses. */
1500bool
1501has_zero_uses_1 (const ssa_use_operand_t *head)
1502{
1503 const ssa_use_operand_t *ptr;
1504
1505 for (ptr = head->next; ptr != head; ptr = ptr->next)
1506 if (!is_gimple_debug (USE_STMT (ptr)))
1507 return false;
1508
1509 return true;
1510}
1511
1512/* Return true if the var whose chain of uses starts at PTR has a
1513 single nondebug use. Set USE_P and STMT to that single nondebug
1514 use, if so, or to NULL otherwise. */
1515bool
1516single_imm_use_1 (const ssa_use_operand_t *head,
1517 use_operand_p *use_p, gimple *stmt)
1518{
1519 ssa_use_operand_t *ptr, *single_use = 0;
1520
1521 for (ptr = head->next; ptr != head; ptr = ptr->next)
1522 if (!is_gimple_debug (USE_STMT (ptr)))
1523 {
1524 if (single_use)
1525 {
1526 single_use = NULL;
1527 break;
1528 }
1529 single_use = ptr;
1530 }
1531
1532 if (use_p)
1533 *use_p = single_use;
1534
1535 if (stmt)
1536 *stmt = single_use ? single_use->loc.stmt : NULL;
1537
1538 return !!single_use;
1539}
1540
8171a1dd 1541/* Replaces all uses of NAME by VAL. */
1542
10fec820 1543void
8171a1dd 1544replace_uses_by (tree name, tree val)
1545{
1546 imm_use_iterator imm_iter;
1547 use_operand_p use;
75a70cf9 1548 gimple stmt;
8171a1dd 1549 edge e;
8171a1dd 1550
09aca5bc 1551 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
8171a1dd 1552 {
09aca5bc 1553 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1554 {
1555 replace_exp (use, val);
8171a1dd 1556
75a70cf9 1557 if (gimple_code (stmt) == GIMPLE_PHI)
8171a1dd 1558 {
75a70cf9 1559 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
09aca5bc 1560 if (e->flags & EDGE_ABNORMAL)
1561 {
1562 /* This can only occur for virtual operands, since
1563 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1564 would prevent replacement. */
7c782c9b 1565 gcc_checking_assert (virtual_operand_p (name));
09aca5bc 1566 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1567 }
8171a1dd 1568 }
1569 }
de6ed584 1570
75a70cf9 1571 if (gimple_code (stmt) != GIMPLE_PHI)
09aca5bc 1572 {
de215c53 1573 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
9f3977d9 1574 gimple orig_stmt = stmt;
75a70cf9 1575 size_t i;
fcd2418b 1576
b03f58fa 1577 /* Mark the block if we changed the last stmt in it. */
1578 if (cfgcleanup_altered_bbs
1579 && stmt_ends_bb_p (stmt))
75a70cf9 1580 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
de6ed584 1581
b03f58fa 1582 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1583 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1584 only change sth from non-invariant to invariant, and only
f86a7907 1585 when propagating constants. */
1586 if (is_gimple_min_invariant (val))
b03f58fa 1587 for (i = 0; i < gimple_num_ops (stmt); i++)
1588 {
1589 tree op = gimple_op (stmt, i);
1590 /* Operands may be empty here. For example, the labels
1591 of a GIMPLE_COND are nulled out following the creation
1592 of the corresponding CFG edges. */
1593 if (op && TREE_CODE (op) == ADDR_EXPR)
1594 recompute_tree_invariant_for_addr_expr (op);
1595 }
1596
1597 if (fold_stmt (&gsi))
313b3101 1598 stmt = gsi_stmt (gsi);
1599
a79e8e8c 1600 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1601 gimple_purge_dead_eh_edges (gimple_bb (stmt));
fcd2418b 1602
4c5fd53c 1603 update_stmt (stmt);
09aca5bc 1604 }
8171a1dd 1605 }
13b96211 1606
b03f58fa 1607 gcc_checking_assert (has_zero_uses (name));
f3830c7c 1608
1609 /* Also update the trees stored in loop structures. */
1610 if (current_loops)
1611 {
1612 struct loop *loop;
17519ba0 1613 loop_iterator li;
f3830c7c 1614
17519ba0 1615 FOR_EACH_LOOP (li, loop, 0)
f3830c7c 1616 {
17519ba0 1617 substitute_in_loop_info (loop, name, val);
f3830c7c 1618 }
1619 }
8171a1dd 1620}
4ee9c684 1621
1622/* Merge block B into block A. */
1623
1624static void
75a70cf9 1625gimple_merge_blocks (basic_block a, basic_block b)
4ee9c684 1626{
75a70cf9 1627 gimple_stmt_iterator last, gsi, psi;
4ee9c684 1628
1629 if (dump_file)
1630 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1631
ae79515f 1632 /* Remove all single-valued PHI nodes from block B of the form
1633 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
75a70cf9 1634 gsi = gsi_last_bb (a);
e3a19533 1635 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
8171a1dd 1636 {
75a70cf9 1637 gimple phi = gsi_stmt (psi);
1638 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1639 gimple copy;
7c782c9b 1640 bool may_replace_uses = (virtual_operand_p (def)
1641 || may_propagate_copy (def, use));
4f62605b 1642
fd859561 1643 /* In case we maintain loop closed ssa form, do not propagate arguments
1644 of loop exit phi nodes. */
4f62605b 1645 if (current_loops
f24ec26f 1646 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
7c782c9b 1647 && !virtual_operand_p (def)
4f62605b 1648 && TREE_CODE (use) == SSA_NAME
1649 && a->loop_father != b->loop_father)
1650 may_replace_uses = false;
1651
1652 if (!may_replace_uses)
8171a1dd 1653 {
7c782c9b 1654 gcc_assert (!virtual_operand_p (def));
8171a1dd 1655
c78cbec8 1656 /* Note that just emitting the copies is fine -- there is no problem
8171a1dd 1657 with ordering of phi nodes. This is because A is the single
1658 predecessor of B, therefore results of the phi nodes cannot
1659 appear as arguments of the phi nodes. */
75a70cf9 1660 copy = gimple_build_assign (def, use);
1661 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1662 remove_phi_node (&psi, false);
8171a1dd 1663 }
1664 else
18a524af 1665 {
5c6e4974 1666 /* If we deal with a PHI for virtual operands, we can simply
1667 propagate these without fussing with folding or updating
1668 the stmt. */
7c782c9b 1669 if (virtual_operand_p (def))
5c6e4974 1670 {
1671 imm_use_iterator iter;
1672 use_operand_p use_p;
75a70cf9 1673 gimple stmt;
5c6e4974 1674
1675 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1676 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1677 SET_USE (use_p, use);
ef6748ff 1678
1679 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1680 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
5c6e4974 1681 }
1682 else
1683 replace_uses_by (def, use);
75a70cf9 1684
1685 remove_phi_node (&psi, true);
18a524af 1686 }
8171a1dd 1687 }
1688
4ee9c684 1689 /* Ensure that B follows A. */
1690 move_block_after (b, a);
1691
ea091dfd 1692 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
8c0963c4 1693 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
4ee9c684 1694
75a70cf9 1695 /* Remove labels from B and set gimple_bb to A for other statements. */
1696 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
4ee9c684 1697 {
e38def9c 1698 gimple stmt = gsi_stmt (gsi);
1699 if (gimple_code (stmt) == GIMPLE_LABEL)
6d7413d8 1700 {
e38def9c 1701 tree label = gimple_label_label (stmt);
1702 int lp_nr;
75a70cf9 1703
1704 gsi_remove (&gsi, false);
6d7413d8 1705
6d7413d8 1706 /* Now that we can thread computed gotos, we might have
1707 a situation where we have a forced label in block B
1708 However, the label at the start of block B might still be
1709 used in other ways (think about the runtime checking for
1710 Fortran assigned gotos). So we can not just delete the
1711 label. Instead we move the label to the start of block A. */
e38def9c 1712 if (FORCED_LABEL (label))
6d7413d8 1713 {
75a70cf9 1714 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
e38def9c 1715 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1716 }
63f5ad44 1717 /* Other user labels keep around in a form of a debug stmt. */
1718 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1719 {
1720 gimple dbg = gimple_build_debug_bind (label,
1721 integer_zero_node,
1722 stmt);
1723 gimple_debug_bind_reset_value (dbg);
1724 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1725 }
e38def9c 1726
1727 lp_nr = EH_LANDING_PAD_NR (label);
1728 if (lp_nr)
1729 {
1730 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1731 lp->post_landing_pad = NULL;
6d7413d8 1732 }
1733 }
4ee9c684 1734 else
1735 {
e38def9c 1736 gimple_set_bb (stmt, a);
75a70cf9 1737 gsi_next (&gsi);
4ee9c684 1738 }
1739 }
1740
75a70cf9 1741 /* Merge the sequences. */
1742 last = gsi_last_bb (a);
1743 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1744 set_bb_seq (b, NULL);
31a8456e 1745
1746 if (cfgcleanup_altered_bbs)
1747 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
4ee9c684 1748}
1749
1750
ac70caad 1751/* Return the one of two successors of BB that is not reachable by a
2c2eb7fa 1752 complex edge, if there is one. Else, return BB. We use
ac70caad 1753 this in optimizations that use post-dominators for their heuristics,
1754 to catch the cases in C++ where function calls are involved. */
13b96211 1755
ac70caad 1756basic_block
13b96211 1757single_noncomplex_succ (basic_block bb)
ac70caad 1758{
1759 edge e0, e1;
1760 if (EDGE_COUNT (bb->succs) != 2)
1761 return bb;
13b96211 1762
ac70caad 1763 e0 = EDGE_SUCC (bb, 0);
1764 e1 = EDGE_SUCC (bb, 1);
1765 if (e0->flags & EDGE_COMPLEX)
1766 return e1->dest;
1767 if (e1->flags & EDGE_COMPLEX)
1768 return e0->dest;
13b96211 1769
ac70caad 1770 return bb;
13b96211 1771}
ac70caad 1772
4ee9c684 1773/* T is CALL_EXPR. Set current_function_calls_* flags. */
1774
1775void
75a70cf9 1776notice_special_calls (gimple call)
4ee9c684 1777{
75a70cf9 1778 int flags = gimple_call_flags (call);
4ee9c684 1779
1780 if (flags & ECF_MAY_BE_ALLOCA)
18d50ae6 1781 cfun->calls_alloca = true;
4ee9c684 1782 if (flags & ECF_RETURNS_TWICE)
18d50ae6 1783 cfun->calls_setjmp = true;
4ee9c684 1784}
1785
1786
1787/* Clear flags set by notice_special_calls. Used by dead code removal
1788 to update the flags. */
1789
1790void
1791clear_special_calls (void)
1792{
18d50ae6 1793 cfun->calls_alloca = false;
1794 cfun->calls_setjmp = false;
4ee9c684 1795}
1796
4ee9c684 1797/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1798
1799static void
1800remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1801{
4ee9c684 1802 /* Since this block is no longer reachable, we can just delete all
1803 of its PHI nodes. */
899e6126 1804 remove_phi_nodes (bb);
4ee9c684 1805
1806 /* Remove edges to BB's successors. */
cd665a06 1807 while (EDGE_COUNT (bb->succs) > 0)
0891994d 1808 remove_edge (EDGE_SUCC (bb, 0));
4ee9c684 1809}
1810
1811
1812/* Remove statements of basic block BB. */
1813
1814static void
1815remove_bb (basic_block bb)
1816{
75a70cf9 1817 gimple_stmt_iterator i;
4ee9c684 1818
1819 if (dump_file)
1820 {
1821 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1822 if (dump_flags & TDF_DETAILS)
1823 {
bec2cf98 1824 dump_bb (dump_file, bb, 0, dump_flags);
4ee9c684 1825 fprintf (dump_file, "\n");
1826 }
1827 }
1828
053fdd99 1829 if (current_loops)
1830 {
1831 struct loop *loop = bb->loop_father;
1832
88e6f696 1833 /* If a loop gets removed, clean up the information associated
1834 with it. */
053fdd99 1835 if (loop->latch == bb
1836 || loop->header == bb)
88e6f696 1837 free_numbers_of_iterations_estimates_loop (loop);
053fdd99 1838 }
1839
4ee9c684 1840 /* Remove all the instructions in the block. */
75a70cf9 1841 if (bb_seq (bb) != NULL)
4ee9c684 1842 {
9845d120 1843 /* Walk backwards so as to get a chance to substitute all
1844 released DEFs into debug stmts. See
1845 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1846 details. */
1847 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
4110ed3a 1848 {
75a70cf9 1849 gimple stmt = gsi_stmt (i);
1850 if (gimple_code (stmt) == GIMPLE_LABEL
1851 && (FORCED_LABEL (gimple_label_label (stmt))
1852 || DECL_NONLOCAL (gimple_label_label (stmt))))
c23dad79 1853 {
1854 basic_block new_bb;
75a70cf9 1855 gimple_stmt_iterator new_gsi;
c23dad79 1856
1857 /* A non-reachable non-local label may still be referenced.
1858 But it no longer needs to carry the extra semantics of
1859 non-locality. */
75a70cf9 1860 if (DECL_NONLOCAL (gimple_label_label (stmt)))
c23dad79 1861 {
75a70cf9 1862 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1863 FORCED_LABEL (gimple_label_label (stmt)) = 1;
c23dad79 1864 }
2fc7d87c 1865
c23dad79 1866 new_bb = bb->prev_bb;
75a70cf9 1867 new_gsi = gsi_start_bb (new_bb);
1868 gsi_remove (&i, false);
1869 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
c23dad79 1870 }
1871 else
2fc7d87c 1872 {
c23dad79 1873 /* Release SSA definitions if we are in SSA. Note that we
1874 may be called when not in SSA. For example,
1875 final_cleanup calls this function via
1876 cleanup_tree_cfg. */
1877 if (gimple_in_ssa_p (cfun))
1878 release_defs (stmt);
1879
75a70cf9 1880 gsi_remove (&i, true);
2fc7d87c 1881 }
13b96211 1882
9845d120 1883 if (gsi_end_p (i))
1884 i = gsi_last_bb (bb);
1885 else
1886 gsi_prev (&i);
ba821eb1 1887 }
4ee9c684 1888 }
1889
4ee9c684 1890 remove_phi_nodes_and_edges_for_unreachable_block (bb);
924c4c71 1891 bb->il.gimple.seq = NULL;
1892 bb->il.gimple.phi_nodes = NULL;
4ee9c684 1893}
1894
4ee9c684 1895
1740345a 1896/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1897 predicate VAL, return the edge that will be taken out of the block.
1898 If VAL does not match a unique edge, NULL is returned. */
4ee9c684 1899
1900edge
1901find_taken_edge (basic_block bb, tree val)
1902{
75a70cf9 1903 gimple stmt;
4ee9c684 1904
1905 stmt = last_stmt (bb);
1906
8c0963c4 1907 gcc_assert (stmt);
1908 gcc_assert (is_ctrl_stmt (stmt));
4ee9c684 1909
75a70cf9 1910 if (val == NULL)
1911 return NULL;
1912
1913 if (!is_gimple_min_invariant (val))
4ee9c684 1914 return NULL;
1915
75a70cf9 1916 if (gimple_code (stmt) == GIMPLE_COND)
4ee9c684 1917 return find_taken_edge_cond_expr (bb, val);
1918
75a70cf9 1919 if (gimple_code (stmt) == GIMPLE_SWITCH)
4ee9c684 1920 return find_taken_edge_switch_expr (bb, val);
1921
6d7413d8 1922 if (computed_goto_p (stmt))
94ca4916 1923 {
1924 /* Only optimize if the argument is a label, if the argument is
1925 not a label then we can not construct a proper CFG.
1926
1927 It may be the case that we only need to allow the LABEL_REF to
1928 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1929 appear inside a LABEL_EXPR just to be safe. */
1930 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1931 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1932 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1933 return NULL;
1934 }
6d7413d8 1935
1740345a 1936 gcc_unreachable ();
4ee9c684 1937}
1938
6d7413d8 1939/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1940 statement, determine which of the outgoing edges will be taken out of the
1941 block. Return NULL if either edge may be taken. */
1942
1943static edge
1944find_taken_edge_computed_goto (basic_block bb, tree val)
1945{
1946 basic_block dest;
1947 edge e = NULL;
1948
1949 dest = label_to_block (val);
1950 if (dest)
1951 {
1952 e = find_edge (bb, dest);
1953 gcc_assert (e != NULL);
1954 }
1955
1956 return e;
1957}
4ee9c684 1958
1959/* Given a constant value VAL and the entry block BB to a COND_EXPR
1960 statement, determine which of the two edges will be taken out of the
1961 block. Return NULL if either edge may be taken. */
1962
1963static edge
1964find_taken_edge_cond_expr (basic_block bb, tree val)
1965{
1966 edge true_edge, false_edge;
1967
1968 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
13b96211 1969
36204c30 1970 gcc_assert (TREE_CODE (val) == INTEGER_CST);
cd743a11 1971 return (integer_zerop (val) ? false_edge : true_edge);
4ee9c684 1972}
1973
9677695f 1974/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
4ee9c684 1975 statement, determine which edge will be taken out of the block. Return
1976 NULL if any edge may be taken. */
1977
1978static edge
1979find_taken_edge_switch_expr (basic_block bb, tree val)
1980{
4ee9c684 1981 basic_block dest_bb;
1982 edge e;
75a70cf9 1983 gimple switch_stmt;
1984 tree taken_case;
4ee9c684 1985
75a70cf9 1986 switch_stmt = last_stmt (bb);
1987 taken_case = find_case_label_for_value (switch_stmt, val);
4ee9c684 1988 dest_bb = label_to_block (CASE_LABEL (taken_case));
1989
1990 e = find_edge (bb, dest_bb);
8c0963c4 1991 gcc_assert (e);
4ee9c684 1992 return e;
1993}
1994
1995
75a70cf9 1996/* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
b624a250 1997 We can make optimal use here of the fact that the case labels are
1998 sorted: We can do a binary search for a case matching VAL. */
4ee9c684 1999
2000static tree
75a70cf9 2001find_case_label_for_value (gimple switch_stmt, tree val)
4ee9c684 2002{
75a70cf9 2003 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2004 tree default_case = gimple_switch_default_label (switch_stmt);
4ee9c684 2005
75a70cf9 2006 for (low = 0, high = n; high - low > 1; )
4ee9c684 2007 {
b624a250 2008 size_t i = (high + low) / 2;
75a70cf9 2009 tree t = gimple_switch_label (switch_stmt, i);
b624a250 2010 int cmp;
2011
2012 /* Cache the result of comparing CASE_LOW and val. */
2013 cmp = tree_int_cst_compare (CASE_LOW (t), val);
4ee9c684 2014
b624a250 2015 if (cmp > 0)
2016 high = i;
2017 else
2018 low = i;
2019
2020 if (CASE_HIGH (t) == NULL)
4ee9c684 2021 {
b624a250 2022 /* A singe-valued case label. */
2023 if (cmp == 0)
4ee9c684 2024 return t;
2025 }
2026 else
2027 {
2028 /* A case range. We can only handle integer ranges. */
b624a250 2029 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
4ee9c684 2030 return t;
2031 }
2032 }
2033
4ee9c684 2034 return default_case;
2035}
2036
2037
4ee9c684 2038/* Dump a basic block on stderr. */
2039
2040void
75a70cf9 2041gimple_debug_bb (basic_block bb)
4ee9c684 2042{
bec2cf98 2043 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
4ee9c684 2044}
2045
2046
2047/* Dump basic block with index N on stderr. */
2048
2049basic_block
75a70cf9 2050gimple_debug_bb_n (int n)
4ee9c684 2051{
75a70cf9 2052 gimple_debug_bb (BASIC_BLOCK (n));
4ee9c684 2053 return BASIC_BLOCK (n);
13b96211 2054}
4ee9c684 2055
2056
2057/* Dump the CFG on stderr.
2058
2059 FLAGS are the same used by the tree dumping functions
0cd02a19 2060 (see TDF_* in dumpfile.h). */
4ee9c684 2061
2062void
75a70cf9 2063gimple_debug_cfg (int flags)
4ee9c684 2064{
75a70cf9 2065 gimple_dump_cfg (stderr, flags);
4ee9c684 2066}
2067
2068
2069/* Dump the program showing basic block boundaries on the given FILE.
2070
2071 FLAGS are the same used by the tree dumping functions (see TDF_* in
2072 tree.h). */
2073
2074void
75a70cf9 2075gimple_dump_cfg (FILE *file, int flags)
4ee9c684 2076{
2077 if (flags & TDF_DETAILS)
2078 {
55b028fe 2079 dump_function_header (file, current_function_decl, flags);
4ee9c684 2080 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2081 n_basic_blocks, n_edges, last_basic_block);
2082
bec2cf98 2083 brief_dump_cfg (file, flags | TDF_COMMENT);
4ee9c684 2084 fprintf (file, "\n");
2085 }
2086
2087 if (flags & TDF_STATS)
2088 dump_cfg_stats (file);
2089
2090 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2091}
2092
2093
2094/* Dump CFG statistics on FILE. */
2095
2096void
2097dump_cfg_stats (FILE *file)
2098{
2099 static long max_num_merged_labels = 0;
2100 unsigned long size, total = 0;
c75de2f7 2101 long num_edges;
4ee9c684 2102 basic_block bb;
2103 const char * const fmt_str = "%-30s%-13s%12s\n";
56d16a1a 2104 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
69c638bc 2105 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
4ee9c684 2106 const char * const fmt_str_3 = "%-43s%11lu%c\n";
b9ed1410 2107 const char *funcname = current_function_name ();
4ee9c684 2108
2109 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2110
2111 fprintf (file, "---------------------------------------------------------\n");
2112 fprintf (file, fmt_str, "", " Number of ", "Memory");
2113 fprintf (file, fmt_str, "", " instances ", "used ");
2114 fprintf (file, "---------------------------------------------------------\n");
2115
2116 size = n_basic_blocks * sizeof (struct basic_block_def);
2117 total += size;
56d16a1a 2118 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2119 SCALE (size), LABEL (size));
4ee9c684 2120
c75de2f7 2121 num_edges = 0;
4ee9c684 2122 FOR_EACH_BB (bb)
c75de2f7 2123 num_edges += EDGE_COUNT (bb->succs);
2124 size = num_edges * sizeof (struct edge_def);
4ee9c684 2125 total += size;
69c638bc 2126 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
4ee9c684 2127
4ee9c684 2128 fprintf (file, "---------------------------------------------------------\n");
2129 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2130 LABEL (total));
2131 fprintf (file, "---------------------------------------------------------\n");
2132 fprintf (file, "\n");
2133
2134 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2135 max_num_merged_labels = cfg_stats.num_merged_labels;
2136
2137 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2138 cfg_stats.num_merged_labels, max_num_merged_labels);
2139
2140 fprintf (file, "\n");
2141}
2142
2143
2144/* Dump CFG statistics on stderr. Keep extern so that it's always
2145 linked in the final executable. */
2146
4b987fac 2147DEBUG_FUNCTION void
4ee9c684 2148debug_cfg_stats (void)
2149{
2150 dump_cfg_stats (stderr);
2151}
2152
2153
2154/* Dump the flowgraph to a .vcg FILE. */
2155
2156static void
75a70cf9 2157gimple_cfg2vcg (FILE *file)
4ee9c684 2158{
2159 edge e;
cd665a06 2160 edge_iterator ei;
4ee9c684 2161 basic_block bb;
b9ed1410 2162 const char *funcname = current_function_name ();
4ee9c684 2163
2164 /* Write the file header. */
2165 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2166 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2167 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2168
2169 /* Write blocks and edges. */
cd665a06 2170 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4ee9c684 2171 {
2172 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2173 e->dest->index);
2174
2175 if (e->flags & EDGE_FAKE)
2176 fprintf (file, " linestyle: dotted priority: 10");
2177 else
2178 fprintf (file, " linestyle: solid priority: 100");
2179
2180 fprintf (file, " }\n");
2181 }
2182 fputc ('\n', file);
2183
2184 FOR_EACH_BB (bb)
2185 {
75a70cf9 2186 enum gimple_code head_code, end_code;
4ee9c684 2187 const char *head_name, *end_name;
2188 int head_line = 0;
2189 int end_line = 0;
75a70cf9 2190 gimple first = first_stmt (bb);
2191 gimple last = last_stmt (bb);
4ee9c684 2192
2193 if (first)
2194 {
75a70cf9 2195 head_code = gimple_code (first);
2196 head_name = gimple_code_name[head_code];
4ee9c684 2197 head_line = get_lineno (first);
2198 }
2199 else
2200 head_name = "no-statement";
2201
2202 if (last)
2203 {
75a70cf9 2204 end_code = gimple_code (last);
2205 end_name = gimple_code_name[end_code];
4ee9c684 2206 end_line = get_lineno (last);
2207 }
2208 else
2209 end_name = "no-statement";
2210
2211 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2212 bb->index, bb->index, head_name, head_line, end_name,
2213 end_line);
2214
cd665a06 2215 FOR_EACH_EDGE (e, ei, bb->succs)
4ee9c684 2216 {
2217 if (e->dest == EXIT_BLOCK_PTR)
2218 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2219 else
2220 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2221
2222 if (e->flags & EDGE_FAKE)
2223 fprintf (file, " priority: 10 linestyle: dotted");
2224 else
2225 fprintf (file, " priority: 100 linestyle: solid");
2226
2227 fprintf (file, " }\n");
2228 }
2229
2230 if (bb->next_bb != EXIT_BLOCK_PTR)
2231 fputc ('\n', file);
2232 }
2233
2234 fputs ("}\n\n", file);
2235}
2236
2237
2238
2239/*---------------------------------------------------------------------------
2240 Miscellaneous helpers
2241---------------------------------------------------------------------------*/
2242
822dbdbc 2243/* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2244 flow. Transfers of control flow associated with EH are excluded. */
2245
2246static bool
2247call_can_make_abnormal_goto (gimple t)
2248{
2249 /* If the function has no non-local labels, then a call cannot make an
2250 abnormal transfer of control. */
2251 if (!cfun->has_nonlocal_label)
2252 return false;
2253
2254 /* Likewise if the call has no side effects. */
2255 if (!gimple_has_side_effects (t))
2256 return false;
2257
2258 /* Likewise if the called function is leaf. */
2259 if (gimple_call_flags (t) & ECF_LEAF)
2260 return false;
2261
2262 return true;
2263}
2264
2265
2266/* Return true if T can make an abnormal transfer of control flow.
2267 Transfers of control flow associated with EH are excluded. */
2268
2269bool
2270stmt_can_make_abnormal_goto (gimple t)
2271{
2272 if (computed_goto_p (t))
2273 return true;
2274 if (is_gimple_call (t))
2275 return call_can_make_abnormal_goto (t);
2276 return false;
2277}
2278
2279
4ee9c684 2280/* Return true if T represents a stmt that always transfers control. */
2281
2282bool
75a70cf9 2283is_ctrl_stmt (gimple t)
4ee9c684 2284{
977411a6 2285 switch (gimple_code (t))
2286 {
2287 case GIMPLE_COND:
2288 case GIMPLE_SWITCH:
2289 case GIMPLE_GOTO:
2290 case GIMPLE_RETURN:
2291 case GIMPLE_RESX:
2292 return true;
2293 default:
2294 return false;
2295 }
4ee9c684 2296}
2297
2298
2299/* Return true if T is a statement that may alter the flow of control
2300 (e.g., a call to a non-returning function). */
2301
2302bool
75a70cf9 2303is_ctrl_altering_stmt (gimple t)
4ee9c684 2304{
8c0963c4 2305 gcc_assert (t);
75a70cf9 2306
1a9a4436 2307 switch (gimple_code (t))
4ee9c684 2308 {
1a9a4436 2309 case GIMPLE_CALL:
2310 {
2311 int flags = gimple_call_flags (t);
75a70cf9 2312
822dbdbc 2313 /* A call alters control flow if it can make an abnormal goto. */
2314 if (call_can_make_abnormal_goto (t))
1a9a4436 2315 return true;
4ee9c684 2316
1a9a4436 2317 /* A call also alters control flow if it does not return. */
4569d18c 2318 if (flags & ECF_NORETURN)
1a9a4436 2319 return true;
3ea38c1f 2320
4c0315d0 2321 /* TM ending statements have backedges out of the transaction.
2322 Return true so we split the basic block containing them.
2323 Note that the TM_BUILTIN test is merely an optimization. */
2324 if ((flags & ECF_TM_BUILTIN)
2325 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2326 return true;
2327
3ea38c1f 2328 /* BUILT_IN_RETURN call is same as return statement. */
2329 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2330 return true;
1a9a4436 2331 }
2332 break;
4ee9c684 2333
e38def9c 2334 case GIMPLE_EH_DISPATCH:
2335 /* EH_DISPATCH branches to the individual catch handlers at
2336 this level of a try or allowed-exceptions region. It can
2337 fallthru to the next statement as well. */
2338 return true;
2339
78f55ca8 2340 case GIMPLE_ASM:
2341 if (gimple_asm_nlabels (t) > 0)
2342 return true;
2343 break;
2344
1a9a4436 2345 CASE_GIMPLE_OMP:
2346 /* OpenMP directives alter control flow. */
2347 return true;
2348
4c0315d0 2349 case GIMPLE_TRANSACTION:
2350 /* A transaction start alters control flow. */
2351 return true;
2352
1a9a4436 2353 default:
2354 break;
2355 }
773c5ba7 2356
4ee9c684 2357 /* If a statement can throw, it alters control flow. */
75a70cf9 2358 return stmt_can_throw_internal (t);
4ee9c684 2359}
2360
2361
2c8a1497 2362/* Return true if T is a simple local goto. */
4ee9c684 2363
2364bool
75a70cf9 2365simple_goto_p (gimple t)
4ee9c684 2366{
75a70cf9 2367 return (gimple_code (t) == GIMPLE_GOTO
2368 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2c8a1497 2369}
2370
2371
75a70cf9 2372/* Return true if STMT should start a new basic block. PREV_STMT is
2373 the statement preceding STMT. It is used when STMT is a label or a
2374 case label. Labels should only start a new basic block if their
2375 previous statement wasn't a label. Otherwise, sequence of labels
2376 would generate unnecessary basic blocks that only contain a single
2377 label. */
4ee9c684 2378
2379static inline bool
75a70cf9 2380stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
4ee9c684 2381{
75a70cf9 2382 if (stmt == NULL)
4ee9c684 2383 return false;
2384
75a70cf9 2385 /* Labels start a new basic block only if the preceding statement
2386 wasn't a label of the same type. This prevents the creation of
2387 consecutive blocks that have nothing but a single label. */
2388 if (gimple_code (stmt) == GIMPLE_LABEL)
4ee9c684 2389 {
2390 /* Nonlocal and computed GOTO targets always start a new block. */
75a70cf9 2391 if (DECL_NONLOCAL (gimple_label_label (stmt))
2392 || FORCED_LABEL (gimple_label_label (stmt)))
4ee9c684 2393 return true;
2394
75a70cf9 2395 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
4ee9c684 2396 {
75a70cf9 2397 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
4ee9c684 2398 return true;
2399
2400 cfg_stats.num_merged_labels++;
2401 return false;
2402 }
2403 else
2404 return true;
2405 }
2406
2407 return false;
2408}
2409
2410
2411/* Return true if T should end a basic block. */
2412
2413bool
75a70cf9 2414stmt_ends_bb_p (gimple t)
4ee9c684 2415{
2416 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2417}
2418
75a70cf9 2419/* Remove block annotations and other data structures. */
4ee9c684 2420
2421void
0ec80471 2422delete_tree_cfg_annotations (void)
4ee9c684 2423{
f1f41a6c 2424 vec_free (label_to_block_map);
4ee9c684 2425}
2426
2427
2428/* Return the first statement in basic block BB. */
2429
75a70cf9 2430gimple
4ee9c684 2431first_stmt (basic_block bb)
2432{
75a70cf9 2433 gimple_stmt_iterator i = gsi_start_bb (bb);
9845d120 2434 gimple stmt = NULL;
2435
2436 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2437 {
2438 gsi_next (&i);
2439 stmt = NULL;
2440 }
2441 return stmt;
4ee9c684 2442}
2443
d01c707b 2444/* Return the first non-label statement in basic block BB. */
2445
2446static gimple
2447first_non_label_stmt (basic_block bb)
2448{
2449 gimple_stmt_iterator i = gsi_start_bb (bb);
2450 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2451 gsi_next (&i);
2452 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2453}
2454
4ee9c684 2455/* Return the last statement in basic block BB. */
2456
75a70cf9 2457gimple
4ee9c684 2458last_stmt (basic_block bb)
2459{
9845d120 2460 gimple_stmt_iterator i = gsi_last_bb (bb);
2461 gimple stmt = NULL;
2462
2463 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2464 {
2465 gsi_prev (&i);
2466 stmt = NULL;
2467 }
2468 return stmt;
4ee9c684 2469}
2470
4ee9c684 2471/* Return the last statement of an otherwise empty block. Return NULL
2472 if the block is totally empty, or if it contains more than one
2473 statement. */
2474
75a70cf9 2475gimple
4ee9c684 2476last_and_only_stmt (basic_block bb)
2477{
9845d120 2478 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
75a70cf9 2479 gimple last, prev;
4ee9c684 2480
75a70cf9 2481 if (gsi_end_p (i))
2482 return NULL;
4ee9c684 2483
75a70cf9 2484 last = gsi_stmt (i);
9845d120 2485 gsi_prev_nondebug (&i);
75a70cf9 2486 if (gsi_end_p (i))
4ee9c684 2487 return last;
2488
2489 /* Empty statements should no longer appear in the instruction stream.
2490 Everything that might have appeared before should be deleted by
75a70cf9 2491 remove_useless_stmts, and the optimizers should just gsi_remove
4ee9c684 2492 instead of smashing with build_empty_stmt.
2493
2494 Thus the only thing that should appear here in a block containing
2495 one executable statement is a label. */
75a70cf9 2496 prev = gsi_stmt (i);
2497 if (gimple_code (prev) == GIMPLE_LABEL)
4ee9c684 2498 return last;
2499 else
75a70cf9 2500 return NULL;
bb445479 2501}
4ee9c684 2502
5f20a306 2503/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2504
2505static void
2506reinstall_phi_args (edge new_edge, edge old_edge)
2507{
f1f41a6c 2508 edge_var_map_vector *v;
d03ba86f 2509 edge_var_map *vm;
2510 int i;
75a70cf9 2511 gimple_stmt_iterator phis;
48e1416a 2512
d03ba86f 2513 v = redirect_edge_var_map_vector (old_edge);
2514 if (!v)
5f20a306 2515 return;
48e1416a 2516
75a70cf9 2517 for (i = 0, phis = gsi_start_phis (new_edge->dest);
f1f41a6c 2518 v->iterate (i, &vm) && !gsi_end_p (phis);
75a70cf9 2519 i++, gsi_next (&phis))
5f20a306 2520 {
75a70cf9 2521 gimple phi = gsi_stmt (phis);
d03ba86f 2522 tree result = redirect_edge_var_map_result (vm);
2523 tree arg = redirect_edge_var_map_def (vm);
48e1416a 2524
75a70cf9 2525 gcc_assert (result == gimple_phi_result (phi));
48e1416a 2526
60d535d2 2527 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
5f20a306 2528 }
48e1416a 2529
d03ba86f 2530 redirect_edge_var_map_clear (old_edge);
5f20a306 2531}
2532
588ce679 2533/* Returns the basic block after which the new basic block created
c4d867e0 2534 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2535 near its "logical" location. This is of most help to humans looking
2536 at debugging dumps. */
2537
2538static basic_block
2539split_edge_bb_loc (edge edge_in)
2540{
2541 basic_block dest = edge_in->dest;
0882aafd 2542 basic_block dest_prev = dest->prev_bb;
c4d867e0 2543
0882aafd 2544 if (dest_prev)
2545 {
2546 edge e = find_edge (dest_prev, dest);
2547 if (e && !(e->flags & EDGE_COMPLEX))
2548 return edge_in->src;
2549 }
2550 return dest_prev;
c4d867e0 2551}
2552
4ee9c684 2553/* Split a (typically critical) edge EDGE_IN. Return the new block.
2554 Abort on abnormal edges. */
2555
2556static basic_block
75a70cf9 2557gimple_split_edge (edge edge_in)
4ee9c684 2558{
0055dbde 2559 basic_block new_bb, after_bb, dest;
4ee9c684 2560 edge new_edge, e;
4ee9c684 2561
2562 /* Abnormal edges cannot be split. */
8c0963c4 2563 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
4ee9c684 2564
4ee9c684 2565 dest = edge_in->dest;
2566
c4d867e0 2567 after_bb = split_edge_bb_loc (edge_in);
4ee9c684 2568
2569 new_bb = create_empty_bb (after_bb);
856b3326 2570 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2571 new_bb->count = edge_in->count;
4ee9c684 2572 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
856b3326 2573 new_edge->probability = REG_BR_PROB_BASE;
2574 new_edge->count = edge_in->count;
4ee9c684 2575
8c0963c4 2576 e = redirect_edge_and_branch (edge_in, new_bb);
d8a0d6b8 2577 gcc_assert (e == edge_in);
5f20a306 2578 reinstall_phi_args (new_edge, e);
4ee9c684 2579
2580 return new_bb;
2581}
2582
182cf5a9 2583
2584/* Verify properties of the address expression T with base object BASE. */
2585
2586static tree
2587verify_address (tree t, tree base)
2588{
2589 bool old_constant;
2590 bool old_side_effects;
2591 bool new_constant;
2592 bool new_side_effects;
2593
2594 old_constant = TREE_CONSTANT (t);
2595 old_side_effects = TREE_SIDE_EFFECTS (t);
2596
2597 recompute_tree_invariant_for_addr_expr (t);
2598 new_side_effects = TREE_SIDE_EFFECTS (t);
2599 new_constant = TREE_CONSTANT (t);
2600
2601 if (old_constant != new_constant)
2602 {
2603 error ("constant not recomputed when ADDR_EXPR changed");
2604 return t;
2605 }
2606 if (old_side_effects != new_side_effects)
2607 {
2608 error ("side effects not recomputed when ADDR_EXPR changed");
2609 return t;
2610 }
2611
2612 if (!(TREE_CODE (base) == VAR_DECL
2613 || TREE_CODE (base) == PARM_DECL
2614 || TREE_CODE (base) == RESULT_DECL))
2615 return NULL_TREE;
2616
2617 if (DECL_GIMPLE_REG_P (base))
2618 {
2619 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2620 return base;
2621 }
2622
2623 return NULL_TREE;
2624}
2625
4ee9c684 2626/* Callback for walk_tree, check that all elements with address taken are
8b592de0 2627 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2628 inside a PHI node. */
4ee9c684 2629
2630static tree
e44658e5 2631verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4ee9c684 2632{
2633 tree t = *tp, x;
2634
2635 if (TYPE_P (t))
2636 *walk_subtrees = 0;
13b96211 2637
260e7e11 2638 /* Check operand N for being valid GIMPLE and give error MSG if not. */
e44658e5 2639#define CHECK_OP(N, MSG) \
260e7e11 2640 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
e44658e5 2641 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
4ee9c684 2642
2643 switch (TREE_CODE (t))
2644 {
2645 case SSA_NAME:
2646 if (SSA_NAME_IN_FREE_LIST (t))
2647 {
2648 error ("SSA name in freelist but still referenced");
2649 return *tp;
2650 }
2651 break;
2652
d3a42176 2653 case INDIRECT_REF:
182cf5a9 2654 error ("INDIRECT_REF in gimple IL");
2655 return t;
2656
2657 case MEM_REF:
d3a42176 2658 x = TREE_OPERAND (t, 0);
a34ee42c 2659 if (!POINTER_TYPE_P (TREE_TYPE (x))
2660 || !is_gimple_mem_ref_addr (x))
d3a42176 2661 {
bf776685 2662 error ("invalid first operand of MEM_REF");
d3a42176 2663 return x;
2664 }
182cf5a9 2665 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2666 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2667 {
bf776685 2668 error ("invalid offset operand of MEM_REF");
182cf5a9 2669 return TREE_OPERAND (t, 1);
2670 }
2671 if (TREE_CODE (x) == ADDR_EXPR
2672 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2673 return x;
2674 *walk_subtrees = 0;
d3a42176 2675 break;
2676
88dbf20f 2677 case ASSERT_EXPR:
2678 x = fold (ASSERT_EXPR_COND (t));
2679 if (x == boolean_false_node)
2680 {
2681 error ("ASSERT_EXPR with an always-false condition");
2682 return *tp;
2683 }
2684 break;
2685
4ee9c684 2686 case MODIFY_EXPR:
bf776685 2687 error ("MODIFY_EXPR not expected while having tuples");
7262b179 2688 return *tp;
4ee9c684 2689
2690 case ADDR_EXPR:
2f96475c 2691 {
182cf5a9 2692 tree tem;
2f96475c 2693
c7d4e749 2694 gcc_assert (is_gimple_address (t));
2695
2f96475c 2696 /* Skip any references (they will be checked when we recurse down the
2697 tree) and ensure that any variable used as a prefix is marked
2698 addressable. */
2699 for (x = TREE_OPERAND (t, 0);
2700 handled_component_p (x);
2701 x = TREE_OPERAND (x, 0))
2702 ;
2703
182cf5a9 2704 if ((tem = verify_address (t, x)))
2705 return tem;
2706
dd277d48 2707 if (!(TREE_CODE (x) == VAR_DECL
2708 || TREE_CODE (x) == PARM_DECL
2709 || TREE_CODE (x) == RESULT_DECL))
2f96475c 2710 return NULL;
182cf5a9 2711
2f96475c 2712 if (!TREE_ADDRESSABLE (x))
2713 {
2714 error ("address taken, but ADDRESSABLE bit not set");
2715 return x;
2716 }
cd3f90e2 2717
2f96475c 2718 break;
2719 }
4ee9c684 2720
2721 case COND_EXPR:
58f52dd4 2722 x = COND_EXPR_COND (t);
79307120 2723 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
4ee9c684 2724 {
79307120 2725 error ("non-integral used in condition");
4ee9c684 2726 return x;
2727 }
3a45c5eb 2728 if (!is_gimple_condexpr (x))
2729 {
0a81f5a0 2730 error ("invalid conditional operand");
3a45c5eb 2731 return x;
2732 }
4ee9c684 2733 break;
2734
6a2b2394 2735 case NON_LVALUE_EXPR:
569b4ac8 2736 case TRUTH_NOT_EXPR:
2737 gcc_unreachable ();
6a2b2394 2738
72dd6141 2739 CASE_CONVERT:
4ee9c684 2740 case FIX_TRUNC_EXPR:
4ee9c684 2741 case FLOAT_EXPR:
2742 case NEGATE_EXPR:
2743 case ABS_EXPR:
2744 case BIT_NOT_EXPR:
0a81f5a0 2745 CHECK_OP (0, "invalid operand to unary operator");
4ee9c684 2746 break;
2747
2748 case REALPART_EXPR:
2749 case IMAGPART_EXPR:
e44658e5 2750 case COMPONENT_REF:
2751 case ARRAY_REF:
2752 case ARRAY_RANGE_REF:
2753 case BIT_FIELD_REF:
2754 case VIEW_CONVERT_EXPR:
2755 /* We have a nest of references. Verify that each of the operands
2756 that determine where to reference is either a constant or a variable,
2757 verify that the base is valid, and then show we've already checked
2758 the subtrees. */
1f9b622b 2759 while (handled_component_p (t))
e44658e5 2760 {
2761 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
0a81f5a0 2762 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
e44658e5 2763 else if (TREE_CODE (t) == ARRAY_REF
2764 || TREE_CODE (t) == ARRAY_RANGE_REF)
2765 {
0a81f5a0 2766 CHECK_OP (1, "invalid array index");
e44658e5 2767 if (TREE_OPERAND (t, 2))
0a81f5a0 2768 CHECK_OP (2, "invalid array lower bound");
e44658e5 2769 if (TREE_OPERAND (t, 3))
0a81f5a0 2770 CHECK_OP (3, "invalid array stride");
e44658e5 2771 }
2772 else if (TREE_CODE (t) == BIT_FIELD_REF)
2773 {
70d2daf7 2774 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2775 || !host_integerp (TREE_OPERAND (t, 2), 1))
2776 {
2777 error ("invalid position or size operand to BIT_FIELD_REF");
2778 return t;
2779 }
7f23b9c0 2780 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2781 && (TYPE_PRECISION (TREE_TYPE (t))
2782 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
70337474 2783 {
2784 error ("integral result type precision does not match "
2785 "field size of BIT_FIELD_REF");
2786 return t;
2787 }
7f23b9c0 2788 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2789 && !AGGREGATE_TYPE_P (TREE_TYPE (t))
2790 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2791 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2792 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
70337474 2793 {
2794 error ("mode precision of non-integral result does not "
2795 "match field size of BIT_FIELD_REF");
2796 return t;
2797 }
e44658e5 2798 }
2799
2800 t = TREE_OPERAND (t, 0);
2801 }
2802
2a2aef73 2803 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
e44658e5 2804 {
0a81f5a0 2805 error ("invalid reference prefix");
e44658e5 2806 return t;
2807 }
2808 *walk_subtrees = 0;
4ee9c684 2809 break;
0de36bdb 2810 case PLUS_EXPR:
2811 case MINUS_EXPR:
2812 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2813 POINTER_PLUS_EXPR. */
2814 if (POINTER_TYPE_P (TREE_TYPE (t)))
2815 {
2816 error ("invalid operand to plus/minus, type is a pointer");
2817 return t;
2818 }
2819 CHECK_OP (0, "invalid operand to binary operator");
2820 CHECK_OP (1, "invalid operand to binary operator");
2821 break;
4ee9c684 2822
0de36bdb 2823 case POINTER_PLUS_EXPR:
2824 /* Check to make sure the first operand is a pointer or reference type. */
2825 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2826 {
2827 error ("invalid operand to pointer plus, first operand is not a pointer");
2828 return t;
2829 }
a845d317 2830 /* Check to make sure the second operand is a ptrofftype. */
2831 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
0de36bdb 2832 {
2833 error ("invalid operand to pointer plus, second operand is not an "
a845d317 2834 "integer type of appropriate width");
0de36bdb 2835 return t;
2836 }
2837 /* FALLTHROUGH */
4ee9c684 2838 case LT_EXPR:
2839 case LE_EXPR:
2840 case GT_EXPR:
2841 case GE_EXPR:
2842 case EQ_EXPR:
2843 case NE_EXPR:
2844 case UNORDERED_EXPR:
2845 case ORDERED_EXPR:
2846 case UNLT_EXPR:
2847 case UNLE_EXPR:
2848 case UNGT_EXPR:
2849 case UNGE_EXPR:
2850 case UNEQ_EXPR:
318a728f 2851 case LTGT_EXPR:
4ee9c684 2852 case MULT_EXPR:
2853 case TRUNC_DIV_EXPR:
2854 case CEIL_DIV_EXPR:
2855 case FLOOR_DIV_EXPR:
2856 case ROUND_DIV_EXPR:
2857 case TRUNC_MOD_EXPR:
2858 case CEIL_MOD_EXPR:
2859 case FLOOR_MOD_EXPR:
2860 case ROUND_MOD_EXPR:
2861 case RDIV_EXPR:
2862 case EXACT_DIV_EXPR:
2863 case MIN_EXPR:
2864 case MAX_EXPR:
2865 case LSHIFT_EXPR:
2866 case RSHIFT_EXPR:
2867 case LROTATE_EXPR:
2868 case RROTATE_EXPR:
2869 case BIT_IOR_EXPR:
2870 case BIT_XOR_EXPR:
2871 case BIT_AND_EXPR:
0a81f5a0 2872 CHECK_OP (0, "invalid operand to binary operator");
2873 CHECK_OP (1, "invalid operand to binary operator");
4ee9c684 2874 break;
2875
96d5c2e2 2876 case CONSTRUCTOR:
2877 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2878 *walk_subtrees = 0;
2879 break;
2880
afec0a77 2881 case CASE_LABEL_EXPR:
2882 if (CASE_CHAIN (t))
2883 {
2884 error ("invalid CASE_CHAIN");
2885 return t;
2886 }
2887 break;
2888
4ee9c684 2889 default:
2890 break;
2891 }
2892 return NULL;
e44658e5 2893
2894#undef CHECK_OP
4ee9c684 2895}
2896
f03e0ae4 2897
2898/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2899 Returns true if there is an error, otherwise false. */
2900
2901static bool
75a70cf9 2902verify_types_in_gimple_min_lval (tree expr)
f03e0ae4 2903{
2904 tree op;
2905
2906 if (is_gimple_id (expr))
2907 return false;
2908
5d9de213 2909 if (TREE_CODE (expr) != TARGET_MEM_REF
182cf5a9 2910 && TREE_CODE (expr) != MEM_REF)
f03e0ae4 2911 {
2912 error ("invalid expression for min lvalue");
2913 return true;
2914 }
2915
ec6b51ed 2916 /* TARGET_MEM_REFs are strange beasts. */
2917 if (TREE_CODE (expr) == TARGET_MEM_REF)
2918 return false;
2919
f03e0ae4 2920 op = TREE_OPERAND (expr, 0);
2921 if (!is_gimple_val (op))
2922 {
2923 error ("invalid operand in indirect reference");
2924 debug_generic_stmt (op);
2925 return true;
2926 }
182cf5a9 2927 /* Memory references now generally can involve a value conversion. */
f03e0ae4 2928
2929 return false;
2930}
2931
b898ce29 2932/* Verify if EXPR is a valid GIMPLE reference expression. If
2933 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
f03e0ae4 2934 if there is an error, otherwise false. */
2935
2936static bool
b898ce29 2937verify_types_in_gimple_reference (tree expr, bool require_lvalue)
f03e0ae4 2938{
2939 while (handled_component_p (expr))
2940 {
2941 tree op = TREE_OPERAND (expr, 0);
2942
2943 if (TREE_CODE (expr) == ARRAY_REF
2944 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2945 {
2946 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2947 || (TREE_OPERAND (expr, 2)
2948 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2949 || (TREE_OPERAND (expr, 3)
2950 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2951 {
2952 error ("invalid operands to array reference");
2953 debug_generic_stmt (expr);
2954 return true;
2955 }
2956 }
2957
2958 /* Verify if the reference array element types are compatible. */
2959 if (TREE_CODE (expr) == ARRAY_REF
2960 && !useless_type_conversion_p (TREE_TYPE (expr),
2961 TREE_TYPE (TREE_TYPE (op))))
2962 {
2963 error ("type mismatch in array reference");
2964 debug_generic_stmt (TREE_TYPE (expr));
2965 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2966 return true;
2967 }
2968 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2969 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2970 TREE_TYPE (TREE_TYPE (op))))
2971 {
2972 error ("type mismatch in array range reference");
2973 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2974 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2975 return true;
2976 }
2977
2978 if ((TREE_CODE (expr) == REALPART_EXPR
2979 || TREE_CODE (expr) == IMAGPART_EXPR)
2980 && !useless_type_conversion_p (TREE_TYPE (expr),
2981 TREE_TYPE (TREE_TYPE (op))))
2982 {
2983 error ("type mismatch in real/imagpart reference");
2984 debug_generic_stmt (TREE_TYPE (expr));
2985 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2986 return true;
2987 }
2988
2989 if (TREE_CODE (expr) == COMPONENT_REF
2990 && !useless_type_conversion_p (TREE_TYPE (expr),
2991 TREE_TYPE (TREE_OPERAND (expr, 1))))
2992 {
2993 error ("type mismatch in component reference");
2994 debug_generic_stmt (TREE_TYPE (expr));
2995 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2996 return true;
2997 }
2998
66b8b09f 2999 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3000 {
3001 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3002 that their operand is not an SSA name or an invariant when
3003 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3004 bug). Otherwise there is nothing to verify, gross mismatches at
3005 most invoke undefined behavior. */
3006 if (require_lvalue
3007 && (TREE_CODE (op) == SSA_NAME
3008 || is_gimple_min_invariant (op)))
3009 {
bf776685 3010 error ("conversion of an SSA_NAME on the left hand side");
66b8b09f 3011 debug_generic_stmt (expr);
3012 return true;
3013 }
182cf5a9 3014 else if (TREE_CODE (op) == SSA_NAME
3015 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3016 {
bf776685 3017 error ("conversion of register to a different size");
182cf5a9 3018 debug_generic_stmt (expr);
3019 return true;
3020 }
66b8b09f 3021 else if (!handled_component_p (op))
3022 return false;
3023 }
f03e0ae4 3024
3025 expr = op;
3026 }
3027
182cf5a9 3028 if (TREE_CODE (expr) == MEM_REF)
3029 {
3030 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3031 {
bf776685 3032 error ("invalid address operand in MEM_REF");
182cf5a9 3033 debug_generic_stmt (expr);
3034 return true;
3035 }
3036 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3037 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3038 {
bf776685 3039 error ("invalid offset operand in MEM_REF");
182cf5a9 3040 debug_generic_stmt (expr);
3041 return true;
3042 }
3043 }
9a14ba4f 3044 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3045 {
28daba6f 3046 if (!TMR_BASE (expr)
3047 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
e077c79b 3048 {
e66d204f 3049 error ("invalid address operand in TARGET_MEM_REF");
e077c79b 3050 return true;
3051 }
9a14ba4f 3052 if (!TMR_OFFSET (expr)
3053 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3054 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3055 {
bf776685 3056 error ("invalid offset operand in TARGET_MEM_REF");
9a14ba4f 3057 debug_generic_stmt (expr);
3058 return true;
3059 }
3060 }
182cf5a9 3061
b898ce29 3062 return ((require_lvalue || !is_gimple_min_invariant (expr))
3063 && verify_types_in_gimple_min_lval (expr));
f03e0ae4 3064}
3065
c3a8498b 3066/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3067 list of pointer-to types that is trivially convertible to DEST. */
3068
3069static bool
3070one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3071{
3072 tree src;
3073
3074 if (!TYPE_POINTER_TO (src_obj))
3075 return true;
3076
3077 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3078 if (useless_type_conversion_p (dest, src))
3079 return true;
3080
3081 return false;
3082}
3083
75a70cf9 3084/* Return true if TYPE1 is a fixed-point type and if conversions to and
3085 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3086
3087static bool
3088valid_fixed_convert_types_p (tree type1, tree type2)
3089{
3090 return (FIXED_POINT_TYPE_P (type1)
3091 && (INTEGRAL_TYPE_P (type2)
3092 || SCALAR_FLOAT_TYPE_P (type2)
3093 || FIXED_POINT_TYPE_P (type2)));
3094}
3095
75a70cf9 3096/* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3097 is a problem, otherwise false. */
3098
3099static bool
d266354c 3100verify_gimple_call (gimple stmt)
75a70cf9 3101{
d266354c 3102 tree fn = gimple_call_fn (stmt);
fd7ad53b 3103 tree fntype, fndecl;
2727c872 3104 unsigned i;
3105
fb049fba 3106 if (gimple_call_internal_p (stmt))
3107 {
3108 if (fn)
3109 {
3110 error ("gimple call has two targets");
3111 debug_generic_stmt (fn);
3112 return true;
3113 }
3114 }
3115 else
3116 {
3117 if (!fn)
3118 {
3119 error ("gimple call has no target");
3120 return true;
3121 }
3122 }
3123
3124 if (fn && !is_gimple_call_addr (fn))
2727c872 3125 {
3126 error ("invalid function in gimple call");
3127 debug_generic_stmt (fn);
3128 return true;
3129 }
75a70cf9 3130
fb049fba 3131 if (fn
3132 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3133 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3134 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
d266354c 3135 {
3136 error ("non-function in gimple call");
3137 return true;
3138 }
75a70cf9 3139
fd7ad53b 3140 fndecl = gimple_call_fndecl (stmt);
3141 if (fndecl
3142 && TREE_CODE (fndecl) == FUNCTION_DECL
3143 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3144 && !DECL_PURE_P (fndecl)
3145 && !TREE_READONLY (fndecl))
3146 {
3147 error ("invalid pure const state for function");
3148 return true;
3149 }
3150
d266354c 3151 if (gimple_call_lhs (stmt)
66b8b09f 3152 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3153 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
d266354c 3154 {
3155 error ("invalid LHS in gimple call");
3156 return true;
3157 }
75a70cf9 3158
4569d18c 3159 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3160 {
3161 error ("LHS in noreturn call");
3162 return true;
3163 }
3164
2de00a2d 3165 fntype = gimple_call_fntype (stmt);
fb049fba 3166 if (fntype
3167 && gimple_call_lhs (stmt)
d266354c 3168 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3169 TREE_TYPE (fntype))
3170 /* ??? At least C++ misses conversions at assignments from
3171 void * call results.
3172 ??? Java is completely off. Especially with functions
3173 returning java.lang.Object.
3174 For now simply allow arbitrary pointer type conversions. */
3175 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3176 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3177 {
3178 error ("invalid conversion in gimple call");
3179 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3180 debug_generic_stmt (TREE_TYPE (fntype));
3181 return true;
3182 }
75a70cf9 3183
2727c872 3184 if (gimple_call_chain (stmt)
3185 && !is_gimple_val (gimple_call_chain (stmt)))
3186 {
3187 error ("invalid static chain in gimple call");
3188 debug_generic_stmt (gimple_call_chain (stmt));
3189 return true;
3190 }
3191
cfcaff92 3192 /* If there is a static chain argument, this should not be an indirect
3bb63aeb 3193 call, and the decl should have DECL_STATIC_CHAIN set. */
cfcaff92 3194 if (gimple_call_chain (stmt))
3195 {
491dbbe1 3196 if (!gimple_call_fndecl (stmt))
cfcaff92 3197 {
3198 error ("static chain in indirect gimple call");
3199 return true;
3200 }
3201 fn = TREE_OPERAND (fn, 0);
3202
3bb63aeb 3203 if (!DECL_STATIC_CHAIN (fn))
cfcaff92 3204 {
bf776685 3205 error ("static chain with function that doesn%'t use one");
cfcaff92 3206 return true;
3207 }
3208 }
3209
d266354c 3210 /* ??? The C frontend passes unpromoted arguments in case it
3211 didn't see a function declaration before the call. So for now
2727c872 3212 leave the call arguments mostly unverified. Once we gimplify
d266354c 3213 unit-at-a-time we have a chance to fix this. */
75a70cf9 3214
2727c872 3215 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3216 {
3217 tree arg = gimple_call_arg (stmt, i);
251e7603 3218 if ((is_gimple_reg_type (TREE_TYPE (arg))
3219 && !is_gimple_val (arg))
3220 || (!is_gimple_reg_type (TREE_TYPE (arg))
3221 && !is_gimple_lvalue (arg)))
2727c872 3222 {
3223 error ("invalid argument to gimple call");
3224 debug_generic_expr (arg);
c2b7e854 3225 return true;
2727c872 3226 }
3227 }
3228
d266354c 3229 return false;
75a70cf9 3230}
3231
d266354c 3232/* Verifies the gimple comparison with the result type TYPE and
3233 the operands OP0 and OP1. */
9ac19c3a 3234
3235static bool
d266354c 3236verify_gimple_comparison (tree type, tree op0, tree op1)
9ac19c3a 3237{
d266354c 3238 tree op0_type = TREE_TYPE (op0);
3239 tree op1_type = TREE_TYPE (op1);
75a70cf9 3240
d266354c 3241 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3242 {
3243 error ("invalid operands in gimple comparison");
3244 return true;
3245 }
9ac19c3a 3246
d266354c 3247 /* For comparisons we do not have the operations type as the
3248 effective type the comparison is carried out in. Instead
3249 we require that either the first operand is trivially
3250 convertible into the second, or the other way around.
d266354c 3251 Because we special-case pointers to void we allow
3252 comparisons of pointers with the same mode as well. */
3a82f2b4 3253 if (!useless_type_conversion_p (op0_type, op1_type)
3254 && !useless_type_conversion_p (op1_type, op0_type)
3255 && (!POINTER_TYPE_P (op0_type)
3256 || !POINTER_TYPE_P (op1_type)
3257 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3258 {
3259 error ("mismatching comparison operand types");
d266354c 3260 debug_generic_expr (op0_type);
3261 debug_generic_expr (op1_type);
3262 return true;
3263 }
3264
3a82f2b4 3265 /* The resulting type of a comparison may be an effective boolean type. */
3266 if (INTEGRAL_TYPE_P (type)
3267 && (TREE_CODE (type) == BOOLEAN_TYPE
3268 || TYPE_PRECISION (type) == 1))
3269 ;
3270 /* Or an integer vector type with the same size and element count
3271 as the comparison operand types. */
3272 else if (TREE_CODE (type) == VECTOR_TYPE
3273 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3274 {
3275 if (TREE_CODE (op0_type) != VECTOR_TYPE
3276 || TREE_CODE (op1_type) != VECTOR_TYPE)
3277 {
3278 error ("non-vector operands in vector comparison");
3279 debug_generic_expr (op0_type);
3280 debug_generic_expr (op1_type);
3281 return true;
3282 }
3283
3284 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
96baa0d7 3285 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3286 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type)))))
3a82f2b4 3287 {
3288 error ("invalid vector comparison resulting type");
3289 debug_generic_expr (type);
3290 return true;
3291 }
3292 }
3293 else
3294 {
3295 error ("bogus comparison result type");
3296 debug_generic_expr (type);
3297 return true;
3298 }
3299
d266354c 3300 return false;
3301}
75a70cf9 3302
ec6b51ed 3303/* Verify a gimple assignment statement STMT with an unary rhs.
3304 Returns true if anything is wrong. */
f03e0ae4 3305
3306static bool
ec6b51ed 3307verify_gimple_assign_unary (gimple stmt)
f03e0ae4 3308{
75a70cf9 3309 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3310 tree lhs = gimple_assign_lhs (stmt);
75a70cf9 3311 tree lhs_type = TREE_TYPE (lhs);
ec6b51ed 3312 tree rhs1 = gimple_assign_rhs1 (stmt);
75a70cf9 3313 tree rhs1_type = TREE_TYPE (rhs1);
f03e0ae4 3314
5717187b 3315 if (!is_gimple_reg (lhs))
ec6b51ed 3316 {
3317 error ("non-register as LHS of unary operation");
3318 return true;
3319 }
3320
3321 if (!is_gimple_val (rhs1))
3322 {
3323 error ("invalid operand in unary operation");
3324 return true;
3325 }
3326
3327 /* First handle conversions. */
75a70cf9 3328 switch (rhs_code)
f03e0ae4 3329 {
72dd6141 3330 CASE_CONVERT:
f03e0ae4 3331 {
5c332a23 3332 /* Allow conversions from pointer type to integral type only if
ec6b51ed 3333 there is no sign or zero extension involved.
a0553bff 3334 For targets were the precision of ptrofftype doesn't match that
5c332a23 3335 of pointers we need to allow arbitrary conversions to ptrofftype. */
ec6b51ed 3336 if ((POINTER_TYPE_P (lhs_type)
5c332a23 3337 && INTEGRAL_TYPE_P (rhs1_type))
ec6b51ed 3338 || (POINTER_TYPE_P (rhs1_type)
3339 && INTEGRAL_TYPE_P (lhs_type)
3340 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
a0553bff 3341 || ptrofftype_p (sizetype))))
f03e0ae4 3342 return false;
3343
3f55c2b5 3344 /* Allow conversion from integral to offset type and vice versa. */
75a70cf9 3345 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3f55c2b5 3346 && INTEGRAL_TYPE_P (rhs1_type))
22ba4121 3347 || (INTEGRAL_TYPE_P (lhs_type)
3348 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
f03e0ae4 3349 return false;
3350
3351 /* Otherwise assert we are converting between types of the
3352 same kind. */
75a70cf9 3353 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
f03e0ae4 3354 {
3355 error ("invalid types in nop conversion");
75a70cf9 3356 debug_generic_expr (lhs_type);
3357 debug_generic_expr (rhs1_type);
f03e0ae4 3358 return true;
3359 }
3360
3361 return false;
3362 }
3363
bd1a81f7 3364 case ADDR_SPACE_CONVERT_EXPR:
3365 {
3366 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3367 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3368 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3369 {
3370 error ("invalid types in address space conversion");
3371 debug_generic_expr (lhs_type);
3372 debug_generic_expr (rhs1_type);
3373 return true;
3374 }
3375
3376 return false;
3377 }
3378
9ac19c3a 3379 case FIXED_CONVERT_EXPR:
3380 {
75a70cf9 3381 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3382 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
9ac19c3a 3383 {
3384 error ("invalid types in fixed-point conversion");
75a70cf9 3385 debug_generic_expr (lhs_type);
3386 debug_generic_expr (rhs1_type);
9ac19c3a 3387 return true;
3388 }
3389
3390 return false;
3391 }
3392
f03e0ae4 3393 case FLOAT_EXPR:
3394 {
b423d9f7 3395 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3396 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3397 || !VECTOR_FLOAT_TYPE_P(lhs_type)))
f03e0ae4 3398 {
3399 error ("invalid types in conversion to floating point");
75a70cf9 3400 debug_generic_expr (lhs_type);
3401 debug_generic_expr (rhs1_type);
f03e0ae4 3402 return true;
3403 }
75a70cf9 3404
f03e0ae4 3405 return false;
3406 }
3407
3408 case FIX_TRUNC_EXPR:
3409 {
b423d9f7 3410 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3411 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3412 || !VECTOR_FLOAT_TYPE_P(rhs1_type)))
f03e0ae4 3413 {
3414 error ("invalid types in conversion to integer");
75a70cf9 3415 debug_generic_expr (lhs_type);
3416 debug_generic_expr (rhs1_type);
f03e0ae4 3417 return true;
3418 }
75a70cf9 3419
f03e0ae4 3420 return false;
3421 }
3422
87f9ffa4 3423 case VEC_UNPACK_HI_EXPR:
3424 case VEC_UNPACK_LO_EXPR:
3425 case REDUC_MAX_EXPR:
3426 case REDUC_MIN_EXPR:
3427 case REDUC_PLUS_EXPR:
3428 case VEC_UNPACK_FLOAT_HI_EXPR:
3429 case VEC_UNPACK_FLOAT_LO_EXPR:
3430 /* FIXME. */
3431 return false;
ec6b51ed 3432
3433 case NEGATE_EXPR:
3434 case ABS_EXPR:
3435 case BIT_NOT_EXPR:
3436 case PAREN_EXPR:
3437 case NON_LVALUE_EXPR:
3438 case CONJ_EXPR:
ec6b51ed 3439 break;
3440
3441 default:
3442 gcc_unreachable ();
3443 }
3444
3445 /* For the remaining codes assert there is no conversion involved. */
3446 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3447 {
3448 error ("non-trivial conversion in unary operation");
3449 debug_generic_expr (lhs_type);
3450 debug_generic_expr (rhs1_type);
3451 return true;
3452 }
3453
3454 return false;
3455}
3456
3457/* Verify a gimple assignment statement STMT with a binary rhs.
3458 Returns true if anything is wrong. */
3459
3460static bool
3461verify_gimple_assign_binary (gimple stmt)
3462{
3463 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3464 tree lhs = gimple_assign_lhs (stmt);
3465 tree lhs_type = TREE_TYPE (lhs);
3466 tree rhs1 = gimple_assign_rhs1 (stmt);
3467 tree rhs1_type = TREE_TYPE (rhs1);
3468 tree rhs2 = gimple_assign_rhs2 (stmt);
3469 tree rhs2_type = TREE_TYPE (rhs2);
3470
5717187b 3471 if (!is_gimple_reg (lhs))
ec6b51ed 3472 {
3473 error ("non-register as LHS of binary operation");
3474 return true;
3475 }
75a70cf9 3476
ec6b51ed 3477 if (!is_gimple_val (rhs1)
3478 || !is_gimple_val (rhs2))
3479 {
3480 error ("invalid operands in binary operation");
3481 return true;
3482 }
3483
3484 /* First handle operations that involve different types. */
3485 switch (rhs_code)
3486 {
3487 case COMPLEX_EXPR:
3488 {
3489 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3490 || !(INTEGRAL_TYPE_P (rhs1_type)
75a70cf9 3491 || SCALAR_FLOAT_TYPE_P (rhs1_type))
ec6b51ed 3492 || !(INTEGRAL_TYPE_P (rhs2_type)
75a70cf9 3493 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
f03e0ae4 3494 {
3495 error ("type mismatch in complex expression");
75a70cf9 3496 debug_generic_expr (lhs_type);
3497 debug_generic_expr (rhs1_type);
3498 debug_generic_expr (rhs2_type);
f03e0ae4 3499 return true;
3500 }
75a70cf9 3501
f03e0ae4 3502 return false;
3503 }
3504
f03e0ae4 3505 case LSHIFT_EXPR:
3506 case RSHIFT_EXPR:
3507 case LROTATE_EXPR:
3508 case RROTATE_EXPR:
3509 {
87f9ffa4 3510 /* Shifts and rotates are ok on integral types, fixed point
3511 types and integer vector types. */
3512 if ((!INTEGRAL_TYPE_P (rhs1_type)
3513 && !FIXED_POINT_TYPE_P (rhs1_type)
3514 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
9e1eb6c1 3515 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
87f9ffa4 3516 || (!INTEGRAL_TYPE_P (rhs2_type)
3517 /* Vector shifts of vectors are also ok. */
3518 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
9e1eb6c1 3519 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
87f9ffa4 3520 && TREE_CODE (rhs2_type) == VECTOR_TYPE
9e1eb6c1 3521 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
75a70cf9 3522 || !useless_type_conversion_p (lhs_type, rhs1_type))
f03e0ae4 3523 {
3524 error ("type mismatch in shift expression");
75a70cf9 3525 debug_generic_expr (lhs_type);
3526 debug_generic_expr (rhs1_type);
3527 debug_generic_expr (rhs2_type);
f03e0ae4 3528 return true;
3529 }
75a70cf9 3530
f03e0ae4 3531 return false;
3532 }
3533
ec6b51ed 3534 case VEC_LSHIFT_EXPR:
3535 case VEC_RSHIFT_EXPR:
f03e0ae4 3536 {
ec6b51ed 3537 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
f323bb78 3538 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
0d825687 3539 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
8f93c289 3540 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3541 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
ec6b51ed 3542 || (!INTEGRAL_TYPE_P (rhs2_type)
3543 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3544 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3545 || !useless_type_conversion_p (lhs_type, rhs1_type))
f03e0ae4 3546 {
ec6b51ed 3547 error ("type mismatch in vector shift expression");
3548 debug_generic_expr (lhs_type);
3549 debug_generic_expr (rhs1_type);
3550 debug_generic_expr (rhs2_type);
f03e0ae4 3551 return true;
3552 }
0d825687 3553 /* For shifting a vector of non-integral components we
8f93c289 3554 only allow shifting by a constant multiple of the element size. */
0d825687 3555 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
8f93c289 3556 && (TREE_CODE (rhs2) != INTEGER_CST
3557 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3558 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3559 {
3560 error ("non-element sized vector shift of floating point vector");
3561 return true;
3562 }
75a70cf9 3563
ec6b51ed 3564 return false;
f03e0ae4 3565 }
3566
6083c152 3567 case WIDEN_LSHIFT_EXPR:
3568 {
3569 if (!INTEGRAL_TYPE_P (lhs_type)
3570 || !INTEGRAL_TYPE_P (rhs1_type)
3571 || TREE_CODE (rhs2) != INTEGER_CST
3572 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3573 {
3574 error ("type mismatch in widening vector shift expression");
3575 debug_generic_expr (lhs_type);
3576 debug_generic_expr (rhs1_type);
3577 debug_generic_expr (rhs2_type);
3578 return true;
3579 }
3580
3581 return false;
3582 }
3583
3584 case VEC_WIDEN_LSHIFT_HI_EXPR:
3585 case VEC_WIDEN_LSHIFT_LO_EXPR:
3586 {
3587 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3588 || TREE_CODE (lhs_type) != VECTOR_TYPE
3589 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3590 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3591 || TREE_CODE (rhs2) != INTEGER_CST
3592 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3593 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3594 {
3595 error ("type mismatch in widening vector shift expression");
3596 debug_generic_expr (lhs_type);
3597 debug_generic_expr (rhs1_type);
3598 debug_generic_expr (rhs2_type);
3599 return true;
3600 }
3601
3602 return false;
3603 }
3604
3e10a823 3605 case PLUS_EXPR:
46b155e1 3606 case MINUS_EXPR:
3e10a823 3607 {
46b155e1 3608 /* We use regular PLUS_EXPR and MINUS_EXPR for vectors.
3e10a823 3609 ??? This just makes the checker happy and may not be what is
3610 intended. */
3611 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3612 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3613 {
3614 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3615 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3616 {
3617 error ("invalid non-vector operands to vector valued plus");
3618 return true;
3619 }
3620 lhs_type = TREE_TYPE (lhs_type);
3621 rhs1_type = TREE_TYPE (rhs1_type);
3622 rhs2_type = TREE_TYPE (rhs2_type);
3623 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3624 the pointer to 2nd place. */
3625 if (POINTER_TYPE_P (rhs2_type))
3626 {
3627 tree tem = rhs1_type;
3628 rhs1_type = rhs2_type;
3629 rhs2_type = tem;
3630 }
3631 goto do_pointer_plus_expr_check;
3632 }
3e10a823 3633 if (POINTER_TYPE_P (lhs_type)
3634 || POINTER_TYPE_P (rhs1_type)
3635 || POINTER_TYPE_P (rhs2_type))
3636 {
3637 error ("invalid (pointer) operands to plus/minus");
3638 return true;
3639 }
3640
3641 /* Continue with generic binary expression handling. */
3642 break;
3643 }
3644
f03e0ae4 3645 case POINTER_PLUS_EXPR:
3646 {
3e10a823 3647do_pointer_plus_expr_check:
75a70cf9 3648 if (!POINTER_TYPE_P (rhs1_type)
3649 || !useless_type_conversion_p (lhs_type, rhs1_type)
a845d317 3650 || !ptrofftype_p (rhs2_type))
f03e0ae4 3651 {
3652 error ("type mismatch in pointer plus expression");
75a70cf9 3653 debug_generic_stmt (lhs_type);
3654 debug_generic_stmt (rhs1_type);
3655 debug_generic_stmt (rhs2_type);
f03e0ae4 3656 return true;
3657 }
f03e0ae4 3658
75a70cf9 3659 return false;
48e1416a 3660 }
f03e0ae4 3661
f03e0ae4 3662 case TRUTH_ANDIF_EXPR:
3663 case TRUTH_ORIF_EXPR:
3664 case TRUTH_AND_EXPR:
3665 case TRUTH_OR_EXPR:
3666 case TRUTH_XOR_EXPR:
f03e0ae4 3667
d515ee79 3668 gcc_unreachable ();
f03e0ae4 3669
ec6b51ed 3670 case LT_EXPR:
3671 case LE_EXPR:
3672 case GT_EXPR:
3673 case GE_EXPR:
3674 case EQ_EXPR:
3675 case NE_EXPR:
3676 case UNORDERED_EXPR:
3677 case ORDERED_EXPR:
3678 case UNLT_EXPR:
3679 case UNLE_EXPR:
3680 case UNGT_EXPR:
3681 case UNGE_EXPR:
3682 case UNEQ_EXPR:
3683 case LTGT_EXPR:
3684 /* Comparisons are also binary, but the result type is not
3685 connected to the operand types. */
3686 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
f03e0ae4 3687
87f9ffa4 3688 case WIDEN_MULT_EXPR:
62be004c 3689 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3690 return true;
aff5fb4d 3691 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
62be004c 3692 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3693
3694 case WIDEN_SUM_EXPR:
87f9ffa4 3695 case VEC_WIDEN_MULT_HI_EXPR:
3696 case VEC_WIDEN_MULT_LO_EXPR:
79a78f7f 3697 case VEC_WIDEN_MULT_EVEN_EXPR:
3698 case VEC_WIDEN_MULT_ODD_EXPR:
87f9ffa4 3699 case VEC_PACK_TRUNC_EXPR:
3700 case VEC_PACK_SAT_EXPR:
3701 case VEC_PACK_FIX_TRUNC_EXPR:
87f9ffa4 3702 /* FIXME. */
3703 return false;
3704
ec6b51ed 3705 case MULT_EXPR:
96504875 3706 case MULT_HIGHPART_EXPR:
ec6b51ed 3707 case TRUNC_DIV_EXPR:
3708 case CEIL_DIV_EXPR:
3709 case FLOOR_DIV_EXPR:
3710 case ROUND_DIV_EXPR:
3711 case TRUNC_MOD_EXPR:
3712 case CEIL_MOD_EXPR:
3713 case FLOOR_MOD_EXPR:
3714 case ROUND_MOD_EXPR:
3715 case RDIV_EXPR:
3716 case EXACT_DIV_EXPR:
3717 case MIN_EXPR:
3718 case MAX_EXPR:
3719 case BIT_IOR_EXPR:
3720 case BIT_XOR_EXPR:
3721 case BIT_AND_EXPR:
ec6b51ed 3722 /* Continue with generic binary expression handling. */
3723 break;
f03e0ae4 3724
ec6b51ed 3725 default:
3726 gcc_unreachable ();
3727 }
635b74da 3728
ec6b51ed 3729 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3730 || !useless_type_conversion_p (lhs_type, rhs2_type))
3731 {
3732 error ("type mismatch in binary expression");
3733 debug_generic_stmt (lhs_type);
3734 debug_generic_stmt (rhs1_type);
3735 debug_generic_stmt (rhs2_type);
3736 return true;
3737 }
3738
3739 return false;
3740}
3741
00f4f705 3742/* Verify a gimple assignment statement STMT with a ternary rhs.
3743 Returns true if anything is wrong. */
3744
3745static bool
3746verify_gimple_assign_ternary (gimple stmt)
3747{
3748 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3749 tree lhs = gimple_assign_lhs (stmt);
3750 tree lhs_type = TREE_TYPE (lhs);
3751 tree rhs1 = gimple_assign_rhs1 (stmt);
3752 tree rhs1_type = TREE_TYPE (rhs1);
3753 tree rhs2 = gimple_assign_rhs2 (stmt);
3754 tree rhs2_type = TREE_TYPE (rhs2);
3755 tree rhs3 = gimple_assign_rhs3 (stmt);
3756 tree rhs3_type = TREE_TYPE (rhs3);
3757
5717187b 3758 if (!is_gimple_reg (lhs))
00f4f705 3759 {
3760 error ("non-register as LHS of ternary operation");
3761 return true;
3762 }
3763
8a2caf10 3764 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3765 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
00f4f705 3766 || !is_gimple_val (rhs2)
3767 || !is_gimple_val (rhs3))
3768 {
3769 error ("invalid operands in ternary operation");
3770 return true;
3771 }
3772
3773 /* First handle operations that involve different types. */
3774 switch (rhs_code)
3775 {
3776 case WIDEN_MULT_PLUS_EXPR:
3777 case WIDEN_MULT_MINUS_EXPR:
3778 if ((!INTEGRAL_TYPE_P (rhs1_type)
3779 && !FIXED_POINT_TYPE_P (rhs1_type))
3780 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3781 || !useless_type_conversion_p (lhs_type, rhs3_type)
aff5fb4d 3782 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
00f4f705 3783 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3784 {
3785 error ("type mismatch in widening multiply-accumulate expression");
3786 debug_generic_expr (lhs_type);
3787 debug_generic_expr (rhs1_type);
3788 debug_generic_expr (rhs2_type);
3789 debug_generic_expr (rhs3_type);
3790 return true;
3791 }
3792 break;
3793
b9be572e 3794 case FMA_EXPR:
3795 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3796 || !useless_type_conversion_p (lhs_type, rhs2_type)
3797 || !useless_type_conversion_p (lhs_type, rhs3_type))
3798 {
3799 error ("type mismatch in fused multiply-add expression");
3800 debug_generic_expr (lhs_type);
3801 debug_generic_expr (rhs1_type);
3802 debug_generic_expr (rhs2_type);
3803 debug_generic_expr (rhs3_type);
3804 return true;
3805 }
3806 break;
3807
8a2caf10 3808 case COND_EXPR:
3809 case VEC_COND_EXPR:
3810 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3811 || !useless_type_conversion_p (lhs_type, rhs3_type))
3812 {
3813 error ("type mismatch in conditional expression");
3814 debug_generic_expr (lhs_type);
3815 debug_generic_expr (rhs2_type);
3816 debug_generic_expr (rhs3_type);
3817 return true;
3818 }
3819 break;
3820
f4803722 3821 case VEC_PERM_EXPR:
6cf89e04 3822 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3823 || !useless_type_conversion_p (lhs_type, rhs2_type))
3824 {
f4803722 3825 error ("type mismatch in vector permute expression");
6cf89e04 3826 debug_generic_expr (lhs_type);
3827 debug_generic_expr (rhs1_type);
3828 debug_generic_expr (rhs2_type);
3829 debug_generic_expr (rhs3_type);
3830 return true;
3831 }
3832
3833 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3834 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3835 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3836 {
f4803722 3837 error ("vector types expected in vector permute expression");
6cf89e04 3838 debug_generic_expr (lhs_type);
3839 debug_generic_expr (rhs1_type);
3840 debug_generic_expr (rhs2_type);
3841 debug_generic_expr (rhs3_type);
3842 return true;
3843 }
3844
3845 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3846 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3847 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3848 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3849 != TYPE_VECTOR_SUBPARTS (lhs_type))
3850 {
3851 error ("vectors with different element number found "
f4803722 3852 "in vector permute expression");
6cf89e04 3853 debug_generic_expr (lhs_type);
3854 debug_generic_expr (rhs1_type);
3855 debug_generic_expr (rhs2_type);
3856 debug_generic_expr (rhs3_type);
3857 return true;
3858 }
3859
3860 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3861 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3862 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3863 {
f4803722 3864 error ("invalid mask type in vector permute expression");
6cf89e04 3865 debug_generic_expr (lhs_type);
3866 debug_generic_expr (rhs1_type);
3867 debug_generic_expr (rhs2_type);
3868 debug_generic_expr (rhs3_type);
3869 return true;
3870 }
3871
3872 return false;
3873
c86930b0 3874 case DOT_PROD_EXPR:
3875 case REALIGN_LOAD_EXPR:
3876 /* FIXME. */
3877 return false;
3878
00f4f705 3879 default:
3880 gcc_unreachable ();
3881 }
3882 return false;
3883}
3884
ec6b51ed 3885/* Verify a gimple assignment statement STMT with a single rhs.
3886 Returns true if anything is wrong. */
3887
3888static bool
3889verify_gimple_assign_single (gimple stmt)
3890{
3891 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3892 tree lhs = gimple_assign_lhs (stmt);
3893 tree lhs_type = TREE_TYPE (lhs);
3894 tree rhs1 = gimple_assign_rhs1 (stmt);
3895 tree rhs1_type = TREE_TYPE (rhs1);
3896 bool res = false;
3897
3898 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3899 {
3900 error ("non-trivial conversion at assignment");
3901 debug_generic_expr (lhs_type);
3902 debug_generic_expr (rhs1_type);
3903 return true;
f03e0ae4 3904 }
3905
eb54054d 3906 if (gimple_clobber_p (stmt)
3907 && !DECL_P (lhs))
3908 {
3909 error ("non-decl LHS in clobber statement");
3910 debug_generic_expr (lhs);
3911 return true;
3912 }
3913
ec6b51ed 3914 if (handled_component_p (lhs))
b898ce29 3915 res |= verify_types_in_gimple_reference (lhs, true);
ec6b51ed 3916
3917 /* Special codes we cannot handle via their class. */
3918 switch (rhs_code)
f03e0ae4 3919 {
ec6b51ed 3920 case ADDR_EXPR:
3921 {
3922 tree op = TREE_OPERAND (rhs1, 0);
3923 if (!is_gimple_addressable (op))
3924 {
3925 error ("invalid operand in unary expression");
3926 return true;
3927 }
767be488 3928
491dbbe1 3929 /* Technically there is no longer a need for matching types, but
3930 gimple hygiene asks for this check. In LTO we can end up
3931 combining incompatible units and thus end up with addresses
3932 of globals that change their type to a common one. */
3933 if (!in_lto_p
3934 && !types_compatible_p (TREE_TYPE (op),
3935 TREE_TYPE (TREE_TYPE (rhs1)))
a8cefe90 3936 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3937 TREE_TYPE (op)))
ec6b51ed 3938 {
3939 error ("type mismatch in address expression");
a8cefe90 3940 debug_generic_stmt (TREE_TYPE (rhs1));
3941 debug_generic_stmt (TREE_TYPE (op));
ec6b51ed 3942 return true;
3943 }
3944
b898ce29 3945 return verify_types_in_gimple_reference (op, true);
ec6b51ed 3946 }
3947
3948 /* tcc_reference */
182cf5a9 3949 case INDIRECT_REF:
3950 error ("INDIRECT_REF in gimple IL");
3951 return true;
3952
ec6b51ed 3953 case COMPONENT_REF:
3954 case BIT_FIELD_REF:
ec6b51ed 3955 case ARRAY_REF:
3956 case ARRAY_RANGE_REF:
3957 case VIEW_CONVERT_EXPR:
3958 case REALPART_EXPR:
3959 case IMAGPART_EXPR:
3960 case TARGET_MEM_REF:
182cf5a9 3961 case MEM_REF:
ec6b51ed 3962 if (!is_gimple_reg (lhs)
3963 && is_gimple_reg_type (TREE_TYPE (lhs)))
767be488 3964 {
ec6b51ed 3965 error ("invalid rhs for gimple memory store");
3966 debug_generic_stmt (lhs);
3967 debug_generic_stmt (rhs1);
75a70cf9 3968 return true;
3969 }
b898ce29 3970 return res || verify_types_in_gimple_reference (rhs1, false);
f03e0ae4 3971
ec6b51ed 3972 /* tcc_constant */
3973 case SSA_NAME:
3974 case INTEGER_CST:
3975 case REAL_CST:
3976 case FIXED_CST:
3977 case COMPLEX_CST:
3978 case VECTOR_CST:
3979 case STRING_CST:
3980 return res;
3981
3982 /* tcc_declaration */
3983 case CONST_DECL:
3984 return res;
3985 case VAR_DECL:
3986 case PARM_DECL:
3987 if (!is_gimple_reg (lhs)
3988 && !is_gimple_reg (rhs1)
3989 && is_gimple_reg_type (TREE_TYPE (lhs)))
23c07107 3990 {
ec6b51ed 3991 error ("invalid rhs for gimple memory store");
3992 debug_generic_stmt (lhs);
3993 debug_generic_stmt (rhs1);
23c07107 3994 return true;
3995 }
ec6b51ed 3996 return res;
f03e0ae4 3997
ec6b51ed 3998 case CONSTRUCTOR:
0ff8139c 3999 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4000 {
4001 unsigned int i;
4002 tree elt_i, elt_v, elt_t = NULL_TREE;
4003
4004 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4005 return res;
4006 /* For vector CONSTRUCTORs we require that either it is empty
4007 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4008 (then the element count must be correct to cover the whole
4009 outer vector and index must be NULL on all elements, or it is
4010 a CONSTRUCTOR of scalar elements, where we as an exception allow
4011 smaller number of elements (assuming zero filling) and
4012 consecutive indexes as compared to NULL indexes (such
4013 CONSTRUCTORs can appear in the IL from FEs). */
4014 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4015 {
4016 if (elt_t == NULL_TREE)
4017 {
4018 elt_t = TREE_TYPE (elt_v);
4019 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4020 {
4021 tree elt_t = TREE_TYPE (elt_v);
4022 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4023 TREE_TYPE (elt_t)))
4024 {
4025 error ("incorrect type of vector CONSTRUCTOR"
4026 " elements");
4027 debug_generic_stmt (rhs1);
4028 return true;
4029 }
4030 else if (CONSTRUCTOR_NELTS (rhs1)
4031 * TYPE_VECTOR_SUBPARTS (elt_t)
4032 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4033 {
4034 error ("incorrect number of vector CONSTRUCTOR"
4035 " elements");
4036 debug_generic_stmt (rhs1);
4037 return true;
4038 }
4039 }
4040 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4041 elt_t))
4042 {
4043 error ("incorrect type of vector CONSTRUCTOR elements");
4044 debug_generic_stmt (rhs1);
4045 return true;
4046 }
4047 else if (CONSTRUCTOR_NELTS (rhs1)
4048 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4049 {
4050 error ("incorrect number of vector CONSTRUCTOR elements");
4051 debug_generic_stmt (rhs1);
4052 return true;
4053 }
4054 }
4055 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4056 {
4057 error ("incorrect type of vector CONSTRUCTOR elements");
4058 debug_generic_stmt (rhs1);
4059 return true;
4060 }
4061 if (elt_i != NULL_TREE
4062 && (TREE_CODE (elt_t) == VECTOR_TYPE
4063 || TREE_CODE (elt_i) != INTEGER_CST
4064 || compare_tree_int (elt_i, i) != 0))
4065 {
4066 error ("vector CONSTRUCTOR with non-NULL element index");
4067 debug_generic_stmt (rhs1);
4068 return true;
4069 }
4070 }
4071 }
4072 return res;
ec6b51ed 4073 case OBJ_TYPE_REF:
4074 case ASSERT_EXPR:
4075 case WITH_SIZE_EXPR:
ec6b51ed 4076 /* FIXME. */
4077 return res;
f03e0ae4 4078
75a70cf9 4079 default:;
f03e0ae4 4080 }
4081
ec6b51ed 4082 return res;
f03e0ae4 4083}
4084
ec6b51ed 4085/* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4086 is a problem, otherwise false. */
4087
4088static bool
4089verify_gimple_assign (gimple stmt)
4090{
4091 switch (gimple_assign_rhs_class (stmt))
4092 {
4093 case GIMPLE_SINGLE_RHS:
4094 return verify_gimple_assign_single (stmt);
4095
4096 case GIMPLE_UNARY_RHS:
4097 return verify_gimple_assign_unary (stmt);
4098
4099 case GIMPLE_BINARY_RHS:
4100 return verify_gimple_assign_binary (stmt);
4101
00f4f705 4102 case GIMPLE_TERNARY_RHS:
4103 return verify_gimple_assign_ternary (stmt);
4104
ec6b51ed 4105 default:
4106 gcc_unreachable ();
4107 }
4108}
75a70cf9 4109
4110/* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4111 is a problem, otherwise false. */
f03e0ae4 4112
4113static bool
d266354c 4114verify_gimple_return (gimple stmt)
f03e0ae4 4115{
75a70cf9 4116 tree op = gimple_return_retval (stmt);
d266354c 4117 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
75a70cf9 4118
d266354c 4119 /* We cannot test for present return values as we do not fix up missing
4120 return values from the original source. */
75a70cf9 4121 if (op == NULL)
4122 return false;
48e1416a 4123
d266354c 4124 if (!is_gimple_val (op)
4125 && TREE_CODE (op) != RESULT_DECL)
4126 {
4127 error ("invalid operand in return statement");
4128 debug_generic_stmt (op);
4129 return true;
4130 }
4131
f5ec85fb 4132 if ((TREE_CODE (op) == RESULT_DECL
4133 && DECL_BY_REFERENCE (op))
4134 || (TREE_CODE (op) == SSA_NAME
ec11736b 4135 && SSA_NAME_VAR (op)
f5ec85fb 4136 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4137 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4138 op = TREE_TYPE (op);
4139
4140 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
d266354c 4141 {
4142 error ("invalid conversion in return statement");
4143 debug_generic_stmt (restype);
4144 debug_generic_stmt (TREE_TYPE (op));
4145 return true;
4146 }
4147
4148 return false;
75a70cf9 4149}
f03e0ae4 4150
f03e0ae4 4151
d266354c 4152/* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4153 is a problem, otherwise false. */
4154
4155static bool
4156verify_gimple_goto (gimple stmt)
4157{
4158 tree dest = gimple_goto_dest (stmt);
4159
4160 /* ??? We have two canonical forms of direct goto destinations, a
4161 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4162 if (TREE_CODE (dest) != LABEL_DECL
4163 && (!is_gimple_val (dest)
4164 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4165 {
4166 error ("goto destination is neither a label nor a pointer");
4167 return true;
4168 }
4169
4170 return false;
4171}
4172
75a70cf9 4173/* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4174 is a problem, otherwise false. */
4175
4176static bool
d266354c 4177verify_gimple_switch (gimple stmt)
75a70cf9 4178{
9a9033a9 4179 unsigned int i, n;
4180 tree elt, prev_upper_bound = NULL_TREE;
4181 tree index_type, elt_type = NULL_TREE;
4182
75a70cf9 4183 if (!is_gimple_val (gimple_switch_index (stmt)))
f03e0ae4 4184 {
75a70cf9 4185 error ("invalid operand to switch statement");
d266354c 4186 debug_generic_stmt (gimple_switch_index (stmt));
f03e0ae4 4187 return true;
4188 }
4189
9a9033a9 4190 index_type = TREE_TYPE (gimple_switch_index (stmt));
4191 if (! INTEGRAL_TYPE_P (index_type))
4192 {
4193 error ("non-integral type switch statement");
4194 debug_generic_expr (index_type);
4195 return true;
4196 }
4197
49a70175 4198 elt = gimple_switch_label (stmt, 0);
9a9033a9 4199 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4200 {
4201 error ("invalid default case label in switch statement");
4202 debug_generic_expr (elt);
4203 return true;
4204 }
4205
4206 n = gimple_switch_num_labels (stmt);
4207 for (i = 1; i < n; i++)
4208 {
4209 elt = gimple_switch_label (stmt, i);
4210
4211 if (! CASE_LOW (elt))
4212 {
4213 error ("invalid case label in switch statement");
4214 debug_generic_expr (elt);
4215 return true;
4216 }
4217 if (CASE_HIGH (elt)
4218 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4219 {
4220 error ("invalid case range in switch statement");
4221 debug_generic_expr (elt);
4222 return true;
4223 }
4224
4225 if (elt_type)
4226 {
4227 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4228 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4229 {
4230 error ("type mismatch for case label in switch statement");
4231 debug_generic_expr (elt);
4232 return true;
4233 }
4234 }
4235 else
4236 {
4237 elt_type = TREE_TYPE (CASE_LOW (elt));
4238 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4239 {
4240 error ("type precision mismatch in switch statement");
4241 return true;
4242 }
4243 }
4244
4245 if (prev_upper_bound)
4246 {
4247 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4248 {
4249 error ("case labels not sorted in switch statement");
4250 return true;
4251 }
4252 }
4253
4254 prev_upper_bound = CASE_HIGH (elt);
4255 if (! prev_upper_bound)
4256 prev_upper_bound = CASE_LOW (elt);
4257 }
4258
75a70cf9 4259 return false;
4260}
f03e0ae4 4261
9845d120 4262/* Verify a gimple debug statement STMT.
4263 Returns true if anything is wrong. */
4264
4265static bool
4266verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4267{
4268 /* There isn't much that could be wrong in a gimple debug stmt. A
4269 gimple debug bind stmt, for example, maps a tree, that's usually
4270 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4271 component or member of an aggregate type, to another tree, that
4272 can be an arbitrary expression. These stmts expand into debug
4273 insns, and are converted to debug notes by var-tracking.c. */
4274 return false;
4275}
4276
fd7ad53b 4277/* Verify a gimple label statement STMT.
4278 Returns true if anything is wrong. */
4279
4280static bool
4281verify_gimple_label (gimple stmt)
4282{
4283 tree decl = gimple_label_label (stmt);
4284 int uid;
4285 bool err = false;
4286
4287 if (TREE_CODE (decl) != LABEL_DECL)
4288 return true;
4289
4290 uid = LABEL_DECL_UID (decl);
4291 if (cfun->cfg
f1f41a6c 4292 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
fd7ad53b 4293 {
4294 error ("incorrect entry in label_to_block_map");
4295 err |= true;
4296 }
4297
4298 uid = EH_LANDING_PAD_NR (decl);
4299 if (uid)
4300 {
4301 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4302 if (decl != lp->post_landing_pad)
4303 {
4304 error ("incorrect setting of landing pad number");
4305 err |= true;
4306 }
4307 }
4308
4309 return err;
4310}
9845d120 4311
f03e0ae4 4312/* Verify the GIMPLE statement STMT. Returns true if there is an
4313 error, otherwise false. */
4314
4315static bool
fd7ad53b 4316verify_gimple_stmt (gimple stmt)
f03e0ae4 4317{
75a70cf9 4318 switch (gimple_code (stmt))
f03e0ae4 4319 {
75a70cf9 4320 case GIMPLE_ASSIGN:
ec6b51ed 4321 return verify_gimple_assign (stmt);
f03e0ae4 4322
75a70cf9 4323 case GIMPLE_LABEL:
fd7ad53b 4324 return verify_gimple_label (stmt);
f03e0ae4 4325
75a70cf9 4326 case GIMPLE_CALL:
d266354c 4327 return verify_gimple_call (stmt);
f03e0ae4 4328
75a70cf9 4329 case GIMPLE_COND:
2727c872 4330 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4331 {
4332 error ("invalid comparison code in gimple cond");
4333 return true;
4334 }
4335 if (!(!gimple_cond_true_label (stmt)
4336 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4337 || !(!gimple_cond_false_label (stmt)
4338 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4339 {
4340 error ("invalid labels in gimple cond");
4341 return true;
4342 }
4343
d266354c 4344 return verify_gimple_comparison (boolean_type_node,
4345 gimple_cond_lhs (stmt),
4346 gimple_cond_rhs (stmt));
f03e0ae4 4347
75a70cf9 4348 case GIMPLE_GOTO:
d266354c 4349 return verify_gimple_goto (stmt);
f03e0ae4 4350
75a70cf9 4351 case GIMPLE_SWITCH:
d266354c 4352 return verify_gimple_switch (stmt);
f03e0ae4 4353
75a70cf9 4354 case GIMPLE_RETURN:
d266354c 4355 return verify_gimple_return (stmt);
f03e0ae4 4356
75a70cf9 4357 case GIMPLE_ASM:
f03e0ae4 4358 return false;
4359
4c0315d0 4360 case GIMPLE_TRANSACTION:
4361 return verify_gimple_transaction (stmt);
4362
d266354c 4363 /* Tuples that do not have tree operands. */
4364 case GIMPLE_NOP:
d266354c 4365 case GIMPLE_PREDICT:
e38def9c 4366 case GIMPLE_RESX:
4367 case GIMPLE_EH_DISPATCH:
860a36d0 4368 case GIMPLE_EH_MUST_NOT_THROW:
d266354c 4369 return false;
75a70cf9 4370
1a9a4436 4371 CASE_GIMPLE_OMP:
4372 /* OpenMP directives are validated by the FE and never operated
4373 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4374 non-gimple expressions when the main index variable has had
4375 its address taken. This does not affect the loop itself
4376 because the header of an GIMPLE_OMP_FOR is merely used to determine
4377 how to setup the parallel iteration. */
4378 return false;
4379
9845d120 4380 case GIMPLE_DEBUG:
4381 return verify_gimple_debug (stmt);
4382
f03e0ae4 4383 default:
4384 gcc_unreachable ();
4385 }
4386}
4387
fd7ad53b 4388/* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4389 and false otherwise. */
4390
4391static bool
4392verify_gimple_phi (gimple phi)
4393{
4394 bool err = false;
4395 unsigned i;
4396 tree phi_result = gimple_phi_result (phi);
4397 bool virtual_p;
4398
4399 if (!phi_result)
4400 {
4401 error ("invalid PHI result");
4402 return true;
4403 }
4404
7c782c9b 4405 virtual_p = virtual_operand_p (phi_result);
fd7ad53b 4406 if (TREE_CODE (phi_result) != SSA_NAME
4407 || (virtual_p
4408 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4409 {
4410 error ("invalid PHI result");
4411 err = true;
4412 }
4413
4414 for (i = 0; i < gimple_phi_num_args (phi); i++)
4415 {
4416 tree t = gimple_phi_arg_def (phi, i);
4417
4418 if (!t)
4419 {
4420 error ("missing PHI def");
4421 err |= true;
4422 continue;
4423 }
4424 /* Addressable variables do have SSA_NAMEs but they
4425 are not considered gimple values. */
4426 else if ((TREE_CODE (t) == SSA_NAME
7c782c9b 4427 && virtual_p != virtual_operand_p (t))
fd7ad53b 4428 || (virtual_p
4429 && (TREE_CODE (t) != SSA_NAME
4430 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4431 || (!virtual_p
4432 && !is_gimple_val (t)))
4433 {
4434 error ("invalid PHI argument");
4435 debug_generic_expr (t);
4436 err |= true;
4437 }
4438#ifdef ENABLE_TYPES_CHECKING
4439 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4440 {
4441 error ("incompatible types in PHI argument %u", i);
4442 debug_generic_stmt (TREE_TYPE (phi_result));
4443 debug_generic_stmt (TREE_TYPE (t));
4444 err |= true;
4445 }
4446#endif
4447 }
4448
4449 return err;
4450}
4451
75a70cf9 4452/* Verify the GIMPLE statements inside the sequence STMTS. */
f03e0ae4 4453
2f519297 4454static bool
fd7ad53b 4455verify_gimple_in_seq_2 (gimple_seq stmts)
f03e0ae4 4456{
75a70cf9 4457 gimple_stmt_iterator ittr;
2f519297 4458 bool err = false;
f03e0ae4 4459
75a70cf9 4460 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
f03e0ae4 4461 {
75a70cf9 4462 gimple stmt = gsi_stmt (ittr);
f03e0ae4 4463
75a70cf9 4464 switch (gimple_code (stmt))
4465 {
d266354c 4466 case GIMPLE_BIND:
fd7ad53b 4467 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
d266354c 4468 break;
4469
4470 case GIMPLE_TRY:
fd7ad53b 4471 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4472 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
d266354c 4473 break;
4474
4475 case GIMPLE_EH_FILTER:
fd7ad53b 4476 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
d266354c 4477 break;
4478
4c0315d0 4479 case GIMPLE_EH_ELSE:
4480 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4481 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4482 break;
4483
d266354c 4484 case GIMPLE_CATCH:
fd7ad53b 4485 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
d266354c 4486 break;
f03e0ae4 4487
4c0315d0 4488 case GIMPLE_TRANSACTION:
4489 err |= verify_gimple_transaction (stmt);
4490 break;
4491
f03e0ae4 4492 default:
2f519297 4493 {
fd7ad53b 4494 bool err2 = verify_gimple_stmt (stmt);
2f519297 4495 if (err2)
75a70cf9 4496 debug_gimple_stmt (stmt);
2f519297 4497 err |= err2;
4498 }
f03e0ae4 4499 }
4500 }
2f519297 4501
4502 return err;
4503}
4504
4c0315d0 4505/* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4506 is a problem, otherwise false. */
4507
4508static bool
4509verify_gimple_transaction (gimple stmt)
4510{
4511 tree lab = gimple_transaction_label (stmt);
4512 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4513 return true;
4514 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4515}
4516
2f519297 4517
4518/* Verify the GIMPLE statements inside the statement list STMTS. */
4519
fd7ad53b 4520DEBUG_FUNCTION void
4521verify_gimple_in_seq (gimple_seq stmts)
2f519297 4522{
fd7ad53b 4523 timevar_push (TV_TREE_STMT_VERIFY);
4524 if (verify_gimple_in_seq_2 (stmts))
2f519297 4525 internal_error ("verify_gimple failed");
fd7ad53b 4526 timevar_pop (TV_TREE_STMT_VERIFY);
f03e0ae4 4527}
4528
4ee9c684 4529/* Return true when the T can be shared. */
4530
7bfefa9d 4531bool
4ee9c684 4532tree_node_can_be_shared (tree t)
4533{
ce45a448 4534 if (IS_TYPE_OR_DECL_P (t)
4ee9c684 4535 || is_gimple_min_invariant (t)
9b63f7c4 4536 || TREE_CODE (t) == SSA_NAME
1e8e9920 4537 || t == error_mark_node
4538 || TREE_CODE (t) == IDENTIFIER_NODE)
4ee9c684 4539 return true;
4540
194ae074 4541 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4542 return true;
4543
6374121b 4544 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1e8e9920 4545 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4546 || TREE_CODE (t) == COMPONENT_REF
4547 || TREE_CODE (t) == REALPART_EXPR
4548 || TREE_CODE (t) == IMAGPART_EXPR)
4ee9c684 4549 t = TREE_OPERAND (t, 0);
4550
4551 if (DECL_P (t))
4552 return true;
4553
4554 return false;
4555}
4556
75a70cf9 4557/* Called via walk_gimple_stmt. Verify tree sharing. */
4ee9c684 4558
4559static tree
75a70cf9 4560verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4ee9c684 4561{
75a70cf9 4562 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4563 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4ee9c684 4564
4565 if (tree_node_can_be_shared (*tp))
4566 {
4567 *walk_subtrees = false;
4568 return NULL;
4569 }
4570
836074a5 4571 if (pointer_set_insert (visited, *tp))
4572 return *tp;
4ee9c684 4573
4574 return NULL;
4575}
4576
836074a5 4577static bool eh_error_found;
4578static int
4579verify_eh_throw_stmt_node (void **slot, void *data)
4580{
4581 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4582 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4583
4584 if (!pointer_set_contains (visited, node->stmt))
4585 {
bf776685 4586 error ("dead STMT in EH table");
75a70cf9 4587 debug_gimple_stmt (node->stmt);
836074a5 4588 eh_error_found = true;
4589 }
7ec44cf1 4590 return 1;
836074a5 4591}
4592
fd7ad53b 4593/* Verify the GIMPLE statements in the CFG of FN. */
4ee9c684 4594
4b987fac 4595DEBUG_FUNCTION void
fd7ad53b 4596verify_gimple_in_cfg (struct function *fn)
4ee9c684 4597{
4598 basic_block bb;
4ee9c684 4599 bool err = false;
836074a5 4600 struct pointer_set_t *visited, *visited_stmts;
4ee9c684 4601
4602 timevar_push (TV_TREE_STMT_VERIFY);
836074a5 4603 visited = pointer_set_create ();
4604 visited_stmts = pointer_set_create ();
4ee9c684 4605
fd7ad53b 4606 FOR_EACH_BB_FN (bb, fn)
4ee9c684 4607 {
fd7ad53b 4608 gimple_stmt_iterator gsi;
4ee9c684 4609
75a70cf9 4610 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 4611 {
fd7ad53b 4612 gimple phi = gsi_stmt (gsi);
4613 bool err2 = false;
4614 unsigned i;
4615
836074a5 4616 pointer_set_insert (visited_stmts, phi);
fd7ad53b 4617
75a70cf9 4618 if (gimple_bb (phi) != bb)
973bf48f 4619 {
75a70cf9 4620 error ("gimple_bb (phi) is set to a wrong basic block");
fd7ad53b 4621 err2 = true;
973bf48f 4622 }
4623
fd7ad53b 4624 err2 |= verify_gimple_phi (phi);
4625
75a70cf9 4626 for (i = 0; i < gimple_phi_num_args (phi); i++)
4ee9c684 4627 {
fd7ad53b 4628 tree arg = gimple_phi_arg_def (phi, i);
4629 tree addr = walk_tree (&arg, verify_node_sharing, visited, NULL);
4ee9c684 4630 if (addr)
4631 {
0a81f5a0 4632 error ("incorrect sharing of tree nodes");
75a70cf9 4633 debug_generic_expr (addr);
fd7ad53b 4634 err2 |= true;
4ee9c684 4635 }
4636 }
ae2a9b00 4637
fd7ad53b 4638 if (err2)
4639 debug_gimple_stmt (phi);
4640 err |= err2;
4ee9c684 4641 }
4642
fd7ad53b 4643 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 4644 {
75a70cf9 4645 gimple stmt = gsi_stmt (gsi);
fd7ad53b 4646 bool err2 = false;
4647 struct walk_stmt_info wi;
4648 tree addr;
4649 int lp_nr;
973bf48f 4650
836074a5 4651 pointer_set_insert (visited_stmts, stmt);
35cc02b5 4652
75a70cf9 4653 if (gimple_bb (stmt) != bb)
973bf48f 4654 {
75a70cf9 4655 error ("gimple_bb (stmt) is set to a wrong basic block");
fd7ad53b 4656 err2 = true;
973bf48f 4657 }
4658
fd7ad53b 4659 err2 |= verify_gimple_stmt (stmt);
4660
4661 memset (&wi, 0, sizeof (wi));
4662 wi.info = (void *) visited;
4663 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4664 if (addr)
75a70cf9 4665 {
fd7ad53b 4666 error ("incorrect sharing of tree nodes");
4667 debug_generic_expr (addr);
4668 err2 |= true;
4669 }
75a70cf9 4670
fd7ad53b 4671 /* ??? Instead of not checking these stmts at all the walker
4672 should know its context via wi. */
4673 if (!is_gimple_debug (stmt)
4674 && !is_gimple_omp (stmt))
4675 {
4676 memset (&wi, 0, sizeof (wi));
4677 addr = walk_gimple_op (stmt, verify_expr, &wi);
4678 if (addr)
75a70cf9 4679 {
fd7ad53b 4680 debug_generic_expr (addr);
4681 inform (gimple_location (stmt), "in statement");
4682 err2 |= true;
75a70cf9 4683 }
fd7ad53b 4684 }
e38def9c 4685
fd7ad53b 4686 /* If the statement is marked as part of an EH region, then it is
4687 expected that the statement could throw. Verify that when we
4688 have optimizations that simplify statements such that we prove
4689 that they cannot throw, that we update other data structures
4690 to match. */
4691 lp_nr = lookup_stmt_eh_lp (stmt);
4692 if (lp_nr != 0)
4693 {
4694 if (!stmt_could_throw_p (stmt))
e38def9c 4695 {
fd7ad53b 4696 error ("statement marked for throw, but doesn%'t");
4697 err2 |= true;
4698 }
4699 else if (lp_nr > 0
4700 && !gsi_one_before_end_p (gsi)
4701 && stmt_can_throw_internal (stmt))
4702 {
4703 error ("statement marked for throw in middle of block");
4704 err2 |= true;
e38def9c 4705 }
75a70cf9 4706 }
4707
fd7ad53b 4708 if (err2)
4709 debug_gimple_stmt (stmt);
4710 err |= err2;
4ee9c684 4711 }
4712 }
75a70cf9 4713
836074a5 4714 eh_error_found = false;
4715 if (get_eh_throw_stmt_table (cfun))
4716 htab_traverse (get_eh_throw_stmt_table (cfun),
4717 verify_eh_throw_stmt_node,
4718 visited_stmts);
4ee9c684 4719
fd7ad53b 4720 if (err || eh_error_found)
4721 internal_error ("verify_gimple failed");
4ee9c684 4722
836074a5 4723 pointer_set_destroy (visited);
4724 pointer_set_destroy (visited_stmts);
4992f399 4725 verify_histograms ();
4ee9c684 4726 timevar_pop (TV_TREE_STMT_VERIFY);
4727}
4728
4729
4730/* Verifies that the flow information is OK. */
4731
4732static int
75a70cf9 4733gimple_verify_flow_info (void)
4ee9c684 4734{
4735 int err = 0;
4736 basic_block bb;
75a70cf9 4737 gimple_stmt_iterator gsi;
4738 gimple stmt;
4ee9c684 4739 edge e;
cd665a06 4740 edge_iterator ei;
4ee9c684 4741
924c4c71 4742 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4ee9c684 4743 {
c23dad79 4744 error ("ENTRY_BLOCK has IL associated with it");
4ee9c684 4745 err = 1;
4746 }
4747
924c4c71 4748 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4ee9c684 4749 {
c23dad79 4750 error ("EXIT_BLOCK has IL associated with it");
4ee9c684 4751 err = 1;
4752 }
4753
cd665a06 4754 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4ee9c684 4755 if (e->flags & EDGE_FALLTHRU)
4756 {
0a81f5a0 4757 error ("fallthru to exit from bb %d", e->src->index);
4ee9c684 4758 err = 1;
4759 }
4760
4761 FOR_EACH_BB (bb)
4762 {
4763 bool found_ctrl_stmt = false;
4764
75a70cf9 4765 stmt = NULL;
818dddca 4766
4ee9c684 4767 /* Skip labels on the start of basic block. */
75a70cf9 4768 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 4769 {
75a70cf9 4770 tree label;
4771 gimple prev_stmt = stmt;
818dddca 4772
75a70cf9 4773 stmt = gsi_stmt (gsi);
818dddca 4774
75a70cf9 4775 if (gimple_code (stmt) != GIMPLE_LABEL)
4ee9c684 4776 break;
4777
75a70cf9 4778 label = gimple_label_label (stmt);
4779 if (prev_stmt && DECL_NONLOCAL (label))
818dddca 4780 {
1e8e9920 4781 error ("nonlocal label ");
75a70cf9 4782 print_generic_expr (stderr, label, 0);
1e8e9920 4783 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4784 bb->index);
818dddca 4785 err = 1;
4786 }
4787
7b960d37 4788 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4789 {
4790 error ("EH landing pad label ");
4791 print_generic_expr (stderr, label, 0);
4792 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4793 bb->index);
4794 err = 1;
4795 }
4796
75a70cf9 4797 if (label_to_block (label) != bb)
4ee9c684 4798 {
1e8e9920 4799 error ("label ");
75a70cf9 4800 print_generic_expr (stderr, label, 0);
1e8e9920 4801 fprintf (stderr, " to block does not match in bb %d",
4802 bb->index);
4ee9c684 4803 err = 1;
4804 }
4805
75a70cf9 4806 if (decl_function_context (label) != current_function_decl)
4ee9c684 4807 {
1e8e9920 4808 error ("label ");
75a70cf9 4809 print_generic_expr (stderr, label, 0);
1e8e9920 4810 fprintf (stderr, " has incorrect context in bb %d",
4811 bb->index);
4ee9c684 4812 err = 1;
4813 }
4814 }
4815
4816 /* Verify that body of basic block BB is free of control flow. */
75a70cf9 4817 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 4818 {
75a70cf9 4819 gimple stmt = gsi_stmt (gsi);
4ee9c684 4820
4821 if (found_ctrl_stmt)
4822 {
0a81f5a0 4823 error ("control flow in the middle of basic block %d",
4ee9c684 4824 bb->index);
4825 err = 1;
4826 }
4827
4828 if (stmt_ends_bb_p (stmt))
4829 found_ctrl_stmt = true;
4830
75a70cf9 4831 if (gimple_code (stmt) == GIMPLE_LABEL)
4ee9c684 4832 {
1e8e9920 4833 error ("label ");
75a70cf9 4834 print_generic_expr (stderr, gimple_label_label (stmt), 0);
1e8e9920 4835 fprintf (stderr, " in the middle of basic block %d", bb->index);
4ee9c684 4836 err = 1;
4837 }
4838 }
1e8e9920 4839
75a70cf9 4840 gsi = gsi_last_bb (bb);
4841 if (gsi_end_p (gsi))
4ee9c684 4842 continue;
4843
75a70cf9 4844 stmt = gsi_stmt (gsi);
4ee9c684 4845
e38def9c 4846 if (gimple_code (stmt) == GIMPLE_LABEL)
4847 continue;
4848
b4ba5e9d 4849 err |= verify_eh_edges (stmt);
4850
4ee9c684 4851 if (is_ctrl_stmt (stmt))
4852 {
cd665a06 4853 FOR_EACH_EDGE (e, ei, bb->succs)
4ee9c684 4854 if (e->flags & EDGE_FALLTHRU)
4855 {
0a81f5a0 4856 error ("fallthru edge after a control statement in bb %d",
4ee9c684 4857 bb->index);
4858 err = 1;
4859 }
4860 }
4861
75a70cf9 4862 if (gimple_code (stmt) != GIMPLE_COND)
71cfcaa2 4863 {
4864 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4865 after anything else but if statement. */
4866 FOR_EACH_EDGE (e, ei, bb->succs)
4867 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4868 {
75a70cf9 4869 error ("true/false edge after a non-GIMPLE_COND in bb %d",
71cfcaa2 4870 bb->index);
4871 err = 1;
4872 }
4873 }
4874
75a70cf9 4875 switch (gimple_code (stmt))
4ee9c684 4876 {
75a70cf9 4877 case GIMPLE_COND:
4ee9c684 4878 {
4879 edge true_edge;
4880 edge false_edge;
48e1416a 4881
4ee9c684 4882 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4883
75a70cf9 4884 if (!true_edge
4885 || !false_edge
4ee9c684 4886 || !(true_edge->flags & EDGE_TRUE_VALUE)
4887 || !(false_edge->flags & EDGE_FALSE_VALUE)
4888 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4889 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
cd665a06 4890 || EDGE_COUNT (bb->succs) >= 3)
4ee9c684 4891 {
0a81f5a0 4892 error ("wrong outgoing edge flags at end of bb %d",
4ee9c684 4893 bb->index);
4894 err = 1;
4895 }
4ee9c684 4896 }
4897 break;
4898
75a70cf9 4899 case GIMPLE_GOTO:
4ee9c684 4900 if (simple_goto_p (stmt))
4901 {
0a81f5a0 4902 error ("explicit goto at end of bb %d", bb->index);
13b96211 4903 err = 1;
4ee9c684 4904 }
4905 else
4906 {
13b96211 4907 /* FIXME. We should double check that the labels in the
4ee9c684 4908 destination blocks have their address taken. */
cd665a06 4909 FOR_EACH_EDGE (e, ei, bb->succs)
4ee9c684 4910 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4911 | EDGE_FALSE_VALUE))
4912 || !(e->flags & EDGE_ABNORMAL))
4913 {
0a81f5a0 4914 error ("wrong outgoing edge flags at end of bb %d",
4ee9c684 4915 bb->index);
4916 err = 1;
4917 }
4918 }
4919 break;
4920
3ea38c1f 4921 case GIMPLE_CALL:
4922 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4923 break;
4924 /* ... fallthru ... */
75a70cf9 4925 case GIMPLE_RETURN:
ea091dfd 4926 if (!single_succ_p (bb)
4927 || (single_succ_edge (bb)->flags
4928 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4929 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4ee9c684 4930 {
0a81f5a0 4931 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4ee9c684 4932 err = 1;
4933 }
ea091dfd 4934 if (single_succ (bb) != EXIT_BLOCK_PTR)
4ee9c684 4935 {
0a81f5a0 4936 error ("return edge does not point to exit in bb %d",
4ee9c684 4937 bb->index);
4938 err = 1;
4939 }
4940 break;
4941
75a70cf9 4942 case GIMPLE_SWITCH:
4ee9c684 4943 {
eb3b7946 4944 tree prev;
4ee9c684 4945 edge e;
4946 size_t i, n;
4ee9c684 4947
75a70cf9 4948 n = gimple_switch_num_labels (stmt);
4ee9c684 4949
4950 /* Mark all the destination basic blocks. */
4951 for (i = 0; i < n; ++i)
4952 {
75a70cf9 4953 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4ee9c684 4954 basic_block label_bb = label_to_block (lab);
8c0963c4 4955 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4ee9c684 4956 label_bb->aux = (void *)1;
4957 }
4958
eb3b7946 4959 /* Verify that the case labels are sorted. */
75a70cf9 4960 prev = gimple_switch_label (stmt, 0);
72c30859 4961 for (i = 1; i < n; ++i)
eb3b7946 4962 {
75a70cf9 4963 tree c = gimple_switch_label (stmt, i);
4964 if (!CASE_LOW (c))
eb3b7946 4965 {
75a70cf9 4966 error ("found default case not at the start of "
4967 "case vector");
4968 err = 1;
eb3b7946 4969 continue;
4970 }
75a70cf9 4971 if (CASE_LOW (prev)
4972 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
eb3b7946 4973 {
1e8e9920 4974 error ("case labels not sorted: ");
eb3b7946 4975 print_generic_expr (stderr, prev, 0);
4976 fprintf (stderr," is greater than ");
4977 print_generic_expr (stderr, c, 0);
4978 fprintf (stderr," but comes before it.\n");
4979 err = 1;
4980 }
4981 prev = c;
4982 }
72c30859 4983 /* VRP will remove the default case if it can prove it will
4984 never be executed. So do not verify there always exists
4985 a default case here. */
eb3b7946 4986
cd665a06 4987 FOR_EACH_EDGE (e, ei, bb->succs)
4ee9c684 4988 {
4989 if (!e->dest->aux)
4990 {
0a81f5a0 4991 error ("extra outgoing edge %d->%d",
4ee9c684 4992 bb->index, e->dest->index);
4993 err = 1;
4994 }
75a70cf9 4995
4ee9c684 4996 e->dest->aux = (void *)2;
4997 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4998 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4999 {
0a81f5a0 5000 error ("wrong outgoing edge flags at end of bb %d",
4ee9c684 5001 bb->index);
5002 err = 1;
5003 }
5004 }
5005
5006 /* Check that we have all of them. */
5007 for (i = 0; i < n; ++i)
5008 {
75a70cf9 5009 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4ee9c684 5010 basic_block label_bb = label_to_block (lab);
5011
5012 if (label_bb->aux != (void *)2)
5013 {
75a70cf9 5014 error ("missing edge %i->%i", bb->index, label_bb->index);
4ee9c684 5015 err = 1;
5016 }
5017 }
5018
cd665a06 5019 FOR_EACH_EDGE (e, ei, bb->succs)
4ee9c684 5020 e->dest->aux = (void *)0;
5021 }
e38def9c 5022 break;
5023
5024 case GIMPLE_EH_DISPATCH:
5025 err |= verify_eh_dispatch_edge (stmt);
5026 break;
4ee9c684 5027
e38def9c 5028 default:
5029 break;
4ee9c684 5030 }
5031 }
5032
50b08d37 5033 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4ee9c684 5034 verify_dominators (CDI_DOMINATORS);
5035
5036 return err;
5037}
5038
5039
ebeaefa4 5040/* Updates phi nodes after creating a forwarder block joined
4ee9c684 5041 by edge FALLTHRU. */
5042
5043static void
75a70cf9 5044gimple_make_forwarder_block (edge fallthru)
4ee9c684 5045{
5046 edge e;
cd665a06 5047 edge_iterator ei;
4ee9c684 5048 basic_block dummy, bb;
75a70cf9 5049 tree var;
5050 gimple_stmt_iterator gsi;
4ee9c684 5051
5052 dummy = fallthru->src;
5053 bb = fallthru->dest;
5054
ea091dfd 5055 if (single_pred_p (bb))
4ee9c684 5056 return;
5057
de6ed584 5058 /* If we redirected a branch we must create new PHI nodes at the
4ee9c684 5059 start of BB. */
75a70cf9 5060 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 5061 {
75a70cf9 5062 gimple phi, new_phi;
48e1416a 5063
75a70cf9 5064 phi = gsi_stmt (gsi);
5065 var = gimple_phi_result (phi);
4ee9c684 5066 new_phi = create_phi_node (var, bb);
874117c8 5067 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
48e1416a 5068 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
60d535d2 5069 UNKNOWN_LOCATION);
4ee9c684 5070 }
5071
4ee9c684 5072 /* Add the arguments we have stored on edges. */
cd665a06 5073 FOR_EACH_EDGE (e, ei, bb->preds)
4ee9c684 5074 {
5075 if (e == fallthru)
5076 continue;
5077
44a46103 5078 flush_pending_stmts (e);
4ee9c684 5079 }
5080}
5081
5082
4ee9c684 5083/* Return a non-special label in the head of basic block BLOCK.
5084 Create one if it doesn't exist. */
5085
3aaaf63f 5086tree
75a70cf9 5087gimple_block_label (basic_block bb)
4ee9c684 5088{
75a70cf9 5089 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4ee9c684 5090 bool first = true;
75a70cf9 5091 tree label;
5092 gimple stmt;
4ee9c684 5093
75a70cf9 5094 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4ee9c684 5095 {
75a70cf9 5096 stmt = gsi_stmt (i);
5097 if (gimple_code (stmt) != GIMPLE_LABEL)
4ee9c684 5098 break;
75a70cf9 5099 label = gimple_label_label (stmt);
4ee9c684 5100 if (!DECL_NONLOCAL (label))
5101 {
5102 if (!first)
75a70cf9 5103 gsi_move_before (&i, &s);
4ee9c684 5104 return label;
5105 }
5106 }
5107
e60a6f7b 5108 label = create_artificial_label (UNKNOWN_LOCATION);
75a70cf9 5109 stmt = gimple_build_label (label);
5110 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4ee9c684 5111 return label;
5112}
5113
5114
5115/* Attempt to perform edge redirection by replacing a possibly complex
5116 jump instruction by a goto or by removing the jump completely.
5117 This can apply only if all edges now point to the same block. The
5118 parameters and return values are equivalent to
5119 redirect_edge_and_branch. */
5120
5121static edge
75a70cf9 5122gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4ee9c684 5123{
5124 basic_block src = e->src;
75a70cf9 5125 gimple_stmt_iterator i;
5126 gimple stmt;
4ee9c684 5127
7fca6166 5128 /* We can replace or remove a complex jump only when we have exactly
5129 two edges. */
5130 if (EDGE_COUNT (src->succs) != 2
5131 /* Verify that all targets will be TARGET. Specifically, the
5132 edge that is not E must also go to TARGET. */
5133 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4ee9c684 5134 return NULL;
5135
75a70cf9 5136 i = gsi_last_bb (src);
5137 if (gsi_end_p (i))
4ee9c684 5138 return NULL;
4ee9c684 5139
75a70cf9 5140 stmt = gsi_stmt (i);
5141
5142 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4ee9c684 5143 {
75a70cf9 5144 gsi_remove (&i, true);
4ee9c684 5145 e = ssa_redirect_edge (e, target);
5146 e->flags = EDGE_FALLTHRU;
5147 return e;
5148 }
5149
5150 return NULL;
5151}
5152
5153
5154/* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5155 edge representing the redirected branch. */
5156
5157static edge
75a70cf9 5158gimple_redirect_edge_and_branch (edge e, basic_block dest)
4ee9c684 5159{
5160 basic_block bb = e->src;
75a70cf9 5161 gimple_stmt_iterator gsi;
4ee9c684 5162 edge ret;
75a70cf9 5163 gimple stmt;
4ee9c684 5164
2c8a1497 5165 if (e->flags & EDGE_ABNORMAL)
4ee9c684 5166 return NULL;
5167
4ee9c684 5168 if (e->dest == dest)
5169 return NULL;
5170
927a6b6b 5171 if (e->flags & EDGE_EH)
5172 return redirect_eh_edge (e, dest);
5173
7055eea1 5174 if (e->src != ENTRY_BLOCK_PTR)
5175 {
5176 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5177 if (ret)
5178 return ret;
5179 }
5180
75a70cf9 5181 gsi = gsi_last_bb (bb);
5182 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4ee9c684 5183
6163a125 5184 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
4ee9c684 5185 {
75a70cf9 5186 case GIMPLE_COND:
63f88450 5187 /* For COND_EXPR, we only need to redirect the edge. */
4ee9c684 5188 break;
5189
75a70cf9 5190 case GIMPLE_GOTO:
4ee9c684 5191 /* No non-abnormal edges should lead from a non-simple goto, and
5192 simple ones should be represented implicitly. */
8c0963c4 5193 gcc_unreachable ();
4ee9c684 5194
75a70cf9 5195 case GIMPLE_SWITCH:
4ee9c684 5196 {
75a70cf9 5197 tree label = gimple_block_label (dest);
6ff867cc 5198 tree cases = get_cases_for_edge (e, stmt);
4ee9c684 5199
6ff867cc 5200 /* If we have a list of cases associated with E, then use it
5201 as it's a lot faster than walking the entire case vector. */
5202 if (cases)
4ee9c684 5203 {
d5ec53c7 5204 edge e2 = find_edge (e->src, dest);
6ff867cc 5205 tree last, first;
5206
5207 first = cases;
5208 while (cases)
5209 {
5210 last = cases;
5211 CASE_LABEL (cases) = label;
edb0f60d 5212 cases = CASE_CHAIN (cases);
6ff867cc 5213 }
5214
5215 /* If there was already an edge in the CFG, then we need
5216 to move all the cases associated with E to E2. */
5217 if (e2)
5218 {
5219 tree cases2 = get_cases_for_edge (e2, stmt);
5220
edb0f60d 5221 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5222 CASE_CHAIN (cases2) = first;
6ff867cc 5223 }
836a3d81 5224 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
4ee9c684 5225 }
194ae074 5226 else
5227 {
75a70cf9 5228 size_t i, n = gimple_switch_num_labels (stmt);
6ff867cc 5229
5230 for (i = 0; i < n; i++)
5231 {
75a70cf9 5232 tree elt = gimple_switch_label (stmt, i);
6ff867cc 5233 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5234 CASE_LABEL (elt) = label;
5235 }
194ae074 5236 }
78f55ca8 5237 }
5238 break;
6ff867cc 5239
78f55ca8 5240 case GIMPLE_ASM:
5241 {
5242 int i, n = gimple_asm_nlabels (stmt);
1a9393e0 5243 tree label = NULL;
78f55ca8 5244
5245 for (i = 0; i < n; ++i)
5246 {
5247 tree cons = gimple_asm_label_op (stmt, i);
5248 if (label_to_block (TREE_VALUE (cons)) == e->dest)
1a9393e0 5249 {
5250 if (!label)
5251 label = gimple_block_label (dest);
5252 TREE_VALUE (cons) = label;
5253 }
78f55ca8 5254 }
1a9393e0 5255
5256 /* If we didn't find any label matching the former edge in the
5257 asm labels, we must be redirecting the fallthrough
5258 edge. */
5259 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
4ee9c684 5260 }
78f55ca8 5261 break;
4ee9c684 5262
75a70cf9 5263 case GIMPLE_RETURN:
5264 gsi_remove (&gsi, true);
4ee9c684 5265 e->flags |= EDGE_FALLTHRU;
5266 break;
5267
75a70cf9 5268 case GIMPLE_OMP_RETURN:
5269 case GIMPLE_OMP_CONTINUE:
5270 case GIMPLE_OMP_SECTIONS_SWITCH:
5271 case GIMPLE_OMP_FOR:
ac6e3339 5272 /* The edges from OMP constructs can be simply redirected. */
5273 break;
5274
e38def9c 5275 case GIMPLE_EH_DISPATCH:
5276 if (!(e->flags & EDGE_FALLTHRU))
5277 redirect_eh_dispatch_edge (stmt, e, dest);
5278 break;
5279
4c0315d0 5280 case GIMPLE_TRANSACTION:
5281 /* The ABORT edge has a stored label associated with it, otherwise
5282 the edges are simply redirectable. */
5283 if (e->flags == 0)
5284 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5285 break;
5286
4ee9c684 5287 default:
5288 /* Otherwise it must be a fallthru edge, and we don't need to
5289 do anything besides redirecting it. */
8c0963c4 5290 gcc_assert (e->flags & EDGE_FALLTHRU);
4ee9c684 5291 break;
5292 }
5293
5294 /* Update/insert PHI nodes as necessary. */
5295
5296 /* Now update the edges in the CFG. */
5297 e = ssa_redirect_edge (e, dest);
5298
5299 return e;
5300}
5301
611d2ac1 5302/* Returns true if it is possible to remove edge E by redirecting
5303 it to the destination of the other edge from E->src. */
5304
5305static bool
75a70cf9 5306gimple_can_remove_branch_p (const_edge e)
611d2ac1 5307{
3d1eacdb 5308 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
611d2ac1 5309 return false;
5310
5311 return true;
5312}
4ee9c684 5313
5314/* Simple wrapper, as we can always redirect fallthru edges. */
5315
5316static basic_block
75a70cf9 5317gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4ee9c684 5318{
75a70cf9 5319 e = gimple_redirect_edge_and_branch (e, dest);
8c0963c4 5320 gcc_assert (e);
4ee9c684 5321
5322 return NULL;
5323}
5324
5325
5326/* Splits basic block BB after statement STMT (but at least after the
5327 labels). If STMT is NULL, BB is split just after the labels. */
5328
5329static basic_block
75a70cf9 5330gimple_split_block (basic_block bb, void *stmt)
4ee9c684 5331{
75a70cf9 5332 gimple_stmt_iterator gsi;
5333 gimple_stmt_iterator gsi_tgt;
5334 gimple act;
5335 gimple_seq list;
4ee9c684 5336 basic_block new_bb;
5337 edge e;
cd665a06 5338 edge_iterator ei;
4ee9c684 5339
5340 new_bb = create_empty_bb (bb);
5341
5342 /* Redirect the outgoing edges. */
cd665a06 5343 new_bb->succs = bb->succs;
5344 bb->succs = NULL;
5345 FOR_EACH_EDGE (e, ei, new_bb->succs)
4ee9c684 5346 e->src = new_bb;
5347
75a70cf9 5348 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
4ee9c684 5349 stmt = NULL;
5350
75a70cf9 5351 /* Move everything from GSI to the new basic block. */
5352 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 5353 {
75a70cf9 5354 act = gsi_stmt (gsi);
5355 if (gimple_code (act) == GIMPLE_LABEL)
4ee9c684 5356 continue;
5357
5358 if (!stmt)
5359 break;
5360
5361 if (stmt == act)
5362 {
75a70cf9 5363 gsi_next (&gsi);
4ee9c684 5364 break;
5365 }
5366 }
5367
75a70cf9 5368 if (gsi_end_p (gsi))
2f30597a 5369 return new_bb;
5370
5371 /* Split the statement list - avoid re-creating new containers as this
48e1416a 5372 brings ugly quadratic memory consumption in the inliner.
2f30597a 5373 (We are still quadratic since we need to update stmt BB pointers,
5374 sadly.) */
e3a19533 5375 gsi_split_seq_before (&gsi, &list);
75a70cf9 5376 set_bb_seq (new_bb, list);
5377 for (gsi_tgt = gsi_start (list);
5378 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5379 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
4ee9c684 5380
5381 return new_bb;
5382}
5383
5384
5385/* Moves basic block BB after block AFTER. */
5386
5387static bool
75a70cf9 5388gimple_move_block_after (basic_block bb, basic_block after)
4ee9c684 5389{
5390 if (bb->prev_bb == after)
5391 return true;
5392
5393 unlink_block (bb);
5394 link_block (bb, after);
5395
5396 return true;
5397}
5398
5399
9631926a 5400/* Return TRUE if block BB has no executable statements, otherwise return
5401 FALSE. */
5402
5403bool
5404gimple_empty_block_p (basic_block bb)
5405{
5406 /* BB must have no executable statements. */
5407 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5408 if (phi_nodes (bb))
5409 return false;
5410 if (gsi_end_p (gsi))
5411 return true;
5412 if (is_gimple_debug (gsi_stmt (gsi)))
5413 gsi_next_nondebug (&gsi);
5414 return gsi_end_p (gsi);
5415}
5416
5417
5418/* Split a basic block if it ends with a conditional branch and if the
5419 other part of the block is not empty. */
5420
5421static basic_block
5422gimple_split_block_before_cond_jump (basic_block bb)
5423{
5424 gimple last, split_point;
5425 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5426 if (gsi_end_p (gsi))
5427 return NULL;
5428 last = gsi_stmt (gsi);
5429 if (gimple_code (last) != GIMPLE_COND
5430 && gimple_code (last) != GIMPLE_SWITCH)
5431 return NULL;
5432 gsi_prev_nondebug (&gsi);
5433 split_point = gsi_stmt (gsi);
5434 return split_block (bb, split_point)->dest;
5435}
5436
5437
4ee9c684 5438/* Return true if basic_block can be duplicated. */
5439
5440static bool
d8edefd7 5441gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
4ee9c684 5442{
5443 return true;
5444}
5445
4ee9c684 5446/* Create a duplicate of the basic block BB. NOTE: This does not
5447 preserve SSA form. */
5448
5449static basic_block
75a70cf9 5450gimple_duplicate_bb (basic_block bb)
4ee9c684 5451{
5452 basic_block new_bb;
75a70cf9 5453 gimple_stmt_iterator gsi, gsi_tgt;
5454 gimple_seq phis = phi_nodes (bb);
5455 gimple phi, stmt, copy;
4ee9c684 5456
5457 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
deb78f9e 5458
095dcfa3 5459 /* Copy the PHI nodes. We ignore PHI node arguments here because
5460 the incoming edges have not been setup yet. */
75a70cf9 5461 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
deb78f9e 5462 {
75a70cf9 5463 phi = gsi_stmt (gsi);
9c06f260 5464 copy = create_phi_node (NULL_TREE, new_bb);
5465 create_new_def_for (gimple_phi_result (phi), copy,
75a70cf9 5466 gimple_phi_result_ptr (copy));
deb78f9e 5467 }
095dcfa3 5468
75a70cf9 5469 gsi_tgt = gsi_start_bb (new_bb);
5470 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 5471 {
095dcfa3 5472 def_operand_p def_p;
5473 ssa_op_iter op_iter;
c7c68014 5474 tree lhs;
4ee9c684 5475
75a70cf9 5476 stmt = gsi_stmt (gsi);
5477 if (gimple_code (stmt) == GIMPLE_LABEL)
4ee9c684 5478 continue;
5479
63f5ad44 5480 /* Don't duplicate label debug stmts. */
5481 if (gimple_debug_bind_p (stmt)
5482 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5483 == LABEL_DECL)
5484 continue;
5485
095dcfa3 5486 /* Create a new copy of STMT and duplicate STMT's virtual
5487 operands. */
75a70cf9 5488 copy = gimple_copy (stmt);
5489 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
e38def9c 5490
5491 maybe_duplicate_eh_stmt (copy, stmt);
4992f399 5492 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
095dcfa3 5493
c7c68014 5494 /* When copying around a stmt writing into a local non-user
5495 aggregate, make sure it won't share stack slot with other
5496 vars. */
5497 lhs = gimple_get_lhs (stmt);
5498 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5499 {
5500 tree base = get_base_address (lhs);
5501 if (base
5502 && (TREE_CODE (base) == VAR_DECL
5503 || TREE_CODE (base) == RESULT_DECL)
5504 && DECL_IGNORED_P (base)
5505 && !TREE_STATIC (base)
5506 && !DECL_EXTERNAL (base)
5507 && (TREE_CODE (base) != VAR_DECL
5508 || !DECL_HAS_VALUE_EXPR_P (base)))
5509 DECL_NONSHAREABLE (base) = 1;
5510 }
5511
095dcfa3 5512 /* Create new names for all the definitions created by COPY and
5513 add replacement mappings for each new name. */
5514 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5515 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4ee9c684 5516 }
5517
5518 return new_bb;
5519}
5520
28c92cbb 5521/* Adds phi node arguments for edge E_COPY after basic block duplication. */
5522
5523static void
5524add_phi_args_after_copy_edge (edge e_copy)
5525{
5526 basic_block bb, bb_copy = e_copy->src, dest;
5527 edge e;
5528 edge_iterator ei;
75a70cf9 5529 gimple phi, phi_copy;
5530 tree def;
5531 gimple_stmt_iterator psi, psi_copy;
28c92cbb 5532
75a70cf9 5533 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
28c92cbb 5534 return;
5535
5536 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5537
5538 if (e_copy->dest->flags & BB_DUPLICATED)
5539 dest = get_bb_original (e_copy->dest);
5540 else
5541 dest = e_copy->dest;
5542
5543 e = find_edge (bb, dest);
5544 if (!e)
5545 {
5546 /* During loop unrolling the target of the latch edge is copied.
5547 In this case we are not looking for edge to dest, but to
5548 duplicated block whose original was dest. */
5549 FOR_EACH_EDGE (e, ei, bb->succs)
5550 {
5551 if ((e->dest->flags & BB_DUPLICATED)
5552 && get_bb_original (e->dest) == dest)
5553 break;
5554 }
5555
5556 gcc_assert (e != NULL);
5557 }
5558
75a70cf9 5559 for (psi = gsi_start_phis (e->dest),
5560 psi_copy = gsi_start_phis (e_copy->dest);
5561 !gsi_end_p (psi);
5562 gsi_next (&psi), gsi_next (&psi_copy))
28c92cbb 5563 {
75a70cf9 5564 phi = gsi_stmt (psi);
5565 phi_copy = gsi_stmt (psi_copy);
28c92cbb 5566 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
48e1416a 5567 add_phi_arg (phi_copy, def, e_copy,
60d535d2 5568 gimple_phi_arg_location_from_edge (phi, e));
28c92cbb 5569 }
5570}
5571
095dcfa3 5572
d8b5b4fe 5573/* Basic block BB_COPY was created by code duplication. Add phi node
5574 arguments for edges going out of BB_COPY. The blocks that were
01020a5f 5575 duplicated have BB_DUPLICATED set. */
d8b5b4fe 5576
5577void
5578add_phi_args_after_copy_bb (basic_block bb_copy)
5579{
28c92cbb 5580 edge e_copy;
75a70cf9 5581 edge_iterator ei;
d8b5b4fe 5582
cd665a06 5583 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
d8b5b4fe 5584 {
28c92cbb 5585 add_phi_args_after_copy_edge (e_copy);
d8b5b4fe 5586 }
5587}
5588
5589/* Blocks in REGION_COPY array of length N_REGION were created by
5590 duplication of basic blocks. Add phi node arguments for edges
28c92cbb 5591 going from these blocks. If E_COPY is not NULL, also add
5592 phi node arguments for its destination.*/
d8b5b4fe 5593
5594void
28c92cbb 5595add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5596 edge e_copy)
d8b5b4fe 5597{
5598 unsigned i;
5599
5600 for (i = 0; i < n_region; i++)
01020a5f 5601 region_copy[i]->flags |= BB_DUPLICATED;
d8b5b4fe 5602
5603 for (i = 0; i < n_region; i++)
5604 add_phi_args_after_copy_bb (region_copy[i]);
28c92cbb 5605 if (e_copy)
5606 add_phi_args_after_copy_edge (e_copy);
d8b5b4fe 5607
5608 for (i = 0; i < n_region; i++)
01020a5f 5609 region_copy[i]->flags &= ~BB_DUPLICATED;
d8b5b4fe 5610}
5611
d8b5b4fe 5612/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5613 important exit edge EXIT. By important we mean that no SSA name defined
5614 inside region is live over the other exit edges of the region. All entry
5615 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
8cb0acff 5616 to the duplicate of the region. Dominance and loop information is
5617 updated, but not the SSA web. The new basic blocks are stored to
5618 REGION_COPY in the same order as they had in REGION, provided that
5619 REGION_COPY is not NULL.
d8b5b4fe 5620 The function returns false if it is unable to copy the region,
5621 true otherwise. */
5622
5623bool
75a70cf9 5624gimple_duplicate_sese_region (edge entry, edge exit,
d8b5b4fe 5625 basic_block *region, unsigned n_region,
5626 basic_block *region_copy)
5627{
3f9439d7 5628 unsigned i;
d8b5b4fe 5629 bool free_region_copy = false, copying_header = false;
5630 struct loop *loop = entry->dest->loop_father;
5631 edge exit_copy;
f1f41a6c 5632 vec<basic_block> doms;
d8b5b4fe 5633 edge redirected;
f57c928a 5634 int total_freq = 0, entry_freq = 0;
5635 gcov_type total_count = 0, entry_count = 0;
d8b5b4fe 5636
5637 if (!can_copy_bbs_p (region, n_region))
5638 return false;
5639
5640 /* Some sanity checking. Note that we do not check for all possible
5641 missuses of the functions. I.e. if you ask to copy something weird,
5642 it will work, but the state of structures probably will not be
5643 correct. */
d8b5b4fe 5644 for (i = 0; i < n_region; i++)
5645 {
5646 /* We do not handle subloops, i.e. all the blocks must belong to the
5647 same loop. */
5648 if (region[i]->loop_father != loop)
5649 return false;
5650
5651 if (region[i] != entry->dest
5652 && region[i] == loop->header)
5653 return false;
5654 }
5655
96c90e5e 5656 set_loop_copy (loop, loop);
d8b5b4fe 5657
5658 /* In case the function is used for loop header copying (which is the primary
5659 use), ensure that EXIT and its copy will be new latch and entry edges. */
5660 if (loop->header == entry->dest)
5661 {
5662 copying_header = true;
96c90e5e 5663 set_loop_copy (loop, loop_outer (loop));
d8b5b4fe 5664
5665 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5666 return false;
5667
5668 for (i = 0; i < n_region; i++)
5669 if (region[i] != exit->src
5670 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5671 return false;
5672 }
5673
5674 if (!region_copy)
5675 {
680a19b9 5676 region_copy = XNEWVEC (basic_block, n_region);
d8b5b4fe 5677 free_region_copy = true;
5678 }
5679
2625ff05 5680 /* Record blocks outside the region that are dominated by something
d8b5b4fe 5681 inside. */
f1f41a6c 5682 doms.create (0);
01020a5f 5683 initialize_original_copy_tables ();
5684
3f9439d7 5685 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
d8b5b4fe 5686
f57c928a 5687 if (entry->dest->count)
5688 {
5689 total_count = entry->dest->count;
5690 entry_count = entry->count;
5691 /* Fix up corner cases, to avoid division by zero or creation of negative
5692 frequencies. */
5693 if (entry_count > total_count)
5694 entry_count = total_count;
5695 }
5696 else
5697 {
5698 total_freq = entry->dest->frequency;
5699 entry_freq = EDGE_FREQUENCY (entry);
5700 /* Fix up corner cases, to avoid division by zero or creation of negative
5701 frequencies. */
5702 if (total_freq == 0)
5703 total_freq = 1;
5704 else if (entry_freq > total_freq)
5705 entry_freq = total_freq;
5706 }
2625ff05 5707
c4d867e0 5708 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5709 split_edge_bb_loc (entry));
f57c928a 5710 if (total_count)
5711 {
5712 scale_bbs_frequencies_gcov_type (region, n_region,
5713 total_count - entry_count,
5714 total_count);
5715 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
13b96211 5716 total_count);
f57c928a 5717 }
5718 else
5719 {
5720 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5721 total_freq);
5722 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5723 }
d8b5b4fe 5724
5725 if (copying_header)
5726 {
5727 loop->header = exit->dest;
5728 loop->latch = exit->src;
5729 }
5730
5731 /* Redirect the entry and add the phi node arguments. */
01020a5f 5732 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
d8b5b4fe 5733 gcc_assert (redirected != NULL);
44a46103 5734 flush_pending_stmts (entry);
d8b5b4fe 5735
5736 /* Concerning updating of dominators: We must recount dominators
095dcfa3 5737 for entry block and its copy. Anything that is outside of the
5738 region, but was dominated by something inside needs recounting as
5739 well. */
d8b5b4fe 5740 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
f1f41a6c 5741 doms.safe_push (get_bb_original (entry->dest));
3f9439d7 5742 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
f1f41a6c 5743 doms.release ();
d8b5b4fe 5744
095dcfa3 5745 /* Add the other PHI node arguments. */
28c92cbb 5746 add_phi_args_after_copy (region_copy, n_region, NULL);
5747
28c92cbb 5748 if (free_region_copy)
5749 free (region_copy);
5750
5751 free_original_copy_tables ();
5752 return true;
5753}
5754
89675e8c 5755/* Checks if BB is part of the region defined by N_REGION BBS. */
5756static bool
5757bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5758{
5759 unsigned int n;
5760
5761 for (n = 0; n < n_region; n++)
5762 {
5763 if (bb == bbs[n])
5764 return true;
5765 }
5766 return false;
5767}
5768
28c92cbb 5769/* Duplicates REGION consisting of N_REGION blocks. The new blocks
5770 are stored to REGION_COPY in the same order in that they appear
5771 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5772 the region, EXIT an exit from it. The condition guarding EXIT
5773 is moved to ENTRY. Returns true if duplication succeeds, false
5774 otherwise.
5775
48e1416a 5776 For example,
5777
28c92cbb 5778 some_code;
5779 if (cond)
5780 A;
5781 else
5782 B;
5783
5784 is transformed to
5785
5786 if (cond)
5787 {
5788 some_code;
5789 A;
5790 }
5791 else
5792 {
5793 some_code;
5794 B;
5795 }
5796*/
5797
5798bool
75a70cf9 5799gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5800 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5801 basic_block *region_copy ATTRIBUTE_UNUSED)
28c92cbb 5802{
5803 unsigned i;
5804 bool free_region_copy = false;
5805 struct loop *loop = exit->dest->loop_father;
5806 struct loop *orig_loop = entry->dest->loop_father;
5807 basic_block switch_bb, entry_bb, nentry_bb;
f1f41a6c 5808 vec<basic_block> doms;
28c92cbb 5809 int total_freq = 0, exit_freq = 0;
5810 gcov_type total_count = 0, exit_count = 0;
5811 edge exits[2], nexits[2], e;
2a556654 5812 gimple_stmt_iterator gsi;
75a70cf9 5813 gimple cond_stmt;
fbbe5b51 5814 edge sorig, snew;
b0fb253a 5815 basic_block exit_bb;
fbbe5b51 5816 gimple_stmt_iterator psi;
5817 gimple phi;
5818 tree def;
89675e8c 5819 struct loop *target, *aloop, *cloop;
28c92cbb 5820
5821 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5822 exits[0] = exit;
5823 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5824
5825 if (!can_copy_bbs_p (region, n_region))
5826 return false;
5827
28c92cbb 5828 initialize_original_copy_tables ();
5829 set_loop_copy (orig_loop, loop);
89675e8c 5830
5831 target= loop;
5832 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5833 {
5834 if (bb_part_of_region_p (aloop->header, region, n_region))
5835 {
5836 cloop = duplicate_loop (aloop, target);
5837 duplicate_subloops (aloop, cloop);
5838 }
5839 }
28c92cbb 5840
5841 if (!region_copy)
5842 {
5843 region_copy = XNEWVEC (basic_block, n_region);
5844 free_region_copy = true;
5845 }
5846
dd277d48 5847 gcc_assert (!need_ssa_update_p (cfun));
28c92cbb 5848
5849 /* Record blocks outside the region that are dominated by something
5850 inside. */
5851 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5852
5853 if (exit->src->count)
5854 {
5855 total_count = exit->src->count;
5856 exit_count = exit->count;
5857 /* Fix up corner cases, to avoid division by zero or creation of negative
5858 frequencies. */
5859 if (exit_count > total_count)
5860 exit_count = total_count;
5861 }
5862 else
5863 {
5864 total_freq = exit->src->frequency;
5865 exit_freq = EDGE_FREQUENCY (exit);
5866 /* Fix up corner cases, to avoid division by zero or creation of negative
5867 frequencies. */
5868 if (total_freq == 0)
5869 total_freq = 1;
5870 if (exit_freq > total_freq)
5871 exit_freq = total_freq;
5872 }
5873
5874 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5875 split_edge_bb_loc (exit));
5876 if (total_count)
5877 {
5878 scale_bbs_frequencies_gcov_type (region, n_region,
5879 total_count - exit_count,
5880 total_count);
5881 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5882 total_count);
5883 }
5884 else
5885 {
5886 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5887 total_freq);
5888 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5889 }
5890
5891 /* Create the switch block, and put the exit condition to it. */
5892 entry_bb = entry->dest;
5893 nentry_bb = get_bb_copy (entry_bb);
5894 if (!last_stmt (entry->src)
5895 || !stmt_ends_bb_p (last_stmt (entry->src)))
5896 switch_bb = entry->src;
5897 else
5898 switch_bb = split_edge (entry);
5899 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5900
75a70cf9 5901 gsi = gsi_last_bb (switch_bb);
5902 cond_stmt = last_stmt (exit->src);
5903 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5904 cond_stmt = gimple_copy (cond_stmt);
48e1416a 5905
75a70cf9 5906 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
28c92cbb 5907
5908 sorig = single_succ_edge (switch_bb);
5909 sorig->flags = exits[1]->flags;
5910 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5911
5912 /* Register the new edge from SWITCH_BB in loop exit lists. */
5913 rescan_loop_exit (snew, true, false);
5914
5915 /* Add the PHI node arguments. */
5916 add_phi_args_after_copy (region_copy, n_region, snew);
48e1416a 5917
28c92cbb 5918 /* Get rid of now superfluous conditions and associated edges (and phi node
5919 arguments). */
b0fb253a 5920 exit_bb = exit->dest;
48e1416a 5921
28c92cbb 5922 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
75a70cf9 5923 PENDING_STMT (e) = NULL;
48e1416a 5924
fbbe5b51 5925 /* The latch of ORIG_LOOP was copied, and so was the backedge
5926 to the original header. We redirect this backedge to EXIT_BB. */
b0fb253a 5927 for (i = 0; i < n_region; i++)
fbbe5b51 5928 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5929 {
5930 gcc_assert (single_succ_edge (region_copy[i]));
5931 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5932 PENDING_STMT (e) = NULL;
5933 for (psi = gsi_start_phis (exit_bb);
5934 !gsi_end_p (psi);
5935 gsi_next (&psi))
5936 {
5937 phi = gsi_stmt (psi);
5938 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
60d535d2 5939 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
fbbe5b51 5940 }
5941 }
89675e8c 5942 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
fbbe5b51 5943 PENDING_STMT (e) = NULL;
5944
28c92cbb 5945 /* Anything that is outside of the region, but was dominated by something
5946 inside needs to update dominance info. */
5947 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
f1f41a6c 5948 doms.release ();
095dcfa3 5949 /* Update the SSA web. */
5950 update_ssa (TODO_update_ssa);
cca979c6 5951
d8b5b4fe 5952 if (free_region_copy)
5953 free (region_copy);
cca979c6 5954
01020a5f 5955 free_original_copy_tables ();
d8b5b4fe 5956 return true;
5957}
4ee9c684 5958
773c5ba7 5959/* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5960 adding blocks when the dominator traversal reaches EXIT. This
5961 function silently assumes that ENTRY strictly dominates EXIT. */
5962
e06f9c34 5963void
773c5ba7 5964gather_blocks_in_sese_region (basic_block entry, basic_block exit,
f1f41a6c 5965 vec<basic_block> *bbs_p)
773c5ba7 5966{
5967 basic_block son;
5968
5969 for (son = first_dom_son (CDI_DOMINATORS, entry);
5970 son;
5971 son = next_dom_son (CDI_DOMINATORS, son))
5972 {
f1f41a6c 5973 bbs_p->safe_push (son);
773c5ba7 5974 if (son != exit)
5975 gather_blocks_in_sese_region (son, exit, bbs_p);
5976 }
5977}
5978
79acaae1 5979/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5980 The duplicates are recorded in VARS_MAP. */
5981
5982static void
5983replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5984 tree to_context)
5985{
5986 tree t = *tp, new_t;
5987 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5988 void **loc;
5989
5990 if (DECL_CONTEXT (t) == to_context)
5991 return;
5992
5993 loc = pointer_map_contains (vars_map, t);
5994
5995 if (!loc)
5996 {
5997 loc = pointer_map_insert (vars_map, t);
5998
5999 if (SSA_VAR_P (t))
6000 {
6001 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
2ab2ce89 6002 add_local_decl (f, new_t);
79acaae1 6003 }
6004 else
6005 {
6006 gcc_assert (TREE_CODE (t) == CONST_DECL);
6007 new_t = copy_node (t);
6008 }
6009 DECL_CONTEXT (new_t) = to_context;
6010
6011 *loc = new_t;
6012 }
6013 else
45ba1503 6014 new_t = (tree) *loc;
79acaae1 6015
6016 *tp = new_t;
6017}
6018
75a70cf9 6019
79acaae1 6020/* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6021 VARS_MAP maps old ssa names and var_decls to the new ones. */
6022
6023static tree
6024replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6025 tree to_context)
6026{
6027 void **loc;
ec11736b 6028 tree new_name;
79acaae1 6029
7c782c9b 6030 gcc_assert (!virtual_operand_p (name));
79acaae1 6031
6032 loc = pointer_map_contains (vars_map, name);
6033
6034 if (!loc)
6035 {
ec11736b 6036 tree decl = SSA_NAME_VAR (name);
6037 if (decl)
6038 {
6039 replace_by_duplicate_decl (&decl, vars_map, to_context);
6040 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6041 decl, SSA_NAME_DEF_STMT (name));
6042 if (SSA_NAME_IS_DEFAULT_DEF (name))
6043 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6044 decl, new_name);
6045 }
6046 else
6047 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6048 name, SSA_NAME_DEF_STMT (name));
79acaae1 6049
6050 loc = pointer_map_insert (vars_map, name);
6051 *loc = new_name;
6052 }
6053 else
45ba1503 6054 new_name = (tree) *loc;
79acaae1 6055
6056 return new_name;
6057}
773c5ba7 6058
6059struct move_stmt_d
6060{
1d22f541 6061 tree orig_block;
6062 tree new_block;
773c5ba7 6063 tree from_context;
6064 tree to_context;
79acaae1 6065 struct pointer_map_t *vars_map;
30b822ea 6066 htab_t new_label_map;
e38def9c 6067 struct pointer_map_t *eh_map;
773c5ba7 6068 bool remap_decls_p;
6069};
6070
6071/* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
1d22f541 6072 contained in *TP if it has been ORIG_BLOCK previously and change the
6073 DECL_CONTEXT of every local variable referenced in *TP. */
773c5ba7 6074
6075static tree
75a70cf9 6076move_stmt_op (tree *tp, int *walk_subtrees, void *data)
773c5ba7 6077{
75a70cf9 6078 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6079 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
30b822ea 6080 tree t = *tp;
773c5ba7 6081
75a70cf9 6082 if (EXPR_P (t))
5169661d 6083 {
524f019b 6084 if (TREE_BLOCK (t) == p->orig_block
6085 || (p->orig_block == NULL_TREE
6086 && TREE_BLOCK (t) == NULL_TREE))
5169661d 6087 TREE_SET_BLOCK (t, p->new_block);
6088 }
79acaae1 6089 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
773c5ba7 6090 {
79acaae1 6091 if (TREE_CODE (t) == SSA_NAME)
6092 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6093 else if (TREE_CODE (t) == LABEL_DECL)
30b822ea 6094 {
6095 if (p->new_label_map)
6096 {
6097 struct tree_map in, *out;
9af7fd5b 6098 in.base.from = t;
45ba1503 6099 out = (struct tree_map *)
6100 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
30b822ea 6101 if (out)
6102 *tp = t = out->to;
6103 }
773c5ba7 6104
30b822ea 6105 DECL_CONTEXT (t) = p->to_context;
6106 }
6107 else if (p->remap_decls_p)
773c5ba7 6108 {
79acaae1 6109 /* Replace T with its duplicate. T should no longer appear in the
6110 parent function, so this looks wasteful; however, it may appear
6111 in referenced_vars, and more importantly, as virtual operands of
6112 statements, and in alias lists of other variables. It would be
6113 quite difficult to expunge it from all those places. ??? It might
6114 suffice to do this for addressable variables. */
6115 if ((TREE_CODE (t) == VAR_DECL
6116 && !is_global_var (t))
6117 || TREE_CODE (t) == CONST_DECL)
b03e5397 6118 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
773c5ba7 6119 }
79acaae1 6120 *walk_subtrees = 0;
773c5ba7 6121 }
30b822ea 6122 else if (TYPE_P (t))
6123 *walk_subtrees = 0;
773c5ba7 6124
6125 return NULL_TREE;
6126}
6127
e38def9c 6128/* Helper for move_stmt_r. Given an EH region number for the source
6129 function, map that to the duplicate EH regio number in the dest. */
6130
6131static int
6132move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6133{
6134 eh_region old_r, new_r;
6135 void **slot;
6136
6137 old_r = get_eh_region_from_number (old_nr);
6138 slot = pointer_map_contains (p->eh_map, old_r);
6139 new_r = (eh_region) *slot;
6140
6141 return new_r->index;
6142}
6143
6144/* Similar, but operate on INTEGER_CSTs. */
6145
6146static tree
6147move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6148{
6149 int old_nr, new_nr;
6150
6151 old_nr = tree_low_cst (old_t_nr, 0);
6152 new_nr = move_stmt_eh_region_nr (old_nr, p);
6153
7002a1c8 6154 return build_int_cst (integer_type_node, new_nr);
e38def9c 6155}
6156
75a70cf9 6157/* Like move_stmt_op, but for gimple statements.
6158
6159 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6160 contained in the current statement in *GSI_P and change the
6161 DECL_CONTEXT of every local variable referenced in the current
6162 statement. */
6163
6164static tree
6165move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6166 struct walk_stmt_info *wi)
6167{
6168 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6169 gimple stmt = gsi_stmt (*gsi_p);
6170 tree block = gimple_block (stmt);
6171
6172 if (p->orig_block == NULL_TREE
6173 || block == p->orig_block
6174 || block == NULL_TREE)
6175 gimple_set_block (stmt, p->new_block);
6176#ifdef ENABLE_CHECKING
6177 else if (block != p->new_block)
6178 {
6179 while (block && block != p->orig_block)
6180 block = BLOCK_SUPERCONTEXT (block);
6181 gcc_assert (block);
6182 }
6183#endif
6184
e38def9c 6185 switch (gimple_code (stmt))
75a70cf9 6186 {
e38def9c 6187 case GIMPLE_CALL:
6188 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6189 {
6190 tree r, fndecl = gimple_call_fndecl (stmt);
6191 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6192 switch (DECL_FUNCTION_CODE (fndecl))
6193 {
6194 case BUILT_IN_EH_COPY_VALUES:
6195 r = gimple_call_arg (stmt, 1);
6196 r = move_stmt_eh_region_tree_nr (r, p);
6197 gimple_call_set_arg (stmt, 1, r);
6198 /* FALLTHRU */
6199
6200 case BUILT_IN_EH_POINTER:
6201 case BUILT_IN_EH_FILTER:
6202 r = gimple_call_arg (stmt, 0);
6203 r = move_stmt_eh_region_tree_nr (r, p);
6204 gimple_call_set_arg (stmt, 0, r);
6205 break;
75a70cf9 6206
e38def9c 6207 default:
6208 break;
6209 }
6210 }
6211 break;
6212
6213 case GIMPLE_RESX:
6214 {
6215 int r = gimple_resx_region (stmt);
6216 r = move_stmt_eh_region_nr (r, p);
6217 gimple_resx_set_region (stmt, r);
6218 }
6219 break;
75a70cf9 6220
e38def9c 6221 case GIMPLE_EH_DISPATCH:
6222 {
6223 int r = gimple_eh_dispatch_region (stmt);
6224 r = move_stmt_eh_region_nr (r, p);
6225 gimple_eh_dispatch_set_region (stmt, r);
6226 }
6227 break;
6228
6229 case GIMPLE_OMP_RETURN:
6230 case GIMPLE_OMP_CONTINUE:
6231 break;
6232 default:
6233 if (is_gimple_omp (stmt))
6234 {
6235 /* Do not remap variables inside OMP directives. Variables
6236 referenced in clauses and directive header belong to the
6237 parent function and should not be moved into the child
6238 function. */
6239 bool save_remap_decls_p = p->remap_decls_p;
6240 p->remap_decls_p = false;
6241 *handled_ops_p = true;
6242
e3a19533 6243 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6244 move_stmt_op, wi);
e38def9c 6245
6246 p->remap_decls_p = save_remap_decls_p;
6247 }
6248 break;
75a70cf9 6249 }
6250
6251 return NULL_TREE;
6252}
6253
773c5ba7 6254/* Move basic block BB from function CFUN to function DEST_FN. The
6255 block is moved out of the original linked list and placed after
6256 block AFTER in the new list. Also, the block is removed from the
6257 original array of blocks and placed in DEST_FN's array of blocks.
6258 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6259 updated to reflect the moved edges.
13b96211 6260
79acaae1 6261 The local variables are remapped to new instances, VARS_MAP is used
6262 to record the mapping. */
773c5ba7 6263
6264static void
6265move_block_to_fn (struct function *dest_cfun, basic_block bb,
6266 basic_block after, bool update_edge_count_p,
e38def9c 6267 struct move_stmt_d *d)
773c5ba7 6268{
6269 struct control_flow_graph *cfg;
6270 edge_iterator ei;
6271 edge e;
75a70cf9 6272 gimple_stmt_iterator si;
633c1ea8 6273 unsigned old_len, new_len;
773c5ba7 6274
e996e8f1 6275 /* Remove BB from dominance structures. */
6276 delete_from_dominance_info (CDI_DOMINATORS, bb);
28c92cbb 6277 if (current_loops)
6278 remove_bb_from_loops (bb);
e996e8f1 6279
773c5ba7 6280 /* Link BB to the new linked list. */
6281 move_block_after (bb, after);
6282
6283 /* Update the edge count in the corresponding flowgraphs. */
6284 if (update_edge_count_p)
6285 FOR_EACH_EDGE (e, ei, bb->succs)
6286 {
6287 cfun->cfg->x_n_edges--;
6288 dest_cfun->cfg->x_n_edges++;
6289 }
6290
6291 /* Remove BB from the original basic block array. */
f1f41a6c 6292 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
773c5ba7 6293 cfun->cfg->x_n_basic_blocks--;
6294
6295 /* Grow DEST_CFUN's basic block array if needed. */
6296 cfg = dest_cfun->cfg;
6297 cfg->x_n_basic_blocks++;
e996e8f1 6298 if (bb->index >= cfg->x_last_basic_block)
6299 cfg->x_last_basic_block = bb->index + 1;
773c5ba7 6300
f1f41a6c 6301 old_len = vec_safe_length (cfg->x_basic_block_info);
633c1ea8 6302 if ((unsigned) cfg->x_last_basic_block >= old_len)
773c5ba7 6303 {
633c1ea8 6304 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
f1f41a6c 6305 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
773c5ba7 6306 }
6307
f1f41a6c 6308 (*cfg->x_basic_block_info)[bb->index] = bb;
773c5ba7 6309
79acaae1 6310 /* Remap the variables in phi nodes. */
75a70cf9 6311 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
79acaae1 6312 {
75a70cf9 6313 gimple phi = gsi_stmt (si);
79acaae1 6314 use_operand_p use;
6315 tree op = PHI_RESULT (phi);
6316 ssa_op_iter oi;
f98fddd0 6317 unsigned i;
79acaae1 6318
7c782c9b 6319 if (virtual_operand_p (op))
28c92cbb 6320 {
6321 /* Remove the phi nodes for virtual operands (alias analysis will be
6322 run for the new function, anyway). */
75a70cf9 6323 remove_phi_node (&si, true);
28c92cbb 6324 continue;
6325 }
79acaae1 6326
1d22f541 6327 SET_PHI_RESULT (phi,
6328 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
79acaae1 6329 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6330 {
6331 op = USE_FROM_PTR (use);
6332 if (TREE_CODE (op) == SSA_NAME)
1d22f541 6333 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
79acaae1 6334 }
75a70cf9 6335
f98fddd0 6336 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6337 {
6338 location_t locus = gimple_phi_arg_location (phi, i);
6339 tree block = LOCATION_BLOCK (locus);
6340
6341 if (locus == UNKNOWN_LOCATION)
6342 continue;
6343 if (d->orig_block == NULL_TREE || block == d->orig_block)
6344 {
6345 if (d->new_block == NULL_TREE)
6346 locus = LOCATION_LOCUS (locus);
6347 else
6348 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6349 gimple_phi_arg_set_location (phi, i, locus);
6350 }
6351 }
6352
75a70cf9 6353 gsi_next (&si);
79acaae1 6354 }
6355
75a70cf9 6356 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
773c5ba7 6357 {
75a70cf9 6358 gimple stmt = gsi_stmt (si);
75a70cf9 6359 struct walk_stmt_info wi;
773c5ba7 6360
75a70cf9 6361 memset (&wi, 0, sizeof (wi));
6362 wi.info = d;
6363 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
773c5ba7 6364
75a70cf9 6365 if (gimple_code (stmt) == GIMPLE_LABEL)
773c5ba7 6366 {
75a70cf9 6367 tree label = gimple_label_label (stmt);
773c5ba7 6368 int uid = LABEL_DECL_UID (label);
6369
6370 gcc_assert (uid > -1);
6371
f1f41a6c 6372 old_len = vec_safe_length (cfg->x_label_to_block_map);
773c5ba7 6373 if (old_len <= (unsigned) uid)
6374 {
dd277d48 6375 new_len = 3 * uid / 2 + 1;
f1f41a6c 6376 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
773c5ba7 6377 }
6378
f1f41a6c 6379 (*cfg->x_label_to_block_map)[uid] = bb;
6380 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
773c5ba7 6381
6382 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6383
edb7afe8 6384 if (uid >= dest_cfun->cfg->last_label_uid)
6385 dest_cfun->cfg->last_label_uid = uid + 1;
773c5ba7 6386 }
30b822ea 6387
e38def9c 6388 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6389 remove_stmt_from_eh_lp_fn (cfun, stmt);
6390
6391 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6392 gimple_remove_stmt_histograms (cfun, stmt);
79acaae1 6393
28c92cbb 6394 /* We cannot leave any operands allocated from the operand caches of
6395 the current function. */
6396 free_stmt_operands (stmt);
6397 push_cfun (dest_cfun);
79acaae1 6398 update_stmt (stmt);
28c92cbb 6399 pop_cfun ();
30b822ea 6400 }
9c388755 6401
6402 FOR_EACH_EDGE (e, ei, bb->succs)
524f019b 6403 if (e->goto_locus != UNKNOWN_LOCATION)
9c388755 6404 {
5169661d 6405 tree block = LOCATION_BLOCK (e->goto_locus);
9c388755 6406 if (d->orig_block == NULL_TREE
6407 || block == d->orig_block)
5169661d 6408 e->goto_locus = d->new_block ?
6409 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6410 LOCATION_LOCUS (e->goto_locus);
9c388755 6411#ifdef ENABLE_CHECKING
6412 else if (block != d->new_block)
6413 {
6414 while (block && block != d->orig_block)
6415 block = BLOCK_SUPERCONTEXT (block);
6416 gcc_assert (block);
6417 }
6418#endif
6419 }
30b822ea 6420}
6421
6422/* Examine the statements in BB (which is in SRC_CFUN); find and return
6423 the outermost EH region. Use REGION as the incoming base EH region. */
6424
e38def9c 6425static eh_region
30b822ea 6426find_outermost_region_in_block (struct function *src_cfun,
e38def9c 6427 basic_block bb, eh_region region)
30b822ea 6428{
75a70cf9 6429 gimple_stmt_iterator si;
13b96211 6430
75a70cf9 6431 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
30b822ea 6432 {
75a70cf9 6433 gimple stmt = gsi_stmt (si);
e38def9c 6434 eh_region stmt_region;
6435 int lp_nr;
8487df40 6436
e38def9c 6437 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6438 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6439 if (stmt_region)
6c105785 6440 {
e38def9c 6441 if (region == NULL)
6c105785 6442 region = stmt_region;
6443 else if (stmt_region != region)
6444 {
6445 region = eh_region_outermost (src_cfun, stmt_region, region);
e38def9c 6446 gcc_assert (region != NULL);
6c105785 6447 }
6448 }
773c5ba7 6449 }
30b822ea 6450
6451 return region;
773c5ba7 6452}
6453
30b822ea 6454static tree
6455new_label_mapper (tree decl, void *data)
6456{
6457 htab_t hash = (htab_t) data;
6458 struct tree_map *m;
6459 void **slot;
6460
6461 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6462
45ba1503 6463 m = XNEW (struct tree_map);
30b822ea 6464 m->hash = DECL_UID (decl);
9af7fd5b 6465 m->base.from = decl;
e60a6f7b 6466 m->to = create_artificial_label (UNKNOWN_LOCATION);
30b822ea 6467 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
edb7afe8 6468 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6469 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
30b822ea 6470
6471 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6472 gcc_assert (*slot == NULL);
6473
6474 *slot = m;
6475
6476 return m->to;
6477}
773c5ba7 6478
1d22f541 6479/* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6480 subblocks. */
6481
6482static void
6483replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6484 tree to_context)
6485{
6486 tree *tp, t;
6487
1767a056 6488 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
1d22f541 6489 {
6490 t = *tp;
ba7226a7 6491 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6492 continue;
1d22f541 6493 replace_by_duplicate_decl (&t, vars_map, to_context);
6494 if (t != *tp)
6495 {
6496 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6497 {
6498 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6499 DECL_HAS_VALUE_EXPR_P (t) = 1;
6500 }
1767a056 6501 DECL_CHAIN (t) = DECL_CHAIN (*tp);
1d22f541 6502 *tp = t;
6503 }
6504 }
6505
6506 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6507 replace_block_vars_by_duplicates (block, vars_map, to_context);
6508}
6509
773c5ba7 6510/* Move a single-entry, single-exit region delimited by ENTRY_BB and
6511 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6512 single basic block in the original CFG and the new basic block is
6513 returned. DEST_CFUN must not have a CFG yet.
6514
6515 Note that the region need not be a pure SESE region. Blocks inside
6516 the region may contain calls to abort/exit. The only restriction
6517 is that ENTRY_BB should be the only entry point and it must
6518 dominate EXIT_BB.
6519
1d22f541 6520 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6521 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6522 to the new function.
6523
773c5ba7 6524 All local variables referenced in the region are assumed to be in
6525 the corresponding BLOCK_VARS and unexpanded variable lists
6526 associated with DEST_CFUN. */
6527
6528basic_block
6529move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
1d22f541 6530 basic_block exit_bb, tree orig_block)
773c5ba7 6531{
f1f41a6c 6532 vec<basic_block> bbs, dom_bbs;
79acaae1 6533 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6534 basic_block after, bb, *entry_pred, *exit_succ, abb;
6535 struct function *saved_cfun = cfun;
e38def9c 6536 int *entry_flag, *exit_flag;
79acaae1 6537 unsigned *entry_prob, *exit_prob;
773c5ba7 6538 unsigned i, num_entry_edges, num_exit_edges;
6539 edge e;
6540 edge_iterator ei;
30b822ea 6541 htab_t new_label_map;
e38def9c 6542 struct pointer_map_t *vars_map, *eh_map;
28c92cbb 6543 struct loop *loop = entry_bb->loop_father;
1d22f541 6544 struct move_stmt_d d;
773c5ba7 6545
6546 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6547 region. */
6548 gcc_assert (entry_bb != exit_bb
5056ba1a 6549 && (!exit_bb
6550 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
773c5ba7 6551
79acaae1 6552 /* Collect all the blocks in the region. Manually add ENTRY_BB
6553 because it won't be added by dfs_enumerate_from. */
f1f41a6c 6554 bbs.create (0);
6555 bbs.safe_push (entry_bb);
773c5ba7 6556 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6557
79acaae1 6558 /* The blocks that used to be dominated by something in BBS will now be
6559 dominated by the new block. */
6560 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
f1f41a6c 6561 bbs.address (),
6562 bbs.length ());
79acaae1 6563
773c5ba7 6564 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6565 the predecessor edges to ENTRY_BB and the successor edges to
6566 EXIT_BB so that we can re-attach them to the new basic block that
6567 will replace the region. */
6568 num_entry_edges = EDGE_COUNT (entry_bb->preds);
ed7e2206 6569 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6570 entry_flag = XNEWVEC (int, num_entry_edges);
79acaae1 6571 entry_prob = XNEWVEC (unsigned, num_entry_edges);
773c5ba7 6572 i = 0;
6573 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6574 {
79acaae1 6575 entry_prob[i] = e->probability;
773c5ba7 6576 entry_flag[i] = e->flags;
6577 entry_pred[i++] = e->src;
6578 remove_edge (e);
6579 }
6580
5056ba1a 6581 if (exit_bb)
773c5ba7 6582 {
5056ba1a 6583 num_exit_edges = EDGE_COUNT (exit_bb->succs);
ed7e2206 6584 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6585 exit_flag = XNEWVEC (int, num_exit_edges);
79acaae1 6586 exit_prob = XNEWVEC (unsigned, num_exit_edges);
5056ba1a 6587 i = 0;
6588 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6589 {
79acaae1 6590 exit_prob[i] = e->probability;
5056ba1a 6591 exit_flag[i] = e->flags;
6592 exit_succ[i++] = e->dest;
6593 remove_edge (e);
6594 }
6595 }
6596 else
6597 {
6598 num_exit_edges = 0;
6599 exit_succ = NULL;
6600 exit_flag = NULL;
79acaae1 6601 exit_prob = NULL;
773c5ba7 6602 }
6603
6604 /* Switch context to the child function to initialize DEST_FN's CFG. */
6605 gcc_assert (dest_cfun->cfg == NULL);
79acaae1 6606 push_cfun (dest_cfun);
30b822ea 6607
773c5ba7 6608 init_empty_tree_cfg ();
30b822ea 6609
6610 /* Initialize EH information for the new function. */
e38def9c 6611 eh_map = NULL;
30b822ea 6612 new_label_map = NULL;
6613 if (saved_cfun->eh)
6614 {
e38def9c 6615 eh_region region = NULL;
30b822ea 6616
f1f41a6c 6617 FOR_EACH_VEC_ELT (bbs, i, bb)
30b822ea 6618 region = find_outermost_region_in_block (saved_cfun, bb, region);
6619
6620 init_eh_for_function ();
e38def9c 6621 if (region != NULL)
30b822ea 6622 {
6623 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
e38def9c 6624 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6625 new_label_mapper, new_label_map);
30b822ea 6626 }
6627 }
6628
79acaae1 6629 pop_cfun ();
6630
773c5ba7 6631 /* Move blocks from BBS into DEST_CFUN. */
f1f41a6c 6632 gcc_assert (bbs.length () >= 2);
773c5ba7 6633 after = dest_cfun->cfg->x_entry_block_ptr;
79acaae1 6634 vars_map = pointer_map_create ();
1d22f541 6635
6636 memset (&d, 0, sizeof (d));
e38def9c 6637 d.orig_block = orig_block;
6638 d.new_block = DECL_INITIAL (dest_cfun->decl);
1d22f541 6639 d.from_context = cfun->decl;
6640 d.to_context = dest_cfun->decl;
e38def9c 6641 d.vars_map = vars_map;
1d22f541 6642 d.new_label_map = new_label_map;
e38def9c 6643 d.eh_map = eh_map;
1d22f541 6644 d.remap_decls_p = true;
1d22f541 6645
f1f41a6c 6646 FOR_EACH_VEC_ELT (bbs, i, bb)
773c5ba7 6647 {
6648 /* No need to update edge counts on the last block. It has
6649 already been updated earlier when we detached the region from
6650 the original CFG. */
e38def9c 6651 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
773c5ba7 6652 after = bb;
6653 }
6654
1d22f541 6655 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6656 if (orig_block)
6657 {
6658 tree block;
6659 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6660 == NULL_TREE);
6661 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6662 = BLOCK_SUBBLOCKS (orig_block);
6663 for (block = BLOCK_SUBBLOCKS (orig_block);
6664 block; block = BLOCK_CHAIN (block))
6665 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6666 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6667 }
6668
6669 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6670 vars_map, dest_cfun->decl);
6671
30b822ea 6672 if (new_label_map)
6673 htab_delete (new_label_map);
e38def9c 6674 if (eh_map)
6675 pointer_map_destroy (eh_map);
79acaae1 6676 pointer_map_destroy (vars_map);
773c5ba7 6677
6678 /* Rewire the entry and exit blocks. The successor to the entry
6679 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6680 the child function. Similarly, the predecessor of DEST_FN's
6681 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6682 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6683 various CFG manipulation function get to the right CFG.
6684
6685 FIXME, this is silly. The CFG ought to become a parameter to
6686 these helpers. */
79acaae1 6687 push_cfun (dest_cfun);
773c5ba7 6688 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
5056ba1a 6689 if (exit_bb)
6690 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
79acaae1 6691 pop_cfun ();
773c5ba7 6692
6693 /* Back in the original function, the SESE region has disappeared,
6694 create a new basic block in its place. */
6695 bb = create_empty_bb (entry_pred[0]);
28c92cbb 6696 if (current_loops)
6697 add_bb_to_loop (bb, loop);
773c5ba7 6698 for (i = 0; i < num_entry_edges; i++)
79acaae1 6699 {
6700 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6701 e->probability = entry_prob[i];
6702 }
773c5ba7 6703
6704 for (i = 0; i < num_exit_edges; i++)
79acaae1 6705 {
6706 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6707 e->probability = exit_prob[i];
6708 }
6709
6710 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
f1f41a6c 6711 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
79acaae1 6712 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
f1f41a6c 6713 dom_bbs.release ();
773c5ba7 6714
5056ba1a 6715 if (exit_bb)
6716 {
79acaae1 6717 free (exit_prob);
5056ba1a 6718 free (exit_flag);
6719 free (exit_succ);
6720 }
79acaae1 6721 free (entry_prob);
773c5ba7 6722 free (entry_flag);
6723 free (entry_pred);
f1f41a6c 6724 bbs.release ();
773c5ba7 6725
6726 return bb;
6727}
6728
095dcfa3 6729
0cd02a19 6730/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
75a70cf9 6731 */
4ee9c684 6732
6733void
8d672d12 6734dump_function_to_file (tree fndecl, FILE *file, int flags)
4ee9c684 6735{
8d672d12 6736 tree arg, var, old_current_fndecl = current_function_decl;
2debe3a3 6737 struct function *dsf;
4ee9c684 6738 bool ignore_topmost_bind = false, any_var = false;
6739 basic_block bb;
6740 tree chain;
8d672d12 6741 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6742 && decl_is_tm_clone (fndecl));
6743 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
13b96211 6744
8d672d12 6745 current_function_decl = fndecl;
6746 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
4ee9c684 6747
8d672d12 6748 arg = DECL_ARGUMENTS (fndecl);
4ee9c684 6749 while (arg)
6750 {
04ba083d 6751 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6752 fprintf (file, " ");
4ee9c684 6753 print_generic_expr (file, arg, dump_flags);
46412a37 6754 if (flags & TDF_VERBOSE)
6755 print_node (file, "", arg, 4);
1767a056 6756 if (DECL_CHAIN (arg))
4ee9c684 6757 fprintf (file, ", ");
1767a056 6758 arg = DECL_CHAIN (arg);
4ee9c684 6759 }
6760 fprintf (file, ")\n");
6761
46412a37 6762 if (flags & TDF_VERBOSE)
8d672d12 6763 print_node (file, "", fndecl, 2);
46412a37 6764
8d672d12 6765 dsf = DECL_STRUCT_FUNCTION (fndecl);
f89f1788 6766 if (dsf && (flags & TDF_EH))
2debe3a3 6767 dump_eh_tree (file, dsf);
6768
8d672d12 6769 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
4ee9c684 6770 {
8d672d12 6771 dump_node (fndecl, TDF_SLIM | flags, file);
6772 current_function_decl = old_current_fndecl;
4ee9c684 6773 return;
6774 }
6775
6776 /* When GIMPLE is lowered, the variables are no longer available in
6777 BIND_EXPRs, so display them separately. */
8d672d12 6778 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
4ee9c684 6779 {
2ab2ce89 6780 unsigned ix;
4ee9c684 6781 ignore_topmost_bind = true;
6782
6783 fprintf (file, "{\n");
f1f41a6c 6784 if (!vec_safe_is_empty (fun->local_decls))
8d672d12 6785 FOR_EACH_LOCAL_DECL (fun, ix, var)
3e74e534 6786 {
6787 print_generic_decl (file, var, flags);
6788 if (flags & TDF_VERBOSE)
6789 print_node (file, "", var, 4);
6790 fprintf (file, "\n");
4ee9c684 6791
3e74e534 6792 any_var = true;
6793 }
ec11736b 6794 if (gimple_in_ssa_p (cfun))
6795 for (ix = 1; ix < num_ssa_names; ++ix)
6796 {
6797 tree name = ssa_name (ix);
6798 if (name && !SSA_NAME_VAR (name))
6799 {
6800 fprintf (file, " ");
6801 print_generic_expr (file, TREE_TYPE (name), flags);
6802 fprintf (file, " ");
6803 print_generic_expr (file, name, flags);
6804 fprintf (file, ";\n");
3e74e534 6805
6806 any_var = true;
ec11736b 6807 }
6808 }
4ee9c684 6809 }
6810
f1f41a6c 6811 if (fun && fun->decl == fndecl
6812 && fun->cfg
8d672d12 6813 && basic_block_info_for_function (fun))
4ee9c684 6814 {
75a70cf9 6815 /* If the CFG has been built, emit a CFG-based dump. */
4ee9c684 6816 if (!ignore_topmost_bind)
6817 fprintf (file, "{\n");
6818
8d672d12 6819 if (any_var && n_basic_blocks_for_function (fun))
4ee9c684 6820 fprintf (file, "\n");
6821
8d672d12 6822 FOR_EACH_BB_FN (bb, fun)
bec2cf98 6823 dump_bb (file, bb, 2, flags | TDF_COMMENT);
13b96211 6824
4ee9c684 6825 fprintf (file, "}\n");
6826 }
8d672d12 6827 else if (DECL_SAVED_TREE (fndecl) == NULL)
75a70cf9 6828 {
6829 /* The function is now in GIMPLE form but the CFG has not been
6830 built yet. Emit the single sequence of GIMPLE statements
6831 that make up its body. */
8d672d12 6832 gimple_seq body = gimple_body (fndecl);
75a70cf9 6833
6834 if (gimple_seq_first_stmt (body)
6835 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6836 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6837 print_gimple_seq (file, body, 0, flags);
6838 else
6839 {
6840 if (!ignore_topmost_bind)
6841 fprintf (file, "{\n");
6842
6843 if (any_var)
6844 fprintf (file, "\n");
6845
6846 print_gimple_seq (file, body, 2, flags);
6847 fprintf (file, "}\n");
6848 }
6849 }
4ee9c684 6850 else
6851 {
6852 int indent;
6853
6854 /* Make a tree based dump. */
8d672d12 6855 chain = DECL_SAVED_TREE (fndecl);
1e8e9920 6856 if (chain && TREE_CODE (chain) == BIND_EXPR)
4ee9c684 6857 {
6858 if (ignore_topmost_bind)
6859 {
6860 chain = BIND_EXPR_BODY (chain);
6861 indent = 2;
6862 }
6863 else
6864 indent = 0;
6865 }
6866 else
6867 {
6868 if (!ignore_topmost_bind)
6869 fprintf (file, "{\n");
6870 indent = 2;
6871 }
6872
6873 if (any_var)
6874 fprintf (file, "\n");
6875
6876 print_generic_stmt_indented (file, chain, flags, indent);
6877 if (ignore_topmost_bind)
6878 fprintf (file, "}\n");
6879 }
6880
12e18540 6881 if (flags & TDF_ENUMERATE_LOCALS)
6882 dump_enumerated_decls (file, flags);
4ee9c684 6883 fprintf (file, "\n\n");
1e8e9920 6884
8d672d12 6885 current_function_decl = old_current_fndecl;
1e8e9920 6886}
6887
1e8e9920 6888/* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6889
4b987fac 6890DEBUG_FUNCTION void
1e8e9920 6891debug_function (tree fn, int flags)
6892{
6893 dump_function_to_file (fn, stderr, flags);
4ee9c684 6894}
6895
6896
903dae48 6897/* Print on FILE the indexes for the predecessors of basic_block BB. */
4ee9c684 6898
6899static void
cd665a06 6900print_pred_bbs (FILE *file, basic_block bb)
4ee9c684 6901{
cd665a06 6902 edge e;
6903 edge_iterator ei;
6904
6905 FOR_EACH_EDGE (e, ei, bb->preds)
903dae48 6906 fprintf (file, "bb_%d ", e->src->index);
4ee9c684 6907}
6908
6909
903dae48 6910/* Print on FILE the indexes for the successors of basic_block BB. */
4ee9c684 6911
6912static void
cd665a06 6913print_succ_bbs (FILE *file, basic_block bb)
4ee9c684 6914{
cd665a06 6915 edge e;
6916 edge_iterator ei;
6917
6918 FOR_EACH_EDGE (e, ei, bb->succs)
903dae48 6919 fprintf (file, "bb_%d ", e->dest->index);
4ee9c684 6920}
6921
2f62d06a 6922/* Print to FILE the basic block BB following the VERBOSITY level. */
6923
48e1416a 6924void
2f62d06a 6925print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6926{
6927 char *s_indent = (char *) alloca ((size_t) indent + 1);
6928 memset ((void *) s_indent, ' ', (size_t) indent);
6929 s_indent[indent] = '\0';
6930
6931 /* Print basic_block's header. */
6932 if (verbosity >= 2)
6933 {
6934 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6935 print_pred_bbs (file, bb);
6936 fprintf (file, "}, succs = {");
6937 print_succ_bbs (file, bb);
6938 fprintf (file, "})\n");
6939 }
6940
6941 /* Print basic_block's body. */
6942 if (verbosity >= 3)
6943 {
6944 fprintf (file, "%s {\n", s_indent);
bec2cf98 6945 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
2f62d06a 6946 fprintf (file, "%s }\n", s_indent);
6947 }
6948}
6949
6950static void print_loop_and_siblings (FILE *, struct loop *, int, int);
4ee9c684 6951
2f62d06a 6952/* Pretty print LOOP on FILE, indented INDENT spaces. Following
6953 VERBOSITY level this outputs the contents of the loop, or just its
6954 structure. */
4ee9c684 6955
6956static void
2f62d06a 6957print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
4ee9c684 6958{
6959 char *s_indent;
6960 basic_block bb;
13b96211 6961
4ee9c684 6962 if (loop == NULL)
6963 return;
6964
6965 s_indent = (char *) alloca ((size_t) indent + 1);
6966 memset ((void *) s_indent, ' ', (size_t) indent);
6967 s_indent[indent] = '\0';
6968
2f62d06a 6969 /* Print loop's header. */
6d52d7f8 6970 fprintf (file, "%sloop_%d (", s_indent, loop->num);
6971 if (loop->header)
6972 fprintf (file, "header = %d", loop->header->index);
6973 else
6974 {
6975 fprintf (file, "deleted)\n");
6976 return;
6977 }
6978 if (loop->latch)
6979 fprintf (file, ", latch = %d", loop->latch->index);
6980 else
6981 fprintf (file, ", multiple latches");
2f62d06a 6982 fprintf (file, ", niter = ");
6983 print_generic_expr (file, loop->nb_iterations, 0);
13b96211 6984
2f62d06a 6985 if (loop->any_upper_bound)
6986 {
6987 fprintf (file, ", upper_bound = ");
6988 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6989 }
13b96211 6990
2f62d06a 6991 if (loop->any_estimate)
6992 {
6993 fprintf (file, ", estimate = ");
6994 dump_double_int (file, loop->nb_iterations_estimate, true);
6995 }
6996 fprintf (file, ")\n");
6997
6998 /* Print loop's body. */
6999 if (verbosity >= 1)
7000 {
7001 fprintf (file, "%s{\n", s_indent);
7002 FOR_EACH_BB (bb)
7003 if (bb->loop_father == loop)
7004 print_loops_bb (file, bb, indent, verbosity);
7005
7006 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7007 fprintf (file, "%s}\n", s_indent);
7008 }
4ee9c684 7009}
7010
2f62d06a 7011/* Print the LOOP and its sibling loops on FILE, indented INDENT
7012 spaces. Following VERBOSITY level this outputs the contents of the
7013 loop, or just its structure. */
7014
7015static void
7016print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
7017{
7018 if (loop == NULL)
7019 return;
7020
7021 print_loop (file, loop, indent, verbosity);
7022 print_loop_and_siblings (file, loop->next, indent, verbosity);
7023}
4ee9c684 7024
7025/* Follow a CFG edge from the entry point of the program, and on entry
7026 of a loop, pretty print the loop structure on FILE. */
7027
13b96211 7028void
2f62d06a 7029print_loops (FILE *file, int verbosity)
4ee9c684 7030{
7031 basic_block bb;
13b96211 7032
255b6be7 7033 bb = ENTRY_BLOCK_PTR;
4ee9c684 7034 if (bb && bb->loop_father)
2f62d06a 7035 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
4ee9c684 7036}
7037
7038
2f62d06a 7039/* Debugging loops structure at tree level, at some VERBOSITY level. */
7040
4b987fac 7041DEBUG_FUNCTION void
2f62d06a 7042debug_loops (int verbosity)
7043{
7044 print_loops (stderr, verbosity);
7045}
7046
7047/* Print on stderr the code of LOOP, at some VERBOSITY level. */
4ee9c684 7048
4b987fac 7049DEBUG_FUNCTION void
2f62d06a 7050debug_loop (struct loop *loop, int verbosity)
4ee9c684 7051{
2f62d06a 7052 print_loop (stderr, loop, 0, verbosity);
4ee9c684 7053}
7054
2f62d06a 7055/* Print on stderr the code of loop number NUM, at some VERBOSITY
7056 level. */
7057
4b987fac 7058DEBUG_FUNCTION void
2f62d06a 7059debug_loop_num (unsigned num, int verbosity)
7060{
7061 debug_loop (get_loop (num), verbosity);
7062}
4ee9c684 7063
7064/* Return true if BB ends with a call, possibly followed by some
7065 instructions that must stay with the call. Return false,
7066 otherwise. */
7067
7068static bool
75a70cf9 7069gimple_block_ends_with_call_p (basic_block bb)
4ee9c684 7070{
9845d120 7071 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
beb5ec58 7072 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
4ee9c684 7073}
7074
7075
7076/* Return true if BB ends with a conditional branch. Return false,
7077 otherwise. */
7078
7079static bool
75a70cf9 7080gimple_block_ends_with_condjump_p (const_basic_block bb)
4ee9c684 7081{
75a70cf9 7082 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7083 return (stmt && gimple_code (stmt) == GIMPLE_COND);
4ee9c684 7084}
7085
7086
7087/* Return true if we need to add fake edge to exit at statement T.
75a70cf9 7088 Helper function for gimple_flow_call_edges_add. */
4ee9c684 7089
7090static bool
75a70cf9 7091need_fake_edge_p (gimple t)
4ee9c684 7092{
75a70cf9 7093 tree fndecl = NULL_TREE;
7094 int call_flags = 0;
4ee9c684 7095
7096 /* NORETURN and LONGJMP calls already have an edge to exit.
13422b8a 7097 CONST and PURE calls do not need one.
4ee9c684 7098 We don't currently check for CONST and PURE here, although
7099 it would be a good idea, because those attributes are
7100 figured out from the RTL in mark_constant_function, and
7101 the counter incrementation code from -fprofile-arcs
7102 leads to different results from -fbranch-probabilities. */
75a70cf9 7103 if (is_gimple_call (t))
eb5bb63e 7104 {
75a70cf9 7105 fndecl = gimple_call_fndecl (t);
7106 call_flags = gimple_call_flags (t);
eb5bb63e 7107 }
7108
75a70cf9 7109 if (is_gimple_call (t)
7110 && fndecl
7111 && DECL_BUILT_IN (fndecl)
eb5bb63e 7112 && (call_flags & ECF_NOTHROW)
dac59c04 7113 && !(call_flags & ECF_RETURNS_TWICE)
7114 /* fork() doesn't really return twice, but the effect of
7115 wrapping it in __gcov_fork() which calls __gcov_flush()
7116 and clears the counters before forking has the same
7117 effect as returning twice. Force a fake edge. */
7118 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7119 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7120 return false;
eb5bb63e 7121
44e41ce0 7122 if (is_gimple_call (t))
7123 {
7124 edge_iterator ei;
7125 edge e;
7126 basic_block bb;
7127
7128 if (!(call_flags & ECF_NORETURN))
7129 return true;
7130
7131 bb = gimple_bb (t);
7132 FOR_EACH_EDGE (e, ei, bb->succs)
7133 if ((e->flags & EDGE_FAKE) == 0)
7134 return true;
7135 }
4ee9c684 7136
590c3166 7137 if (gimple_code (t) == GIMPLE_ASM
75a70cf9 7138 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
4ee9c684 7139 return true;
7140
7141 return false;
7142}
7143
7144
7145/* Add fake edges to the function exit for any non constant and non
44e41ce0 7146 noreturn calls (or noreturn calls with EH/abnormal edges),
7147 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7148 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7149 that were split.
4ee9c684 7150
7151 The goal is to expose cases in which entering a basic block does
7152 not imply that all subsequent instructions must be executed. */
7153
7154static int
75a70cf9 7155gimple_flow_call_edges_add (sbitmap blocks)
4ee9c684 7156{
7157 int i;
7158 int blocks_split = 0;
7159 int last_bb = last_basic_block;
7160 bool check_last_block = false;
7161
4d2e5d52 7162 if (n_basic_blocks == NUM_FIXED_BLOCKS)
4ee9c684 7163 return 0;
7164
7165 if (! blocks)
7166 check_last_block = true;
7167 else
08b7917c 7168 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4ee9c684 7169
7170 /* In the last basic block, before epilogue generation, there will be
7171 a fallthru edge to EXIT. Special care is required if the last insn
7172 of the last basic block is a call because make_edge folds duplicate
7173 edges, which would result in the fallthru edge also being marked
7174 fake, which would result in the fallthru edge being removed by
7175 remove_fake_edges, which would result in an invalid CFG.
7176
7177 Moreover, we can't elide the outgoing fake edge, since the block
7178 profiler needs to take this into account in order to solve the minimal
7179 spanning tree in the case that the call doesn't return.
7180
7181 Handle this by adding a dummy instruction in a new last basic block. */
7182 if (check_last_block)
7183 {
7184 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
1a4a3132 7185 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
75a70cf9 7186 gimple t = NULL;
7187
7188 if (!gsi_end_p (gsi))
7189 t = gsi_stmt (gsi);
4ee9c684 7190
20498e00 7191 if (t && need_fake_edge_p (t))
4ee9c684 7192 {
7193 edge e;
7194
c6356c17 7195 e = find_edge (bb, EXIT_BLOCK_PTR);
7196 if (e)
7197 {
75a70cf9 7198 gsi_insert_on_edge (e, gimple_build_nop ());
7199 gsi_commit_edge_inserts ();
c6356c17 7200 }
4ee9c684 7201 }
7202 }
7203
7204 /* Now add fake edges to the function exit for any non constant
7205 calls since there is no way that we can determine if they will
7206 return or not... */
7207 for (i = 0; i < last_bb; i++)
7208 {
7209 basic_block bb = BASIC_BLOCK (i);
75a70cf9 7210 gimple_stmt_iterator gsi;
7211 gimple stmt, last_stmt;
4ee9c684 7212
7213 if (!bb)
7214 continue;
7215
08b7917c 7216 if (blocks && !bitmap_bit_p (blocks, i))
4ee9c684 7217 continue;
7218
1a4a3132 7219 gsi = gsi_last_nondebug_bb (bb);
75a70cf9 7220 if (!gsi_end_p (gsi))
4ee9c684 7221 {
75a70cf9 7222 last_stmt = gsi_stmt (gsi);
4ee9c684 7223 do
7224 {
75a70cf9 7225 stmt = gsi_stmt (gsi);
4ee9c684 7226 if (need_fake_edge_p (stmt))
7227 {
7228 edge e;
75a70cf9 7229
4ee9c684 7230 /* The handling above of the final block before the
7231 epilogue should be enough to verify that there is
7232 no edge to the exit block in CFG already.
7233 Calling make_edge in such case would cause us to
7234 mark that edge as fake and remove it later. */
7235#ifdef ENABLE_CHECKING
7236 if (stmt == last_stmt)
cd665a06 7237 {
c6356c17 7238 e = find_edge (bb, EXIT_BLOCK_PTR);
7239 gcc_assert (e == NULL);
cd665a06 7240 }
4ee9c684 7241#endif
7242
7243 /* Note that the following may create a new basic block
7244 and renumber the existing basic blocks. */
7245 if (stmt != last_stmt)
7246 {
7247 e = split_block (bb, stmt);
7248 if (e)
7249 blocks_split++;
7250 }
7251 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7252 }
75a70cf9 7253 gsi_prev (&gsi);
4ee9c684 7254 }
75a70cf9 7255 while (!gsi_end_p (gsi));
4ee9c684 7256 }
7257 }
7258
7259 if (blocks_split)
7260 verify_flow_info ();
7261
7262 return blocks_split;
7263}
7264
31a8456e 7265/* Removes edge E and all the blocks dominated by it, and updates dominance
7266 information. The IL in E->src needs to be updated separately.
7267 If dominance info is not available, only the edge E is removed.*/
7268
7269void
7270remove_edge_and_dominated_blocks (edge e)
7271{
f1f41a6c 7272 vec<basic_block> bbs_to_remove = vec<basic_block>();
7273 vec<basic_block> bbs_to_fix_dom = vec<basic_block>();
31a8456e 7274 bitmap df, df_idom;
7275 edge f;
7276 edge_iterator ei;
7277 bool none_removed = false;
7278 unsigned i;
7279 basic_block bb, dbb;
7280 bitmap_iterator bi;
7281
50b08d37 7282 if (!dom_info_available_p (CDI_DOMINATORS))
31a8456e 7283 {
7284 remove_edge (e);
7285 return;
7286 }
7287
7288 /* No updating is needed for edges to exit. */
7289 if (e->dest == EXIT_BLOCK_PTR)
7290 {
7291 if (cfgcleanup_altered_bbs)
7292 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7293 remove_edge (e);
7294 return;
7295 }
7296
7297 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7298 that is not dominated by E->dest, then this set is empty. Otherwise,
7299 all the basic blocks dominated by E->dest are removed.
7300
7301 Also, to DF_IDOM we store the immediate dominators of the blocks in
7302 the dominance frontier of E (i.e., of the successors of the
7303 removed blocks, if there are any, and of E->dest otherwise). */
7304 FOR_EACH_EDGE (f, ei, e->dest->preds)
7305 {
7306 if (f == e)
7307 continue;
7308
7309 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7310 {
7311 none_removed = true;
7312 break;
7313 }
7314 }
7315
7316 df = BITMAP_ALLOC (NULL);
7317 df_idom = BITMAP_ALLOC (NULL);
7318
7319 if (none_removed)
7320 bitmap_set_bit (df_idom,
7321 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7322 else
7323 {
59f3ea59 7324 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
f1f41a6c 7325 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
31a8456e 7326 {
7327 FOR_EACH_EDGE (f, ei, bb->succs)
7328 {
7329 if (f->dest != EXIT_BLOCK_PTR)
7330 bitmap_set_bit (df, f->dest->index);
7331 }
7332 }
f1f41a6c 7333 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
31a8456e 7334 bitmap_clear_bit (df, bb->index);
7335
7336 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7337 {
7338 bb = BASIC_BLOCK (i);
7339 bitmap_set_bit (df_idom,
7340 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7341 }
7342 }
7343
7344 if (cfgcleanup_altered_bbs)
7345 {
7346 /* Record the set of the altered basic blocks. */
7347 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7348 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7349 }
7350
7351 /* Remove E and the cancelled blocks. */
7352 if (none_removed)
7353 remove_edge (e);
7354 else
7355 {
9845d120 7356 /* Walk backwards so as to get a chance to substitute all
7357 released DEFs into debug stmts. See
7358 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7359 details. */
f1f41a6c 7360 for (i = bbs_to_remove.length (); i-- > 0; )
7361 delete_basic_block (bbs_to_remove[i]);
31a8456e 7362 }
7363
7364 /* Update the dominance information. The immediate dominator may change only
7365 for blocks whose immediate dominator belongs to DF_IDOM:
48e1416a 7366
31a8456e 7367 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7368 removal. Let Z the arbitrary block such that idom(Z) = Y and
7369 Z dominates X after the removal. Before removal, there exists a path P
7370 from Y to X that avoids Z. Let F be the last edge on P that is
7371 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7372 dominates W, and because of P, Z does not dominate W), and W belongs to
48e1416a 7373 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
31a8456e 7374 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7375 {
7376 bb = BASIC_BLOCK (i);
7377 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7378 dbb;
7379 dbb = next_dom_son (CDI_DOMINATORS, dbb))
f1f41a6c 7380 bbs_to_fix_dom.safe_push (dbb);
31a8456e 7381 }
7382
3f9439d7 7383 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
31a8456e 7384
7385 BITMAP_FREE (df);
7386 BITMAP_FREE (df_idom);
f1f41a6c 7387 bbs_to_remove.release ();
7388 bbs_to_fix_dom.release ();
31a8456e 7389}
7390
2c8a1497 7391/* Purge dead EH edges from basic block BB. */
7392
35c15734 7393bool
75a70cf9 7394gimple_purge_dead_eh_edges (basic_block bb)
35c15734 7395{
7396 bool changed = false;
cd665a06 7397 edge e;
7398 edge_iterator ei;
75a70cf9 7399 gimple stmt = last_stmt (bb);
35c15734 7400
75a70cf9 7401 if (stmt && stmt_can_throw_internal (stmt))
35c15734 7402 return false;
7403
cd665a06 7404 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
35c15734 7405 {
35c15734 7406 if (e->flags & EDGE_EH)
7407 {
31a8456e 7408 remove_edge_and_dominated_blocks (e);
35c15734 7409 changed = true;
7410 }
cd665a06 7411 else
7412 ei_next (&ei);
35c15734 7413 }
7414
7415 return changed;
7416}
7417
10f52eb8 7418/* Purge dead EH edges from basic block listed in BLOCKS. */
7419
35c15734 7420bool
75a70cf9 7421gimple_purge_all_dead_eh_edges (const_bitmap blocks)
35c15734 7422{
7423 bool changed = false;
4f917ffe 7424 unsigned i;
0cc4271a 7425 bitmap_iterator bi;
35c15734 7426
0cc4271a 7427 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7428 {
96d407e2 7429 basic_block bb = BASIC_BLOCK (i);
7430
7431 /* Earlier gimple_purge_dead_eh_edges could have removed
7432 this basic block already. */
7433 gcc_assert (bb || changed);
7434 if (bb != NULL)
7435 changed |= gimple_purge_dead_eh_edges (bb);
0cc4271a 7436 }
35c15734 7437
7438 return changed;
7439}
4ee9c684 7440
10f52eb8 7441/* Purge dead abnormal call edges from basic block BB. */
7442
7443bool
7444gimple_purge_dead_abnormal_call_edges (basic_block bb)
7445{
7446 bool changed = false;
7447 edge e;
7448 edge_iterator ei;
7449 gimple stmt = last_stmt (bb);
7450
7451 if (!cfun->has_nonlocal_label)
7452 return false;
7453
7454 if (stmt && stmt_can_make_abnormal_goto (stmt))
7455 return false;
7456
7457 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7458 {
7459 if (e->flags & EDGE_ABNORMAL)
7460 {
7461 remove_edge_and_dominated_blocks (e);
7462 changed = true;
7463 }
7464 else
7465 ei_next (&ei);
7466 }
7467
7468 return changed;
7469}
7470
7471/* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7472
7473bool
7474gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7475{
7476 bool changed = false;
7477 unsigned i;
7478 bitmap_iterator bi;
7479
7480 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7481 {
7482 basic_block bb = BASIC_BLOCK (i);
7483
7484 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7485 this basic block already. */
7486 gcc_assert (bb || changed);
7487 if (bb != NULL)
7488 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7489 }
7490
7491 return changed;
7492}
7493
a77b4cde 7494/* This function is called whenever a new edge is created or
7495 redirected. */
7496
7497static void
75a70cf9 7498gimple_execute_on_growing_pred (edge e)
a77b4cde 7499{
7500 basic_block bb = e->dest;
7501
be2517f5 7502 if (!gimple_seq_empty_p (phi_nodes (bb)))
a77b4cde 7503 reserve_phi_args_for_new_edge (bb);
7504}
7505
e4ff8a73 7506/* This function is called immediately before edge E is removed from
7507 the edge vector E->dest->preds. */
7508
7509static void
75a70cf9 7510gimple_execute_on_shrinking_pred (edge e)
e4ff8a73 7511{
be2517f5 7512 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
e4ff8a73 7513 remove_phi_args (e);
7514}
7515
c50ae675 7516/*---------------------------------------------------------------------------
7517 Helper functions for Loop versioning
7518 ---------------------------------------------------------------------------*/
7519
7520/* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7521 of 'first'. Both of them are dominated by 'new_head' basic block. When
7522 'new_head' was created by 'second's incoming edge it received phi arguments
7523 on the edge by split_edge(). Later, additional edge 'e' was created to
13b96211 7524 connect 'new_head' and 'first'. Now this routine adds phi args on this
7525 additional edge 'e' that new_head to second edge received as part of edge
75a70cf9 7526 splitting. */
c50ae675 7527
7528static void
75a70cf9 7529gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7530 basic_block new_head, edge e)
c50ae675 7531{
75a70cf9 7532 gimple phi1, phi2;
7533 gimple_stmt_iterator psi1, psi2;
7534 tree def;
b0551d9c 7535 edge e2 = find_edge (new_head, second);
7536
7537 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7538 edge, we should always have an edge from NEW_HEAD to SECOND. */
7539 gcc_assert (e2 != NULL);
c50ae675 7540
7541 /* Browse all 'second' basic block phi nodes and add phi args to
7542 edge 'e' for 'first' head. PHI args are always in correct order. */
7543
75a70cf9 7544 for (psi2 = gsi_start_phis (second),
7545 psi1 = gsi_start_phis (first);
7546 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7547 gsi_next (&psi2), gsi_next (&psi1))
c50ae675 7548 {
75a70cf9 7549 phi1 = gsi_stmt (psi1);
7550 phi2 = gsi_stmt (psi2);
7551 def = PHI_ARG_DEF (phi2, e2->dest_idx);
60d535d2 7552 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
c50ae675 7553 }
7554}
7555
75a70cf9 7556
13b96211 7557/* Adds a if else statement to COND_BB with condition COND_EXPR.
7558 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
c50ae675 7559 the destination of the ELSE part. */
75a70cf9 7560
c50ae675 7561static void
75a70cf9 7562gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7563 basic_block second_head ATTRIBUTE_UNUSED,
7564 basic_block cond_bb, void *cond_e)
c50ae675 7565{
75a70cf9 7566 gimple_stmt_iterator gsi;
7567 gimple new_cond_expr;
c50ae675 7568 tree cond_expr = (tree) cond_e;
7569 edge e0;
7570
7571 /* Build new conditional expr */
75a70cf9 7572 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7573 NULL_TREE, NULL_TREE);
c50ae675 7574
13b96211 7575 /* Add new cond in cond_bb. */
75a70cf9 7576 gsi = gsi_last_bb (cond_bb);
7577 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7578
c50ae675 7579 /* Adjust edges appropriately to connect new head with first head
7580 as well as second head. */
7581 e0 = single_succ_edge (cond_bb);
7582 e0->flags &= ~EDGE_FALLTHRU;
7583 e0->flags |= EDGE_FALSE_VALUE;
7584}
7585
98193482 7586
7587/* Do book-keeping of basic block BB for the profile consistency checker.
7588 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7589 then do post-pass accounting. Store the counting in RECORD. */
7590static void
7591gimple_account_profile_record (basic_block bb, int after_pass,
7592 struct profile_record *record)
7593{
7594 gimple_stmt_iterator i;
7595 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7596 {
7597 record->size[after_pass]
7598 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7599 if (profile_status == PROFILE_READ)
7600 record->time[after_pass]
7601 += estimate_num_insns (gsi_stmt (i),
7602 &eni_time_weights) * bb->count;
7603 else if (profile_status == PROFILE_GUESSED)
7604 record->time[after_pass]
7605 += estimate_num_insns (gsi_stmt (i),
7606 &eni_time_weights) * bb->frequency;
7607 }
7608}
7609
75a70cf9 7610struct cfg_hooks gimple_cfg_hooks = {
7611 "gimple",
7612 gimple_verify_flow_info,
7613 gimple_dump_bb, /* dump_bb */
4ee9c684 7614 create_bb, /* create_basic_block */
75a70cf9 7615 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7616 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7617 gimple_can_remove_branch_p, /* can_remove_branch_p */
4ee9c684 7618 remove_bb, /* delete_basic_block */
75a70cf9 7619 gimple_split_block, /* split_block */
7620 gimple_move_block_after, /* move_block_after */
7621 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7622 gimple_merge_blocks, /* merge_blocks */
7623 gimple_predict_edge, /* predict_edge */
d54f29bc 7624 gimple_predicted_by_p, /* predicted_by_p */
75a70cf9 7625 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7626 gimple_duplicate_bb, /* duplicate_block */
7627 gimple_split_edge, /* split_edge */
7628 gimple_make_forwarder_block, /* make_forward_block */
4ee9c684 7629 NULL, /* tidy_fallthru_edge */
202bbc06 7630 NULL, /* force_nonfallthru */
75a70cf9 7631 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7632 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
d54f29bc 7633 gimple_flow_call_edges_add, /* flow_call_edges_add */
75a70cf9 7634 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7635 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7636 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7637 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7638 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
c50ae675 7639 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9631926a 7640 flush_pending_stmts, /* flush_pending_stmts */
7641 gimple_empty_block_p, /* block_empty_p */
7642 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
98193482 7643 gimple_account_profile_record,
4ee9c684 7644};
7645
7646
7647/* Split all critical edges. */
7648
2a1990e9 7649static unsigned int
4ee9c684 7650split_critical_edges (void)
7651{
7652 basic_block bb;
7653 edge e;
cd665a06 7654 edge_iterator ei;
4ee9c684 7655
6ff867cc 7656 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7657 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7658 mappings around the calls to split_edge. */
7659 start_recording_case_labels ();
4ee9c684 7660 FOR_ALL_BB (bb)
7661 {
cd665a06 7662 FOR_EACH_EDGE (e, ei, bb->succs)
3d1eacdb 7663 {
7664 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
4ee9c684 7665 split_edge (e);
48e1416a 7666 /* PRE inserts statements to edges and expects that
3d1eacdb 7667 since split_critical_edges was done beforehand, committing edge
7668 insertions will not split more edges. In addition to critical
7669 edges we must split edges that have multiple successors and
48e1416a 7670 end by control flow statements, such as RESX.
3d1eacdb 7671 Go ahead and split them too. This matches the logic in
7672 gimple_find_edge_insert_loc. */
7673 else if ((!single_pred_p (e->dest)
7c41b799 7674 || !gimple_seq_empty_p (phi_nodes (e->dest))
3d1eacdb 7675 || e->dest == EXIT_BLOCK_PTR)
7676 && e->src != ENTRY_BLOCK_PTR
7677 && !(e->flags & EDGE_ABNORMAL))
7678 {
7679 gimple_stmt_iterator gsi;
7680
7681 gsi = gsi_last_bb (e->src);
7682 if (!gsi_end_p (gsi)
7683 && stmt_ends_bb_p (gsi_stmt (gsi))
3ea38c1f 7684 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7685 && !gimple_call_builtin_p (gsi_stmt (gsi),
7686 BUILT_IN_RETURN)))
3d1eacdb 7687 split_edge (e);
7688 }
7689 }
4ee9c684 7690 }
6ff867cc 7691 end_recording_case_labels ();
2a1990e9 7692 return 0;
4ee9c684 7693}
7694
20099e35 7695struct gimple_opt_pass pass_split_crit_edges =
4ee9c684 7696{
20099e35 7697 {
7698 GIMPLE_PASS,
b84a3da6 7699 "crited", /* name */
c7875731 7700 OPTGROUP_NONE, /* optinfo_flags */
4ee9c684 7701 NULL, /* gate */
7702 split_critical_edges, /* execute */
7703 NULL, /* sub */
7704 NULL, /* next */
7705 0, /* static_pass_number */
7706 TV_TREE_SPLIT_EDGES, /* tv_id */
7707 PROP_cfg, /* properties required */
7708 PROP_no_crit_edges, /* properties_provided */
7709 0, /* properties_destroyed */
7710 0, /* todo_flags_start */
771e2890 7711 TODO_verify_flow /* todo_flags_finish */
20099e35 7712 }
4ee9c684 7713};
83e2a11b 7714
83e2a11b 7715
75a70cf9 7716/* Build a ternary operation and gimplify it. Emit code before GSI.
83e2a11b 7717 Return the gimple_val holding the result. */
7718
7719tree
75a70cf9 7720gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
83e2a11b 7721 tree type, tree a, tree b, tree c)
7722{
7723 tree ret;
389dd41b 7724 location_t loc = gimple_location (gsi_stmt (*gsi));
83e2a11b 7725
389dd41b 7726 ret = fold_build3_loc (loc, code, type, a, b, c);
83e2a11b 7727 STRIP_NOPS (ret);
7728
75a70cf9 7729 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7730 GSI_SAME_STMT);
83e2a11b 7731}
7732
75a70cf9 7733/* Build a binary operation and gimplify it. Emit code before GSI.
83e2a11b 7734 Return the gimple_val holding the result. */
7735
7736tree
75a70cf9 7737gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
83e2a11b 7738 tree type, tree a, tree b)
7739{
7740 tree ret;
7741
389dd41b 7742 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
83e2a11b 7743 STRIP_NOPS (ret);
7744
75a70cf9 7745 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7746 GSI_SAME_STMT);
83e2a11b 7747}
7748
75a70cf9 7749/* Build a unary operation and gimplify it. Emit code before GSI.
83e2a11b 7750 Return the gimple_val holding the result. */
7751
7752tree
75a70cf9 7753gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
83e2a11b 7754 tree a)
7755{
7756 tree ret;
7757
389dd41b 7758 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
83e2a11b 7759 STRIP_NOPS (ret);
7760
75a70cf9 7761 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7762 GSI_SAME_STMT);
83e2a11b 7763}
7764
7765
4ee9c684 7766\f
7767/* Emit return warnings. */
7768
2a1990e9 7769static unsigned int
4ee9c684 7770execute_warn_function_return (void)
7771{
512e0fec 7772 source_location location;
75a70cf9 7773 gimple last;
4ee9c684 7774 edge e;
cd665a06 7775 edge_iterator ei;
4ee9c684 7776
08c6cbd2 7777 if (!targetm.warn_func_return (cfun->decl))
7778 return 0;
7779
4ee9c684 7780 /* If we have a path to EXIT, then we do return. */
7781 if (TREE_THIS_VOLATILE (cfun->decl)
cd665a06 7782 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
4ee9c684 7783 {
512e0fec 7784 location = UNKNOWN_LOCATION;
cd665a06 7785 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4ee9c684 7786 {
7787 last = last_stmt (e->src);
3ea38c1f 7788 if ((gimple_code (last) == GIMPLE_RETURN
7789 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
75a70cf9 7790 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
4ee9c684 7791 break;
7792 }
512e0fec 7793 if (location == UNKNOWN_LOCATION)
7794 location = cfun->function_end_locus;
5fb6a912 7795 warning_at (location, 0, "%<noreturn%> function does return");
4ee9c684 7796 }
7797
7798 /* If we see "return;" in some basic block, then we do reach the end
7799 without returning a value. */
7800 else if (warn_return_type
7f0f308d 7801 && !TREE_NO_WARNING (cfun->decl)
cd665a06 7802 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
4ee9c684 7803 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7804 {
cd665a06 7805 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4ee9c684 7806 {
75a70cf9 7807 gimple last = last_stmt (e->src);
7808 if (gimple_code (last) == GIMPLE_RETURN
7809 && gimple_return_retval (last) == NULL
7810 && !gimple_no_warning_p (last))
4ee9c684 7811 {
75a70cf9 7812 location = gimple_location (last);
512e0fec 7813 if (location == UNKNOWN_LOCATION)
7814 location = cfun->function_end_locus;
bdbc474b 7815 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7f0f308d 7816 TREE_NO_WARNING (cfun->decl) = 1;
4ee9c684 7817 break;
7818 }
7819 }
7820 }
2a1990e9 7821 return 0;
4ee9c684 7822}
7823
7824
7825/* Given a basic block B which ends with a conditional and has
7826 precisely two successors, determine which of the edges is taken if
7827 the conditional is true and which is taken if the conditional is
7828 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7829
7830void
7831extract_true_false_edges_from_block (basic_block b,
7832 edge *true_edge,
7833 edge *false_edge)
7834{
cd665a06 7835 edge e = EDGE_SUCC (b, 0);
4ee9c684 7836
7837 if (e->flags & EDGE_TRUE_VALUE)
7838 {
7839 *true_edge = e;
cd665a06 7840 *false_edge = EDGE_SUCC (b, 1);
4ee9c684 7841 }
7842 else
7843 {
7844 *false_edge = e;
cd665a06 7845 *true_edge = EDGE_SUCC (b, 1);
4ee9c684 7846 }
7847}
7848
20099e35 7849struct gimple_opt_pass pass_warn_function_return =
4ee9c684 7850{
20099e35 7851 {
7852 GIMPLE_PASS,
0c297edc 7853 "*warn_function_return", /* name */
c7875731 7854 OPTGROUP_NONE, /* optinfo_flags */
4ee9c684 7855 NULL, /* gate */
7856 execute_warn_function_return, /* execute */
7857 NULL, /* sub */
7858 NULL, /* next */
7859 0, /* static_pass_number */
0b1615c1 7860 TV_NONE, /* tv_id */
c4110044 7861 PROP_cfg, /* properties_required */
4ee9c684 7862 0, /* properties_provided */
7863 0, /* properties_destroyed */
7864 0, /* todo_flags_start */
20099e35 7865 0 /* todo_flags_finish */
7866 }
4ee9c684 7867};
73d5fb10 7868
7869/* Emit noreturn warnings. */
7870
2a1990e9 7871static unsigned int
73d5fb10 7872execute_warn_function_noreturn (void)
7873{
43d60d64 7874 if (!TREE_THIS_VOLATILE (current_function_decl)
7875 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
7876 warn_function_noreturn (current_function_decl);
2a1990e9 7877 return 0;
73d5fb10 7878}
7879
43d60d64 7880static bool
7881gate_warn_function_noreturn (void)
7882{
7883 return warn_suggest_attribute_noreturn;
7884}
7885
20099e35 7886struct gimple_opt_pass pass_warn_function_noreturn =
73d5fb10 7887{
20099e35 7888 {
7889 GIMPLE_PASS,
0c297edc 7890 "*warn_function_noreturn", /* name */
c7875731 7891 OPTGROUP_NONE, /* optinfo_flags */
43d60d64 7892 gate_warn_function_noreturn, /* gate */
73d5fb10 7893 execute_warn_function_noreturn, /* execute */
7894 NULL, /* sub */
7895 NULL, /* next */
7896 0, /* static_pass_number */
0b1615c1 7897 TV_NONE, /* tv_id */
73d5fb10 7898 PROP_cfg, /* properties_required */
7899 0, /* properties_provided */
7900 0, /* properties_destroyed */
7901 0, /* todo_flags_start */
20099e35 7902 0 /* todo_flags_finish */
7903 }
73d5fb10 7904};
bfec3452 7905
7906
7907/* Walk a gimplified function and warn for functions whose return value is
7908 ignored and attribute((warn_unused_result)) is set. This is done before
7909 inlining, so we don't have to worry about that. */
7910
7911static void
7912do_warn_unused_result (gimple_seq seq)
7913{
7914 tree fdecl, ftype;
7915 gimple_stmt_iterator i;
7916
7917 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7918 {
7919 gimple g = gsi_stmt (i);
7920
7921 switch (gimple_code (g))
7922 {
7923 case GIMPLE_BIND:
7924 do_warn_unused_result (gimple_bind_body (g));
7925 break;
7926 case GIMPLE_TRY:
7927 do_warn_unused_result (gimple_try_eval (g));
7928 do_warn_unused_result (gimple_try_cleanup (g));
7929 break;
7930 case GIMPLE_CATCH:
7931 do_warn_unused_result (gimple_catch_handler (g));
7932 break;
7933 case GIMPLE_EH_FILTER:
7934 do_warn_unused_result (gimple_eh_filter_failure (g));
7935 break;
7936
7937 case GIMPLE_CALL:
7938 if (gimple_call_lhs (g))
7939 break;
fb049fba 7940 if (gimple_call_internal_p (g))
7941 break;
bfec3452 7942
7943 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7944 LHS. All calls whose value is ignored should be
7945 represented like this. Look for the attribute. */
7946 fdecl = gimple_call_fndecl (g);
2de00a2d 7947 ftype = gimple_call_fntype (g);
bfec3452 7948
7949 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7950 {
7951 location_t loc = gimple_location (g);
7952
7953 if (fdecl)
7954 warning_at (loc, OPT_Wunused_result,
7955 "ignoring return value of %qD, "
7956 "declared with attribute warn_unused_result",
7957 fdecl);
7958 else
7959 warning_at (loc, OPT_Wunused_result,
7960 "ignoring return value of function "
7961 "declared with attribute warn_unused_result");
7962 }
7963 break;
7964
7965 default:
7966 /* Not a container, not a call, or a call whose value is used. */
7967 break;
7968 }
7969 }
7970}
7971
7972static unsigned int
7973run_warn_unused_result (void)
7974{
7975 do_warn_unused_result (gimple_body (current_function_decl));
7976 return 0;
7977}
7978
7979static bool
7980gate_warn_unused_result (void)
7981{
7982 return flag_warn_unused_result;
7983}
7984
7985struct gimple_opt_pass pass_warn_unused_result =
7986{
7987 {
7988 GIMPLE_PASS,
53b5ae07 7989 "*warn_unused_result", /* name */
c7875731 7990 OPTGROUP_NONE, /* optinfo_flags */
bfec3452 7991 gate_warn_unused_result, /* gate */
7992 run_warn_unused_result, /* execute */
7993 NULL, /* sub */
7994 NULL, /* next */
7995 0, /* static_pass_number */
7996 TV_NONE, /* tv_id */
7997 PROP_gimple_any, /* properties_required */
7998 0, /* properties_provided */
7999 0, /* properties_destroyed */
8000 0, /* todo_flags_start */
8001 0, /* todo_flags_finish */
8002 }
8003};
2b15d2ba 8004
8005
8006/* Garbage collection support for edge_def. */
8007
8008extern void gt_ggc_mx (tree&);
8009extern void gt_ggc_mx (gimple&);
8010extern void gt_ggc_mx (rtx&);
8011extern void gt_ggc_mx (basic_block&);
8012
8013void
8014gt_ggc_mx (edge_def *e)
8015{
5169661d 8016 tree block = LOCATION_BLOCK (e->goto_locus);
2b15d2ba 8017 gt_ggc_mx (e->src);
8018 gt_ggc_mx (e->dest);
8019 if (current_ir_type () == IR_GIMPLE)
8020 gt_ggc_mx (e->insns.g);
8021 else
8022 gt_ggc_mx (e->insns.r);
5169661d 8023 gt_ggc_mx (block);
2b15d2ba 8024}
8025
8026/* PCH support for edge_def. */
8027
8028extern void gt_pch_nx (tree&);
8029extern void gt_pch_nx (gimple&);
8030extern void gt_pch_nx (rtx&);
8031extern void gt_pch_nx (basic_block&);
8032
8033void
8034gt_pch_nx (edge_def *e)
8035{
5169661d 8036 tree block = LOCATION_BLOCK (e->goto_locus);
2b15d2ba 8037 gt_pch_nx (e->src);
8038 gt_pch_nx (e->dest);
8039 if (current_ir_type () == IR_GIMPLE)
8040 gt_pch_nx (e->insns.g);
8041 else
8042 gt_pch_nx (e->insns.r);
5169661d 8043 gt_pch_nx (block);
2b15d2ba 8044}
8045
8046void
8047gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8048{
5169661d 8049 tree block = LOCATION_BLOCK (e->goto_locus);
2b15d2ba 8050 op (&(e->src), cookie);
8051 op (&(e->dest), cookie);
8052 if (current_ir_type () == IR_GIMPLE)
8053 op (&(e->insns.g), cookie);
8054 else
8055 op (&(e->insns.r), cookie);
5169661d 8056 op (&(block), cookie);
2b15d2ba 8057}