]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-cfg.c
2009-07-07 Manuel López-Ibáñez <manu@gcc.gnu.org>
[thirdparty/gcc.git] / gcc / tree-cfg.c
CommitLineData
4ee9c684 1/* Control flow functions for trees.
cfaf579d 2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
2a09b1df 3 Free Software Foundation, Inc.
4ee9c684 4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
8c4c00c1 10the Free Software Foundation; either version 3, or (at your option)
4ee9c684 11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
4ee9c684 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "output.h"
4ee9c684 32#include "flags.h"
33#include "function.h"
34#include "expr.h"
35#include "ggc.h"
36#include "langhooks.h"
37#include "diagnostic.h"
38#include "tree-flow.h"
39#include "timevar.h"
40#include "tree-dump.h"
41#include "tree-pass.h"
42#include "toplev.h"
43#include "except.h"
44#include "cfgloop.h"
d8b5b4fe 45#include "cfglayout.h"
fcd2418b 46#include "tree-ssa-propagate.h"
4992f399 47#include "value-prof.h"
836074a5 48#include "pointer-set.h"
79acaae1 49#include "tree-inline.h"
4ee9c684 50
51/* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
53
54/* Local declarations. */
55
56/* Initial capacity for the basic block array. */
57static const int initial_cfg_capacity = 20;
58
6ff867cc 59/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
75a70cf9 62 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
194ae074 63
6ff867cc 64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
194ae074 66
6ff867cc 67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
71
b30a8715 72static struct pointer_map_t *edge_to_cases;
194ae074 73
4ee9c684 74/* CFG statistics. */
75struct cfg_stats_d
76{
77 long num_merged_labels;
78};
79
80static struct cfg_stats_d cfg_stats;
81
82/* Nonzero if we found a computed goto while building basic blocks. */
83static bool found_computed_goto;
84
d01c707b 85/* Hash table to store last discriminator assigned for each locus. */
86struct locus_discrim_map
87{
88 location_t locus;
89 int discriminator;
90};
91static htab_t discriminator_per_locus;
92
4ee9c684 93/* Basic blocks and flowgraphs. */
75a70cf9 94static void make_blocks (gimple_seq);
4ee9c684 95static void factor_computed_gotos (void);
4ee9c684 96
97/* Edges. */
98static void make_edges (void);
4ee9c684 99static void make_cond_expr_edges (basic_block);
75a70cf9 100static void make_gimple_switch_edges (basic_block);
4ee9c684 101static void make_goto_expr_edges (basic_block);
d01c707b 102static unsigned int locus_map_hash (const void *);
103static int locus_map_eq (const void *, const void *);
104static void assign_discriminator (location_t, basic_block);
75a70cf9 105static edge gimple_redirect_edge_and_branch (edge, basic_block);
106static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
2a1990e9 107static unsigned int split_critical_edges (void);
4ee9c684 108
109/* Various helpers. */
75a70cf9 110static inline bool stmt_starts_bb_p (gimple, gimple);
111static int gimple_verify_flow_info (void);
112static void gimple_make_forwarder_block (edge);
113static void gimple_cfg2vcg (FILE *);
d01c707b 114static gimple first_non_label_stmt (basic_block);
4ee9c684 115
116/* Flowgraph optimization and cleanup. */
75a70cf9 117static void gimple_merge_blocks (basic_block, basic_block);
118static bool gimple_can_merge_blocks_p (basic_block, basic_block);
4ee9c684 119static void remove_bb (basic_block);
6d7413d8 120static edge find_taken_edge_computed_goto (basic_block, tree);
4ee9c684 121static edge find_taken_edge_cond_expr (basic_block, tree);
122static edge find_taken_edge_switch_expr (basic_block, tree);
75a70cf9 123static tree find_case_label_for_value (gimple, tree);
4ee9c684 124
4f217f69 125void
c27baad4 126init_empty_tree_cfg_for_function (struct function *fn)
4f217f69 127{
128 /* Initialize the basic block array. */
c27baad4 129 init_flow (fn);
130 profile_status_for_function (fn) = PROFILE_ABSENT;
131 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
132 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
133 basic_block_info_for_function (fn)
134 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135 VEC_safe_grow_cleared (basic_block, gc,
136 basic_block_info_for_function (fn),
e85c2c2d 137 initial_cfg_capacity);
4f217f69 138
139 /* Build a mapping of labels to their associated blocks. */
c27baad4 140 label_to_block_map_for_function (fn)
141 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc,
143 label_to_block_map_for_function (fn),
e85c2c2d 144 initial_cfg_capacity);
4f217f69 145
c27baad4 146 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
147 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
148 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
149 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
150
151 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
152 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
153 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
154 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
155}
156
157void
158init_empty_tree_cfg (void)
159{
160 init_empty_tree_cfg_for_function (cfun);
4f217f69 161}
4ee9c684 162
163/*---------------------------------------------------------------------------
164 Create basic blocks
165---------------------------------------------------------------------------*/
166
75a70cf9 167/* Entry point to the CFG builder for trees. SEQ is the sequence of
4ee9c684 168 statements to be added to the flowgraph. */
169
170static void
75a70cf9 171build_gimple_cfg (gimple_seq seq)
4ee9c684 172{
75a70cf9 173 /* Register specific gimple functions. */
174 gimple_register_cfg_hooks ();
4ee9c684 175
4ee9c684 176 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
177
4f217f69 178 init_empty_tree_cfg ();
4ee9c684 179
180 found_computed_goto = 0;
75a70cf9 181 make_blocks (seq);
4ee9c684 182
183 /* Computed gotos are hell to deal with, especially if there are
184 lots of them with a large number of destinations. So we factor
185 them to a common computed goto location before we build the
186 edge list. After we convert back to normal form, we will un-factor
187 the computed gotos since factoring introduces an unwanted jump. */
188 if (found_computed_goto)
189 factor_computed_gotos ();
190
ebeaefa4 191 /* Make sure there is always at least one block, even if it's empty. */
4d2e5d52 192 if (n_basic_blocks == NUM_FIXED_BLOCKS)
4ee9c684 193 create_empty_bb (ENTRY_BLOCK_PTR);
194
4ee9c684 195 /* Adjust the size of the array. */
85b938d0 196 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
e85c2c2d 197 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
4ee9c684 198
b624a250 199 /* To speed up statement iterator walks, we first purge dead labels. */
200 cleanup_dead_labels ();
201
202 /* Group case nodes to reduce the number of edges.
203 We do this after cleaning up dead labels because otherwise we miss
204 a lot of obvious case merging opportunities. */
205 group_case_labels ();
206
4ee9c684 207 /* Create the edges of the flowgraph. */
d01c707b 208 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
209 free);
4ee9c684 210 make_edges ();
6313ae8b 211 cleanup_dead_labels ();
d01c707b 212 htab_delete (discriminator_per_locus);
4ee9c684 213
214 /* Debugging dumps. */
215
216 /* Write the flowgraph to a VCG file. */
217 {
218 int local_dump_flags;
3f5be5f4 219 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
220 if (vcg_file)
4ee9c684 221 {
75a70cf9 222 gimple_cfg2vcg (vcg_file);
3f5be5f4 223 dump_end (TDI_vcg, vcg_file);
4ee9c684 224 }
225 }
226
eb0fefdb 227#ifdef ENABLE_CHECKING
228 verify_stmts ();
229#endif
4ee9c684 230}
231
2a1990e9 232static unsigned int
4ee9c684 233execute_build_cfg (void)
234{
1a1a827a 235 gimple_seq body = gimple_body (current_function_decl);
236
237 build_gimple_cfg (body);
238 gimple_set_body (current_function_decl, NULL);
cee43f7e 239 if (dump_file && (dump_flags & TDF_DETAILS))
240 {
241 fprintf (dump_file, "Scope blocks:\n");
242 dump_scope_blocks (dump_file, dump_flags);
243 }
2a1990e9 244 return 0;
4ee9c684 245}
246
20099e35 247struct gimple_opt_pass pass_build_cfg =
4ee9c684 248{
20099e35 249 {
250 GIMPLE_PASS,
4ee9c684 251 "cfg", /* name */
252 NULL, /* gate */
253 execute_build_cfg, /* execute */
254 NULL, /* sub */
255 NULL, /* next */
256 0, /* static_pass_number */
257 TV_TREE_CFG, /* tv_id */
75a70cf9 258 PROP_gimple_leh, /* properties_required */
4ee9c684 259 PROP_cfg, /* properties_provided */
260 0, /* properties_destroyed */
261 0, /* todo_flags_start */
43bba217 262 TODO_verify_stmts | TODO_cleanup_cfg
263 | TODO_dump_func /* todo_flags_finish */
20099e35 264 }
4ee9c684 265};
266
75a70cf9 267
268/* Return true if T is a computed goto. */
269
270static bool
271computed_goto_p (gimple t)
272{
273 return (gimple_code (t) == GIMPLE_GOTO
274 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
275}
276
277
13b96211 278/* Search the CFG for any computed gotos. If found, factor them to a
4ee9c684 279 common computed goto site. Also record the location of that site so
13b96211 280 that we can un-factor the gotos after we have converted back to
4ee9c684 281 normal form. */
282
283static void
284factor_computed_gotos (void)
285{
286 basic_block bb;
287 tree factored_label_decl = NULL;
288 tree var = NULL;
75a70cf9 289 gimple factored_computed_goto_label = NULL;
290 gimple factored_computed_goto = NULL;
4ee9c684 291
292 /* We know there are one or more computed gotos in this function.
293 Examine the last statement in each basic block to see if the block
294 ends with a computed goto. */
13b96211 295
4ee9c684 296 FOR_EACH_BB (bb)
297 {
75a70cf9 298 gimple_stmt_iterator gsi = gsi_last_bb (bb);
299 gimple last;
4ee9c684 300
75a70cf9 301 if (gsi_end_p (gsi))
4ee9c684 302 continue;
75a70cf9 303
304 last = gsi_stmt (gsi);
4ee9c684 305
306 /* Ignore the computed goto we create when we factor the original
307 computed gotos. */
308 if (last == factored_computed_goto)
309 continue;
310
311 /* If the last statement is a computed goto, factor it. */
312 if (computed_goto_p (last))
313 {
75a70cf9 314 gimple assignment;
4ee9c684 315
316 /* The first time we find a computed goto we need to create
317 the factored goto block and the variable each original
318 computed goto will use for their goto destination. */
75a70cf9 319 if (!factored_computed_goto)
4ee9c684 320 {
321 basic_block new_bb = create_empty_bb (bb);
75a70cf9 322 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
4ee9c684 323
324 /* Create the destination of the factored goto. Each original
325 computed goto will put its desired destination into this
326 variable and jump to the label we create immediately
327 below. */
328 var = create_tmp_var (ptr_type_node, "gotovar");
329
330 /* Build a label for the new block which will contain the
331 factored computed goto. */
e60a6f7b 332 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
4ee9c684 333 factored_computed_goto_label
75a70cf9 334 = gimple_build_label (factored_label_decl);
335 gsi_insert_after (&new_gsi, factored_computed_goto_label,
336 GSI_NEW_STMT);
4ee9c684 337
338 /* Build our new computed goto. */
75a70cf9 339 factored_computed_goto = gimple_build_goto (var);
340 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
4ee9c684 341 }
342
343 /* Copy the original computed goto's destination into VAR. */
75a70cf9 344 assignment = gimple_build_assign (var, gimple_goto_dest (last));
345 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
4ee9c684 346
347 /* And re-vector the computed goto to the new destination. */
75a70cf9 348 gimple_goto_set_dest (last, factored_label_decl);
4ee9c684 349 }
350 }
351}
352
353
75a70cf9 354/* Build a flowgraph for the sequence of stmts SEQ. */
4ee9c684 355
356static void
75a70cf9 357make_blocks (gimple_seq seq)
4ee9c684 358{
75a70cf9 359 gimple_stmt_iterator i = gsi_start (seq);
360 gimple stmt = NULL;
4ee9c684 361 bool start_new_block = true;
75a70cf9 362 bool first_stmt_of_seq = true;
4ee9c684 363 basic_block bb = ENTRY_BLOCK_PTR;
364
75a70cf9 365 while (!gsi_end_p (i))
4ee9c684 366 {
75a70cf9 367 gimple prev_stmt;
4ee9c684 368
369 prev_stmt = stmt;
75a70cf9 370 stmt = gsi_stmt (i);
4ee9c684 371
372 /* If the statement starts a new basic block or if we have determined
373 in a previous pass that we need to create a new block for STMT, do
374 so now. */
375 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
376 {
75a70cf9 377 if (!first_stmt_of_seq)
378 seq = gsi_split_seq_before (&i);
379 bb = create_basic_block (seq, NULL, bb);
4ee9c684 380 start_new_block = false;
381 }
382
383 /* Now add STMT to BB and create the subgraphs for special statement
384 codes. */
75a70cf9 385 gimple_set_bb (stmt, bb);
4ee9c684 386
387 if (computed_goto_p (stmt))
388 found_computed_goto = true;
389
390 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
391 next iteration. */
392 if (stmt_ends_bb_p (stmt))
3e40d86f 393 {
394 /* If the stmt can make abnormal goto use a new temporary
395 for the assignment to the LHS. This makes sure the old value
396 of the LHS is available on the abnormal edge. Otherwise
397 we will end up with overlapping life-ranges for abnormal
398 SSA names. */
399 if (gimple_has_lhs (stmt)
400 && stmt_can_make_abnormal_goto (stmt)
401 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
402 {
403 tree lhs = gimple_get_lhs (stmt);
404 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
405 gimple s = gimple_build_assign (lhs, tmp);
406 gimple_set_location (s, gimple_location (stmt));
407 gimple_set_block (s, gimple_block (stmt));
408 gimple_set_lhs (stmt, tmp);
409 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
410 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
411 DECL_GIMPLE_REG_P (tmp) = 1;
412 gsi_insert_after (&i, s, GSI_SAME_STMT);
413 }
414 start_new_block = true;
415 }
4ee9c684 416
75a70cf9 417 gsi_next (&i);
418 first_stmt_of_seq = false;
4ee9c684 419 }
420}
421
422
423/* Create and return a new empty basic block after bb AFTER. */
424
425static basic_block
426create_bb (void *h, void *e, basic_block after)
427{
428 basic_block bb;
429
8c0963c4 430 gcc_assert (!e);
4ee9c684 431
740f575d 432 /* Create and initialize a new basic block. Since alloc_block uses
433 ggc_alloc_cleared to allocate a basic block, we do not have to
434 clear the newly allocated basic block here. */
4ee9c684 435 bb = alloc_block ();
4ee9c684 436
437 bb->index = last_basic_block;
438 bb->flags = BB_NEW;
75a70cf9 439 bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
440 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
4ee9c684 441
442 /* Add the new block to the linked list of blocks. */
443 link_block (bb, after);
444
445 /* Grow the basic block array if needed. */
85b938d0 446 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
4ee9c684 447 {
448 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
e85c2c2d 449 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
4ee9c684 450 }
451
452 /* Add the newly created block to the array. */
85b938d0 453 SET_BASIC_BLOCK (last_basic_block, bb);
4ee9c684 454
4ee9c684 455 n_basic_blocks++;
456 last_basic_block++;
457
4ee9c684 458 return bb;
459}
460
461
462/*---------------------------------------------------------------------------
463 Edge creation
464---------------------------------------------------------------------------*/
465
9677695f 466/* Fold COND_EXPR_COND of each COND_EXPR. */
467
e27482aa 468void
9677695f 469fold_cond_expr_cond (void)
470{
471 basic_block bb;
472
473 FOR_EACH_BB (bb)
474 {
75a70cf9 475 gimple stmt = last_stmt (bb);
9677695f 476
75a70cf9 477 if (stmt && gimple_code (stmt) == GIMPLE_COND)
9677695f 478 {
add6ee5e 479 tree cond;
480 bool zerop, onep;
481
482 fold_defer_overflow_warnings ();
75a70cf9 483 cond = fold_binary (gimple_cond_code (stmt), boolean_type_node,
484 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
485 if (cond)
486 {
487 zerop = integer_zerop (cond);
488 onep = integer_onep (cond);
489 }
490 else
491 zerop = onep = false;
492
72c59a18 493 fold_undefer_overflow_warnings (zerop || onep,
c7addd8c 494 stmt,
add6ee5e 495 WARN_STRICT_OVERFLOW_CONDITIONAL);
496 if (zerop)
75a70cf9 497 gimple_cond_make_false (stmt);
add6ee5e 498 else if (onep)
75a70cf9 499 gimple_cond_make_true (stmt);
9677695f 500 }
501 }
502}
503
4ee9c684 504/* Join all the blocks in the flowgraph. */
505
506static void
507make_edges (void)
508{
509 basic_block bb;
caa1b5c9 510 struct omp_region *cur_region = NULL;
4ee9c684 511
512 /* Create an edge from entry to the first block with executable
513 statements in it. */
4d2e5d52 514 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
4ee9c684 515
a33d8949 516 /* Traverse the basic block array placing edges. */
4ee9c684 517 FOR_EACH_BB (bb)
518 {
75a70cf9 519 gimple last = last_stmt (bb);
2a09b1df 520 bool fallthru;
4ee9c684 521
2a09b1df 522 if (last)
4ee9c684 523 {
75a70cf9 524 enum gimple_code code = gimple_code (last);
caa1b5c9 525 switch (code)
2a09b1df 526 {
75a70cf9 527 case GIMPLE_GOTO:
2a09b1df 528 make_goto_expr_edges (bb);
529 fallthru = false;
530 break;
75a70cf9 531 case GIMPLE_RETURN:
2a09b1df 532 make_edge (bb, EXIT_BLOCK_PTR, 0);
533 fallthru = false;
534 break;
75a70cf9 535 case GIMPLE_COND:
2a09b1df 536 make_cond_expr_edges (bb);
537 fallthru = false;
538 break;
75a70cf9 539 case GIMPLE_SWITCH:
540 make_gimple_switch_edges (bb);
2a09b1df 541 fallthru = false;
542 break;
75a70cf9 543 case GIMPLE_RESX:
2a09b1df 544 make_eh_edges (last);
545 fallthru = false;
546 break;
547
75a70cf9 548 case GIMPLE_CALL:
2a09b1df 549 /* If this function receives a nonlocal goto, then we need to
550 make edges from this call site to all the nonlocal goto
551 handlers. */
75a70cf9 552 if (stmt_can_make_abnormal_goto (last))
2c8a1497 553 make_abnormal_goto_edges (bb, true);
4ee9c684 554
2a09b1df 555 /* If this statement has reachable exception handlers, then
556 create abnormal edges to them. */
557 make_eh_edges (last);
558
559 /* Some calls are known not to return. */
75a70cf9 560 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
2a09b1df 561 break;
562
75a70cf9 563 case GIMPLE_ASSIGN:
564 /* A GIMPLE_ASSIGN may throw internally and thus be considered
565 control-altering. */
2a09b1df 566 if (is_ctrl_altering_stmt (last))
567 {
2a09b1df 568 make_eh_edges (last);
569 }
570 fallthru = true;
571 break;
572
75a70cf9 573 case GIMPLE_OMP_PARALLEL:
574 case GIMPLE_OMP_TASK:
575 case GIMPLE_OMP_FOR:
576 case GIMPLE_OMP_SINGLE:
577 case GIMPLE_OMP_MASTER:
578 case GIMPLE_OMP_ORDERED:
579 case GIMPLE_OMP_CRITICAL:
580 case GIMPLE_OMP_SECTION:
caa1b5c9 581 cur_region = new_omp_region (bb, code, cur_region);
2a09b1df 582 fallthru = true;
583 break;
584
75a70cf9 585 case GIMPLE_OMP_SECTIONS:
caa1b5c9 586 cur_region = new_omp_region (bb, code, cur_region);
ac6e3339 587 fallthru = true;
588 break;
589
75a70cf9 590 case GIMPLE_OMP_SECTIONS_SWITCH:
6c105785 591 fallthru = false;
61e47ac8 592 break;
593
cb7f680b 594
75a70cf9 595 case GIMPLE_OMP_ATOMIC_LOAD:
596 case GIMPLE_OMP_ATOMIC_STORE:
cb7f680b 597 fallthru = true;
598 break;
599
600
75a70cf9 601 case GIMPLE_OMP_RETURN:
602 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
603 somewhere other than the next block. This will be
604 created later. */
caa1b5c9 605 cur_region->exit = bb;
75a70cf9 606 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
caa1b5c9 607 cur_region = cur_region->outer;
608 break;
609
75a70cf9 610 case GIMPLE_OMP_CONTINUE:
caa1b5c9 611 cur_region->cont = bb;
612 switch (cur_region->type)
613 {
75a70cf9 614 case GIMPLE_OMP_FOR:
615 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
616 succs edges as abnormal to prevent splitting
617 them. */
b3a3ddec 618 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
ac6e3339 619 /* Make the loopback edge. */
b3a3ddec 620 make_edge (bb, single_succ (cur_region->entry),
621 EDGE_ABNORMAL);
622
75a70cf9 623 /* Create an edge from GIMPLE_OMP_FOR to exit, which
624 corresponds to the case that the body of the loop
625 is not executed at all. */
b3a3ddec 626 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
627 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
628 fallthru = false;
caa1b5c9 629 break;
630
75a70cf9 631 case GIMPLE_OMP_SECTIONS:
caa1b5c9 632 /* Wire up the edges into and out of the nested sections. */
caa1b5c9 633 {
ac6e3339 634 basic_block switch_bb = single_succ (cur_region->entry);
635
caa1b5c9 636 struct omp_region *i;
637 for (i = cur_region->inner; i ; i = i->next)
638 {
75a70cf9 639 gcc_assert (i->type == GIMPLE_OMP_SECTION);
ac6e3339 640 make_edge (switch_bb, i->entry, 0);
caa1b5c9 641 make_edge (i->exit, bb, EDGE_FALLTHRU);
642 }
ac6e3339 643
644 /* Make the loopback edge to the block with
75a70cf9 645 GIMPLE_OMP_SECTIONS_SWITCH. */
ac6e3339 646 make_edge (bb, switch_bb, 0);
647
648 /* Make the edge from the switch to exit. */
649 make_edge (switch_bb, bb->next_bb, 0);
650 fallthru = false;
caa1b5c9 651 }
652 break;
13b96211 653
caa1b5c9 654 default:
655 gcc_unreachable ();
656 }
caa1b5c9 657 break;
658
2a09b1df 659 default:
660 gcc_assert (!stmt_ends_bb_p (last));
661 fallthru = true;
662 }
4ee9c684 663 }
2a09b1df 664 else
665 fallthru = true;
4ee9c684 666
2a09b1df 667 if (fallthru)
d01c707b 668 {
669 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
670 if (last)
671 assign_discriminator (gimple_location (last), bb->next_bb);
672 }
4ee9c684 673 }
674
caa1b5c9 675 if (root_omp_region)
676 free_omp_regions ();
677
9677695f 678 /* Fold COND_EXPR_COND of each COND_EXPR. */
679 fold_cond_expr_cond ();
4ee9c684 680}
681
d01c707b 682/* Trivial hash function for a location_t. ITEM is a pointer to
683 a hash table entry that maps a location_t to a discriminator. */
684
685static unsigned int
686locus_map_hash (const void *item)
687{
688 return ((const struct locus_discrim_map *) item)->locus;
689}
690
691/* Equality function for the locus-to-discriminator map. VA and VB
692 point to the two hash table entries to compare. */
693
694static int
695locus_map_eq (const void *va, const void *vb)
696{
697 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
698 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
699 return a->locus == b->locus;
700}
701
702/* Find the next available discriminator value for LOCUS. The
703 discriminator distinguishes among several basic blocks that
704 share a common locus, allowing for more accurate sample-based
705 profiling. */
706
707static int
708next_discriminator_for_locus (location_t locus)
709{
710 struct locus_discrim_map item;
711 struct locus_discrim_map **slot;
712
713 item.locus = locus;
714 item.discriminator = 0;
715 slot = (struct locus_discrim_map **)
716 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
717 (hashval_t) locus, INSERT);
718 gcc_assert (slot);
719 if (*slot == HTAB_EMPTY_ENTRY)
720 {
721 *slot = XNEW (struct locus_discrim_map);
722 gcc_assert (*slot);
723 (*slot)->locus = locus;
724 (*slot)->discriminator = 0;
725 }
726 (*slot)->discriminator++;
727 return (*slot)->discriminator;
728}
729
730/* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
731
732static bool
733same_line_p (location_t locus1, location_t locus2)
734{
735 expanded_location from, to;
736
737 if (locus1 == locus2)
738 return true;
739
740 from = expand_location (locus1);
741 to = expand_location (locus2);
742
743 if (from.line != to.line)
744 return false;
745 if (from.file == to.file)
746 return true;
747 return (from.file != NULL
748 && to.file != NULL
749 && strcmp (from.file, to.file) == 0);
750}
751
752/* Assign a unique discriminator value to block BB if it begins at the same
753 LOCUS as its predecessor block. */
754
755static void
756assign_discriminator (location_t locus, basic_block bb)
757{
758 gimple to_stmt;
759
760 if (locus == 0 || bb->discriminator != 0)
761 return;
762
763 to_stmt = first_non_label_stmt (bb);
764 if (to_stmt && same_line_p (locus, gimple_location (to_stmt)))
765 bb->discriminator = next_discriminator_for_locus (locus);
766}
4ee9c684 767
75a70cf9 768/* Create the edges for a GIMPLE_COND starting at block BB. */
4ee9c684 769
770static void
771make_cond_expr_edges (basic_block bb)
772{
75a70cf9 773 gimple entry = last_stmt (bb);
774 gimple then_stmt, else_stmt;
4ee9c684 775 basic_block then_bb, else_bb;
776 tree then_label, else_label;
b5f162df 777 edge e;
d01c707b 778 location_t entry_locus;
4ee9c684 779
8c0963c4 780 gcc_assert (entry);
75a70cf9 781 gcc_assert (gimple_code (entry) == GIMPLE_COND);
4ee9c684 782
d01c707b 783 entry_locus = gimple_location (entry);
784
4ee9c684 785 /* Entry basic blocks for each component. */
75a70cf9 786 then_label = gimple_cond_true_label (entry);
787 else_label = gimple_cond_false_label (entry);
4ee9c684 788 then_bb = label_to_block (then_label);
789 else_bb = label_to_block (else_label);
75a70cf9 790 then_stmt = first_stmt (then_bb);
791 else_stmt = first_stmt (else_bb);
4ee9c684 792
b5f162df 793 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
d01c707b 794 assign_discriminator (entry_locus, then_bb);
75a70cf9 795 e->goto_locus = gimple_location (then_stmt);
32dedf8f 796 if (e->goto_locus)
797 e->goto_block = gimple_block (then_stmt);
b5f162df 798 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
799 if (e)
9c388755 800 {
d01c707b 801 assign_discriminator (entry_locus, else_bb);
9c388755 802 e->goto_locus = gimple_location (else_stmt);
32dedf8f 803 if (e->goto_locus)
804 e->goto_block = gimple_block (else_stmt);
9c388755 805 }
63f88450 806
75a70cf9 807 /* We do not need the labels anymore. */
808 gimple_cond_set_true_label (entry, NULL_TREE);
809 gimple_cond_set_false_label (entry, NULL_TREE);
4ee9c684 810}
811
194ae074 812
6ff867cc 813/* Called for each element in the hash table (P) as we delete the
814 edge to cases hash table.
815
13b96211 816 Clear all the TREE_CHAINs to prevent problems with copying of
6ff867cc 817 SWITCH_EXPRs and structure sharing rules, then free the hash table
818 element. */
819
b30a8715 820static bool
f8fd23c0 821edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
b30a8715 822 void *data ATTRIBUTE_UNUSED)
6ff867cc 823{
6ff867cc 824 tree t, next;
825
b30a8715 826 for (t = (tree) *value; t; t = next)
6ff867cc 827 {
828 next = TREE_CHAIN (t);
829 TREE_CHAIN (t) = NULL;
830 }
b30a8715 831
832 *value = NULL;
833 return false;
6ff867cc 834}
835
836/* Start recording information mapping edges to case labels. */
837
bfd49939 838void
6ff867cc 839start_recording_case_labels (void)
840{
841 gcc_assert (edge_to_cases == NULL);
b30a8715 842 edge_to_cases = pointer_map_create ();
6ff867cc 843}
844
845/* Return nonzero if we are recording information for case labels. */
846
847static bool
848recording_case_labels_p (void)
849{
850 return (edge_to_cases != NULL);
851}
852
853/* Stop recording information mapping edges to case labels and
854 remove any information we have recorded. */
bfd49939 855void
6ff867cc 856end_recording_case_labels (void)
857{
b30a8715 858 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
859 pointer_map_destroy (edge_to_cases);
6ff867cc 860 edge_to_cases = NULL;
861}
862
6ff867cc 863/* If we are inside a {start,end}_recording_cases block, then return
864 a chain of CASE_LABEL_EXPRs from T which reference E.
865
866 Otherwise return NULL. */
194ae074 867
868static tree
75a70cf9 869get_cases_for_edge (edge e, gimple t)
194ae074 870{
194ae074 871 void **slot;
6ff867cc 872 size_t i, n;
194ae074 873
6ff867cc 874 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
875 chains available. Return NULL so the caller can detect this case. */
876 if (!recording_case_labels_p ())
877 return NULL;
13b96211 878
b30a8715 879 slot = pointer_map_contains (edge_to_cases, e);
194ae074 880 if (slot)
b30a8715 881 return (tree) *slot;
194ae074 882
6ff867cc 883 /* If we did not find E in the hash table, then this must be the first
884 time we have been queried for information about E & T. Add all the
885 elements from T to the hash table then perform the query again. */
194ae074 886
75a70cf9 887 n = gimple_switch_num_labels (t);
194ae074 888 for (i = 0; i < n; i++)
889 {
75a70cf9 890 tree elt = gimple_switch_label (t, i);
b30a8715 891 tree lab = CASE_LABEL (elt);
6ff867cc 892 basic_block label_bb = label_to_block (lab);
b30a8715 893 edge this_edge = find_edge (e->src, label_bb);
894
895 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
896 a new chain. */
897 slot = pointer_map_insert (edge_to_cases, this_edge);
898 TREE_CHAIN (elt) = (tree) *slot;
899 *slot = elt;
194ae074 900 }
b30a8715 901
902 return (tree) *pointer_map_contains (edge_to_cases, e);
194ae074 903}
4ee9c684 904
75a70cf9 905/* Create the edges for a GIMPLE_SWITCH starting at block BB. */
4ee9c684 906
907static void
75a70cf9 908make_gimple_switch_edges (basic_block bb)
4ee9c684 909{
75a70cf9 910 gimple entry = last_stmt (bb);
d01c707b 911 location_t entry_locus;
4ee9c684 912 size_t i, n;
4ee9c684 913
d01c707b 914 entry_locus = gimple_location (entry);
915
75a70cf9 916 n = gimple_switch_num_labels (entry);
4ee9c684 917
918 for (i = 0; i < n; ++i)
919 {
75a70cf9 920 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
4ee9c684 921 basic_block label_bb = label_to_block (lab);
6ff867cc 922 make_edge (bb, label_bb, 0);
d01c707b 923 assign_discriminator (entry_locus, label_bb);
4ee9c684 924 }
925}
926
927
928/* Return the basic block holding label DEST. */
929
930basic_block
7a22afab 931label_to_block_fn (struct function *ifun, tree dest)
4ee9c684 932{
0ec80471 933 int uid = LABEL_DECL_UID (dest);
934
ebeaefa4 935 /* We would die hard when faced by an undefined label. Emit a label to
936 the very first basic block. This will hopefully make even the dataflow
0ec80471 937 and undefined variable warnings quite right. */
938 if ((errorcount || sorrycount) && uid < 0)
939 {
75a70cf9 940 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
941 gimple stmt;
0ec80471 942
75a70cf9 943 stmt = gimple_build_label (dest);
944 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
0ec80471 945 uid = LABEL_DECL_UID (dest);
946 }
2a6236c0 947 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
948 <= (unsigned int) uid)
0de999f1 949 return NULL;
2a6236c0 950 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
4ee9c684 951}
952
2c8a1497 953/* Create edges for an abnormal goto statement at block BB. If FOR_CALL
954 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
955
956void
957make_abnormal_goto_edges (basic_block bb, bool for_call)
958{
959 basic_block target_bb;
75a70cf9 960 gimple_stmt_iterator gsi;
2c8a1497 961
962 FOR_EACH_BB (target_bb)
75a70cf9 963 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
2c8a1497 964 {
75a70cf9 965 gimple label_stmt = gsi_stmt (gsi);
966 tree target;
2c8a1497 967
75a70cf9 968 if (gimple_code (label_stmt) != GIMPLE_LABEL)
2c8a1497 969 break;
970
75a70cf9 971 target = gimple_label_label (label_stmt);
2c8a1497 972
973 /* Make an edge to every label block that has been marked as a
974 potential target for a computed goto or a non-local goto. */
975 if ((FORCED_LABEL (target) && !for_call)
976 || (DECL_NONLOCAL (target) && for_call))
977 {
978 make_edge (bb, target_bb, EDGE_ABNORMAL);
979 break;
980 }
981 }
982}
983
4ee9c684 984/* Create edges for a goto statement at block BB. */
985
986static void
987make_goto_expr_edges (basic_block bb)
988{
75a70cf9 989 gimple_stmt_iterator last = gsi_last_bb (bb);
990 gimple goto_t = gsi_stmt (last);
4ee9c684 991
2c8a1497 992 /* A simple GOTO creates normal edges. */
993 if (simple_goto_p (goto_t))
4ee9c684 994 {
75a70cf9 995 tree dest = gimple_goto_dest (goto_t);
d01c707b 996 basic_block label_bb = label_to_block (dest);
997 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
75a70cf9 998 e->goto_locus = gimple_location (goto_t);
d01c707b 999 assign_discriminator (e->goto_locus, label_bb);
32dedf8f 1000 if (e->goto_locus)
1001 e->goto_block = gimple_block (goto_t);
75a70cf9 1002 gsi_remove (&last, true);
2c8a1497 1003 return;
4ee9c684 1004 }
1005
2c8a1497 1006 /* A computed GOTO creates abnormal edges. */
1007 make_abnormal_goto_edges (bb, false);
4ee9c684 1008}
1009
1010
1011/*---------------------------------------------------------------------------
1012 Flowgraph analysis
1013---------------------------------------------------------------------------*/
1014
bf6102ae 1015/* Cleanup useless labels in basic blocks. This is something we wish
1016 to do early because it allows us to group case labels before creating
1017 the edges for the CFG, and it speeds up block statement iterators in
1018 all passes later on.
6313ae8b 1019 We rerun this pass after CFG is created, to get rid of the labels that
1020 are no longer referenced. After then we do not run it any more, since
1021 (almost) no new labels should be created. */
bf6102ae 1022
1023/* A map from basic block index to the leading label of that block. */
6313ae8b 1024static struct label_record
1025{
1026 /* The label. */
1027 tree label;
1028
1029 /* True if the label is referenced from somewhere. */
1030 bool used;
1031} *label_for_bb;
bf6102ae 1032
1033/* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
1034static void
26dbec0a 1035update_eh_label (struct eh_region_d *region)
bf6102ae 1036{
1037 tree old_label = get_eh_region_tree_label (region);
1038 if (old_label)
1039 {
873f1e89 1040 tree new_label;
1041 basic_block bb = label_to_block (old_label);
1042
1043 /* ??? After optimizing, there may be EH regions with labels
1044 that have already been removed from the function body, so
1045 there is no basic block for them. */
1046 if (! bb)
1047 return;
1048
6313ae8b 1049 new_label = label_for_bb[bb->index].label;
1050 label_for_bb[bb->index].used = true;
bf6102ae 1051 set_eh_region_tree_label (region, new_label);
1052 }
1053}
1054
75a70cf9 1055
0ec80471 1056/* Given LABEL return the first label in the same basic block. */
75a70cf9 1057
0ec80471 1058static tree
1059main_block_label (tree label)
1060{
1061 basic_block bb = label_to_block (label);
6313ae8b 1062 tree main_label = label_for_bb[bb->index].label;
0ec80471 1063
1064 /* label_to_block possibly inserted undefined label into the chain. */
6313ae8b 1065 if (!main_label)
1066 {
1067 label_for_bb[bb->index].label = label;
1068 main_label = label;
1069 }
1070
1071 label_for_bb[bb->index].used = true;
1072 return main_label;
0ec80471 1073}
1074
1b28312d 1075/* Cleanup redundant labels. This is a three-step process:
bf6102ae 1076 1) Find the leading label for each block.
1077 2) Redirect all references to labels to the leading labels.
1078 3) Cleanup all useless labels. */
4ee9c684 1079
873f1e89 1080void
4ee9c684 1081cleanup_dead_labels (void)
1082{
1083 basic_block bb;
6313ae8b 1084 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
4ee9c684 1085
1086 /* Find a suitable label for each block. We use the first user-defined
ebeaefa4 1087 label if there is one, or otherwise just the first label we see. */
4ee9c684 1088 FOR_EACH_BB (bb)
1089 {
75a70cf9 1090 gimple_stmt_iterator i;
4ee9c684 1091
75a70cf9 1092 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4ee9c684 1093 {
75a70cf9 1094 tree label;
1095 gimple stmt = gsi_stmt (i);
4ee9c684 1096
75a70cf9 1097 if (gimple_code (stmt) != GIMPLE_LABEL)
4ee9c684 1098 break;
1099
75a70cf9 1100 label = gimple_label_label (stmt);
4ee9c684 1101
1102 /* If we have not yet seen a label for the current block,
1103 remember this one and see if there are more labels. */
6313ae8b 1104 if (!label_for_bb[bb->index].label)
4ee9c684 1105 {
6313ae8b 1106 label_for_bb[bb->index].label = label;
4ee9c684 1107 continue;
1108 }
1109
1110 /* If we did see a label for the current block already, but it
1111 is an artificially created label, replace it if the current
1112 label is a user defined label. */
6313ae8b 1113 if (!DECL_ARTIFICIAL (label)
1114 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
4ee9c684 1115 {
6313ae8b 1116 label_for_bb[bb->index].label = label;
4ee9c684 1117 break;
1118 }
1119 }
1120 }
1121
bf6102ae 1122 /* Now redirect all jumps/branches to the selected label.
1123 First do so for each block ending in a control statement. */
4ee9c684 1124 FOR_EACH_BB (bb)
1125 {
75a70cf9 1126 gimple stmt = last_stmt (bb);
4ee9c684 1127 if (!stmt)
1128 continue;
1129
75a70cf9 1130 switch (gimple_code (stmt))
4ee9c684 1131 {
75a70cf9 1132 case GIMPLE_COND:
4ee9c684 1133 {
75a70cf9 1134 tree true_label = gimple_cond_true_label (stmt);
1135 tree false_label = gimple_cond_false_label (stmt);
4ee9c684 1136
75a70cf9 1137 if (true_label)
1138 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1139 if (false_label)
1140 gimple_cond_set_false_label (stmt, main_block_label (false_label));
4ee9c684 1141 break;
1142 }
13b96211 1143
75a70cf9 1144 case GIMPLE_SWITCH:
4ee9c684 1145 {
75a70cf9 1146 size_t i, n = gimple_switch_num_labels (stmt);
13b96211 1147
4ee9c684 1148 /* Replace all destination labels. */
1149 for (i = 0; i < n; ++i)
194ae074 1150 {
75a70cf9 1151 tree case_label = gimple_switch_label (stmt, i);
1152 tree label = main_block_label (CASE_LABEL (case_label));
1153 CASE_LABEL (case_label) = label;
194ae074 1154 }
4ee9c684 1155 break;
1156 }
1157
75a70cf9 1158 /* We have to handle gotos until they're removed, and we don't
b624a250 1159 remove them until after we've created the CFG edges. */
75a70cf9 1160 case GIMPLE_GOTO:
1161 if (!computed_goto_p (stmt))
0ec80471 1162 {
75a70cf9 1163 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1164 gimple_goto_set_dest (stmt, new_dest);
0ec80471 1165 break;
1166 }
b624a250 1167
4ee9c684 1168 default:
1169 break;
1170 }
1171 }
1172
bf6102ae 1173 for_each_eh_region (update_eh_label);
1174
4ee9c684 1175 /* Finally, purge dead labels. All user-defined labels and labels that
2cdb2cec 1176 can be the target of non-local gotos and labels which have their
1177 address taken are preserved. */
4ee9c684 1178 FOR_EACH_BB (bb)
1179 {
75a70cf9 1180 gimple_stmt_iterator i;
6313ae8b 1181 tree label_for_this_bb = label_for_bb[bb->index].label;
4ee9c684 1182
6313ae8b 1183 if (!label_for_this_bb)
4ee9c684 1184 continue;
1185
6313ae8b 1186 /* If the main label of the block is unused, we may still remove it. */
1187 if (!label_for_bb[bb->index].used)
1188 label_for_this_bb = NULL;
1189
75a70cf9 1190 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 1191 {
75a70cf9 1192 tree label;
1193 gimple stmt = gsi_stmt (i);
4ee9c684 1194
75a70cf9 1195 if (gimple_code (stmt) != GIMPLE_LABEL)
4ee9c684 1196 break;
1197
75a70cf9 1198 label = gimple_label_label (stmt);
4ee9c684 1199
1200 if (label == label_for_this_bb
75a70cf9 1201 || !DECL_ARTIFICIAL (label)
2cdb2cec 1202 || DECL_NONLOCAL (label)
1203 || FORCED_LABEL (label))
75a70cf9 1204 gsi_next (&i);
4ee9c684 1205 else
75a70cf9 1206 gsi_remove (&i, true);
4ee9c684 1207 }
1208 }
1209
1210 free (label_for_bb);
1211}
1212
b624a250 1213/* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1214 and scan the sorted vector of cases. Combine the ones jumping to the
1215 same label.
1216 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1217
873f1e89 1218void
b624a250 1219group_case_labels (void)
1220{
1221 basic_block bb;
1222
1223 FOR_EACH_BB (bb)
1224 {
75a70cf9 1225 gimple stmt = last_stmt (bb);
1226 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
b624a250 1227 {
75a70cf9 1228 int old_size = gimple_switch_num_labels (stmt);
b624a250 1229 int i, j, new_size = old_size;
72c30859 1230 tree default_case = NULL_TREE;
1231 tree default_label = NULL_TREE;
75a70cf9 1232 bool has_default;
0f1fb059 1233
75a70cf9 1234 /* The default label is always the first case in a switch
72c30859 1235 statement after gimplification if it was not optimized
75a70cf9 1236 away */
1237 if (!CASE_LOW (gimple_switch_default_label (stmt))
1238 && !CASE_HIGH (gimple_switch_default_label (stmt)))
72c30859 1239 {
75a70cf9 1240 default_case = gimple_switch_default_label (stmt);
72c30859 1241 default_label = CASE_LABEL (default_case);
75a70cf9 1242 has_default = true;
72c30859 1243 }
75a70cf9 1244 else
1245 has_default = false;
b624a250 1246
72c30859 1247 /* Look for possible opportunities to merge cases. */
75a70cf9 1248 if (has_default)
1249 i = 1;
1250 else
1251 i = 0;
72c30859 1252 while (i < old_size)
b624a250 1253 {
b9e0e333 1254 tree base_case, base_label, base_high;
75a70cf9 1255 base_case = gimple_switch_label (stmt, i);
b624a250 1256
8c0963c4 1257 gcc_assert (base_case);
b624a250 1258 base_label = CASE_LABEL (base_case);
6722b485 1259
1260 /* Discard cases that have the same destination as the
1261 default case. */
1262 if (base_label == default_label)
1263 {
75a70cf9 1264 gimple_switch_set_label (stmt, i, NULL_TREE);
6722b485 1265 i++;
0f1fb059 1266 new_size--;
6722b485 1267 continue;
1268 }
1269
75a70cf9 1270 base_high = CASE_HIGH (base_case)
1271 ? CASE_HIGH (base_case)
1272 : CASE_LOW (base_case);
ba6d6490 1273 i++;
75a70cf9 1274
b624a250 1275 /* Try to merge case labels. Break out when we reach the end
1276 of the label vector or when we cannot merge the next case
1277 label with the current one. */
72c30859 1278 while (i < old_size)
b624a250 1279 {
75a70cf9 1280 tree merge_case = gimple_switch_label (stmt, i);
b624a250 1281 tree merge_label = CASE_LABEL (merge_case);
1282 tree t = int_const_binop (PLUS_EXPR, base_high,
1283 integer_one_node, 1);
1284
1285 /* Merge the cases if they jump to the same place,
1286 and their ranges are consecutive. */
1287 if (merge_label == base_label
1288 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1289 {
1290 base_high = CASE_HIGH (merge_case) ?
1291 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1292 CASE_HIGH (base_case) = base_high;
75a70cf9 1293 gimple_switch_set_label (stmt, i, NULL_TREE);
b624a250 1294 new_size--;
ba6d6490 1295 i++;
b624a250 1296 }
1297 else
1298 break;
1299 }
1300 }
1301
1302 /* Compress the case labels in the label vector, and adjust the
1303 length of the vector. */
1304 for (i = 0, j = 0; i < new_size; i++)
1305 {
75a70cf9 1306 while (! gimple_switch_label (stmt, j))
b624a250 1307 j++;
75a70cf9 1308 gimple_switch_set_label (stmt, i,
1309 gimple_switch_label (stmt, j++));
b624a250 1310 }
75a70cf9 1311
1312 gcc_assert (new_size <= old_size);
1313 gimple_switch_set_num_labels (stmt, new_size);
b624a250 1314 }
1315 }
1316}
4ee9c684 1317
1318/* Checks whether we can merge block B into block A. */
1319
1320static bool
75a70cf9 1321gimple_can_merge_blocks_p (basic_block a, basic_block b)
4ee9c684 1322{
75a70cf9 1323 gimple stmt;
1324 gimple_stmt_iterator gsi;
1325 gimple_seq phis;
4ee9c684 1326
ea091dfd 1327 if (!single_succ_p (a))
4ee9c684 1328 return false;
1329
3d1eacdb 1330 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
4ee9c684 1331 return false;
1332
ea091dfd 1333 if (single_succ (a) != b)
4ee9c684 1334 return false;
1335
ea091dfd 1336 if (!single_pred_p (b))
4ee9c684 1337 return false;
1338
c4ac3cc6 1339 if (b == EXIT_BLOCK_PTR)
1340 return false;
13b96211 1341
4ee9c684 1342 /* If A ends by a statement causing exceptions or something similar, we
1343 cannot merge the blocks. */
75a70cf9 1344 stmt = last_stmt (a);
4ee9c684 1345 if (stmt && stmt_ends_bb_p (stmt))
1346 return false;
1347
1348 /* Do not allow a block with only a non-local label to be merged. */
75a70cf9 1349 if (stmt
1350 && gimple_code (stmt) == GIMPLE_LABEL
1351 && DECL_NONLOCAL (gimple_label_label (stmt)))
4ee9c684 1352 return false;
1353
8171a1dd 1354 /* It must be possible to eliminate all phi nodes in B. If ssa form
4c64356a 1355 is not up-to-date, we cannot eliminate any phis; however, if only
1356 some symbols as whole are marked for renaming, this is not a problem,
1357 as phi nodes for those symbols are irrelevant in updating anyway. */
75a70cf9 1358 phis = phi_nodes (b);
1359 if (!gimple_seq_empty_p (phis))
8171a1dd 1360 {
75a70cf9 1361 gimple_stmt_iterator i;
1362
4c64356a 1363 if (name_mappings_registered_p ())
8171a1dd 1364 return false;
1365
75a70cf9 1366 for (i = gsi_start (phis); !gsi_end_p (i); gsi_next (&i))
1367 {
1368 gimple phi = gsi_stmt (i);
1369
1370 if (!is_gimple_reg (gimple_phi_result (phi))
1371 && !may_propagate_copy (gimple_phi_result (phi),
1372 gimple_phi_arg_def (phi, 0)))
1373 return false;
1374 }
8171a1dd 1375 }
4ee9c684 1376
1377 /* Do not remove user labels. */
75a70cf9 1378 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 1379 {
75a70cf9 1380 stmt = gsi_stmt (gsi);
1381 if (gimple_code (stmt) != GIMPLE_LABEL)
4ee9c684 1382 break;
75a70cf9 1383 if (!DECL_ARTIFICIAL (gimple_label_label (stmt)))
4ee9c684 1384 return false;
1385 }
1386
053fdd99 1387 /* Protect the loop latches. */
1388 if (current_loops
1389 && b->loop_father->latch == b)
1390 return false;
1391
4ee9c684 1392 return true;
1393}
1394
8171a1dd 1395/* Replaces all uses of NAME by VAL. */
1396
10fec820 1397void
8171a1dd 1398replace_uses_by (tree name, tree val)
1399{
1400 imm_use_iterator imm_iter;
1401 use_operand_p use;
75a70cf9 1402 gimple stmt;
8171a1dd 1403 edge e;
8171a1dd 1404
09aca5bc 1405 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
8171a1dd 1406 {
09aca5bc 1407 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1408 {
1409 replace_exp (use, val);
8171a1dd 1410
75a70cf9 1411 if (gimple_code (stmt) == GIMPLE_PHI)
8171a1dd 1412 {
75a70cf9 1413 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
09aca5bc 1414 if (e->flags & EDGE_ABNORMAL)
1415 {
1416 /* This can only occur for virtual operands, since
1417 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1418 would prevent replacement. */
1419 gcc_assert (!is_gimple_reg (name));
1420 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1421 }
8171a1dd 1422 }
1423 }
de6ed584 1424
75a70cf9 1425 if (gimple_code (stmt) != GIMPLE_PHI)
09aca5bc 1426 {
75a70cf9 1427 size_t i;
fcd2418b 1428
09aca5bc 1429 fold_stmt_inplace (stmt);
31a8456e 1430 if (cfgcleanup_altered_bbs)
75a70cf9 1431 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
de6ed584 1432
4c5fd53c 1433 /* FIXME. This should go in update_stmt. */
75a70cf9 1434 for (i = 0; i < gimple_num_ops (stmt); i++)
1435 {
1436 tree op = gimple_op (stmt, i);
1437 /* Operands may be empty here. For example, the labels
1438 of a GIMPLE_COND are nulled out following the creation
1439 of the corresponding CFG edges. */
1440 if (op && TREE_CODE (op) == ADDR_EXPR)
1441 recompute_tree_invariant_for_addr_expr (op);
1442 }
fcd2418b 1443
09aca5bc 1444 maybe_clean_or_replace_eh_stmt (stmt, stmt);
4c5fd53c 1445 update_stmt (stmt);
09aca5bc 1446 }
8171a1dd 1447 }
13b96211 1448
d860e1f9 1449 gcc_assert (has_zero_uses (name));
f3830c7c 1450
1451 /* Also update the trees stored in loop structures. */
1452 if (current_loops)
1453 {
1454 struct loop *loop;
17519ba0 1455 loop_iterator li;
f3830c7c 1456
17519ba0 1457 FOR_EACH_LOOP (li, loop, 0)
f3830c7c 1458 {
17519ba0 1459 substitute_in_loop_info (loop, name, val);
f3830c7c 1460 }
1461 }
8171a1dd 1462}
4ee9c684 1463
1464/* Merge block B into block A. */
1465
1466static void
75a70cf9 1467gimple_merge_blocks (basic_block a, basic_block b)
4ee9c684 1468{
75a70cf9 1469 gimple_stmt_iterator last, gsi, psi;
1470 gimple_seq phis = phi_nodes (b);
4ee9c684 1471
1472 if (dump_file)
1473 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1474
ae79515f 1475 /* Remove all single-valued PHI nodes from block B of the form
1476 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
75a70cf9 1477 gsi = gsi_last_bb (a);
1478 for (psi = gsi_start (phis); !gsi_end_p (psi); )
8171a1dd 1479 {
75a70cf9 1480 gimple phi = gsi_stmt (psi);
1481 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1482 gimple copy;
1483 bool may_replace_uses = !is_gimple_reg (def)
1484 || may_propagate_copy (def, use);
4f62605b 1485
fd859561 1486 /* In case we maintain loop closed ssa form, do not propagate arguments
1487 of loop exit phi nodes. */
4f62605b 1488 if (current_loops
f24ec26f 1489 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
4f62605b 1490 && is_gimple_reg (def)
1491 && TREE_CODE (use) == SSA_NAME
1492 && a->loop_father != b->loop_father)
1493 may_replace_uses = false;
1494
1495 if (!may_replace_uses)
8171a1dd 1496 {
1497 gcc_assert (is_gimple_reg (def));
1498
c78cbec8 1499 /* Note that just emitting the copies is fine -- there is no problem
8171a1dd 1500 with ordering of phi nodes. This is because A is the single
1501 predecessor of B, therefore results of the phi nodes cannot
1502 appear as arguments of the phi nodes. */
75a70cf9 1503 copy = gimple_build_assign (def, use);
1504 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1505 remove_phi_node (&psi, false);
8171a1dd 1506 }
1507 else
18a524af 1508 {
5c6e4974 1509 /* If we deal with a PHI for virtual operands, we can simply
1510 propagate these without fussing with folding or updating
1511 the stmt. */
1512 if (!is_gimple_reg (def))
1513 {
1514 imm_use_iterator iter;
1515 use_operand_p use_p;
75a70cf9 1516 gimple stmt;
5c6e4974 1517
1518 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1519 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1520 SET_USE (use_p, use);
1521 }
1522 else
1523 replace_uses_by (def, use);
75a70cf9 1524
1525 remove_phi_node (&psi, true);
18a524af 1526 }
8171a1dd 1527 }
1528
4ee9c684 1529 /* Ensure that B follows A. */
1530 move_block_after (b, a);
1531
ea091dfd 1532 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
8c0963c4 1533 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
4ee9c684 1534
75a70cf9 1535 /* Remove labels from B and set gimple_bb to A for other statements. */
1536 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
4ee9c684 1537 {
75a70cf9 1538 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
6d7413d8 1539 {
75a70cf9 1540 gimple label = gsi_stmt (gsi);
1541
1542 gsi_remove (&gsi, false);
6d7413d8 1543
6d7413d8 1544 /* Now that we can thread computed gotos, we might have
1545 a situation where we have a forced label in block B
1546 However, the label at the start of block B might still be
1547 used in other ways (think about the runtime checking for
1548 Fortran assigned gotos). So we can not just delete the
1549 label. Instead we move the label to the start of block A. */
75a70cf9 1550 if (FORCED_LABEL (gimple_label_label (label)))
6d7413d8 1551 {
75a70cf9 1552 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1553 gsi_insert_before (&dest_gsi, label, GSI_NEW_STMT);
6d7413d8 1554 }
1555 }
4ee9c684 1556 else
1557 {
75a70cf9 1558 gimple_set_bb (gsi_stmt (gsi), a);
1559 gsi_next (&gsi);
4ee9c684 1560 }
1561 }
1562
75a70cf9 1563 /* Merge the sequences. */
1564 last = gsi_last_bb (a);
1565 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1566 set_bb_seq (b, NULL);
31a8456e 1567
1568 if (cfgcleanup_altered_bbs)
1569 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
4ee9c684 1570}
1571
1572
ac70caad 1573/* Return the one of two successors of BB that is not reachable by a
2c2eb7fa 1574 complex edge, if there is one. Else, return BB. We use
ac70caad 1575 this in optimizations that use post-dominators for their heuristics,
1576 to catch the cases in C++ where function calls are involved. */
13b96211 1577
ac70caad 1578basic_block
13b96211 1579single_noncomplex_succ (basic_block bb)
ac70caad 1580{
1581 edge e0, e1;
1582 if (EDGE_COUNT (bb->succs) != 2)
1583 return bb;
13b96211 1584
ac70caad 1585 e0 = EDGE_SUCC (bb, 0);
1586 e1 = EDGE_SUCC (bb, 1);
1587 if (e0->flags & EDGE_COMPLEX)
1588 return e1->dest;
1589 if (e1->flags & EDGE_COMPLEX)
1590 return e0->dest;
13b96211 1591
ac70caad 1592 return bb;
13b96211 1593}
ac70caad 1594
1595
4ee9c684 1596/* Walk the function tree removing unnecessary statements.
1597
1598 * Empty statement nodes are removed
1599
1600 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1601
1602 * Unnecessary COND_EXPRs are removed
1603
1604 * Some unnecessary BIND_EXPRs are removed
1605
75a70cf9 1606 * GOTO_EXPRs immediately preceding destination are removed.
1607
4ee9c684 1608 Clearly more work could be done. The trick is doing the analysis
1609 and removal fast enough to be a net improvement in compile times.
1610
1611 Note that when we remove a control structure such as a COND_EXPR
1612 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1613 to ensure we eliminate all the useless code. */
1614
1615struct rus_data
1616{
4ee9c684 1617 bool repeat;
1618 bool may_throw;
1619 bool may_branch;
1620 bool has_label;
75a70cf9 1621 bool last_was_goto;
1622 gimple_stmt_iterator last_goto_gsi;
4ee9c684 1623};
1624
75a70cf9 1625
1626static void remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *);
1627
1628/* Given a statement sequence, find the first executable statement with
1629 location information, and warn that it is unreachable. When searching,
1630 descend into containers in execution order. */
4ee9c684 1631
1632static bool
75a70cf9 1633remove_useless_stmts_warn_notreached (gimple_seq stmts)
4ee9c684 1634{
75a70cf9 1635 gimple_stmt_iterator gsi;
4ee9c684 1636
75a70cf9 1637 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 1638 {
75a70cf9 1639 gimple stmt = gsi_stmt (gsi);
4ee9c684 1640
75a70cf9 1641 if (gimple_has_location (stmt))
1642 {
1643 location_t loc = gimple_location (stmt);
1644 if (LOCATION_LINE (loc) > 0)
1645 {
5fb6a912 1646 warning_at (loc, OPT_Wunreachable_code, "will never be executed");
75a70cf9 1647 return true;
1648 }
1649 }
4ee9c684 1650
75a70cf9 1651 switch (gimple_code (stmt))
1652 {
1653 /* Unfortunately, we need the CFG now to detect unreachable
1654 branches in a conditional, so conditionals are not handled here. */
4ee9c684 1655
75a70cf9 1656 case GIMPLE_TRY:
1657 if (remove_useless_stmts_warn_notreached (gimple_try_eval (stmt)))
1658 return true;
1659 if (remove_useless_stmts_warn_notreached (gimple_try_cleanup (stmt)))
1660 return true;
1661 break;
4ee9c684 1662
75a70cf9 1663 case GIMPLE_CATCH:
1664 return remove_useless_stmts_warn_notreached (gimple_catch_handler (stmt));
1665
1666 case GIMPLE_EH_FILTER:
1667 return remove_useless_stmts_warn_notreached (gimple_eh_filter_failure (stmt));
1668
1669 case GIMPLE_BIND:
1670 return remove_useless_stmts_warn_notreached (gimple_bind_body (stmt));
1671
1672 default:
1673 break;
1674 }
4ee9c684 1675 }
1676
1677 return false;
1678}
1679
75a70cf9 1680/* Helper for remove_useless_stmts_1. Handle GIMPLE_COND statements. */
1681
4ee9c684 1682static void
75a70cf9 1683remove_useless_stmts_cond (gimple_stmt_iterator *gsi, struct rus_data *data)
4ee9c684 1684{
75a70cf9 1685 gimple stmt = gsi_stmt (*gsi);
4ee9c684 1686
75a70cf9 1687 /* The folded result must still be a conditional statement. */
1c03e359 1688 fold_stmt (gsi);
1689 gcc_assert (gsi_stmt (*gsi) == stmt);
4ee9c684 1690
75a70cf9 1691 data->may_branch = true;
4ee9c684 1692
75a70cf9 1693 /* Replace trivial conditionals with gotos. */
1694 if (gimple_cond_true_p (stmt))
4ee9c684 1695 {
75a70cf9 1696 /* Goto THEN label. */
1697 tree then_label = gimple_cond_true_label (stmt);
4ee9c684 1698
75a70cf9 1699 gsi_replace (gsi, gimple_build_goto (then_label), false);
1700 data->last_goto_gsi = *gsi;
1701 data->last_was_goto = true;
4ee9c684 1702 data->repeat = true;
1703 }
75a70cf9 1704 else if (gimple_cond_false_p (stmt))
4ee9c684 1705 {
75a70cf9 1706 /* Goto ELSE label. */
1707 tree else_label = gimple_cond_false_label (stmt);
1708
1709 gsi_replace (gsi, gimple_build_goto (else_label), false);
1710 data->last_goto_gsi = *gsi;
1711 data->last_was_goto = true;
4ee9c684 1712 data->repeat = true;
1713 }
4ee9c684 1714 else
1715 {
75a70cf9 1716 tree then_label = gimple_cond_true_label (stmt);
1717 tree else_label = gimple_cond_false_label (stmt);
4ee9c684 1718
75a70cf9 1719 if (then_label == else_label)
1720 {
1721 /* Goto common destination. */
1722 gsi_replace (gsi, gimple_build_goto (then_label), false);
1723 data->last_goto_gsi = *gsi;
1724 data->last_was_goto = true;
4ee9c684 1725 data->repeat = true;
1726 }
4ee9c684 1727 }
1728
75a70cf9 1729 gsi_next (gsi);
1730
1731 data->last_was_goto = false;
4ee9c684 1732}
1733
75a70cf9 1734/* Helper for remove_useless_stmts_1.
1735 Handle the try-finally case for GIMPLE_TRY statements. */
4ee9c684 1736
1737static void
75a70cf9 1738remove_useless_stmts_tf (gimple_stmt_iterator *gsi, struct rus_data *data)
4ee9c684 1739{
1740 bool save_may_branch, save_may_throw;
1741 bool this_may_branch, this_may_throw;
1742
75a70cf9 1743 gimple_seq eval_seq, cleanup_seq;
1744 gimple_stmt_iterator eval_gsi, cleanup_gsi;
1745
1746 gimple stmt = gsi_stmt (*gsi);
1747
4ee9c684 1748 /* Collect may_branch and may_throw information for the body only. */
1749 save_may_branch = data->may_branch;
1750 save_may_throw = data->may_throw;
1751 data->may_branch = false;
1752 data->may_throw = false;
75a70cf9 1753 data->last_was_goto = false;
4ee9c684 1754
75a70cf9 1755 eval_seq = gimple_try_eval (stmt);
1756 eval_gsi = gsi_start (eval_seq);
1757 remove_useless_stmts_1 (&eval_gsi, data);
4ee9c684 1758
1759 this_may_branch = data->may_branch;
1760 this_may_throw = data->may_throw;
1761 data->may_branch |= save_may_branch;
1762 data->may_throw |= save_may_throw;
75a70cf9 1763 data->last_was_goto = false;
4ee9c684 1764
75a70cf9 1765 cleanup_seq = gimple_try_cleanup (stmt);
1766 cleanup_gsi = gsi_start (cleanup_seq);
1767 remove_useless_stmts_1 (&cleanup_gsi, data);
4ee9c684 1768
1769 /* If the body is empty, then we can emit the FINALLY block without
1770 the enclosing TRY_FINALLY_EXPR. */
75a70cf9 1771 if (gimple_seq_empty_p (eval_seq))
4ee9c684 1772 {
75a70cf9 1773 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1774 gsi_remove (gsi, false);
4ee9c684 1775 data->repeat = true;
1776 }
1777
1778 /* If the handler is empty, then we can emit the TRY block without
1779 the enclosing TRY_FINALLY_EXPR. */
75a70cf9 1780 else if (gimple_seq_empty_p (cleanup_seq))
4ee9c684 1781 {
75a70cf9 1782 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1783 gsi_remove (gsi, false);
4ee9c684 1784 data->repeat = true;
1785 }
1786
1787 /* If the body neither throws, nor branches, then we can safely
1788 string the TRY and FINALLY blocks together. */
1789 else if (!this_may_branch && !this_may_throw)
1790 {
75a70cf9 1791 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1792 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1793 gsi_remove (gsi, false);
4ee9c684 1794 data->repeat = true;
1795 }
75a70cf9 1796 else
1797 gsi_next (gsi);
4ee9c684 1798}
1799
75a70cf9 1800/* Helper for remove_useless_stmts_1.
1801 Handle the try-catch case for GIMPLE_TRY statements. */
4ee9c684 1802
1803static void
75a70cf9 1804remove_useless_stmts_tc (gimple_stmt_iterator *gsi, struct rus_data *data)
4ee9c684 1805{
1806 bool save_may_throw, this_may_throw;
75a70cf9 1807
1808 gimple_seq eval_seq, cleanup_seq, handler_seq, failure_seq;
1809 gimple_stmt_iterator eval_gsi, cleanup_gsi, handler_gsi, failure_gsi;
1810
1811 gimple stmt = gsi_stmt (*gsi);
4ee9c684 1812
1813 /* Collect may_throw information for the body only. */
1814 save_may_throw = data->may_throw;
1815 data->may_throw = false;
75a70cf9 1816 data->last_was_goto = false;
4ee9c684 1817
75a70cf9 1818 eval_seq = gimple_try_eval (stmt);
1819 eval_gsi = gsi_start (eval_seq);
1820 remove_useless_stmts_1 (&eval_gsi, data);
4ee9c684 1821
1822 this_may_throw = data->may_throw;
1823 data->may_throw = save_may_throw;
1824
75a70cf9 1825 cleanup_seq = gimple_try_cleanup (stmt);
1826
4ee9c684 1827 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1828 if (!this_may_throw)
1829 {
1830 if (warn_notreached)
75a70cf9 1831 {
1832 remove_useless_stmts_warn_notreached (cleanup_seq);
1833 }
1834 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1835 gsi_remove (gsi, false);
4ee9c684 1836 data->repeat = true;
1837 return;
1838 }
1839
1840 /* Process the catch clause specially. We may be able to tell that
1841 no exceptions propagate past this point. */
1842
1843 this_may_throw = true;
75a70cf9 1844 cleanup_gsi = gsi_start (cleanup_seq);
1845 stmt = gsi_stmt (cleanup_gsi);
1846 data->last_was_goto = false;
4ee9c684 1847
75a70cf9 1848 switch (gimple_code (stmt))
4ee9c684 1849 {
75a70cf9 1850 case GIMPLE_CATCH:
1851 /* If the first element is a catch, they all must be. */
1852 while (!gsi_end_p (cleanup_gsi))
1853 {
1854 stmt = gsi_stmt (cleanup_gsi);
4ee9c684 1855 /* If we catch all exceptions, then the body does not
1856 propagate exceptions past this point. */
75a70cf9 1857 if (gimple_catch_types (stmt) == NULL)
4ee9c684 1858 this_may_throw = false;
75a70cf9 1859 data->last_was_goto = false;
1860 handler_seq = gimple_catch_handler (stmt);
1861 handler_gsi = gsi_start (handler_seq);
1862 remove_useless_stmts_1 (&handler_gsi, data);
1863 gsi_next (&cleanup_gsi);
4ee9c684 1864 }
75a70cf9 1865 gsi_next (gsi);
4ee9c684 1866 break;
1867
75a70cf9 1868 case GIMPLE_EH_FILTER:
1869 /* If the first element is an eh_filter, it should stand alone. */
1870 if (gimple_eh_filter_must_not_throw (stmt))
4ee9c684 1871 this_may_throw = false;
75a70cf9 1872 else if (gimple_eh_filter_types (stmt) == NULL)
4ee9c684 1873 this_may_throw = false;
75a70cf9 1874 failure_seq = gimple_eh_filter_failure (stmt);
1875 failure_gsi = gsi_start (failure_seq);
1876 remove_useless_stmts_1 (&failure_gsi, data);
1877 gsi_next (gsi);
4ee9c684 1878 break;
1879
1880 default:
75a70cf9 1881 /* Otherwise this is a list of cleanup statements. */
1882 remove_useless_stmts_1 (&cleanup_gsi, data);
4ee9c684 1883
1884 /* If the cleanup is empty, then we can emit the TRY block without
1885 the enclosing TRY_CATCH_EXPR. */
75a70cf9 1886 if (gimple_seq_empty_p (cleanup_seq))
4ee9c684 1887 {
75a70cf9 1888 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1889 gsi_remove(gsi, false);
4ee9c684 1890 data->repeat = true;
1891 }
75a70cf9 1892 else
1893 gsi_next (gsi);
4ee9c684 1894 break;
1895 }
75a70cf9 1896
4ee9c684 1897 data->may_throw |= this_may_throw;
1898}
1899
75a70cf9 1900/* Helper for remove_useless_stmts_1. Handle GIMPLE_BIND statements. */
4ee9c684 1901
1902static void
75a70cf9 1903remove_useless_stmts_bind (gimple_stmt_iterator *gsi, struct rus_data *data ATTRIBUTE_UNUSED)
4ee9c684 1904{
1905 tree block;
75a70cf9 1906 gimple_seq body_seq, fn_body_seq;
1907 gimple_stmt_iterator body_gsi;
1908
1909 gimple stmt = gsi_stmt (*gsi);
4ee9c684 1910
1911 /* First remove anything underneath the BIND_EXPR. */
75a70cf9 1912
1913 body_seq = gimple_bind_body (stmt);
1914 body_gsi = gsi_start (body_seq);
1915 remove_useless_stmts_1 (&body_gsi, data);
4ee9c684 1916
75a70cf9 1917 /* If the GIMPLE_BIND has no variables, then we can pull everything
1918 up one level and remove the GIMPLE_BIND, unless this is the toplevel
1919 GIMPLE_BIND for the current function or an inlined function.
4ee9c684 1920
1921 When this situation occurs we will want to apply this
1922 optimization again. */
75a70cf9 1923 block = gimple_bind_block (stmt);
1924 fn_body_seq = gimple_body (current_function_decl);
1925 if (gimple_bind_vars (stmt) == NULL_TREE
1926 && (gimple_seq_empty_p (fn_body_seq)
1927 || stmt != gimple_seq_first_stmt (fn_body_seq))
4ee9c684 1928 && (! block
1929 || ! BLOCK_ABSTRACT_ORIGIN (block)
1930 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1931 != FUNCTION_DECL)))
1932 {
227f485e 1933 tree var = NULL_TREE;
1934 /* Even if there are no gimple_bind_vars, there might be other
1935 decls in BLOCK_VARS rendering the GIMPLE_BIND not useless. */
ebcb0478 1936 if (block && !BLOCK_NUM_NONLOCALIZED_VARS (block))
227f485e 1937 for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
1938 if (TREE_CODE (var) == IMPORTED_DECL)
1939 break;
ebcb0478 1940 if (var || (block && BLOCK_NUM_NONLOCALIZED_VARS (block)))
227f485e 1941 gsi_next (gsi);
1942 else
1943 {
1944 gsi_insert_seq_before (gsi, body_seq, GSI_SAME_STMT);
1945 gsi_remove (gsi, false);
1946 data->repeat = true;
1947 }
4ee9c684 1948 }
75a70cf9 1949 else
1950 gsi_next (gsi);
4ee9c684 1951}
1952
75a70cf9 1953/* Helper for remove_useless_stmts_1. Handle GIMPLE_GOTO statements. */
4ee9c684 1954
1955static void
75a70cf9 1956remove_useless_stmts_goto (gimple_stmt_iterator *gsi, struct rus_data *data)
4ee9c684 1957{
75a70cf9 1958 gimple stmt = gsi_stmt (*gsi);
1959
1960 tree dest = gimple_goto_dest (stmt);
4ee9c684 1961
1962 data->may_branch = true;
75a70cf9 1963 data->last_was_goto = false;
4ee9c684 1964
75a70cf9 1965 /* Record iterator for last goto expr, so that we can delete it if unnecessary. */
4ee9c684 1966 if (TREE_CODE (dest) == LABEL_DECL)
75a70cf9 1967 {
1968 data->last_goto_gsi = *gsi;
1969 data->last_was_goto = true;
1970 }
1971
1972 gsi_next(gsi);
4ee9c684 1973}
1974
75a70cf9 1975/* Helper for remove_useless_stmts_1. Handle GIMPLE_LABEL statements. */
4ee9c684 1976
1977static void
75a70cf9 1978remove_useless_stmts_label (gimple_stmt_iterator *gsi, struct rus_data *data)
4ee9c684 1979{
75a70cf9 1980 gimple stmt = gsi_stmt (*gsi);
1981
1982 tree label = gimple_label_label (stmt);
4ee9c684 1983
1984 data->has_label = true;
1985
1986 /* We do want to jump across non-local label receiver code. */
1987 if (DECL_NONLOCAL (label))
75a70cf9 1988 data->last_was_goto = false;
4ee9c684 1989
75a70cf9 1990 else if (data->last_was_goto
1991 && gimple_goto_dest (gsi_stmt (data->last_goto_gsi)) == label)
4ee9c684 1992 {
75a70cf9 1993 /* Replace the preceding GIMPLE_GOTO statement with
1994 a GIMPLE_NOP, which will be subsequently removed.
1995 In this way, we avoid invalidating other iterators
1996 active on the statement sequence. */
1997 gsi_replace(&data->last_goto_gsi, gimple_build_nop(), false);
1998 data->last_was_goto = false;
4ee9c684 1999 data->repeat = true;
2000 }
2001
2002 /* ??? Add something here to delete unused labels. */
4ee9c684 2003
75a70cf9 2004 gsi_next (gsi);
4ee9c684 2005}
2006
2007
2008/* T is CALL_EXPR. Set current_function_calls_* flags. */
2009
2010void
75a70cf9 2011notice_special_calls (gimple call)
4ee9c684 2012{
75a70cf9 2013 int flags = gimple_call_flags (call);
4ee9c684 2014
2015 if (flags & ECF_MAY_BE_ALLOCA)
18d50ae6 2016 cfun->calls_alloca = true;
4ee9c684 2017 if (flags & ECF_RETURNS_TWICE)
18d50ae6 2018 cfun->calls_setjmp = true;
4ee9c684 2019}
2020
2021
2022/* Clear flags set by notice_special_calls. Used by dead code removal
2023 to update the flags. */
2024
2025void
2026clear_special_calls (void)
2027{
18d50ae6 2028 cfun->calls_alloca = false;
2029 cfun->calls_setjmp = false;
4ee9c684 2030}
2031
75a70cf9 2032/* Remove useless statements from a statement sequence, and perform
2033 some preliminary simplifications. */
4ee9c684 2034
2035static void
75a70cf9 2036remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *data)
4ee9c684 2037{
75a70cf9 2038 while (!gsi_end_p (*gsi))
4ee9c684 2039 {
75a70cf9 2040 gimple stmt = gsi_stmt (*gsi);
4ee9c684 2041
75a70cf9 2042 switch (gimple_code (stmt))
2043 {
2044 case GIMPLE_COND:
2045 remove_useless_stmts_cond (gsi, data);
2046 break;
2047
2048 case GIMPLE_GOTO:
2049 remove_useless_stmts_goto (gsi, data);
2050 break;
2051
2052 case GIMPLE_LABEL:
2053 remove_useless_stmts_label (gsi, data);
2054 break;
2055
2056 case GIMPLE_ASSIGN:
2057 fold_stmt (gsi);
2058 stmt = gsi_stmt (*gsi);
2059 data->last_was_goto = false;
2060 if (stmt_could_throw_p (stmt))
2061 data->may_throw = true;
2062 gsi_next (gsi);
2063 break;
2064
2065 case GIMPLE_ASM:
2066 fold_stmt (gsi);
2067 data->last_was_goto = false;
2068 gsi_next (gsi);
2069 break;
2070
2071 case GIMPLE_CALL:
2072 fold_stmt (gsi);
2073 stmt = gsi_stmt (*gsi);
2074 data->last_was_goto = false;
2075 if (is_gimple_call (stmt))
2076 notice_special_calls (stmt);
2077
2078 /* We used to call update_gimple_call_flags here,
2079 which copied side-effects and nothrows status
2080 from the function decl to the call. In the new
2081 tuplified GIMPLE, the accessors for this information
2082 always consult the function decl, so this copying
2083 is no longer necessary. */
2084 if (stmt_could_throw_p (stmt))
2085 data->may_throw = true;
2086 gsi_next (gsi);
2087 break;
2088
2089 case GIMPLE_RETURN:
2090 fold_stmt (gsi);
2091 data->last_was_goto = false;
2092 data->may_branch = true;
2093 gsi_next (gsi);
2094 break;
2095
2096 case GIMPLE_BIND:
2097 remove_useless_stmts_bind (gsi, data);
2098 break;
2099
2100 case GIMPLE_TRY:
2101 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
2102 remove_useless_stmts_tc (gsi, data);
2103 else if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2104 remove_useless_stmts_tf (gsi, data);
2105 else
2106 gcc_unreachable ();
2107 break;
2108
2109 case GIMPLE_CATCH:
2110 gcc_unreachable ();
2111 break;
2112
2113 case GIMPLE_NOP:
2114 gsi_remove (gsi, false);
2115 break;
2116
2117 case GIMPLE_OMP_FOR:
2118 {
2119 gimple_seq pre_body_seq = gimple_omp_for_pre_body (stmt);
2120 gimple_stmt_iterator pre_body_gsi = gsi_start (pre_body_seq);
2121
2122 remove_useless_stmts_1 (&pre_body_gsi, data);
2123 data->last_was_goto = false;
2124 }
2125 /* FALLTHROUGH */
2126 case GIMPLE_OMP_CRITICAL:
2127 case GIMPLE_OMP_CONTINUE:
2128 case GIMPLE_OMP_MASTER:
2129 case GIMPLE_OMP_ORDERED:
2130 case GIMPLE_OMP_SECTION:
2131 case GIMPLE_OMP_SECTIONS:
2132 case GIMPLE_OMP_SINGLE:
2133 {
2134 gimple_seq body_seq = gimple_omp_body (stmt);
2135 gimple_stmt_iterator body_gsi = gsi_start (body_seq);
2136
2137 remove_useless_stmts_1 (&body_gsi, data);
2138 data->last_was_goto = false;
2139 gsi_next (gsi);
2140 }
2141 break;
2142
2143 case GIMPLE_OMP_PARALLEL:
2144 case GIMPLE_OMP_TASK:
2145 {
2146 /* Make sure the outermost GIMPLE_BIND isn't removed
2147 as useless. */
2148 gimple_seq body_seq = gimple_omp_body (stmt);
2149 gimple bind = gimple_seq_first_stmt (body_seq);
2150 gimple_seq bind_seq = gimple_bind_body (bind);
2151 gimple_stmt_iterator bind_gsi = gsi_start (bind_seq);
2152
2153 remove_useless_stmts_1 (&bind_gsi, data);
2154 data->last_was_goto = false;
2155 gsi_next (gsi);
2156 }
2157 break;
2158
2159 default:
2160 data->last_was_goto = false;
2161 gsi_next (gsi);
2162 break;
2163 }
4ee9c684 2164 }
2165}
2166
75a70cf9 2167/* Walk the function tree, removing useless statements and performing
2168 some preliminary simplifications. */
2169
2a1990e9 2170static unsigned int
4ee9c684 2171remove_useless_stmts (void)
2172{
2173 struct rus_data data;
2174
2175 clear_special_calls ();
2176
2177 do
2178 {
75a70cf9 2179 gimple_stmt_iterator gsi;
2180
2181 gsi = gsi_start (gimple_body (current_function_decl));
4ee9c684 2182 memset (&data, 0, sizeof (data));
75a70cf9 2183 remove_useless_stmts_1 (&gsi, &data);
4ee9c684 2184 }
2185 while (data.repeat);
ae2a9b00 2186
2187#ifdef ENABLE_TYPES_CHECKING
2188 verify_types_in_gimple_seq (gimple_body (current_function_decl));
2189#endif
2190
2a1990e9 2191 return 0;
4ee9c684 2192}
2193
2194
20099e35 2195struct gimple_opt_pass pass_remove_useless_stmts =
4ee9c684 2196{
20099e35 2197 {
2198 GIMPLE_PASS,
4ee9c684 2199 "useless", /* name */
2200 NULL, /* gate */
2201 remove_useless_stmts, /* execute */
2202 NULL, /* sub */
2203 NULL, /* next */
2204 0, /* static_pass_number */
0b1615c1 2205 TV_NONE, /* tv_id */
b8b8d4c9 2206 PROP_gimple_any, /* properties_required */
2207 0, /* properties_provided */
4ee9c684 2208 0, /* properties_destroyed */
2209 0, /* todo_flags_start */
20099e35 2210 TODO_dump_func /* todo_flags_finish */
2211 }
4ee9c684 2212};
2213
4ee9c684 2214/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2215
2216static void
2217remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2218{
4ee9c684 2219 /* Since this block is no longer reachable, we can just delete all
2220 of its PHI nodes. */
899e6126 2221 remove_phi_nodes (bb);
4ee9c684 2222
2223 /* Remove edges to BB's successors. */
cd665a06 2224 while (EDGE_COUNT (bb->succs) > 0)
0891994d 2225 remove_edge (EDGE_SUCC (bb, 0));
4ee9c684 2226}
2227
2228
2229/* Remove statements of basic block BB. */
2230
2231static void
2232remove_bb (basic_block bb)
2233{
75a70cf9 2234 gimple_stmt_iterator i;
47e69468 2235 source_location loc = UNKNOWN_LOCATION;
4ee9c684 2236
2237 if (dump_file)
2238 {
2239 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2240 if (dump_flags & TDF_DETAILS)
2241 {
2242 dump_bb (bb, dump_file, 0);
2243 fprintf (dump_file, "\n");
2244 }
2245 }
2246
053fdd99 2247 if (current_loops)
2248 {
2249 struct loop *loop = bb->loop_father;
2250
88e6f696 2251 /* If a loop gets removed, clean up the information associated
2252 with it. */
053fdd99 2253 if (loop->latch == bb
2254 || loop->header == bb)
88e6f696 2255 free_numbers_of_iterations_estimates_loop (loop);
053fdd99 2256 }
2257
4ee9c684 2258 /* Remove all the instructions in the block. */
75a70cf9 2259 if (bb_seq (bb) != NULL)
4ee9c684 2260 {
75a70cf9 2261 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
4110ed3a 2262 {
75a70cf9 2263 gimple stmt = gsi_stmt (i);
2264 if (gimple_code (stmt) == GIMPLE_LABEL
2265 && (FORCED_LABEL (gimple_label_label (stmt))
2266 || DECL_NONLOCAL (gimple_label_label (stmt))))
c23dad79 2267 {
2268 basic_block new_bb;
75a70cf9 2269 gimple_stmt_iterator new_gsi;
c23dad79 2270
2271 /* A non-reachable non-local label may still be referenced.
2272 But it no longer needs to carry the extra semantics of
2273 non-locality. */
75a70cf9 2274 if (DECL_NONLOCAL (gimple_label_label (stmt)))
c23dad79 2275 {
75a70cf9 2276 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2277 FORCED_LABEL (gimple_label_label (stmt)) = 1;
c23dad79 2278 }
2fc7d87c 2279
c23dad79 2280 new_bb = bb->prev_bb;
75a70cf9 2281 new_gsi = gsi_start_bb (new_bb);
2282 gsi_remove (&i, false);
2283 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
c23dad79 2284 }
2285 else
2fc7d87c 2286 {
c23dad79 2287 /* Release SSA definitions if we are in SSA. Note that we
2288 may be called when not in SSA. For example,
2289 final_cleanup calls this function via
2290 cleanup_tree_cfg. */
2291 if (gimple_in_ssa_p (cfun))
2292 release_defs (stmt);
2293
75a70cf9 2294 gsi_remove (&i, true);
2fc7d87c 2295 }
13b96211 2296
c23dad79 2297 /* Don't warn for removed gotos. Gotos are often removed due to
2298 jump threading, thus resulting in bogus warnings. Not great,
2299 since this way we lose warnings for gotos in the original
2300 program that are indeed unreachable. */
75a70cf9 2301 if (gimple_code (stmt) != GIMPLE_GOTO
2302 && gimple_has_location (stmt)
2303 && !loc)
2304 loc = gimple_location (stmt);
ba821eb1 2305 }
4ee9c684 2306 }
2307
2308 /* If requested, give a warning that the first statement in the
2309 block is unreachable. We walk statements backwards in the
2310 loop above, so the last statement we process is the first statement
2311 in the block. */
931b0a0f 2312 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
5fb6a912 2313 warning_at (loc, OPT_Wunreachable_code, "will never be executed");
4ee9c684 2314
2315 remove_phi_nodes_and_edges_for_unreachable_block (bb);
75a70cf9 2316 bb->il.gimple = NULL;
4ee9c684 2317}
2318
4ee9c684 2319
1740345a 2320/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2321 predicate VAL, return the edge that will be taken out of the block.
2322 If VAL does not match a unique edge, NULL is returned. */
4ee9c684 2323
2324edge
2325find_taken_edge (basic_block bb, tree val)
2326{
75a70cf9 2327 gimple stmt;
4ee9c684 2328
2329 stmt = last_stmt (bb);
2330
8c0963c4 2331 gcc_assert (stmt);
2332 gcc_assert (is_ctrl_stmt (stmt));
4ee9c684 2333
75a70cf9 2334 if (val == NULL)
2335 return NULL;
2336
2337 if (!is_gimple_min_invariant (val))
4ee9c684 2338 return NULL;
2339
75a70cf9 2340 if (gimple_code (stmt) == GIMPLE_COND)
4ee9c684 2341 return find_taken_edge_cond_expr (bb, val);
2342
75a70cf9 2343 if (gimple_code (stmt) == GIMPLE_SWITCH)
4ee9c684 2344 return find_taken_edge_switch_expr (bb, val);
2345
6d7413d8 2346 if (computed_goto_p (stmt))
94ca4916 2347 {
2348 /* Only optimize if the argument is a label, if the argument is
2349 not a label then we can not construct a proper CFG.
2350
2351 It may be the case that we only need to allow the LABEL_REF to
2352 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2353 appear inside a LABEL_EXPR just to be safe. */
2354 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2355 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2356 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2357 return NULL;
2358 }
6d7413d8 2359
1740345a 2360 gcc_unreachable ();
4ee9c684 2361}
2362
6d7413d8 2363/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2364 statement, determine which of the outgoing edges will be taken out of the
2365 block. Return NULL if either edge may be taken. */
2366
2367static edge
2368find_taken_edge_computed_goto (basic_block bb, tree val)
2369{
2370 basic_block dest;
2371 edge e = NULL;
2372
2373 dest = label_to_block (val);
2374 if (dest)
2375 {
2376 e = find_edge (bb, dest);
2377 gcc_assert (e != NULL);
2378 }
2379
2380 return e;
2381}
4ee9c684 2382
2383/* Given a constant value VAL and the entry block BB to a COND_EXPR
2384 statement, determine which of the two edges will be taken out of the
2385 block. Return NULL if either edge may be taken. */
2386
2387static edge
2388find_taken_edge_cond_expr (basic_block bb, tree val)
2389{
2390 edge true_edge, false_edge;
2391
2392 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
13b96211 2393
36204c30 2394 gcc_assert (TREE_CODE (val) == INTEGER_CST);
cd743a11 2395 return (integer_zerop (val) ? false_edge : true_edge);
4ee9c684 2396}
2397
9677695f 2398/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
4ee9c684 2399 statement, determine which edge will be taken out of the block. Return
2400 NULL if any edge may be taken. */
2401
2402static edge
2403find_taken_edge_switch_expr (basic_block bb, tree val)
2404{
4ee9c684 2405 basic_block dest_bb;
2406 edge e;
75a70cf9 2407 gimple switch_stmt;
2408 tree taken_case;
4ee9c684 2409
75a70cf9 2410 switch_stmt = last_stmt (bb);
2411 taken_case = find_case_label_for_value (switch_stmt, val);
4ee9c684 2412 dest_bb = label_to_block (CASE_LABEL (taken_case));
2413
2414 e = find_edge (bb, dest_bb);
8c0963c4 2415 gcc_assert (e);
4ee9c684 2416 return e;
2417}
2418
2419
75a70cf9 2420/* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
b624a250 2421 We can make optimal use here of the fact that the case labels are
2422 sorted: We can do a binary search for a case matching VAL. */
4ee9c684 2423
2424static tree
75a70cf9 2425find_case_label_for_value (gimple switch_stmt, tree val)
4ee9c684 2426{
75a70cf9 2427 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2428 tree default_case = gimple_switch_default_label (switch_stmt);
4ee9c684 2429
75a70cf9 2430 for (low = 0, high = n; high - low > 1; )
4ee9c684 2431 {
b624a250 2432 size_t i = (high + low) / 2;
75a70cf9 2433 tree t = gimple_switch_label (switch_stmt, i);
b624a250 2434 int cmp;
2435
2436 /* Cache the result of comparing CASE_LOW and val. */
2437 cmp = tree_int_cst_compare (CASE_LOW (t), val);
4ee9c684 2438
b624a250 2439 if (cmp > 0)
2440 high = i;
2441 else
2442 low = i;
2443
2444 if (CASE_HIGH (t) == NULL)
4ee9c684 2445 {
b624a250 2446 /* A singe-valued case label. */
2447 if (cmp == 0)
4ee9c684 2448 return t;
2449 }
2450 else
2451 {
2452 /* A case range. We can only handle integer ranges. */
b624a250 2453 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
4ee9c684 2454 return t;
2455 }
2456 }
2457
4ee9c684 2458 return default_case;
2459}
2460
2461
4ee9c684 2462/* Dump a basic block on stderr. */
2463
2464void
75a70cf9 2465gimple_debug_bb (basic_block bb)
4ee9c684 2466{
75a70cf9 2467 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
4ee9c684 2468}
2469
2470
2471/* Dump basic block with index N on stderr. */
2472
2473basic_block
75a70cf9 2474gimple_debug_bb_n (int n)
4ee9c684 2475{
75a70cf9 2476 gimple_debug_bb (BASIC_BLOCK (n));
4ee9c684 2477 return BASIC_BLOCK (n);
13b96211 2478}
4ee9c684 2479
2480
2481/* Dump the CFG on stderr.
2482
2483 FLAGS are the same used by the tree dumping functions
13b96211 2484 (see TDF_* in tree-pass.h). */
4ee9c684 2485
2486void
75a70cf9 2487gimple_debug_cfg (int flags)
4ee9c684 2488{
75a70cf9 2489 gimple_dump_cfg (stderr, flags);
4ee9c684 2490}
2491
2492
2493/* Dump the program showing basic block boundaries on the given FILE.
2494
2495 FLAGS are the same used by the tree dumping functions (see TDF_* in
2496 tree.h). */
2497
2498void
75a70cf9 2499gimple_dump_cfg (FILE *file, int flags)
4ee9c684 2500{
2501 if (flags & TDF_DETAILS)
2502 {
2503 const char *funcname
5135beeb 2504 = lang_hooks.decl_printable_name (current_function_decl, 2);
4ee9c684 2505
2506 fputc ('\n', file);
2507 fprintf (file, ";; Function %s\n\n", funcname);
2508 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2509 n_basic_blocks, n_edges, last_basic_block);
2510
2511 brief_dump_cfg (file);
2512 fprintf (file, "\n");
2513 }
2514
2515 if (flags & TDF_STATS)
2516 dump_cfg_stats (file);
2517
2518 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2519}
2520
2521
2522/* Dump CFG statistics on FILE. */
2523
2524void
2525dump_cfg_stats (FILE *file)
2526{
2527 static long max_num_merged_labels = 0;
2528 unsigned long size, total = 0;
c75de2f7 2529 long num_edges;
4ee9c684 2530 basic_block bb;
2531 const char * const fmt_str = "%-30s%-13s%12s\n";
56d16a1a 2532 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
69c638bc 2533 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
4ee9c684 2534 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2535 const char *funcname
5135beeb 2536 = lang_hooks.decl_printable_name (current_function_decl, 2);
4ee9c684 2537
2538
2539 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2540
2541 fprintf (file, "---------------------------------------------------------\n");
2542 fprintf (file, fmt_str, "", " Number of ", "Memory");
2543 fprintf (file, fmt_str, "", " instances ", "used ");
2544 fprintf (file, "---------------------------------------------------------\n");
2545
2546 size = n_basic_blocks * sizeof (struct basic_block_def);
2547 total += size;
56d16a1a 2548 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2549 SCALE (size), LABEL (size));
4ee9c684 2550
c75de2f7 2551 num_edges = 0;
4ee9c684 2552 FOR_EACH_BB (bb)
c75de2f7 2553 num_edges += EDGE_COUNT (bb->succs);
2554 size = num_edges * sizeof (struct edge_def);
4ee9c684 2555 total += size;
69c638bc 2556 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
4ee9c684 2557
4ee9c684 2558 fprintf (file, "---------------------------------------------------------\n");
2559 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2560 LABEL (total));
2561 fprintf (file, "---------------------------------------------------------\n");
2562 fprintf (file, "\n");
2563
2564 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2565 max_num_merged_labels = cfg_stats.num_merged_labels;
2566
2567 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2568 cfg_stats.num_merged_labels, max_num_merged_labels);
2569
2570 fprintf (file, "\n");
2571}
2572
2573
2574/* Dump CFG statistics on stderr. Keep extern so that it's always
2575 linked in the final executable. */
2576
2577void
2578debug_cfg_stats (void)
2579{
2580 dump_cfg_stats (stderr);
2581}
2582
2583
2584/* Dump the flowgraph to a .vcg FILE. */
2585
2586static void
75a70cf9 2587gimple_cfg2vcg (FILE *file)
4ee9c684 2588{
2589 edge e;
cd665a06 2590 edge_iterator ei;
4ee9c684 2591 basic_block bb;
2592 const char *funcname
5135beeb 2593 = lang_hooks.decl_printable_name (current_function_decl, 2);
4ee9c684 2594
2595 /* Write the file header. */
2596 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2597 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2598 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2599
2600 /* Write blocks and edges. */
cd665a06 2601 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4ee9c684 2602 {
2603 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2604 e->dest->index);
2605
2606 if (e->flags & EDGE_FAKE)
2607 fprintf (file, " linestyle: dotted priority: 10");
2608 else
2609 fprintf (file, " linestyle: solid priority: 100");
2610
2611 fprintf (file, " }\n");
2612 }
2613 fputc ('\n', file);
2614
2615 FOR_EACH_BB (bb)
2616 {
75a70cf9 2617 enum gimple_code head_code, end_code;
4ee9c684 2618 const char *head_name, *end_name;
2619 int head_line = 0;
2620 int end_line = 0;
75a70cf9 2621 gimple first = first_stmt (bb);
2622 gimple last = last_stmt (bb);
4ee9c684 2623
2624 if (first)
2625 {
75a70cf9 2626 head_code = gimple_code (first);
2627 head_name = gimple_code_name[head_code];
4ee9c684 2628 head_line = get_lineno (first);
2629 }
2630 else
2631 head_name = "no-statement";
2632
2633 if (last)
2634 {
75a70cf9 2635 end_code = gimple_code (last);
2636 end_name = gimple_code_name[end_code];
4ee9c684 2637 end_line = get_lineno (last);
2638 }
2639 else
2640 end_name = "no-statement";
2641
2642 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2643 bb->index, bb->index, head_name, head_line, end_name,
2644 end_line);
2645
cd665a06 2646 FOR_EACH_EDGE (e, ei, bb->succs)
4ee9c684 2647 {
2648 if (e->dest == EXIT_BLOCK_PTR)
2649 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2650 else
2651 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2652
2653 if (e->flags & EDGE_FAKE)
2654 fprintf (file, " priority: 10 linestyle: dotted");
2655 else
2656 fprintf (file, " priority: 100 linestyle: solid");
2657
2658 fprintf (file, " }\n");
2659 }
2660
2661 if (bb->next_bb != EXIT_BLOCK_PTR)
2662 fputc ('\n', file);
2663 }
2664
2665 fputs ("}\n\n", file);
2666}
2667
2668
2669
2670/*---------------------------------------------------------------------------
2671 Miscellaneous helpers
2672---------------------------------------------------------------------------*/
2673
2674/* Return true if T represents a stmt that always transfers control. */
2675
2676bool
75a70cf9 2677is_ctrl_stmt (gimple t)
4ee9c684 2678{
75a70cf9 2679 return gimple_code (t) == GIMPLE_COND
2680 || gimple_code (t) == GIMPLE_SWITCH
2681 || gimple_code (t) == GIMPLE_GOTO
2682 || gimple_code (t) == GIMPLE_RETURN
2683 || gimple_code (t) == GIMPLE_RESX;
4ee9c684 2684}
2685
2686
2687/* Return true if T is a statement that may alter the flow of control
2688 (e.g., a call to a non-returning function). */
2689
2690bool
75a70cf9 2691is_ctrl_altering_stmt (gimple t)
4ee9c684 2692{
8c0963c4 2693 gcc_assert (t);
75a70cf9 2694
2695 if (is_gimple_call (t))
4ee9c684 2696 {
75a70cf9 2697 int flags = gimple_call_flags (t);
2698
2699 /* A non-pure/const call alters flow control if the current
4ee9c684 2700 function has nonlocal labels. */
75a70cf9 2701 if (!(flags & (ECF_CONST | ECF_PURE))
2702 && cfun->has_nonlocal_label)
4ee9c684 2703 return true;
2704
75a70cf9 2705 /* A call also alters control flow if it does not return. */
2706 if (gimple_call_flags (t) & ECF_NORETURN)
4ee9c684 2707 return true;
4ee9c684 2708 }
2709
773c5ba7 2710 /* OpenMP directives alter control flow. */
75a70cf9 2711 if (is_gimple_omp (t))
773c5ba7 2712 return true;
2713
4ee9c684 2714 /* If a statement can throw, it alters control flow. */
75a70cf9 2715 return stmt_can_throw_internal (t);
4ee9c684 2716}
2717
2718
2c8a1497 2719/* Return true if T is a simple local goto. */
4ee9c684 2720
2721bool
75a70cf9 2722simple_goto_p (gimple t)
4ee9c684 2723{
75a70cf9 2724 return (gimple_code (t) == GIMPLE_GOTO
2725 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2c8a1497 2726}
2727
2728
2729/* Return true if T can make an abnormal transfer of control flow.
2730 Transfers of control flow associated with EH are excluded. */
2731
2732bool
75a70cf9 2733stmt_can_make_abnormal_goto (gimple t)
2c8a1497 2734{
2735 if (computed_goto_p (t))
2736 return true;
75a70cf9 2737 if (is_gimple_call (t))
2738 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2c8a1497 2739 return false;
4ee9c684 2740}
2741
2742
75a70cf9 2743/* Return true if STMT should start a new basic block. PREV_STMT is
2744 the statement preceding STMT. It is used when STMT is a label or a
2745 case label. Labels should only start a new basic block if their
2746 previous statement wasn't a label. Otherwise, sequence of labels
2747 would generate unnecessary basic blocks that only contain a single
2748 label. */
4ee9c684 2749
2750static inline bool
75a70cf9 2751stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
4ee9c684 2752{
75a70cf9 2753 if (stmt == NULL)
4ee9c684 2754 return false;
2755
75a70cf9 2756 /* Labels start a new basic block only if the preceding statement
2757 wasn't a label of the same type. This prevents the creation of
2758 consecutive blocks that have nothing but a single label. */
2759 if (gimple_code (stmt) == GIMPLE_LABEL)
4ee9c684 2760 {
2761 /* Nonlocal and computed GOTO targets always start a new block. */
75a70cf9 2762 if (DECL_NONLOCAL (gimple_label_label (stmt))
2763 || FORCED_LABEL (gimple_label_label (stmt)))
4ee9c684 2764 return true;
2765
75a70cf9 2766 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
4ee9c684 2767 {
75a70cf9 2768 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
4ee9c684 2769 return true;
2770
2771 cfg_stats.num_merged_labels++;
2772 return false;
2773 }
2774 else
2775 return true;
2776 }
2777
2778 return false;
2779}
2780
2781
2782/* Return true if T should end a basic block. */
2783
2784bool
75a70cf9 2785stmt_ends_bb_p (gimple t)
4ee9c684 2786{
2787 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2788}
2789
75a70cf9 2790/* Remove block annotations and other data structures. */
4ee9c684 2791
2792void
0ec80471 2793delete_tree_cfg_annotations (void)
4ee9c684 2794{
4ee9c684 2795 label_to_block_map = NULL;
4ee9c684 2796}
2797
2798
2799/* Return the first statement in basic block BB. */
2800
75a70cf9 2801gimple
4ee9c684 2802first_stmt (basic_block bb)
2803{
75a70cf9 2804 gimple_stmt_iterator i = gsi_start_bb (bb);
2805 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
4ee9c684 2806}
2807
d01c707b 2808/* Return the first non-label statement in basic block BB. */
2809
2810static gimple
2811first_non_label_stmt (basic_block bb)
2812{
2813 gimple_stmt_iterator i = gsi_start_bb (bb);
2814 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2815 gsi_next (&i);
2816 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2817}
2818
4ee9c684 2819/* Return the last statement in basic block BB. */
2820
75a70cf9 2821gimple
4ee9c684 2822last_stmt (basic_block bb)
2823{
75a70cf9 2824 gimple_stmt_iterator b = gsi_last_bb (bb);
2825 return !gsi_end_p (b) ? gsi_stmt (b) : NULL;
4ee9c684 2826}
2827
4ee9c684 2828/* Return the last statement of an otherwise empty block. Return NULL
2829 if the block is totally empty, or if it contains more than one
2830 statement. */
2831
75a70cf9 2832gimple
4ee9c684 2833last_and_only_stmt (basic_block bb)
2834{
75a70cf9 2835 gimple_stmt_iterator i = gsi_last_bb (bb);
2836 gimple last, prev;
4ee9c684 2837
75a70cf9 2838 if (gsi_end_p (i))
2839 return NULL;
4ee9c684 2840
75a70cf9 2841 last = gsi_stmt (i);
2842 gsi_prev (&i);
2843 if (gsi_end_p (i))
4ee9c684 2844 return last;
2845
2846 /* Empty statements should no longer appear in the instruction stream.
2847 Everything that might have appeared before should be deleted by
75a70cf9 2848 remove_useless_stmts, and the optimizers should just gsi_remove
4ee9c684 2849 instead of smashing with build_empty_stmt.
2850
2851 Thus the only thing that should appear here in a block containing
2852 one executable statement is a label. */
75a70cf9 2853 prev = gsi_stmt (i);
2854 if (gimple_code (prev) == GIMPLE_LABEL)
4ee9c684 2855 return last;
2856 else
75a70cf9 2857 return NULL;
bb445479 2858}
4ee9c684 2859
5f20a306 2860/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2861
2862static void
2863reinstall_phi_args (edge new_edge, edge old_edge)
2864{
d03ba86f 2865 edge_var_map_vector v;
2866 edge_var_map *vm;
2867 int i;
75a70cf9 2868 gimple_stmt_iterator phis;
2869
d03ba86f 2870 v = redirect_edge_var_map_vector (old_edge);
2871 if (!v)
5f20a306 2872 return;
75a70cf9 2873
2874 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2875 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2876 i++, gsi_next (&phis))
5f20a306 2877 {
75a70cf9 2878 gimple phi = gsi_stmt (phis);
d03ba86f 2879 tree result = redirect_edge_var_map_result (vm);
2880 tree arg = redirect_edge_var_map_def (vm);
75a70cf9 2881
2882 gcc_assert (result == gimple_phi_result (phi));
2883
04cd6268 2884 add_phi_arg (phi, arg, new_edge);
5f20a306 2885 }
75a70cf9 2886
d03ba86f 2887 redirect_edge_var_map_clear (old_edge);
5f20a306 2888}
2889
588ce679 2890/* Returns the basic block after which the new basic block created
c4d867e0 2891 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2892 near its "logical" location. This is of most help to humans looking
2893 at debugging dumps. */
2894
2895static basic_block
2896split_edge_bb_loc (edge edge_in)
2897{
2898 basic_block dest = edge_in->dest;
2899
2900 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
2901 return edge_in->src;
2902 else
2903 return dest->prev_bb;
2904}
2905
4ee9c684 2906/* Split a (typically critical) edge EDGE_IN. Return the new block.
2907 Abort on abnormal edges. */
2908
2909static basic_block
75a70cf9 2910gimple_split_edge (edge edge_in)
4ee9c684 2911{
0055dbde 2912 basic_block new_bb, after_bb, dest;
4ee9c684 2913 edge new_edge, e;
4ee9c684 2914
2915 /* Abnormal edges cannot be split. */
8c0963c4 2916 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
4ee9c684 2917
4ee9c684 2918 dest = edge_in->dest;
2919
c4d867e0 2920 after_bb = split_edge_bb_loc (edge_in);
4ee9c684 2921
2922 new_bb = create_empty_bb (after_bb);
856b3326 2923 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2924 new_bb->count = edge_in->count;
4ee9c684 2925 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
856b3326 2926 new_edge->probability = REG_BR_PROB_BASE;
2927 new_edge->count = edge_in->count;
4ee9c684 2928
8c0963c4 2929 e = redirect_edge_and_branch (edge_in, new_bb);
d8a0d6b8 2930 gcc_assert (e == edge_in);
5f20a306 2931 reinstall_phi_args (new_edge, e);
4ee9c684 2932
2933 return new_bb;
2934}
2935
4ee9c684 2936/* Callback for walk_tree, check that all elements with address taken are
8b592de0 2937 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2938 inside a PHI node. */
4ee9c684 2939
2940static tree
e44658e5 2941verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4ee9c684 2942{
2943 tree t = *tp, x;
2944
2945 if (TYPE_P (t))
2946 *walk_subtrees = 0;
13b96211 2947
260e7e11 2948 /* Check operand N for being valid GIMPLE and give error MSG if not. */
e44658e5 2949#define CHECK_OP(N, MSG) \
260e7e11 2950 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
e44658e5 2951 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
4ee9c684 2952
2953 switch (TREE_CODE (t))
2954 {
2955 case SSA_NAME:
2956 if (SSA_NAME_IN_FREE_LIST (t))
2957 {
2958 error ("SSA name in freelist but still referenced");
2959 return *tp;
2960 }
2961 break;
2962
d3a42176 2963 case INDIRECT_REF:
2964 x = TREE_OPERAND (t, 0);
2965 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2966 {
2967 error ("Indirect reference's operand is not a register or a constant.");
2968 return x;
2969 }
2970 break;
2971
88dbf20f 2972 case ASSERT_EXPR:
2973 x = fold (ASSERT_EXPR_COND (t));
2974 if (x == boolean_false_node)
2975 {
2976 error ("ASSERT_EXPR with an always-false condition");
2977 return *tp;
2978 }
2979 break;
2980
4ee9c684 2981 case MODIFY_EXPR:
d3a42176 2982 error ("MODIFY_EXPR not expected while having tuples.");
7262b179 2983 return *tp;
4ee9c684 2984
2985 case ADDR_EXPR:
2f96475c 2986 {
2f96475c 2987 bool old_constant;
2988 bool old_side_effects;
2f96475c 2989 bool new_constant;
2990 bool new_side_effects;
2991
c7d4e749 2992 gcc_assert (is_gimple_address (t));
2993
2f96475c 2994 old_constant = TREE_CONSTANT (t);
2995 old_side_effects = TREE_SIDE_EFFECTS (t);
2996
750ad201 2997 recompute_tree_invariant_for_addr_expr (t);
2f96475c 2998 new_side_effects = TREE_SIDE_EFFECTS (t);
2999 new_constant = TREE_CONSTANT (t);
3000
2f96475c 3001 if (old_constant != new_constant)
3002 {
3003 error ("constant not recomputed when ADDR_EXPR changed");
3004 return t;
3005 }
3006 if (old_side_effects != new_side_effects)
3007 {
3008 error ("side effects not recomputed when ADDR_EXPR changed");
3009 return t;
3010 }
3011
3012 /* Skip any references (they will be checked when we recurse down the
3013 tree) and ensure that any variable used as a prefix is marked
3014 addressable. */
3015 for (x = TREE_OPERAND (t, 0);
3016 handled_component_p (x);
3017 x = TREE_OPERAND (x, 0))
3018 ;
3019
dd277d48 3020 if (!(TREE_CODE (x) == VAR_DECL
3021 || TREE_CODE (x) == PARM_DECL
3022 || TREE_CODE (x) == RESULT_DECL))
2f96475c 3023 return NULL;
3024 if (!TREE_ADDRESSABLE (x))
3025 {
3026 error ("address taken, but ADDRESSABLE bit not set");
3027 return x;
3028 }
47f11e84 3029 if (DECL_GIMPLE_REG_P (x))
3030 {
3031 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
3032 return x;
3033 }
cd3f90e2 3034
2f96475c 3035 break;
3036 }
4ee9c684 3037
3038 case COND_EXPR:
58f52dd4 3039 x = COND_EXPR_COND (t);
79307120 3040 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
4ee9c684 3041 {
79307120 3042 error ("non-integral used in condition");
4ee9c684 3043 return x;
3044 }
3a45c5eb 3045 if (!is_gimple_condexpr (x))
3046 {
0a81f5a0 3047 error ("invalid conditional operand");
3a45c5eb 3048 return x;
3049 }
4ee9c684 3050 break;
3051
6a2b2394 3052 case NON_LVALUE_EXPR:
3053 gcc_unreachable ();
3054
72dd6141 3055 CASE_CONVERT:
4ee9c684 3056 case FIX_TRUNC_EXPR:
4ee9c684 3057 case FLOAT_EXPR:
3058 case NEGATE_EXPR:
3059 case ABS_EXPR:
3060 case BIT_NOT_EXPR:
4ee9c684 3061 case TRUTH_NOT_EXPR:
0a81f5a0 3062 CHECK_OP (0, "invalid operand to unary operator");
4ee9c684 3063 break;
3064
3065 case REALPART_EXPR:
3066 case IMAGPART_EXPR:
e44658e5 3067 case COMPONENT_REF:
3068 case ARRAY_REF:
3069 case ARRAY_RANGE_REF:
3070 case BIT_FIELD_REF:
3071 case VIEW_CONVERT_EXPR:
3072 /* We have a nest of references. Verify that each of the operands
3073 that determine where to reference is either a constant or a variable,
3074 verify that the base is valid, and then show we've already checked
3075 the subtrees. */
1f9b622b 3076 while (handled_component_p (t))
e44658e5 3077 {
3078 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
0a81f5a0 3079 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
e44658e5 3080 else if (TREE_CODE (t) == ARRAY_REF
3081 || TREE_CODE (t) == ARRAY_RANGE_REF)
3082 {
0a81f5a0 3083 CHECK_OP (1, "invalid array index");
e44658e5 3084 if (TREE_OPERAND (t, 2))
0a81f5a0 3085 CHECK_OP (2, "invalid array lower bound");
e44658e5 3086 if (TREE_OPERAND (t, 3))
0a81f5a0 3087 CHECK_OP (3, "invalid array stride");
e44658e5 3088 }
3089 else if (TREE_CODE (t) == BIT_FIELD_REF)
3090 {
70d2daf7 3091 if (!host_integerp (TREE_OPERAND (t, 1), 1)
3092 || !host_integerp (TREE_OPERAND (t, 2), 1))
3093 {
3094 error ("invalid position or size operand to BIT_FIELD_REF");
3095 return t;
3096 }
70337474 3097 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3098 && (TYPE_PRECISION (TREE_TYPE (t))
3099 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3100 {
3101 error ("integral result type precision does not match "
3102 "field size of BIT_FIELD_REF");
3103 return t;
3104 }
3105 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3106 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
3107 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3108 {
3109 error ("mode precision of non-integral result does not "
3110 "match field size of BIT_FIELD_REF");
3111 return t;
3112 }
e44658e5 3113 }
3114
3115 t = TREE_OPERAND (t, 0);
3116 }
3117
2a2aef73 3118 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
e44658e5 3119 {
0a81f5a0 3120 error ("invalid reference prefix");
e44658e5 3121 return t;
3122 }
3123 *walk_subtrees = 0;
4ee9c684 3124 break;
0de36bdb 3125 case PLUS_EXPR:
3126 case MINUS_EXPR:
3127 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3128 POINTER_PLUS_EXPR. */
3129 if (POINTER_TYPE_P (TREE_TYPE (t)))
3130 {
3131 error ("invalid operand to plus/minus, type is a pointer");
3132 return t;
3133 }
3134 CHECK_OP (0, "invalid operand to binary operator");
3135 CHECK_OP (1, "invalid operand to binary operator");
3136 break;
4ee9c684 3137
0de36bdb 3138 case POINTER_PLUS_EXPR:
3139 /* Check to make sure the first operand is a pointer or reference type. */
3140 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3141 {
3142 error ("invalid operand to pointer plus, first operand is not a pointer");
3143 return t;
3144 }
3145 /* Check to make sure the second operand is an integer with type of
3146 sizetype. */
548044d8 3147 if (!useless_type_conversion_p (sizetype,
3148 TREE_TYPE (TREE_OPERAND (t, 1))))
0de36bdb 3149 {
3150 error ("invalid operand to pointer plus, second operand is not an "
3151 "integer with type of sizetype.");
3152 return t;
3153 }
3154 /* FALLTHROUGH */
4ee9c684 3155 case LT_EXPR:
3156 case LE_EXPR:
3157 case GT_EXPR:
3158 case GE_EXPR:
3159 case EQ_EXPR:
3160 case NE_EXPR:
3161 case UNORDERED_EXPR:
3162 case ORDERED_EXPR:
3163 case UNLT_EXPR:
3164 case UNLE_EXPR:
3165 case UNGT_EXPR:
3166 case UNGE_EXPR:
3167 case UNEQ_EXPR:
318a728f 3168 case LTGT_EXPR:
4ee9c684 3169 case MULT_EXPR:
3170 case TRUNC_DIV_EXPR:
3171 case CEIL_DIV_EXPR:
3172 case FLOOR_DIV_EXPR:
3173 case ROUND_DIV_EXPR:
3174 case TRUNC_MOD_EXPR:
3175 case CEIL_MOD_EXPR:
3176 case FLOOR_MOD_EXPR:
3177 case ROUND_MOD_EXPR:
3178 case RDIV_EXPR:
3179 case EXACT_DIV_EXPR:
3180 case MIN_EXPR:
3181 case MAX_EXPR:
3182 case LSHIFT_EXPR:
3183 case RSHIFT_EXPR:
3184 case LROTATE_EXPR:
3185 case RROTATE_EXPR:
3186 case BIT_IOR_EXPR:
3187 case BIT_XOR_EXPR:
3188 case BIT_AND_EXPR:
0a81f5a0 3189 CHECK_OP (0, "invalid operand to binary operator");
3190 CHECK_OP (1, "invalid operand to binary operator");
4ee9c684 3191 break;
3192
96d5c2e2 3193 case CONSTRUCTOR:
3194 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3195 *walk_subtrees = 0;
3196 break;
3197
4ee9c684 3198 default:
3199 break;
3200 }
3201 return NULL;
e44658e5 3202
3203#undef CHECK_OP
4ee9c684 3204}
3205
f03e0ae4 3206
3207/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3208 Returns true if there is an error, otherwise false. */
3209
3210static bool
75a70cf9 3211verify_types_in_gimple_min_lval (tree expr)
f03e0ae4 3212{
3213 tree op;
3214
3215 if (is_gimple_id (expr))
3216 return false;
3217
ec6b51ed 3218 if (!INDIRECT_REF_P (expr)
3219 && TREE_CODE (expr) != TARGET_MEM_REF)
f03e0ae4 3220 {
3221 error ("invalid expression for min lvalue");
3222 return true;
3223 }
3224
ec6b51ed 3225 /* TARGET_MEM_REFs are strange beasts. */
3226 if (TREE_CODE (expr) == TARGET_MEM_REF)
3227 return false;
3228
f03e0ae4 3229 op = TREE_OPERAND (expr, 0);
3230 if (!is_gimple_val (op))
3231 {
3232 error ("invalid operand in indirect reference");
3233 debug_generic_stmt (op);
3234 return true;
3235 }
3236 if (!useless_type_conversion_p (TREE_TYPE (expr),
3237 TREE_TYPE (TREE_TYPE (op))))
3238 {
3239 error ("type mismatch in indirect reference");
3240 debug_generic_stmt (TREE_TYPE (expr));
3241 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3242 return true;
3243 }
3244
3245 return false;
3246}
3247
b898ce29 3248/* Verify if EXPR is a valid GIMPLE reference expression. If
3249 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
f03e0ae4 3250 if there is an error, otherwise false. */
3251
3252static bool
b898ce29 3253verify_types_in_gimple_reference (tree expr, bool require_lvalue)
f03e0ae4 3254{
3255 while (handled_component_p (expr))
3256 {
3257 tree op = TREE_OPERAND (expr, 0);
3258
3259 if (TREE_CODE (expr) == ARRAY_REF
3260 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3261 {
3262 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3263 || (TREE_OPERAND (expr, 2)
3264 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3265 || (TREE_OPERAND (expr, 3)
3266 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3267 {
3268 error ("invalid operands to array reference");
3269 debug_generic_stmt (expr);
3270 return true;
3271 }
3272 }
3273
3274 /* Verify if the reference array element types are compatible. */
3275 if (TREE_CODE (expr) == ARRAY_REF
3276 && !useless_type_conversion_p (TREE_TYPE (expr),
3277 TREE_TYPE (TREE_TYPE (op))))
3278 {
3279 error ("type mismatch in array reference");
3280 debug_generic_stmt (TREE_TYPE (expr));
3281 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3282 return true;
3283 }
3284 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3285 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3286 TREE_TYPE (TREE_TYPE (op))))
3287 {
3288 error ("type mismatch in array range reference");
3289 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3290 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3291 return true;
3292 }
3293
3294 if ((TREE_CODE (expr) == REALPART_EXPR
3295 || TREE_CODE (expr) == IMAGPART_EXPR)
3296 && !useless_type_conversion_p (TREE_TYPE (expr),
3297 TREE_TYPE (TREE_TYPE (op))))
3298 {
3299 error ("type mismatch in real/imagpart reference");
3300 debug_generic_stmt (TREE_TYPE (expr));
3301 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3302 return true;
3303 }
3304
3305 if (TREE_CODE (expr) == COMPONENT_REF
3306 && !useless_type_conversion_p (TREE_TYPE (expr),
3307 TREE_TYPE (TREE_OPERAND (expr, 1))))
3308 {
3309 error ("type mismatch in component reference");
3310 debug_generic_stmt (TREE_TYPE (expr));
3311 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3312 return true;
3313 }
3314
3315 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3316 is nothing to verify. Gross mismatches at most invoke
3317 undefined behavior. */
ec6b51ed 3318 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3319 && !handled_component_p (op))
3320 return false;
f03e0ae4 3321
3322 expr = op;
3323 }
3324
b898ce29 3325 return ((require_lvalue || !is_gimple_min_invariant (expr))
3326 && verify_types_in_gimple_min_lval (expr));
f03e0ae4 3327}
3328
c3a8498b 3329/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3330 list of pointer-to types that is trivially convertible to DEST. */
3331
3332static bool
3333one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3334{
3335 tree src;
3336
3337 if (!TYPE_POINTER_TO (src_obj))
3338 return true;
3339
3340 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3341 if (useless_type_conversion_p (dest, src))
3342 return true;
3343
3344 return false;
3345}
3346
75a70cf9 3347/* Return true if TYPE1 is a fixed-point type and if conversions to and
3348 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3349
3350static bool
3351valid_fixed_convert_types_p (tree type1, tree type2)
3352{
3353 return (FIXED_POINT_TYPE_P (type1)
3354 && (INTEGRAL_TYPE_P (type2)
3355 || SCALAR_FLOAT_TYPE_P (type2)
3356 || FIXED_POINT_TYPE_P (type2)));
3357}
3358
75a70cf9 3359/* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3360 is a problem, otherwise false. */
3361
3362static bool
d266354c 3363verify_gimple_call (gimple stmt)
75a70cf9 3364{
d266354c 3365 tree fn = gimple_call_fn (stmt);
3366 tree fntype;
75a70cf9 3367
d266354c 3368 if (!POINTER_TYPE_P (TREE_TYPE (fn))
3369 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3370 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
3371 {
3372 error ("non-function in gimple call");
3373 return true;
3374 }
75a70cf9 3375
d266354c 3376 if (gimple_call_lhs (stmt)
3377 && !is_gimple_lvalue (gimple_call_lhs (stmt)))
3378 {
3379 error ("invalid LHS in gimple call");
3380 return true;
3381 }
75a70cf9 3382
d266354c 3383 fntype = TREE_TYPE (TREE_TYPE (fn));
3384 if (gimple_call_lhs (stmt)
3385 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3386 TREE_TYPE (fntype))
3387 /* ??? At least C++ misses conversions at assignments from
3388 void * call results.
3389 ??? Java is completely off. Especially with functions
3390 returning java.lang.Object.
3391 For now simply allow arbitrary pointer type conversions. */
3392 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3393 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3394 {
3395 error ("invalid conversion in gimple call");
3396 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3397 debug_generic_stmt (TREE_TYPE (fntype));
3398 return true;
3399 }
75a70cf9 3400
d266354c 3401 /* ??? The C frontend passes unpromoted arguments in case it
3402 didn't see a function declaration before the call. So for now
3403 leave the call arguments unverified. Once we gimplify
3404 unit-at-a-time we have a chance to fix this. */
75a70cf9 3405
d266354c 3406 return false;
75a70cf9 3407}
3408
d266354c 3409/* Verifies the gimple comparison with the result type TYPE and
3410 the operands OP0 and OP1. */
9ac19c3a 3411
3412static bool
d266354c 3413verify_gimple_comparison (tree type, tree op0, tree op1)
9ac19c3a 3414{
d266354c 3415 tree op0_type = TREE_TYPE (op0);
3416 tree op1_type = TREE_TYPE (op1);
75a70cf9 3417
d266354c 3418 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3419 {
3420 error ("invalid operands in gimple comparison");
3421 return true;
3422 }
9ac19c3a 3423
d266354c 3424 /* For comparisons we do not have the operations type as the
3425 effective type the comparison is carried out in. Instead
3426 we require that either the first operand is trivially
3427 convertible into the second, or the other way around.
3428 The resulting type of a comparison may be any integral type.
3429 Because we special-case pointers to void we allow
3430 comparisons of pointers with the same mode as well. */
3431 if ((!useless_type_conversion_p (op0_type, op1_type)
3432 && !useless_type_conversion_p (op1_type, op0_type)
3433 && (!POINTER_TYPE_P (op0_type)
3434 || !POINTER_TYPE_P (op1_type)
3435 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3436 || !INTEGRAL_TYPE_P (type))
3437 {
3438 error ("type mismatch in comparison expression");
3439 debug_generic_expr (type);
3440 debug_generic_expr (op0_type);
3441 debug_generic_expr (op1_type);
3442 return true;
3443 }
3444
3445 return false;
3446}
75a70cf9 3447
ec6b51ed 3448/* Verify a gimple assignment statement STMT with an unary rhs.
3449 Returns true if anything is wrong. */
f03e0ae4 3450
3451static bool
ec6b51ed 3452verify_gimple_assign_unary (gimple stmt)
f03e0ae4 3453{
75a70cf9 3454 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3455 tree lhs = gimple_assign_lhs (stmt);
75a70cf9 3456 tree lhs_type = TREE_TYPE (lhs);
ec6b51ed 3457 tree rhs1 = gimple_assign_rhs1 (stmt);
75a70cf9 3458 tree rhs1_type = TREE_TYPE (rhs1);
f03e0ae4 3459
ec6b51ed 3460 if (!is_gimple_reg (lhs)
3461 && !(optimize == 0
3462 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3463 {
3464 error ("non-register as LHS of unary operation");
3465 return true;
3466 }
3467
3468 if (!is_gimple_val (rhs1))
3469 {
3470 error ("invalid operand in unary operation");
3471 return true;
3472 }
3473
3474 /* First handle conversions. */
75a70cf9 3475 switch (rhs_code)
f03e0ae4 3476 {
72dd6141 3477 CASE_CONVERT:
f03e0ae4 3478 {
f03e0ae4 3479 /* Allow conversions between integral types and pointers only if
ec6b51ed 3480 there is no sign or zero extension involved.
3481 For targets were the precision of sizetype doesn't match that
3482 of pointers we need to allow arbitrary conversions from and
3483 to sizetype. */
3484 if ((POINTER_TYPE_P (lhs_type)
3485 && INTEGRAL_TYPE_P (rhs1_type)
3486 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3487 || rhs1_type == sizetype))
3488 || (POINTER_TYPE_P (rhs1_type)
3489 && INTEGRAL_TYPE_P (lhs_type)
3490 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3491 || lhs_type == sizetype)))
f03e0ae4 3492 return false;
3493
3494 /* Allow conversion from integer to offset type and vice versa. */
75a70cf9 3495 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3496 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3497 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3498 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
f03e0ae4 3499 return false;
3500
3501 /* Otherwise assert we are converting between types of the
3502 same kind. */
75a70cf9 3503 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
f03e0ae4 3504 {
3505 error ("invalid types in nop conversion");
75a70cf9 3506 debug_generic_expr (lhs_type);
3507 debug_generic_expr (rhs1_type);
f03e0ae4 3508 return true;
3509 }
3510
3511 return false;
3512 }
3513
9ac19c3a 3514 case FIXED_CONVERT_EXPR:
3515 {
75a70cf9 3516 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3517 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
9ac19c3a 3518 {
3519 error ("invalid types in fixed-point conversion");
75a70cf9 3520 debug_generic_expr (lhs_type);
3521 debug_generic_expr (rhs1_type);
9ac19c3a 3522 return true;
3523 }
3524
3525 return false;
3526 }
3527
f03e0ae4 3528 case FLOAT_EXPR:
3529 {
75a70cf9 3530 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
f03e0ae4 3531 {
3532 error ("invalid types in conversion to floating point");
75a70cf9 3533 debug_generic_expr (lhs_type);
3534 debug_generic_expr (rhs1_type);
f03e0ae4 3535 return true;
3536 }
75a70cf9 3537
f03e0ae4 3538 return false;
3539 }
3540
3541 case FIX_TRUNC_EXPR:
3542 {
75a70cf9 3543 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
f03e0ae4 3544 {
3545 error ("invalid types in conversion to integer");
75a70cf9 3546 debug_generic_expr (lhs_type);
3547 debug_generic_expr (rhs1_type);
f03e0ae4 3548 return true;
3549 }
75a70cf9 3550
f03e0ae4 3551 return false;
3552 }
3553
87f9ffa4 3554 case VEC_UNPACK_HI_EXPR:
3555 case VEC_UNPACK_LO_EXPR:
3556 case REDUC_MAX_EXPR:
3557 case REDUC_MIN_EXPR:
3558 case REDUC_PLUS_EXPR:
3559 case VEC_UNPACK_FLOAT_HI_EXPR:
3560 case VEC_UNPACK_FLOAT_LO_EXPR:
3561 /* FIXME. */
3562 return false;
ec6b51ed 3563
87f9ffa4 3564 case TRUTH_NOT_EXPR:
ec6b51ed 3565 case NEGATE_EXPR:
3566 case ABS_EXPR:
3567 case BIT_NOT_EXPR:
3568 case PAREN_EXPR:
3569 case NON_LVALUE_EXPR:
3570 case CONJ_EXPR:
ec6b51ed 3571 break;
3572
3573 default:
3574 gcc_unreachable ();
3575 }
3576
3577 /* For the remaining codes assert there is no conversion involved. */
3578 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3579 {
3580 error ("non-trivial conversion in unary operation");
3581 debug_generic_expr (lhs_type);
3582 debug_generic_expr (rhs1_type);
3583 return true;
3584 }
3585
3586 return false;
3587}
3588
3589/* Verify a gimple assignment statement STMT with a binary rhs.
3590 Returns true if anything is wrong. */
3591
3592static bool
3593verify_gimple_assign_binary (gimple stmt)
3594{
3595 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3596 tree lhs = gimple_assign_lhs (stmt);
3597 tree lhs_type = TREE_TYPE (lhs);
3598 tree rhs1 = gimple_assign_rhs1 (stmt);
3599 tree rhs1_type = TREE_TYPE (rhs1);
3600 tree rhs2 = gimple_assign_rhs2 (stmt);
3601 tree rhs2_type = TREE_TYPE (rhs2);
3602
3603 if (!is_gimple_reg (lhs)
3604 && !(optimize == 0
3605 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3606 {
3607 error ("non-register as LHS of binary operation");
3608 return true;
3609 }
75a70cf9 3610
ec6b51ed 3611 if (!is_gimple_val (rhs1)
3612 || !is_gimple_val (rhs2))
3613 {
3614 error ("invalid operands in binary operation");
3615 return true;
3616 }
3617
3618 /* First handle operations that involve different types. */
3619 switch (rhs_code)
3620 {
3621 case COMPLEX_EXPR:
3622 {
3623 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3624 || !(INTEGRAL_TYPE_P (rhs1_type)
75a70cf9 3625 || SCALAR_FLOAT_TYPE_P (rhs1_type))
ec6b51ed 3626 || !(INTEGRAL_TYPE_P (rhs2_type)
75a70cf9 3627 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
f03e0ae4 3628 {
3629 error ("type mismatch in complex expression");
75a70cf9 3630 debug_generic_expr (lhs_type);
3631 debug_generic_expr (rhs1_type);
3632 debug_generic_expr (rhs2_type);
f03e0ae4 3633 return true;
3634 }
75a70cf9 3635
f03e0ae4 3636 return false;
3637 }
3638
f03e0ae4 3639 case LSHIFT_EXPR:
3640 case RSHIFT_EXPR:
3641 case LROTATE_EXPR:
3642 case RROTATE_EXPR:
3643 {
87f9ffa4 3644 /* Shifts and rotates are ok on integral types, fixed point
3645 types and integer vector types. */
3646 if ((!INTEGRAL_TYPE_P (rhs1_type)
3647 && !FIXED_POINT_TYPE_P (rhs1_type)
3648 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3649 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE))
3650 || (!INTEGRAL_TYPE_P (rhs2_type)
3651 /* Vector shifts of vectors are also ok. */
3652 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3653 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE
3654 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3655 && TREE_CODE (TREE_TYPE (rhs2_type)) == INTEGER_TYPE))
75a70cf9 3656 || !useless_type_conversion_p (lhs_type, rhs1_type))
f03e0ae4 3657 {
3658 error ("type mismatch in shift expression");
75a70cf9 3659 debug_generic_expr (lhs_type);
3660 debug_generic_expr (rhs1_type);
3661 debug_generic_expr (rhs2_type);
f03e0ae4 3662 return true;
3663 }
75a70cf9 3664
f03e0ae4 3665 return false;
3666 }
3667
ec6b51ed 3668 case VEC_LSHIFT_EXPR:
3669 case VEC_RSHIFT_EXPR:
f03e0ae4 3670 {
ec6b51ed 3671 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
f323bb78 3672 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
8f93c289 3673 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3674 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
ec6b51ed 3675 || (!INTEGRAL_TYPE_P (rhs2_type)
3676 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3677 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3678 || !useless_type_conversion_p (lhs_type, rhs1_type))
f03e0ae4 3679 {
ec6b51ed 3680 error ("type mismatch in vector shift expression");
3681 debug_generic_expr (lhs_type);
3682 debug_generic_expr (rhs1_type);
3683 debug_generic_expr (rhs2_type);
f03e0ae4 3684 return true;
3685 }
8f93c289 3686 /* For shifting a vector of floating point components we
3687 only allow shifting by a constant multiple of the element size. */
3688 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3689 && (TREE_CODE (rhs2) != INTEGER_CST
3690 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3691 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3692 {
3693 error ("non-element sized vector shift of floating point vector");
3694 return true;
3695 }
75a70cf9 3696
ec6b51ed 3697 return false;
f03e0ae4 3698 }
3699
3e10a823 3700 case PLUS_EXPR:
3701 {
3702 /* We use regular PLUS_EXPR for vectors.
3703 ??? This just makes the checker happy and may not be what is
3704 intended. */
3705 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3706 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3707 {
3708 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3709 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3710 {
3711 error ("invalid non-vector operands to vector valued plus");
3712 return true;
3713 }
3714 lhs_type = TREE_TYPE (lhs_type);
3715 rhs1_type = TREE_TYPE (rhs1_type);
3716 rhs2_type = TREE_TYPE (rhs2_type);
3717 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3718 the pointer to 2nd place. */
3719 if (POINTER_TYPE_P (rhs2_type))
3720 {
3721 tree tem = rhs1_type;
3722 rhs1_type = rhs2_type;
3723 rhs2_type = tem;
3724 }
3725 goto do_pointer_plus_expr_check;
3726 }
3727 }
3728 /* Fallthru. */
3729 case MINUS_EXPR:
3730 {
3731 if (POINTER_TYPE_P (lhs_type)
3732 || POINTER_TYPE_P (rhs1_type)
3733 || POINTER_TYPE_P (rhs2_type))
3734 {
3735 error ("invalid (pointer) operands to plus/minus");
3736 return true;
3737 }
3738
3739 /* Continue with generic binary expression handling. */
3740 break;
3741 }
3742
f03e0ae4 3743 case POINTER_PLUS_EXPR:
3744 {
3e10a823 3745do_pointer_plus_expr_check:
75a70cf9 3746 if (!POINTER_TYPE_P (rhs1_type)
3747 || !useless_type_conversion_p (lhs_type, rhs1_type)
3748 || !useless_type_conversion_p (sizetype, rhs2_type))
f03e0ae4 3749 {
3750 error ("type mismatch in pointer plus expression");
75a70cf9 3751 debug_generic_stmt (lhs_type);
3752 debug_generic_stmt (rhs1_type);
3753 debug_generic_stmt (rhs2_type);
f03e0ae4 3754 return true;
3755 }
f03e0ae4 3756
75a70cf9 3757 return false;
3758 }
f03e0ae4 3759
f03e0ae4 3760 case TRUTH_ANDIF_EXPR:
3761 case TRUTH_ORIF_EXPR:
984bd637 3762 gcc_unreachable ();
3763
f03e0ae4 3764 case TRUTH_AND_EXPR:
3765 case TRUTH_OR_EXPR:
3766 case TRUTH_XOR_EXPR:
3767 {
f03e0ae4 3768 /* We allow any kind of integral typed argument and result. */
75a70cf9 3769 if (!INTEGRAL_TYPE_P (rhs1_type)
3770 || !INTEGRAL_TYPE_P (rhs2_type)
3771 || !INTEGRAL_TYPE_P (lhs_type))
f03e0ae4 3772 {
3773 error ("type mismatch in binary truth expression");
75a70cf9 3774 debug_generic_expr (lhs_type);
3775 debug_generic_expr (rhs1_type);
3776 debug_generic_expr (rhs2_type);
f03e0ae4 3777 return true;
3778 }
3779
3780 return false;
3781 }
3782
ec6b51ed 3783 case LT_EXPR:
3784 case LE_EXPR:
3785 case GT_EXPR:
3786 case GE_EXPR:
3787 case EQ_EXPR:
3788 case NE_EXPR:
3789 case UNORDERED_EXPR:
3790 case ORDERED_EXPR:
3791 case UNLT_EXPR:
3792 case UNLE_EXPR:
3793 case UNGT_EXPR:
3794 case UNGE_EXPR:
3795 case UNEQ_EXPR:
3796 case LTGT_EXPR:
3797 /* Comparisons are also binary, but the result type is not
3798 connected to the operand types. */
3799 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
f03e0ae4 3800
87f9ffa4 3801 case WIDEN_SUM_EXPR:
3802 case WIDEN_MULT_EXPR:
3803 case VEC_WIDEN_MULT_HI_EXPR:
3804 case VEC_WIDEN_MULT_LO_EXPR:
3805 case VEC_PACK_TRUNC_EXPR:
3806 case VEC_PACK_SAT_EXPR:
3807 case VEC_PACK_FIX_TRUNC_EXPR:
3808 case VEC_EXTRACT_EVEN_EXPR:
3809 case VEC_EXTRACT_ODD_EXPR:
3810 case VEC_INTERLEAVE_HIGH_EXPR:
3811 case VEC_INTERLEAVE_LOW_EXPR:
3812 /* FIXME. */
3813 return false;
3814
ec6b51ed 3815 case MULT_EXPR:
3816 case TRUNC_DIV_EXPR:
3817 case CEIL_DIV_EXPR:
3818 case FLOOR_DIV_EXPR:
3819 case ROUND_DIV_EXPR:
3820 case TRUNC_MOD_EXPR:
3821 case CEIL_MOD_EXPR:
3822 case FLOOR_MOD_EXPR:
3823 case ROUND_MOD_EXPR:
3824 case RDIV_EXPR:
3825 case EXACT_DIV_EXPR:
3826 case MIN_EXPR:
3827 case MAX_EXPR:
3828 case BIT_IOR_EXPR:
3829 case BIT_XOR_EXPR:
3830 case BIT_AND_EXPR:
ec6b51ed 3831 /* Continue with generic binary expression handling. */
3832 break;
f03e0ae4 3833
ec6b51ed 3834 default:
3835 gcc_unreachable ();
3836 }
635b74da 3837
ec6b51ed 3838 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3839 || !useless_type_conversion_p (lhs_type, rhs2_type))
3840 {
3841 error ("type mismatch in binary expression");
3842 debug_generic_stmt (lhs_type);
3843 debug_generic_stmt (rhs1_type);
3844 debug_generic_stmt (rhs2_type);
3845 return true;
3846 }
3847
3848 return false;
3849}
3850
3851/* Verify a gimple assignment statement STMT with a single rhs.
3852 Returns true if anything is wrong. */
3853
3854static bool
3855verify_gimple_assign_single (gimple stmt)
3856{
3857 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3858 tree lhs = gimple_assign_lhs (stmt);
3859 tree lhs_type = TREE_TYPE (lhs);
3860 tree rhs1 = gimple_assign_rhs1 (stmt);
3861 tree rhs1_type = TREE_TYPE (rhs1);
3862 bool res = false;
3863
3864 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3865 {
3866 error ("non-trivial conversion at assignment");
3867 debug_generic_expr (lhs_type);
3868 debug_generic_expr (rhs1_type);
3869 return true;
f03e0ae4 3870 }
3871
ec6b51ed 3872 if (handled_component_p (lhs))
b898ce29 3873 res |= verify_types_in_gimple_reference (lhs, true);
ec6b51ed 3874
3875 /* Special codes we cannot handle via their class. */
3876 switch (rhs_code)
f03e0ae4 3877 {
ec6b51ed 3878 case ADDR_EXPR:
3879 {
3880 tree op = TREE_OPERAND (rhs1, 0);
3881 if (!is_gimple_addressable (op))
3882 {
3883 error ("invalid operand in unary expression");
3884 return true;
3885 }
767be488 3886
d7dfb761 3887 if (!one_pointer_to_useless_type_conversion_p (lhs_type,
3888 TREE_TYPE (op)))
ec6b51ed 3889 {
3890 error ("type mismatch in address expression");
3891 debug_generic_stmt (lhs_type);
3892 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
3893 return true;
3894 }
3895
b898ce29 3896 return verify_types_in_gimple_reference (op, true);
ec6b51ed 3897 }
3898
3899 /* tcc_reference */
3900 case COMPONENT_REF:
3901 case BIT_FIELD_REF:
3902 case INDIRECT_REF:
3903 case ALIGN_INDIRECT_REF:
3904 case MISALIGNED_INDIRECT_REF:
3905 case ARRAY_REF:
3906 case ARRAY_RANGE_REF:
3907 case VIEW_CONVERT_EXPR:
3908 case REALPART_EXPR:
3909 case IMAGPART_EXPR:
3910 case TARGET_MEM_REF:
3911 if (!is_gimple_reg (lhs)
3912 && is_gimple_reg_type (TREE_TYPE (lhs)))
767be488 3913 {
ec6b51ed 3914 error ("invalid rhs for gimple memory store");
3915 debug_generic_stmt (lhs);
3916 debug_generic_stmt (rhs1);
75a70cf9 3917 return true;
3918 }
b898ce29 3919 return res || verify_types_in_gimple_reference (rhs1, false);
f03e0ae4 3920
ec6b51ed 3921 /* tcc_constant */
3922 case SSA_NAME:
3923 case INTEGER_CST:
3924 case REAL_CST:
3925 case FIXED_CST:
3926 case COMPLEX_CST:
3927 case VECTOR_CST:
3928 case STRING_CST:
3929 return res;
3930
3931 /* tcc_declaration */
3932 case CONST_DECL:
3933 return res;
3934 case VAR_DECL:
3935 case PARM_DECL:
3936 if (!is_gimple_reg (lhs)
3937 && !is_gimple_reg (rhs1)
3938 && is_gimple_reg_type (TREE_TYPE (lhs)))
23c07107 3939 {
ec6b51ed 3940 error ("invalid rhs for gimple memory store");
3941 debug_generic_stmt (lhs);
3942 debug_generic_stmt (rhs1);
23c07107 3943 return true;
3944 }
ec6b51ed 3945 return res;
f03e0ae4 3946
ec6b51ed 3947 case COND_EXPR:
3948 case CONSTRUCTOR:
3949 case OBJ_TYPE_REF:
3950 case ASSERT_EXPR:
3951 case WITH_SIZE_EXPR:
3952 case EXC_PTR_EXPR:
3953 case FILTER_EXPR:
3954 case POLYNOMIAL_CHREC:
3955 case DOT_PROD_EXPR:
3956 case VEC_COND_EXPR:
3957 case REALIGN_LOAD_EXPR:
3958 /* FIXME. */
3959 return res;
f03e0ae4 3960
75a70cf9 3961 default:;
f03e0ae4 3962 }
3963
ec6b51ed 3964 return res;
f03e0ae4 3965}
3966
ec6b51ed 3967/* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3968 is a problem, otherwise false. */
3969
3970static bool
3971verify_gimple_assign (gimple stmt)
3972{
3973 switch (gimple_assign_rhs_class (stmt))
3974 {
3975 case GIMPLE_SINGLE_RHS:
3976 return verify_gimple_assign_single (stmt);
3977
3978 case GIMPLE_UNARY_RHS:
3979 return verify_gimple_assign_unary (stmt);
3980
3981 case GIMPLE_BINARY_RHS:
3982 return verify_gimple_assign_binary (stmt);
3983
3984 default:
3985 gcc_unreachable ();
3986 }
3987}
75a70cf9 3988
3989/* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3990 is a problem, otherwise false. */
f03e0ae4 3991
3992static bool
d266354c 3993verify_gimple_return (gimple stmt)
f03e0ae4 3994{
75a70cf9 3995 tree op = gimple_return_retval (stmt);
d266354c 3996 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
75a70cf9 3997
d266354c 3998 /* We cannot test for present return values as we do not fix up missing
3999 return values from the original source. */
75a70cf9 4000 if (op == NULL)
4001 return false;
d266354c 4002
4003 if (!is_gimple_val (op)
4004 && TREE_CODE (op) != RESULT_DECL)
4005 {
4006 error ("invalid operand in return statement");
4007 debug_generic_stmt (op);
4008 return true;
4009 }
4010
4011 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
4012 /* ??? With C++ we can have the situation that the result
4013 decl is a reference type while the return type is an aggregate. */
4014 && !(TREE_CODE (op) == RESULT_DECL
4015 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
4016 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
4017 {
4018 error ("invalid conversion in return statement");
4019 debug_generic_stmt (restype);
4020 debug_generic_stmt (TREE_TYPE (op));
4021 return true;
4022 }
4023
4024 return false;
75a70cf9 4025}
f03e0ae4 4026
f03e0ae4 4027
d266354c 4028/* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4029 is a problem, otherwise false. */
4030
4031static bool
4032verify_gimple_goto (gimple stmt)
4033{
4034 tree dest = gimple_goto_dest (stmt);
4035
4036 /* ??? We have two canonical forms of direct goto destinations, a
4037 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4038 if (TREE_CODE (dest) != LABEL_DECL
4039 && (!is_gimple_val (dest)
4040 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4041 {
4042 error ("goto destination is neither a label nor a pointer");
4043 return true;
4044 }
4045
4046 return false;
4047}
4048
75a70cf9 4049/* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4050 is a problem, otherwise false. */
4051
4052static bool
d266354c 4053verify_gimple_switch (gimple stmt)
75a70cf9 4054{
4055 if (!is_gimple_val (gimple_switch_index (stmt)))
f03e0ae4 4056 {
75a70cf9 4057 error ("invalid operand to switch statement");
d266354c 4058 debug_generic_stmt (gimple_switch_index (stmt));
f03e0ae4 4059 return true;
4060 }
4061
75a70cf9 4062 return false;
4063}
f03e0ae4 4064
f03e0ae4 4065
75a70cf9 4066/* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4067 and false otherwise. */
f03e0ae4 4068
75a70cf9 4069static bool
d266354c 4070verify_gimple_phi (gimple stmt)
75a70cf9 4071{
d266354c 4072 tree type = TREE_TYPE (gimple_phi_result (stmt));
4073 unsigned i;
f03e0ae4 4074
d266354c 4075 if (!is_gimple_variable (gimple_phi_result (stmt)))
4076 {
4077 error ("Invalid PHI result");
4078 return true;
4079 }
f03e0ae4 4080
75a70cf9 4081 for (i = 0; i < gimple_phi_num_args (stmt); i++)
d266354c 4082 {
4083 tree arg = gimple_phi_arg_def (stmt, i);
ec6b51ed 4084 if ((is_gimple_reg (gimple_phi_result (stmt))
4085 && !is_gimple_val (arg))
4086 || (!is_gimple_reg (gimple_phi_result (stmt))
4087 && !is_gimple_addressable (arg)))
d266354c 4088 {
4089 error ("Invalid PHI argument");
4090 debug_generic_stmt (arg);
4091 return true;
4092 }
4093 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
4094 {
87f9ffa4 4095 error ("Incompatible types in PHI argument %u", i);
d266354c 4096 debug_generic_stmt (type);
4097 debug_generic_stmt (TREE_TYPE (arg));
4098 return true;
4099 }
4100 }
75a70cf9 4101
f03e0ae4 4102 return false;
4103}
4104
75a70cf9 4105
f03e0ae4 4106/* Verify the GIMPLE statement STMT. Returns true if there is an
4107 error, otherwise false. */
4108
4109static bool
75a70cf9 4110verify_types_in_gimple_stmt (gimple stmt)
f03e0ae4 4111{
75a70cf9 4112 if (is_gimple_omp (stmt))
f03e0ae4 4113 {
4114 /* OpenMP directives are validated by the FE and never operated
75a70cf9 4115 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
f03e0ae4 4116 non-gimple expressions when the main index variable has had
4117 its address taken. This does not affect the loop itself
75a70cf9 4118 because the header of an GIMPLE_OMP_FOR is merely used to determine
f03e0ae4 4119 how to setup the parallel iteration. */
4120 return false;
4121 }
4122
75a70cf9 4123 switch (gimple_code (stmt))
f03e0ae4 4124 {
75a70cf9 4125 case GIMPLE_ASSIGN:
ec6b51ed 4126 return verify_gimple_assign (stmt);
f03e0ae4 4127
75a70cf9 4128 case GIMPLE_LABEL:
4129 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
f03e0ae4 4130
75a70cf9 4131 case GIMPLE_CALL:
d266354c 4132 return verify_gimple_call (stmt);
f03e0ae4 4133
75a70cf9 4134 case GIMPLE_COND:
d266354c 4135 return verify_gimple_comparison (boolean_type_node,
4136 gimple_cond_lhs (stmt),
4137 gimple_cond_rhs (stmt));
f03e0ae4 4138
75a70cf9 4139 case GIMPLE_GOTO:
d266354c 4140 return verify_gimple_goto (stmt);
f03e0ae4 4141
75a70cf9 4142 case GIMPLE_SWITCH:
d266354c 4143 return verify_gimple_switch (stmt);
f03e0ae4 4144
75a70cf9 4145 case GIMPLE_RETURN:
d266354c 4146 return verify_gimple_return (stmt);
f03e0ae4 4147
75a70cf9 4148 case GIMPLE_ASM:
f03e0ae4 4149 return false;
4150
75a70cf9 4151 case GIMPLE_PHI:
d266354c 4152 return verify_gimple_phi (stmt);
4153
4154 /* Tuples that do not have tree operands. */
4155 case GIMPLE_NOP:
4156 case GIMPLE_RESX:
4157 case GIMPLE_PREDICT:
4158 return false;
75a70cf9 4159
f03e0ae4 4160 default:
4161 gcc_unreachable ();
4162 }
4163}
4164
75a70cf9 4165/* Verify the GIMPLE statements inside the sequence STMTS. */
f03e0ae4 4166
2f519297 4167static bool
75a70cf9 4168verify_types_in_gimple_seq_2 (gimple_seq stmts)
f03e0ae4 4169{
75a70cf9 4170 gimple_stmt_iterator ittr;
2f519297 4171 bool err = false;
f03e0ae4 4172
75a70cf9 4173 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
f03e0ae4 4174 {
75a70cf9 4175 gimple stmt = gsi_stmt (ittr);
f03e0ae4 4176
75a70cf9 4177 switch (gimple_code (stmt))
4178 {
d266354c 4179 case GIMPLE_BIND:
4180 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
4181 break;
4182
4183 case GIMPLE_TRY:
4184 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
4185 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
4186 break;
4187
4188 case GIMPLE_EH_FILTER:
4189 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
4190 break;
4191
4192 case GIMPLE_CATCH:
4193 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
4194 break;
f03e0ae4 4195
4196 default:
2f519297 4197 {
75a70cf9 4198 bool err2 = verify_types_in_gimple_stmt (stmt);
2f519297 4199 if (err2)
75a70cf9 4200 debug_gimple_stmt (stmt);
2f519297 4201 err |= err2;
4202 }
f03e0ae4 4203 }
4204 }
2f519297 4205
4206 return err;
4207}
4208
4209
4210/* Verify the GIMPLE statements inside the statement list STMTS. */
4211
4212void
75a70cf9 4213verify_types_in_gimple_seq (gimple_seq stmts)
2f519297 4214{
75a70cf9 4215 if (verify_types_in_gimple_seq_2 (stmts))
2f519297 4216 internal_error ("verify_gimple failed");
f03e0ae4 4217}
4218
4ee9c684 4219
4220/* Verify STMT, return true if STMT is not in GIMPLE form.
4221 TODO: Implement type checking. */
4222
4223static bool
75a70cf9 4224verify_stmt (gimple_stmt_iterator *gsi)
4ee9c684 4225{
4226 tree addr;
75a70cf9 4227 struct walk_stmt_info wi;
4228 bool last_in_block = gsi_one_before_end_p (*gsi);
4229 gimple stmt = gsi_stmt (*gsi);
4ee9c684 4230
75a70cf9 4231 if (is_gimple_omp (stmt))
773c5ba7 4232 {
4233 /* OpenMP directives are validated by the FE and never operated
75a70cf9 4234 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
773c5ba7 4235 non-gimple expressions when the main index variable has had
4236 its address taken. This does not affect the loop itself
75a70cf9 4237 because the header of an GIMPLE_OMP_FOR is merely used to determine
773c5ba7 4238 how to setup the parallel iteration. */
4239 return false;
4240 }
4241
75a70cf9 4242 /* FIXME. The C frontend passes unpromoted arguments in case it
4243 didn't see a function declaration before the call. */
4244 if (is_gimple_call (stmt))
4ee9c684 4245 {
0acacf9e 4246 tree decl;
75a70cf9 4247
0acacf9e 4248 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
4249 {
4250 error ("invalid function in call statement");
4251 return true;
4252 }
4253
4254 decl = gimple_call_fndecl (stmt);
4255 if (decl
4256 && TREE_CODE (decl) == FUNCTION_DECL
75a70cf9 4257 && DECL_LOOPING_CONST_OR_PURE_P (decl)
4258 && (!DECL_PURE_P (decl))
4259 && (!TREE_READONLY (decl)))
4260 {
4261 error ("invalid pure const state for function");
4262 return true;
4263 }
4ee9c684 4264 }
4265
75a70cf9 4266 memset (&wi, 0, sizeof (wi));
4267 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
4ee9c684 4268 if (addr)
4269 {
75a70cf9 4270 debug_generic_expr (addr);
e60a6f7b 4271 inform (gimple_location (gsi_stmt (*gsi)), "in statement");
75a70cf9 4272 debug_gimple_stmt (stmt);
4ee9c684 4273 return true;
4274 }
4275
35c15734 4276 /* If the statement is marked as part of an EH region, then it is
4277 expected that the statement could throw. Verify that when we
4278 have optimizations that simplify statements such that we prove
4279 that they cannot throw, that we update other data structures
4280 to match. */
4281 if (lookup_stmt_eh_region (stmt) >= 0)
4282 {
17b28e52 4283 /* During IPA passes, ipa-pure-const sets nothrow flags on calls
4284 and they are updated on statements only after fixup_cfg
4285 is executed at beggining of expansion stage. */
4286 if (!stmt_could_throw_p (stmt) && cgraph_state != CGRAPH_STATE_IPA_SSA)
35c15734 4287 {
0a81f5a0 4288 error ("statement marked for throw, but doesn%'t");
35c15734 4289 goto fail;
4290 }
75a70cf9 4291 if (!last_in_block && stmt_can_throw_internal (stmt))
35c15734 4292 {
0a81f5a0 4293 error ("statement marked for throw in middle of block");
35c15734 4294 goto fail;
4295 }
4296 }
4297
4ee9c684 4298 return false;
35c15734 4299
4300 fail:
75a70cf9 4301 debug_gimple_stmt (stmt);
35c15734 4302 return true;
4ee9c684 4303}
4304
4305
4306/* Return true when the T can be shared. */
4307
4308static bool
4309tree_node_can_be_shared (tree t)
4310{
ce45a448 4311 if (IS_TYPE_OR_DECL_P (t)
4ee9c684 4312 || is_gimple_min_invariant (t)
9b63f7c4 4313 || TREE_CODE (t) == SSA_NAME
1e8e9920 4314 || t == error_mark_node
4315 || TREE_CODE (t) == IDENTIFIER_NODE)
4ee9c684 4316 return true;
4317
194ae074 4318 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4319 return true;
4320
6374121b 4321 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1e8e9920 4322 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4323 || TREE_CODE (t) == COMPONENT_REF
4324 || TREE_CODE (t) == REALPART_EXPR
4325 || TREE_CODE (t) == IMAGPART_EXPR)
4ee9c684 4326 t = TREE_OPERAND (t, 0);
4327
4328 if (DECL_P (t))
4329 return true;
4330
4331 return false;
4332}
4333
4334
75a70cf9 4335/* Called via walk_gimple_stmt. Verify tree sharing. */
4ee9c684 4336
4337static tree
75a70cf9 4338verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4ee9c684 4339{
75a70cf9 4340 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4341 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4ee9c684 4342
4343 if (tree_node_can_be_shared (*tp))
4344 {
4345 *walk_subtrees = false;
4346 return NULL;
4347 }
4348
836074a5 4349 if (pointer_set_insert (visited, *tp))
4350 return *tp;
4ee9c684 4351
4352 return NULL;
4353}
4354
4355
836074a5 4356static bool eh_error_found;
4357static int
4358verify_eh_throw_stmt_node (void **slot, void *data)
4359{
4360 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4361 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4362
4363 if (!pointer_set_contains (visited, node->stmt))
4364 {
4365 error ("Dead STMT in EH table");
75a70cf9 4366 debug_gimple_stmt (node->stmt);
836074a5 4367 eh_error_found = true;
4368 }
7ec44cf1 4369 return 1;
836074a5 4370}
4371
75a70cf9 4372
4373/* Verify the GIMPLE statements in every basic block. */
4ee9c684 4374
4375void
4376verify_stmts (void)
4377{
4378 basic_block bb;
75a70cf9 4379 gimple_stmt_iterator gsi;
4ee9c684 4380 bool err = false;
836074a5 4381 struct pointer_set_t *visited, *visited_stmts;
4ee9c684 4382 tree addr;
75a70cf9 4383 struct walk_stmt_info wi;
4ee9c684 4384
4385 timevar_push (TV_TREE_STMT_VERIFY);
836074a5 4386 visited = pointer_set_create ();
4387 visited_stmts = pointer_set_create ();
4ee9c684 4388
75a70cf9 4389 memset (&wi, 0, sizeof (wi));
4390 wi.info = (void *) visited;
4391
4ee9c684 4392 FOR_EACH_BB (bb)
4393 {
75a70cf9 4394 gimple phi;
4395 size_t i;
4ee9c684 4396
75a70cf9 4397 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 4398 {
75a70cf9 4399 phi = gsi_stmt (gsi);
836074a5 4400 pointer_set_insert (visited_stmts, phi);
75a70cf9 4401 if (gimple_bb (phi) != bb)
973bf48f 4402 {
75a70cf9 4403 error ("gimple_bb (phi) is set to a wrong basic block");
973bf48f 4404 err |= true;
4405 }
4406
75a70cf9 4407 for (i = 0; i < gimple_phi_num_args (phi); i++)
4ee9c684 4408 {
75a70cf9 4409 tree t = gimple_phi_arg_def (phi, i);
4ee9c684 4410 tree addr;
4411
fd71b542 4412 if (!t)
4413 {
4414 error ("missing PHI def");
75a70cf9 4415 debug_gimple_stmt (phi);
fd71b542 4416 err |= true;
4417 continue;
4418 }
4ee9c684 4419 /* Addressable variables do have SSA_NAMEs but they
4420 are not considered gimple values. */
fd71b542 4421 else if (TREE_CODE (t) != SSA_NAME
4422 && TREE_CODE (t) != FUNCTION_DECL
d3ec693f 4423 && !is_gimple_min_invariant (t))
4ee9c684 4424 {
75a70cf9 4425 error ("PHI argument is not a GIMPLE value");
4426 debug_gimple_stmt (phi);
4427 debug_generic_expr (t);
4ee9c684 4428 err |= true;
4429 }
4430
836074a5 4431 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4ee9c684 4432 if (addr)
4433 {
0a81f5a0 4434 error ("incorrect sharing of tree nodes");
75a70cf9 4435 debug_gimple_stmt (phi);
4436 debug_generic_expr (addr);
4ee9c684 4437 err |= true;
4438 }
4439 }
ae2a9b00 4440
4441#ifdef ENABLE_TYPES_CHECKING
4442 if (verify_gimple_phi (phi))
4443 {
4444 debug_gimple_stmt (phi);
4445 err |= true;
4446 }
4447#endif
4ee9c684 4448 }
4449
75a70cf9 4450 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4ee9c684 4451 {
75a70cf9 4452 gimple stmt = gsi_stmt (gsi);
4453
4454 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4455 || gimple_code (stmt) == GIMPLE_BIND)
4456 {
4457 error ("invalid GIMPLE statement");
4458 debug_gimple_stmt (stmt);
4459 err |= true;
4460 }
973bf48f 4461
836074a5 4462 pointer_set_insert (visited_stmts, stmt);
35cc02b5 4463
75a70cf9 4464 if (gimple_bb (stmt) != bb)
973bf48f 4465 {
75a70cf9 4466 error ("gimple_bb (stmt) is set to a wrong basic block");
2c2eb7fa 4467 debug_gimple_stmt (stmt);
973bf48f 4468 err |= true;
4469 }
4470
75a70cf9 4471 if (gimple_code (stmt) == GIMPLE_LABEL)
4472 {
4473 tree decl = gimple_label_label (stmt);
4474 int uid = LABEL_DECL_UID (decl);
4475
4476 if (uid == -1
4477 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4478 {
4479 error ("incorrect entry in label_to_block_map.\n");
4480 err |= true;
4481 }
4482 }
4483
4484 err |= verify_stmt (&gsi);
ae2a9b00 4485
4486#ifdef ENABLE_TYPES_CHECKING
4487 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4488 {
4489 debug_gimple_stmt (stmt);
4490 err |= true;
4491 }
4492#endif
75a70cf9 4493 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4ee9c684 4494 if (addr)
4495 {
0a81f5a0 4496 error ("incorrect sharing of tree nodes");
75a70cf9 4497 debug_gimple_stmt (stmt);
4498 debug_generic_expr (addr);
4ee9c684 4499 err |= true;
4500 }
75a70cf9 4501 gsi_next (&gsi);
4ee9c684 4502 }
4503 }
75a70cf9 4504
836074a5 4505 eh_error_found = false;
4506 if (get_eh_throw_stmt_table (cfun))
4507 htab_traverse (get_eh_throw_stmt_table (cfun),
4508 verify_eh_throw_stmt_node,
4509 visited_stmts);
4ee9c684 4510
836074a5 4511 if (err | eh_error_found)
0a81f5a0 4512 internal_error ("verify_stmts failed");
4ee9c684 4513
836074a5 4514 pointer_set_destroy (visited);
4515 pointer_set_destroy (visited_stmts);
4992f399 4516 verify_histograms ();
4ee9c684 4517 timevar_pop (TV_TREE_STMT_VERIFY);
4518}
4519
4520
4521/* Verifies that the flow information is OK. */
4522
4523static int
75a70cf9 4524gimple_verify_flow_info (void)
4ee9c684 4525{
4526 int err = 0;
4527 basic_block bb;
75a70cf9 4528 gimple_stmt_iterator gsi;
4529 gimple stmt;
4ee9c684 4530 edge e;
cd665a06 4531 edge_iterator ei;
4ee9c684 4532
75a70cf9 4533 if (ENTRY_BLOCK_PTR->il.gimple)
4ee9c684 4534 {
c23dad79 4535 error ("ENTRY_BLOCK has IL associated with it");
4ee9c684 4536 err = 1;
4537 }
4538
75a70cf9 4539 if (EXIT_BLOCK_PTR->il.gimple)
4ee9c684 4540 {
c23dad79 4541 error ("EXIT_BLOCK has IL associated with it");
4ee9c684 4542 err = 1;
4543 }
4544
cd665a06 4545 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4ee9c684 4546 if (e->flags & EDGE_FALLTHRU)
4547 {
0a81f5a0 4548 error ("fallthru to exit from bb %d", e->src->index);
4ee9c684 4549 err = 1;
4550 }
4551
4552 FOR_EACH_BB (bb)
4553 {
4554 bool found_ctrl_stmt = false;
4555
75a70cf9 4556 stmt = NULL;
818dddca 4557
4ee9c684 4558 /* Skip labels on the start of basic block. */
75a70cf9 4559 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 4560 {
75a70cf9 4561 tree label;
4562 gimple prev_stmt = stmt;
818dddca 4563
75a70cf9 4564 stmt = gsi_stmt (gsi);
818dddca 4565
75a70cf9 4566 if (gimple_code (stmt) != GIMPLE_LABEL)
4ee9c684 4567 break;
4568
75a70cf9 4569 label = gimple_label_label (stmt);
4570 if (prev_stmt && DECL_NONLOCAL (label))
818dddca 4571 {
1e8e9920 4572 error ("nonlocal label ");
75a70cf9 4573 print_generic_expr (stderr, label, 0);
1e8e9920 4574 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4575 bb->index);
818dddca 4576 err = 1;
4577 }
4578
75a70cf9 4579 if (label_to_block (label) != bb)
4ee9c684 4580 {
1e8e9920 4581 error ("label ");
75a70cf9 4582 print_generic_expr (stderr, label, 0);
1e8e9920 4583 fprintf (stderr, " to block does not match in bb %d",
4584 bb->index);
4ee9c684 4585 err = 1;
4586 }
4587
75a70cf9 4588 if (decl_function_context (label) != current_function_decl)
4ee9c684 4589 {
1e8e9920 4590 error ("label ");
75a70cf9 4591 print_generic_expr (stderr, label, 0);
1e8e9920 4592 fprintf (stderr, " has incorrect context in bb %d",
4593 bb->index);
4ee9c684 4594 err = 1;
4595 }
4596 }
4597
4598 /* Verify that body of basic block BB is free of control flow. */
75a70cf9 4599 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 4600 {
75a70cf9 4601 gimple stmt = gsi_stmt (gsi);
4ee9c684 4602
4603 if (found_ctrl_stmt)
4604 {
0a81f5a0 4605 error ("control flow in the middle of basic block %d",
4ee9c684 4606 bb->index);
4607 err = 1;
4608 }
4609
4610 if (stmt_ends_bb_p (stmt))
4611 found_ctrl_stmt = true;
4612
75a70cf9 4613 if (gimple_code (stmt) == GIMPLE_LABEL)
4ee9c684 4614 {
1e8e9920 4615 error ("label ");
75a70cf9 4616 print_generic_expr (stderr, gimple_label_label (stmt), 0);
1e8e9920 4617 fprintf (stderr, " in the middle of basic block %d", bb->index);
4ee9c684 4618 err = 1;
4619 }
4620 }
1e8e9920 4621
75a70cf9 4622 gsi = gsi_last_bb (bb);
4623 if (gsi_end_p (gsi))
4ee9c684 4624 continue;
4625
75a70cf9 4626 stmt = gsi_stmt (gsi);
4ee9c684 4627
b4ba5e9d 4628 err |= verify_eh_edges (stmt);
4629
4ee9c684 4630 if (is_ctrl_stmt (stmt))
4631 {
cd665a06 4632 FOR_EACH_EDGE (e, ei, bb->succs)
4ee9c684 4633 if (e->flags & EDGE_FALLTHRU)
4634 {
0a81f5a0 4635 error ("fallthru edge after a control statement in bb %d",
4ee9c684 4636 bb->index);
4637 err = 1;
4638 }
4639 }
4640
75a70cf9 4641 if (gimple_code (stmt) != GIMPLE_COND)
71cfcaa2 4642 {
4643 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4644 after anything else but if statement. */
4645 FOR_EACH_EDGE (e, ei, bb->succs)
4646 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4647 {
75a70cf9 4648 error ("true/false edge after a non-GIMPLE_COND in bb %d",
71cfcaa2 4649 bb->index);
4650 err = 1;
4651 }
4652 }
4653
75a70cf9 4654 switch (gimple_code (stmt))
4ee9c684 4655 {
75a70cf9 4656 case GIMPLE_COND:
4ee9c684 4657 {
4658 edge true_edge;
4659 edge false_edge;
63f88450 4660
4ee9c684 4661 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4662
75a70cf9 4663 if (!true_edge
4664 || !false_edge
4ee9c684 4665 || !(true_edge->flags & EDGE_TRUE_VALUE)
4666 || !(false_edge->flags & EDGE_FALSE_VALUE)
4667 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4668 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
cd665a06 4669 || EDGE_COUNT (bb->succs) >= 3)
4ee9c684 4670 {
0a81f5a0 4671 error ("wrong outgoing edge flags at end of bb %d",
4ee9c684 4672 bb->index);
4673 err = 1;
4674 }
4ee9c684 4675 }
4676 break;
4677
75a70cf9 4678 case GIMPLE_GOTO:
4ee9c684 4679 if (simple_goto_p (stmt))
4680 {
0a81f5a0 4681 error ("explicit goto at end of bb %d", bb->index);
13b96211 4682 err = 1;
4ee9c684 4683 }
4684 else
4685 {
13b96211 4686 /* FIXME. We should double check that the labels in the
4ee9c684 4687 destination blocks have their address taken. */
cd665a06 4688 FOR_EACH_EDGE (e, ei, bb->succs)
4ee9c684 4689 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4690 | EDGE_FALSE_VALUE))
4691 || !(e->flags & EDGE_ABNORMAL))
4692 {
0a81f5a0 4693 error ("wrong outgoing edge flags at end of bb %d",
4ee9c684 4694 bb->index);
4695 err = 1;
4696 }
4697 }
4698 break;
4699
75a70cf9 4700 case GIMPLE_RETURN:
ea091dfd 4701 if (!single_succ_p (bb)
4702 || (single_succ_edge (bb)->flags
4703 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4704 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4ee9c684 4705 {
0a81f5a0 4706 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4ee9c684 4707 err = 1;
4708 }
ea091dfd 4709 if (single_succ (bb) != EXIT_BLOCK_PTR)
4ee9c684 4710 {
0a81f5a0 4711 error ("return edge does not point to exit in bb %d",
4ee9c684 4712 bb->index);
4713 err = 1;
4714 }
4715 break;
4716
75a70cf9 4717 case GIMPLE_SWITCH:
4ee9c684 4718 {
eb3b7946 4719 tree prev;
4ee9c684 4720 edge e;
4721 size_t i, n;
4ee9c684 4722
75a70cf9 4723 n = gimple_switch_num_labels (stmt);
4ee9c684 4724
4725 /* Mark all the destination basic blocks. */
4726 for (i = 0; i < n; ++i)
4727 {
75a70cf9 4728 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4ee9c684 4729 basic_block label_bb = label_to_block (lab);
8c0963c4 4730 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4ee9c684 4731 label_bb->aux = (void *)1;
4732 }
4733
eb3b7946 4734 /* Verify that the case labels are sorted. */
75a70cf9 4735 prev = gimple_switch_label (stmt, 0);
72c30859 4736 for (i = 1; i < n; ++i)
eb3b7946 4737 {
75a70cf9 4738 tree c = gimple_switch_label (stmt, i);
4739 if (!CASE_LOW (c))
eb3b7946 4740 {
75a70cf9 4741 error ("found default case not at the start of "
4742 "case vector");
4743 err = 1;
eb3b7946 4744 continue;
4745 }
75a70cf9 4746 if (CASE_LOW (prev)
4747 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
eb3b7946 4748 {
1e8e9920 4749 error ("case labels not sorted: ");
eb3b7946 4750 print_generic_expr (stderr, prev, 0);
4751 fprintf (stderr," is greater than ");
4752 print_generic_expr (stderr, c, 0);
4753 fprintf (stderr," but comes before it.\n");
4754 err = 1;
4755 }
4756 prev = c;
4757 }
72c30859 4758 /* VRP will remove the default case if it can prove it will
4759 never be executed. So do not verify there always exists
4760 a default case here. */
eb3b7946 4761
cd665a06 4762 FOR_EACH_EDGE (e, ei, bb->succs)
4ee9c684 4763 {
4764 if (!e->dest->aux)
4765 {
0a81f5a0 4766 error ("extra outgoing edge %d->%d",
4ee9c684 4767 bb->index, e->dest->index);
4768 err = 1;
4769 }
75a70cf9 4770
4ee9c684 4771 e->dest->aux = (void *)2;
4772 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4773 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4774 {
0a81f5a0 4775 error ("wrong outgoing edge flags at end of bb %d",
4ee9c684 4776 bb->index);
4777 err = 1;
4778 }
4779 }
4780
4781 /* Check that we have all of them. */
4782 for (i = 0; i < n; ++i)
4783 {
75a70cf9 4784 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4ee9c684 4785 basic_block label_bb = label_to_block (lab);
4786
4787 if (label_bb->aux != (void *)2)
4788 {
75a70cf9 4789 error ("missing edge %i->%i", bb->index, label_bb->index);
4ee9c684 4790 err = 1;
4791 }
4792 }
4793
cd665a06 4794 FOR_EACH_EDGE (e, ei, bb->succs)
4ee9c684 4795 e->dest->aux = (void *)0;
4796 }
4797
4798 default: ;
4799 }
4800 }
4801
50b08d37 4802 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4ee9c684 4803 verify_dominators (CDI_DOMINATORS);
4804
4805 return err;
4806}
4807
4808
ebeaefa4 4809/* Updates phi nodes after creating a forwarder block joined
4ee9c684 4810 by edge FALLTHRU. */
4811
4812static void
75a70cf9 4813gimple_make_forwarder_block (edge fallthru)
4ee9c684 4814{
4815 edge e;
cd665a06 4816 edge_iterator ei;
4ee9c684 4817 basic_block dummy, bb;
75a70cf9 4818 tree var;
4819 gimple_stmt_iterator gsi;
4ee9c684 4820
4821 dummy = fallthru->src;
4822 bb = fallthru->dest;
4823
ea091dfd 4824 if (single_pred_p (bb))
4ee9c684 4825 return;
4826
de6ed584 4827 /* If we redirected a branch we must create new PHI nodes at the
4ee9c684 4828 start of BB. */
75a70cf9 4829 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 4830 {
75a70cf9 4831 gimple phi, new_phi;
4832
4833 phi = gsi_stmt (gsi);
4834 var = gimple_phi_result (phi);
4ee9c684 4835 new_phi = create_phi_node (var, bb);
4836 SSA_NAME_DEF_STMT (var) = new_phi;
75a70cf9 4837 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4838 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru);
4ee9c684 4839 }
4840
4ee9c684 4841 /* Add the arguments we have stored on edges. */
cd665a06 4842 FOR_EACH_EDGE (e, ei, bb->preds)
4ee9c684 4843 {
4844 if (e == fallthru)
4845 continue;
4846
44a46103 4847 flush_pending_stmts (e);
4ee9c684 4848 }
4849}
4850
4851
4ee9c684 4852/* Return a non-special label in the head of basic block BLOCK.
4853 Create one if it doesn't exist. */
4854
3aaaf63f 4855tree
75a70cf9 4856gimple_block_label (basic_block bb)
4ee9c684 4857{
75a70cf9 4858 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4ee9c684 4859 bool first = true;
75a70cf9 4860 tree label;
4861 gimple stmt;
4ee9c684 4862
75a70cf9 4863 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4ee9c684 4864 {
75a70cf9 4865 stmt = gsi_stmt (i);
4866 if (gimple_code (stmt) != GIMPLE_LABEL)
4ee9c684 4867 break;
75a70cf9 4868 label = gimple_label_label (stmt);
4ee9c684 4869 if (!DECL_NONLOCAL (label))
4870 {
4871 if (!first)
75a70cf9 4872 gsi_move_before (&i, &s);
4ee9c684 4873 return label;
4874 }
4875 }
4876
e60a6f7b 4877 label = create_artificial_label (UNKNOWN_LOCATION);
75a70cf9 4878 stmt = gimple_build_label (label);
4879 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4ee9c684 4880 return label;
4881}
4882
4883
4884/* Attempt to perform edge redirection by replacing a possibly complex
4885 jump instruction by a goto or by removing the jump completely.
4886 This can apply only if all edges now point to the same block. The
4887 parameters and return values are equivalent to
4888 redirect_edge_and_branch. */
4889
4890static edge
75a70cf9 4891gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4ee9c684 4892{
4893 basic_block src = e->src;
75a70cf9 4894 gimple_stmt_iterator i;
4895 gimple stmt;
4ee9c684 4896
7fca6166 4897 /* We can replace or remove a complex jump only when we have exactly
4898 two edges. */
4899 if (EDGE_COUNT (src->succs) != 2
4900 /* Verify that all targets will be TARGET. Specifically, the
4901 edge that is not E must also go to TARGET. */
4902 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4ee9c684 4903 return NULL;
4904
75a70cf9 4905 i = gsi_last_bb (src);
4906 if (gsi_end_p (i))
4ee9c684 4907 return NULL;
4ee9c684 4908
75a70cf9 4909 stmt = gsi_stmt (i);
4910
4911 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4ee9c684 4912 {
75a70cf9 4913 gsi_remove (&i, true);
4ee9c684 4914 e = ssa_redirect_edge (e, target);
4915 e->flags = EDGE_FALLTHRU;
4916 return e;
4917 }
4918
4919 return NULL;
4920}
4921
4922
4923/* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4924 edge representing the redirected branch. */
4925
4926static edge
75a70cf9 4927gimple_redirect_edge_and_branch (edge e, basic_block dest)
4ee9c684 4928{
4929 basic_block bb = e->src;
75a70cf9 4930 gimple_stmt_iterator gsi;
4ee9c684 4931 edge ret;
75a70cf9 4932 gimple stmt;
4ee9c684 4933
2c8a1497 4934 if (e->flags & EDGE_ABNORMAL)
4ee9c684 4935 return NULL;
4936
13b96211 4937 if (e->src != ENTRY_BLOCK_PTR
75a70cf9 4938 && (ret = gimple_try_redirect_by_replacing_jump (e, dest)))
4ee9c684 4939 return ret;
4940
4941 if (e->dest == dest)
4942 return NULL;
4943
927a6b6b 4944 if (e->flags & EDGE_EH)
4945 return redirect_eh_edge (e, dest);
4946
75a70cf9 4947 gsi = gsi_last_bb (bb);
4948 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4ee9c684 4949
6163a125 4950 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
4ee9c684 4951 {
75a70cf9 4952 case GIMPLE_COND:
63f88450 4953 /* For COND_EXPR, we only need to redirect the edge. */
4ee9c684 4954 break;
4955
75a70cf9 4956 case GIMPLE_GOTO:
4ee9c684 4957 /* No non-abnormal edges should lead from a non-simple goto, and
4958 simple ones should be represented implicitly. */
8c0963c4 4959 gcc_unreachable ();
4ee9c684 4960
75a70cf9 4961 case GIMPLE_SWITCH:
4ee9c684 4962 {
75a70cf9 4963 tree label = gimple_block_label (dest);
6ff867cc 4964 tree cases = get_cases_for_edge (e, stmt);
4ee9c684 4965
6ff867cc 4966 /* If we have a list of cases associated with E, then use it
4967 as it's a lot faster than walking the entire case vector. */
4968 if (cases)
4ee9c684 4969 {
d5ec53c7 4970 edge e2 = find_edge (e->src, dest);
6ff867cc 4971 tree last, first;
4972
4973 first = cases;
4974 while (cases)
4975 {
4976 last = cases;
4977 CASE_LABEL (cases) = label;
4978 cases = TREE_CHAIN (cases);
4979 }
4980
4981 /* If there was already an edge in the CFG, then we need
4982 to move all the cases associated with E to E2. */
4983 if (e2)
4984 {
4985 tree cases2 = get_cases_for_edge (e2, stmt);
4986
4987 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4988 TREE_CHAIN (cases2) = first;
4989 }
4ee9c684 4990 }
194ae074 4991 else
4992 {
75a70cf9 4993 size_t i, n = gimple_switch_num_labels (stmt);
6ff867cc 4994
4995 for (i = 0; i < n; i++)
4996 {
75a70cf9 4997 tree elt = gimple_switch_label (stmt, i);
6ff867cc 4998 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4999 CASE_LABEL (elt) = label;
5000 }
194ae074 5001 }
6ff867cc 5002
194ae074 5003 break;
4ee9c684 5004 }
4ee9c684 5005
75a70cf9 5006 case GIMPLE_RETURN:
5007 gsi_remove (&gsi, true);
4ee9c684 5008 e->flags |= EDGE_FALLTHRU;
5009 break;
5010
75a70cf9 5011 case GIMPLE_OMP_RETURN:
5012 case GIMPLE_OMP_CONTINUE:
5013 case GIMPLE_OMP_SECTIONS_SWITCH:
5014 case GIMPLE_OMP_FOR:
ac6e3339 5015 /* The edges from OMP constructs can be simply redirected. */
5016 break;
5017
4ee9c684 5018 default:
5019 /* Otherwise it must be a fallthru edge, and we don't need to
5020 do anything besides redirecting it. */
8c0963c4 5021 gcc_assert (e->flags & EDGE_FALLTHRU);
4ee9c684 5022 break;
5023 }
5024
5025 /* Update/insert PHI nodes as necessary. */
5026
5027 /* Now update the edges in the CFG. */
5028 e = ssa_redirect_edge (e, dest);
5029
5030 return e;
5031}
5032
611d2ac1 5033/* Returns true if it is possible to remove edge E by redirecting
5034 it to the destination of the other edge from E->src. */
5035
5036static bool
75a70cf9 5037gimple_can_remove_branch_p (const_edge e)
611d2ac1 5038{
3d1eacdb 5039 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
611d2ac1 5040 return false;
5041
5042 return true;
5043}
4ee9c684 5044
5045/* Simple wrapper, as we can always redirect fallthru edges. */
5046
5047static basic_block
75a70cf9 5048gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4ee9c684 5049{
75a70cf9 5050 e = gimple_redirect_edge_and_branch (e, dest);
8c0963c4 5051 gcc_assert (e);
4ee9c684 5052
5053 return NULL;
5054}
5055
5056
5057/* Splits basic block BB after statement STMT (but at least after the
5058 labels). If STMT is NULL, BB is split just after the labels. */
5059
5060static basic_block
75a70cf9 5061gimple_split_block (basic_block bb, void *stmt)
4ee9c684 5062{
75a70cf9 5063 gimple_stmt_iterator gsi;
5064 gimple_stmt_iterator gsi_tgt;
5065 gimple act;
5066 gimple_seq list;
4ee9c684 5067 basic_block new_bb;
5068 edge e;
cd665a06 5069 edge_iterator ei;
4ee9c684 5070
5071 new_bb = create_empty_bb (bb);
5072
5073 /* Redirect the outgoing edges. */
cd665a06 5074 new_bb->succs = bb->succs;
5075 bb->succs = NULL;
5076 FOR_EACH_EDGE (e, ei, new_bb->succs)
4ee9c684 5077 e->src = new_bb;
5078
75a70cf9 5079 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
4ee9c684 5080 stmt = NULL;
5081
75a70cf9 5082 /* Move everything from GSI to the new basic block. */
5083 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 5084 {
75a70cf9 5085 act = gsi_stmt (gsi);
5086 if (gimple_code (act) == GIMPLE_LABEL)
4ee9c684 5087 continue;
5088
5089 if (!stmt)
5090 break;
5091
5092 if (stmt == act)
5093 {
75a70cf9 5094 gsi_next (&gsi);
4ee9c684 5095 break;
5096 }
5097 }
5098
75a70cf9 5099 if (gsi_end_p (gsi))
2f30597a 5100 return new_bb;
5101
5102 /* Split the statement list - avoid re-creating new containers as this
5103 brings ugly quadratic memory consumption in the inliner.
5104 (We are still quadratic since we need to update stmt BB pointers,
5105 sadly.) */
75a70cf9 5106 list = gsi_split_seq_before (&gsi);
5107 set_bb_seq (new_bb, list);
5108 for (gsi_tgt = gsi_start (list);
5109 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5110 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
4ee9c684 5111
5112 return new_bb;
5113}
5114
5115
5116/* Moves basic block BB after block AFTER. */
5117
5118static bool
75a70cf9 5119gimple_move_block_after (basic_block bb, basic_block after)
4ee9c684 5120{
5121 if (bb->prev_bb == after)
5122 return true;
5123
5124 unlink_block (bb);
5125 link_block (bb, after);
5126
5127 return true;
5128}
5129
5130
5131/* Return true if basic_block can be duplicated. */
5132
5133static bool
d8edefd7 5134gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
4ee9c684 5135{
5136 return true;
5137}
5138
4ee9c684 5139/* Create a duplicate of the basic block BB. NOTE: This does not
5140 preserve SSA form. */
5141
5142static basic_block
75a70cf9 5143gimple_duplicate_bb (basic_block bb)
4ee9c684 5144{
5145 basic_block new_bb;
75a70cf9 5146 gimple_stmt_iterator gsi, gsi_tgt;
5147 gimple_seq phis = phi_nodes (bb);
5148 gimple phi, stmt, copy;
4ee9c684 5149
5150 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
deb78f9e 5151
095dcfa3 5152 /* Copy the PHI nodes. We ignore PHI node arguments here because
5153 the incoming edges have not been setup yet. */
75a70cf9 5154 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
deb78f9e 5155 {
75a70cf9 5156 phi = gsi_stmt (gsi);
5157 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5158 create_new_def_for (gimple_phi_result (copy), copy,
5159 gimple_phi_result_ptr (copy));
deb78f9e 5160 }
095dcfa3 5161
75a70cf9 5162 gsi_tgt = gsi_start_bb (new_bb);
5163 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 5164 {
095dcfa3 5165 def_operand_p def_p;
5166 ssa_op_iter op_iter;
b4ba5e9d 5167 int region;
4ee9c684 5168
75a70cf9 5169 stmt = gsi_stmt (gsi);
5170 if (gimple_code (stmt) == GIMPLE_LABEL)
4ee9c684 5171 continue;
5172
095dcfa3 5173 /* Create a new copy of STMT and duplicate STMT's virtual
5174 operands. */
75a70cf9 5175 copy = gimple_copy (stmt);
5176 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
b4ba5e9d 5177 region = lookup_stmt_eh_region (stmt);
5178 if (region >= 0)
5179 add_stmt_to_eh_region (copy, region);
4992f399 5180 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
095dcfa3 5181
5182 /* Create new names for all the definitions created by COPY and
5183 add replacement mappings for each new name. */
5184 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5185 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4ee9c684 5186 }
5187
5188 return new_bb;
5189}
5190
28c92cbb 5191/* Adds phi node arguments for edge E_COPY after basic block duplication. */
5192
5193static void
5194add_phi_args_after_copy_edge (edge e_copy)
5195{
5196 basic_block bb, bb_copy = e_copy->src, dest;
5197 edge e;
5198 edge_iterator ei;
75a70cf9 5199 gimple phi, phi_copy;
5200 tree def;
5201 gimple_stmt_iterator psi, psi_copy;
28c92cbb 5202
75a70cf9 5203 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
28c92cbb 5204 return;
5205
5206 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5207
5208 if (e_copy->dest->flags & BB_DUPLICATED)
5209 dest = get_bb_original (e_copy->dest);
5210 else
5211 dest = e_copy->dest;
5212
5213 e = find_edge (bb, dest);
5214 if (!e)
5215 {
5216 /* During loop unrolling the target of the latch edge is copied.
5217 In this case we are not looking for edge to dest, but to
5218 duplicated block whose original was dest. */
5219 FOR_EACH_EDGE (e, ei, bb->succs)
5220 {
5221 if ((e->dest->flags & BB_DUPLICATED)
5222 && get_bb_original (e->dest) == dest)
5223 break;
5224 }
5225
5226 gcc_assert (e != NULL);
5227 }
5228
75a70cf9 5229 for (psi = gsi_start_phis (e->dest),
5230 psi_copy = gsi_start_phis (e_copy->dest);
5231 !gsi_end_p (psi);
5232 gsi_next (&psi), gsi_next (&psi_copy))
28c92cbb 5233 {
75a70cf9 5234 phi = gsi_stmt (psi);
5235 phi_copy = gsi_stmt (psi_copy);
28c92cbb 5236 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5237 add_phi_arg (phi_copy, def, e_copy);
5238 }
5239}
5240
095dcfa3 5241
d8b5b4fe 5242/* Basic block BB_COPY was created by code duplication. Add phi node
5243 arguments for edges going out of BB_COPY. The blocks that were
01020a5f 5244 duplicated have BB_DUPLICATED set. */
d8b5b4fe 5245
5246void
5247add_phi_args_after_copy_bb (basic_block bb_copy)
5248{
28c92cbb 5249 edge e_copy;
75a70cf9 5250 edge_iterator ei;
d8b5b4fe 5251
cd665a06 5252 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
d8b5b4fe 5253 {
28c92cbb 5254 add_phi_args_after_copy_edge (e_copy);
d8b5b4fe 5255 }
5256}
5257
5258/* Blocks in REGION_COPY array of length N_REGION were created by
5259 duplication of basic blocks. Add phi node arguments for edges
28c92cbb 5260 going from these blocks. If E_COPY is not NULL, also add
5261 phi node arguments for its destination.*/
d8b5b4fe 5262
5263void
28c92cbb 5264add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5265 edge e_copy)
d8b5b4fe 5266{
5267 unsigned i;
5268
5269 for (i = 0; i < n_region; i++)
01020a5f 5270 region_copy[i]->flags |= BB_DUPLICATED;
d8b5b4fe 5271
5272 for (i = 0; i < n_region; i++)
5273 add_phi_args_after_copy_bb (region_copy[i]);
28c92cbb 5274 if (e_copy)
5275 add_phi_args_after_copy_edge (e_copy);
d8b5b4fe 5276
5277 for (i = 0; i < n_region; i++)
01020a5f 5278 region_copy[i]->flags &= ~BB_DUPLICATED;
d8b5b4fe 5279}
5280
d8b5b4fe 5281/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5282 important exit edge EXIT. By important we mean that no SSA name defined
5283 inside region is live over the other exit edges of the region. All entry
5284 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5285 to the duplicate of the region. SSA form, dominance and loop information
5286 is updated. The new basic blocks are stored to REGION_COPY in the same
5287 order as they had in REGION, provided that REGION_COPY is not NULL.
5288 The function returns false if it is unable to copy the region,
5289 true otherwise. */
5290
5291bool
75a70cf9 5292gimple_duplicate_sese_region (edge entry, edge exit,
d8b5b4fe 5293 basic_block *region, unsigned n_region,
5294 basic_block *region_copy)
5295{
3f9439d7 5296 unsigned i;
d8b5b4fe 5297 bool free_region_copy = false, copying_header = false;
5298 struct loop *loop = entry->dest->loop_father;
5299 edge exit_copy;
3f9439d7 5300 VEC (basic_block, heap) *doms;
d8b5b4fe 5301 edge redirected;
f57c928a 5302 int total_freq = 0, entry_freq = 0;
5303 gcov_type total_count = 0, entry_count = 0;
d8b5b4fe 5304
5305 if (!can_copy_bbs_p (region, n_region))
5306 return false;
5307
5308 /* Some sanity checking. Note that we do not check for all possible
5309 missuses of the functions. I.e. if you ask to copy something weird,
5310 it will work, but the state of structures probably will not be
5311 correct. */
d8b5b4fe 5312 for (i = 0; i < n_region; i++)
5313 {
5314 /* We do not handle subloops, i.e. all the blocks must belong to the
5315 same loop. */
5316 if (region[i]->loop_father != loop)
5317 return false;
5318
5319 if (region[i] != entry->dest
5320 && region[i] == loop->header)
5321 return false;
5322 }
5323
96c90e5e 5324 set_loop_copy (loop, loop);
d8b5b4fe 5325
5326 /* In case the function is used for loop header copying (which is the primary
5327 use), ensure that EXIT and its copy will be new latch and entry edges. */
5328 if (loop->header == entry->dest)
5329 {
5330 copying_header = true;
96c90e5e 5331 set_loop_copy (loop, loop_outer (loop));
d8b5b4fe 5332
5333 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5334 return false;
5335
5336 for (i = 0; i < n_region; i++)
5337 if (region[i] != exit->src
5338 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5339 return false;
5340 }
5341
5342 if (!region_copy)
5343 {
680a19b9 5344 region_copy = XNEWVEC (basic_block, n_region);
d8b5b4fe 5345 free_region_copy = true;
5346 }
5347
dd277d48 5348 gcc_assert (!need_ssa_update_p (cfun));
d8b5b4fe 5349
2625ff05 5350 /* Record blocks outside the region that are dominated by something
d8b5b4fe 5351 inside. */
3f9439d7 5352 doms = NULL;
01020a5f 5353 initialize_original_copy_tables ();
5354
3f9439d7 5355 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
d8b5b4fe 5356
f57c928a 5357 if (entry->dest->count)
5358 {
5359 total_count = entry->dest->count;
5360 entry_count = entry->count;
5361 /* Fix up corner cases, to avoid division by zero or creation of negative
5362 frequencies. */
5363 if (entry_count > total_count)
5364 entry_count = total_count;
5365 }
5366 else
5367 {
5368 total_freq = entry->dest->frequency;
5369 entry_freq = EDGE_FREQUENCY (entry);
5370 /* Fix up corner cases, to avoid division by zero or creation of negative
5371 frequencies. */
5372 if (total_freq == 0)
5373 total_freq = 1;
5374 else if (entry_freq > total_freq)
5375 entry_freq = total_freq;
5376 }
2625ff05 5377
c4d867e0 5378 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5379 split_edge_bb_loc (entry));
f57c928a 5380 if (total_count)
5381 {
5382 scale_bbs_frequencies_gcov_type (region, n_region,
5383 total_count - entry_count,
5384 total_count);
5385 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
13b96211 5386 total_count);
f57c928a 5387 }
5388 else
5389 {
5390 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5391 total_freq);
5392 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5393 }
d8b5b4fe 5394
5395 if (copying_header)
5396 {
5397 loop->header = exit->dest;
5398 loop->latch = exit->src;
5399 }
5400
5401 /* Redirect the entry and add the phi node arguments. */
01020a5f 5402 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
d8b5b4fe 5403 gcc_assert (redirected != NULL);
44a46103 5404 flush_pending_stmts (entry);
d8b5b4fe 5405
5406 /* Concerning updating of dominators: We must recount dominators
095dcfa3 5407 for entry block and its copy. Anything that is outside of the
5408 region, but was dominated by something inside needs recounting as
5409 well. */
d8b5b4fe 5410 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
3f9439d7 5411 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5412 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
28c92cbb 5413 VEC_free (basic_block, heap, doms);
d8b5b4fe 5414
095dcfa3 5415 /* Add the other PHI node arguments. */
28c92cbb 5416 add_phi_args_after_copy (region_copy, n_region, NULL);
5417
5418 /* Update the SSA web. */
5419 update_ssa (TODO_update_ssa);
5420
5421 if (free_region_copy)
5422 free (region_copy);
5423
5424 free_original_copy_tables ();
5425 return true;
5426}
5427
5428/* Duplicates REGION consisting of N_REGION blocks. The new blocks
5429 are stored to REGION_COPY in the same order in that they appear
5430 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5431 the region, EXIT an exit from it. The condition guarding EXIT
5432 is moved to ENTRY. Returns true if duplication succeeds, false
5433 otherwise.
5434
5435 For example,
5436
5437 some_code;
5438 if (cond)
5439 A;
5440 else
5441 B;
5442
5443 is transformed to
5444
5445 if (cond)
5446 {
5447 some_code;
5448 A;
5449 }
5450 else
5451 {
5452 some_code;
5453 B;
5454 }
5455*/
5456
5457bool
75a70cf9 5458gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5459 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5460 basic_block *region_copy ATTRIBUTE_UNUSED)
28c92cbb 5461{
5462 unsigned i;
5463 bool free_region_copy = false;
5464 struct loop *loop = exit->dest->loop_father;
5465 struct loop *orig_loop = entry->dest->loop_father;
5466 basic_block switch_bb, entry_bb, nentry_bb;
5467 VEC (basic_block, heap) *doms;
5468 int total_freq = 0, exit_freq = 0;
5469 gcov_type total_count = 0, exit_count = 0;
5470 edge exits[2], nexits[2], e;
75a70cf9 5471 gimple_stmt_iterator gsi;
5472 gimple cond_stmt;
28c92cbb 5473 edge sorig, snew;
5474
5475 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5476 exits[0] = exit;
5477 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5478
5479 if (!can_copy_bbs_p (region, n_region))
5480 return false;
5481
5482 /* Some sanity checking. Note that we do not check for all possible
5483 missuses of the functions. I.e. if you ask to copy something weird
5484 (e.g., in the example, if there is a jump from inside to the middle
5485 of some_code, or come_code defines some of the values used in cond)
5486 it will work, but the resulting code will not be correct. */
5487 for (i = 0; i < n_region; i++)
5488 {
5489 /* We do not handle subloops, i.e. all the blocks must belong to the
5490 same loop. */
5491 if (region[i]->loop_father != orig_loop)
5492 return false;
5493
5494 if (region[i] == orig_loop->latch)
5495 return false;
5496 }
5497
5498 initialize_original_copy_tables ();
5499 set_loop_copy (orig_loop, loop);
5500
5501 if (!region_copy)
5502 {
5503 region_copy = XNEWVEC (basic_block, n_region);
5504 free_region_copy = true;
5505 }
5506
dd277d48 5507 gcc_assert (!need_ssa_update_p (cfun));
28c92cbb 5508
5509 /* Record blocks outside the region that are dominated by something
5510 inside. */
5511 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5512
5513 if (exit->src->count)
5514 {
5515 total_count = exit->src->count;
5516 exit_count = exit->count;
5517 /* Fix up corner cases, to avoid division by zero or creation of negative
5518 frequencies. */
5519 if (exit_count > total_count)
5520 exit_count = total_count;
5521 }
5522 else
5523 {
5524 total_freq = exit->src->frequency;
5525 exit_freq = EDGE_FREQUENCY (exit);
5526 /* Fix up corner cases, to avoid division by zero or creation of negative
5527 frequencies. */
5528 if (total_freq == 0)
5529 total_freq = 1;
5530 if (exit_freq > total_freq)
5531 exit_freq = total_freq;
5532 }
5533
5534 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5535 split_edge_bb_loc (exit));
5536 if (total_count)
5537 {
5538 scale_bbs_frequencies_gcov_type (region, n_region,
5539 total_count - exit_count,
5540 total_count);
5541 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5542 total_count);
5543 }
5544 else
5545 {
5546 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5547 total_freq);
5548 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5549 }
5550
5551 /* Create the switch block, and put the exit condition to it. */
5552 entry_bb = entry->dest;
5553 nentry_bb = get_bb_copy (entry_bb);
5554 if (!last_stmt (entry->src)
5555 || !stmt_ends_bb_p (last_stmt (entry->src)))
5556 switch_bb = entry->src;
5557 else
5558 switch_bb = split_edge (entry);
5559 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5560
75a70cf9 5561 gsi = gsi_last_bb (switch_bb);
5562 cond_stmt = last_stmt (exit->src);
5563 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5564 cond_stmt = gimple_copy (cond_stmt);
5565 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5566 gimple_cond_set_rhs (cond_stmt, unshare_expr (gimple_cond_rhs (cond_stmt)));
5567 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
28c92cbb 5568
5569 sorig = single_succ_edge (switch_bb);
5570 sorig->flags = exits[1]->flags;
5571 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5572
5573 /* Register the new edge from SWITCH_BB in loop exit lists. */
5574 rescan_loop_exit (snew, true, false);
5575
5576 /* Add the PHI node arguments. */
5577 add_phi_args_after_copy (region_copy, n_region, snew);
5578
5579 /* Get rid of now superfluous conditions and associated edges (and phi node
5580 arguments). */
5581 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
75a70cf9 5582 PENDING_STMT (e) = NULL;
28c92cbb 5583 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
75a70cf9 5584 PENDING_STMT (e) = NULL;
28c92cbb 5585
5586 /* Anything that is outside of the region, but was dominated by something
5587 inside needs to update dominance info. */
5588 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5589 VEC_free (basic_block, heap, doms);
d8b5b4fe 5590
095dcfa3 5591 /* Update the SSA web. */
5592 update_ssa (TODO_update_ssa);
d8b5b4fe 5593
5594 if (free_region_copy)
5595 free (region_copy);
5596
01020a5f 5597 free_original_copy_tables ();
d8b5b4fe 5598 return true;
5599}
4ee9c684 5600
773c5ba7 5601/* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5602 adding blocks when the dominator traversal reaches EXIT. This
5603 function silently assumes that ENTRY strictly dominates EXIT. */
5604
e06f9c34 5605void
773c5ba7 5606gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5607 VEC(basic_block,heap) **bbs_p)
5608{
5609 basic_block son;
5610
5611 for (son = first_dom_son (CDI_DOMINATORS, entry);
5612 son;
5613 son = next_dom_son (CDI_DOMINATORS, son))
5614 {
5615 VEC_safe_push (basic_block, heap, *bbs_p, son);
5616 if (son != exit)
5617 gather_blocks_in_sese_region (son, exit, bbs_p);
5618 }
5619}
5620
79acaae1 5621/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5622 The duplicates are recorded in VARS_MAP. */
5623
5624static void
5625replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5626 tree to_context)
5627{
5628 tree t = *tp, new_t;
5629 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5630 void **loc;
5631
5632 if (DECL_CONTEXT (t) == to_context)
5633 return;
5634
5635 loc = pointer_map_contains (vars_map, t);
5636
5637 if (!loc)
5638 {
5639 loc = pointer_map_insert (vars_map, t);
5640
5641 if (SSA_VAR_P (t))
5642 {
5643 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
edb7afe8 5644 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
79acaae1 5645 }
5646 else
5647 {
5648 gcc_assert (TREE_CODE (t) == CONST_DECL);
5649 new_t = copy_node (t);
5650 }
5651 DECL_CONTEXT (new_t) = to_context;
5652
5653 *loc = new_t;
5654 }
5655 else
45ba1503 5656 new_t = (tree) *loc;
79acaae1 5657
5658 *tp = new_t;
5659}
5660
75a70cf9 5661
79acaae1 5662/* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5663 VARS_MAP maps old ssa names and var_decls to the new ones. */
5664
5665static tree
5666replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5667 tree to_context)
5668{
5669 void **loc;
5670 tree new_name, decl = SSA_NAME_VAR (name);
5671
5672 gcc_assert (is_gimple_reg (name));
5673
5674 loc = pointer_map_contains (vars_map, name);
5675
5676 if (!loc)
5677 {
5678 replace_by_duplicate_decl (&decl, vars_map, to_context);
5679
5680 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5681 if (gimple_in_ssa_p (cfun))
5682 add_referenced_var (decl);
5683
5684 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5685 if (SSA_NAME_IS_DEFAULT_DEF (name))
5686 set_default_def (decl, new_name);
5687 pop_cfun ();
5688
5689 loc = pointer_map_insert (vars_map, name);
5690 *loc = new_name;
5691 }
5692 else
45ba1503 5693 new_name = (tree) *loc;
79acaae1 5694
5695 return new_name;
5696}
773c5ba7 5697
5698struct move_stmt_d
5699{
1d22f541 5700 tree orig_block;
5701 tree new_block;
773c5ba7 5702 tree from_context;
5703 tree to_context;
79acaae1 5704 struct pointer_map_t *vars_map;
30b822ea 5705 htab_t new_label_map;
773c5ba7 5706 bool remap_decls_p;
5707};
5708
5709/* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
1d22f541 5710 contained in *TP if it has been ORIG_BLOCK previously and change the
5711 DECL_CONTEXT of every local variable referenced in *TP. */
773c5ba7 5712
5713static tree
75a70cf9 5714move_stmt_op (tree *tp, int *walk_subtrees, void *data)
773c5ba7 5715{
75a70cf9 5716 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5717 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
30b822ea 5718 tree t = *tp;
773c5ba7 5719
75a70cf9 5720 if (EXPR_P (t))
5721 /* We should never have TREE_BLOCK set on non-statements. */
5722 gcc_assert (!TREE_BLOCK (t));
30b822ea 5723
79acaae1 5724 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
773c5ba7 5725 {
79acaae1 5726 if (TREE_CODE (t) == SSA_NAME)
5727 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5728 else if (TREE_CODE (t) == LABEL_DECL)
30b822ea 5729 {
5730 if (p->new_label_map)
5731 {
5732 struct tree_map in, *out;
9af7fd5b 5733 in.base.from = t;
45ba1503 5734 out = (struct tree_map *)
5735 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
30b822ea 5736 if (out)
5737 *tp = t = out->to;
5738 }
773c5ba7 5739
30b822ea 5740 DECL_CONTEXT (t) = p->to_context;
5741 }
5742 else if (p->remap_decls_p)
773c5ba7 5743 {
79acaae1 5744 /* Replace T with its duplicate. T should no longer appear in the
5745 parent function, so this looks wasteful; however, it may appear
5746 in referenced_vars, and more importantly, as virtual operands of
5747 statements, and in alias lists of other variables. It would be
5748 quite difficult to expunge it from all those places. ??? It might
5749 suffice to do this for addressable variables. */
5750 if ((TREE_CODE (t) == VAR_DECL
5751 && !is_global_var (t))
5752 || TREE_CODE (t) == CONST_DECL)
5753 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5754
5755 if (SSA_VAR_P (t)
5756 && gimple_in_ssa_p (cfun))
30b822ea 5757 {
79acaae1 5758 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5759 add_referenced_var (*tp);
5760 pop_cfun ();
30b822ea 5761 }
773c5ba7 5762 }
79acaae1 5763 *walk_subtrees = 0;
773c5ba7 5764 }
30b822ea 5765 else if (TYPE_P (t))
5766 *walk_subtrees = 0;
773c5ba7 5767
5768 return NULL_TREE;
5769}
5770
75a70cf9 5771/* Like move_stmt_op, but for gimple statements.
5772
5773 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5774 contained in the current statement in *GSI_P and change the
5775 DECL_CONTEXT of every local variable referenced in the current
5776 statement. */
5777
5778static tree
5779move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5780 struct walk_stmt_info *wi)
5781{
5782 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5783 gimple stmt = gsi_stmt (*gsi_p);
5784 tree block = gimple_block (stmt);
5785
5786 if (p->orig_block == NULL_TREE
5787 || block == p->orig_block
5788 || block == NULL_TREE)
5789 gimple_set_block (stmt, p->new_block);
5790#ifdef ENABLE_CHECKING
5791 else if (block != p->new_block)
5792 {
5793 while (block && block != p->orig_block)
5794 block = BLOCK_SUPERCONTEXT (block);
5795 gcc_assert (block);
5796 }
5797#endif
5798
5799 if (is_gimple_omp (stmt)
5800 && gimple_code (stmt) != GIMPLE_OMP_RETURN
5801 && gimple_code (stmt) != GIMPLE_OMP_CONTINUE)
5802 {
5803 /* Do not remap variables inside OMP directives. Variables
5804 referenced in clauses and directive header belong to the
5805 parent function and should not be moved into the child
5806 function. */
5807 bool save_remap_decls_p = p->remap_decls_p;
5808 p->remap_decls_p = false;
5809 *handled_ops_p = true;
5810
5811 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r, move_stmt_op, wi);
5812
5813 p->remap_decls_p = save_remap_decls_p;
5814 }
5815
5816 return NULL_TREE;
5817}
5818
79acaae1 5819/* Marks virtual operands of all statements in basic blocks BBS for
5820 renaming. */
5821
801c5610 5822void
5823mark_virtual_ops_in_bb (basic_block bb)
79acaae1 5824{
75a70cf9 5825 gimple_stmt_iterator gsi;
801c5610 5826
75a70cf9 5827 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5828 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
801c5610 5829
75a70cf9 5830 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5831 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
801c5610 5832}
5833
773c5ba7 5834/* Move basic block BB from function CFUN to function DEST_FN. The
5835 block is moved out of the original linked list and placed after
5836 block AFTER in the new list. Also, the block is removed from the
5837 original array of blocks and placed in DEST_FN's array of blocks.
5838 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5839 updated to reflect the moved edges.
13b96211 5840
79acaae1 5841 The local variables are remapped to new instances, VARS_MAP is used
5842 to record the mapping. */
773c5ba7 5843
5844static void
5845move_block_to_fn (struct function *dest_cfun, basic_block bb,
5846 basic_block after, bool update_edge_count_p,
1d22f541 5847 struct move_stmt_d *d, int eh_offset)
773c5ba7 5848{
5849 struct control_flow_graph *cfg;
5850 edge_iterator ei;
5851 edge e;
75a70cf9 5852 gimple_stmt_iterator si;
633c1ea8 5853 unsigned old_len, new_len;
773c5ba7 5854
e996e8f1 5855 /* Remove BB from dominance structures. */
5856 delete_from_dominance_info (CDI_DOMINATORS, bb);
28c92cbb 5857 if (current_loops)
5858 remove_bb_from_loops (bb);
e996e8f1 5859
773c5ba7 5860 /* Link BB to the new linked list. */
5861 move_block_after (bb, after);
5862
5863 /* Update the edge count in the corresponding flowgraphs. */
5864 if (update_edge_count_p)
5865 FOR_EACH_EDGE (e, ei, bb->succs)
5866 {
5867 cfun->cfg->x_n_edges--;
5868 dest_cfun->cfg->x_n_edges++;
5869 }
5870
5871 /* Remove BB from the original basic block array. */
5872 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5873 cfun->cfg->x_n_basic_blocks--;
5874
5875 /* Grow DEST_CFUN's basic block array if needed. */
5876 cfg = dest_cfun->cfg;
5877 cfg->x_n_basic_blocks++;
e996e8f1 5878 if (bb->index >= cfg->x_last_basic_block)
5879 cfg->x_last_basic_block = bb->index + 1;
773c5ba7 5880
633c1ea8 5881 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5882 if ((unsigned) cfg->x_last_basic_block >= old_len)
773c5ba7 5883 {
633c1ea8 5884 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
e85c2c2d 5885 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5886 new_len);
773c5ba7 5887 }
5888
5889 VEC_replace (basic_block, cfg->x_basic_block_info,
1f204d2d 5890 bb->index, bb);
773c5ba7 5891
79acaae1 5892 /* Remap the variables in phi nodes. */
75a70cf9 5893 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
79acaae1 5894 {
75a70cf9 5895 gimple phi = gsi_stmt (si);
79acaae1 5896 use_operand_p use;
5897 tree op = PHI_RESULT (phi);
5898 ssa_op_iter oi;
5899
5900 if (!is_gimple_reg (op))
28c92cbb 5901 {
5902 /* Remove the phi nodes for virtual operands (alias analysis will be
5903 run for the new function, anyway). */
75a70cf9 5904 remove_phi_node (&si, true);
28c92cbb 5905 continue;
5906 }
79acaae1 5907
1d22f541 5908 SET_PHI_RESULT (phi,
5909 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
79acaae1 5910 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5911 {
5912 op = USE_FROM_PTR (use);
5913 if (TREE_CODE (op) == SSA_NAME)
1d22f541 5914 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
79acaae1 5915 }
75a70cf9 5916
5917 gsi_next (&si);
79acaae1 5918 }
5919
75a70cf9 5920 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
773c5ba7 5921 {
75a70cf9 5922 gimple stmt = gsi_stmt (si);
30b822ea 5923 int region;
75a70cf9 5924 struct walk_stmt_info wi;
773c5ba7 5925
75a70cf9 5926 memset (&wi, 0, sizeof (wi));
5927 wi.info = d;
5928 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
773c5ba7 5929
75a70cf9 5930 if (gimple_code (stmt) == GIMPLE_LABEL)
773c5ba7 5931 {
75a70cf9 5932 tree label = gimple_label_label (stmt);
773c5ba7 5933 int uid = LABEL_DECL_UID (label);
5934
5935 gcc_assert (uid > -1);
5936
5937 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5938 if (old_len <= (unsigned) uid)
5939 {
dd277d48 5940 new_len = 3 * uid / 2 + 1;
e85c2c2d 5941 VEC_safe_grow_cleared (basic_block, gc,
5942 cfg->x_label_to_block_map, new_len);
773c5ba7 5943 }
5944
5945 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5946 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5947
5948 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5949
edb7afe8 5950 if (uid >= dest_cfun->cfg->last_label_uid)
5951 dest_cfun->cfg->last_label_uid = uid + 1;
773c5ba7 5952 }
75a70cf9 5953 else if (gimple_code (stmt) == GIMPLE_RESX && eh_offset != 0)
5954 gimple_resx_set_region (stmt, gimple_resx_region (stmt) + eh_offset);
30b822ea 5955
5956 region = lookup_stmt_eh_region (stmt);
5957 if (region >= 0)
5958 {
5959 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5960 remove_stmt_from_eh_region (stmt);
4992f399 5961 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5962 gimple_remove_stmt_histograms (cfun, stmt);
30b822ea 5963 }
79acaae1 5964
28c92cbb 5965 /* We cannot leave any operands allocated from the operand caches of
5966 the current function. */
5967 free_stmt_operands (stmt);
5968 push_cfun (dest_cfun);
79acaae1 5969 update_stmt (stmt);
28c92cbb 5970 pop_cfun ();
30b822ea 5971 }
9c388755 5972
5973 FOR_EACH_EDGE (e, ei, bb->succs)
5974 if (e->goto_locus)
5975 {
5976 tree block = e->goto_block;
5977 if (d->orig_block == NULL_TREE
5978 || block == d->orig_block)
5979 e->goto_block = d->new_block;
5980#ifdef ENABLE_CHECKING
5981 else if (block != d->new_block)
5982 {
5983 while (block && block != d->orig_block)
5984 block = BLOCK_SUPERCONTEXT (block);
5985 gcc_assert (block);
5986 }
5987#endif
5988 }
30b822ea 5989}
5990
5991/* Examine the statements in BB (which is in SRC_CFUN); find and return
5992 the outermost EH region. Use REGION as the incoming base EH region. */
5993
5994static int
5995find_outermost_region_in_block (struct function *src_cfun,
5996 basic_block bb, int region)
5997{
75a70cf9 5998 gimple_stmt_iterator si;
13b96211 5999
75a70cf9 6000 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
30b822ea 6001 {
75a70cf9 6002 gimple stmt = gsi_stmt (si);
30b822ea 6003 int stmt_region;
8487df40 6004
75a70cf9 6005 if (gimple_code (stmt) == GIMPLE_RESX)
6006 stmt_region = gimple_resx_region (stmt);
5f3cc051 6007 else
6008 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
6c105785 6009 if (stmt_region > 0)
6010 {
6011 if (region < 0)
6012 region = stmt_region;
6013 else if (stmt_region != region)
6014 {
6015 region = eh_region_outermost (src_cfun, stmt_region, region);
6016 gcc_assert (region != -1);
6017 }
6018 }
773c5ba7 6019 }
30b822ea 6020
6021 return region;
773c5ba7 6022}
6023
30b822ea 6024static tree
6025new_label_mapper (tree decl, void *data)
6026{
6027 htab_t hash = (htab_t) data;
6028 struct tree_map *m;
6029 void **slot;
6030
6031 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6032
45ba1503 6033 m = XNEW (struct tree_map);
30b822ea 6034 m->hash = DECL_UID (decl);
9af7fd5b 6035 m->base.from = decl;
e60a6f7b 6036 m->to = create_artificial_label (UNKNOWN_LOCATION);
30b822ea 6037 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
edb7afe8 6038 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6039 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
30b822ea 6040
6041 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6042 gcc_assert (*slot == NULL);
6043
6044 *slot = m;
6045
6046 return m->to;
6047}
773c5ba7 6048
1d22f541 6049/* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6050 subblocks. */
6051
6052static void
6053replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6054 tree to_context)
6055{
6056 tree *tp, t;
6057
6058 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
6059 {
6060 t = *tp;
ba7226a7 6061 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6062 continue;
1d22f541 6063 replace_by_duplicate_decl (&t, vars_map, to_context);
6064 if (t != *tp)
6065 {
6066 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6067 {
6068 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6069 DECL_HAS_VALUE_EXPR_P (t) = 1;
6070 }
6071 TREE_CHAIN (t) = TREE_CHAIN (*tp);
6072 *tp = t;
6073 }
6074 }
6075
6076 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6077 replace_block_vars_by_duplicates (block, vars_map, to_context);
6078}
6079
773c5ba7 6080/* Move a single-entry, single-exit region delimited by ENTRY_BB and
6081 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6082 single basic block in the original CFG and the new basic block is
6083 returned. DEST_CFUN must not have a CFG yet.
6084
6085 Note that the region need not be a pure SESE region. Blocks inside
6086 the region may contain calls to abort/exit. The only restriction
6087 is that ENTRY_BB should be the only entry point and it must
6088 dominate EXIT_BB.
6089
1d22f541 6090 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6091 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6092 to the new function.
6093
773c5ba7 6094 All local variables referenced in the region are assumed to be in
6095 the corresponding BLOCK_VARS and unexpanded variable lists
6096 associated with DEST_CFUN. */
6097
6098basic_block
6099move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
1d22f541 6100 basic_block exit_bb, tree orig_block)
773c5ba7 6101{
79acaae1 6102 VEC(basic_block,heap) *bbs, *dom_bbs;
6103 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6104 basic_block after, bb, *entry_pred, *exit_succ, abb;
6105 struct function *saved_cfun = cfun;
30b822ea 6106 int *entry_flag, *exit_flag, eh_offset;
79acaae1 6107 unsigned *entry_prob, *exit_prob;
773c5ba7 6108 unsigned i, num_entry_edges, num_exit_edges;
6109 edge e;
6110 edge_iterator ei;
30b822ea 6111 htab_t new_label_map;
79acaae1 6112 struct pointer_map_t *vars_map;
28c92cbb 6113 struct loop *loop = entry_bb->loop_father;
1d22f541 6114 struct move_stmt_d d;
773c5ba7 6115
6116 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6117 region. */
6118 gcc_assert (entry_bb != exit_bb
5056ba1a 6119 && (!exit_bb
6120 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
773c5ba7 6121
79acaae1 6122 /* Collect all the blocks in the region. Manually add ENTRY_BB
6123 because it won't be added by dfs_enumerate_from. */
773c5ba7 6124 bbs = NULL;
6125 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6126 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6127
79acaae1 6128 /* The blocks that used to be dominated by something in BBS will now be
6129 dominated by the new block. */
6130 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6131 VEC_address (basic_block, bbs),
6132 VEC_length (basic_block, bbs));
6133
773c5ba7 6134 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6135 the predecessor edges to ENTRY_BB and the successor edges to
6136 EXIT_BB so that we can re-attach them to the new basic block that
6137 will replace the region. */
6138 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6139 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6140 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
79acaae1 6141 entry_prob = XNEWVEC (unsigned, num_entry_edges);
773c5ba7 6142 i = 0;
6143 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6144 {
79acaae1 6145 entry_prob[i] = e->probability;
773c5ba7 6146 entry_flag[i] = e->flags;
6147 entry_pred[i++] = e->src;
6148 remove_edge (e);
6149 }
6150
5056ba1a 6151 if (exit_bb)
773c5ba7 6152 {
5056ba1a 6153 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6154 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6155 sizeof (basic_block));
6156 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
79acaae1 6157 exit_prob = XNEWVEC (unsigned, num_exit_edges);
5056ba1a 6158 i = 0;
6159 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6160 {
79acaae1 6161 exit_prob[i] = e->probability;
5056ba1a 6162 exit_flag[i] = e->flags;
6163 exit_succ[i++] = e->dest;
6164 remove_edge (e);
6165 }
6166 }
6167 else
6168 {
6169 num_exit_edges = 0;
6170 exit_succ = NULL;
6171 exit_flag = NULL;
79acaae1 6172 exit_prob = NULL;
773c5ba7 6173 }
6174
6175 /* Switch context to the child function to initialize DEST_FN's CFG. */
6176 gcc_assert (dest_cfun->cfg == NULL);
79acaae1 6177 push_cfun (dest_cfun);
30b822ea 6178
773c5ba7 6179 init_empty_tree_cfg ();
30b822ea 6180
6181 /* Initialize EH information for the new function. */
6182 eh_offset = 0;
6183 new_label_map = NULL;
6184 if (saved_cfun->eh)
6185 {
6186 int region = -1;
6187
6188 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6189 region = find_outermost_region_in_block (saved_cfun, bb, region);
6190
6191 init_eh_for_function ();
6192 if (region != -1)
6193 {
6194 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6195 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
6196 new_label_map, region, 0);
6197 }
6198 }
6199
79acaae1 6200 pop_cfun ();
6201
773c5ba7 6202 /* Move blocks from BBS into DEST_CFUN. */
6203 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6204 after = dest_cfun->cfg->x_entry_block_ptr;
79acaae1 6205 vars_map = pointer_map_create ();
1d22f541 6206
6207 memset (&d, 0, sizeof (d));
6208 d.vars_map = vars_map;
6209 d.from_context = cfun->decl;
6210 d.to_context = dest_cfun->decl;
6211 d.new_label_map = new_label_map;
6212 d.remap_decls_p = true;
6213 d.orig_block = orig_block;
6214 d.new_block = DECL_INITIAL (dest_cfun->decl);
6215
773c5ba7 6216 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6217 {
6218 /* No need to update edge counts on the last block. It has
6219 already been updated earlier when we detached the region from
6220 the original CFG. */
1d22f541 6221 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d, eh_offset);
773c5ba7 6222 after = bb;
6223 }
6224
1d22f541 6225 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6226 if (orig_block)
6227 {
6228 tree block;
6229 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6230 == NULL_TREE);
6231 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6232 = BLOCK_SUBBLOCKS (orig_block);
6233 for (block = BLOCK_SUBBLOCKS (orig_block);
6234 block; block = BLOCK_CHAIN (block))
6235 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6236 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6237 }
6238
6239 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6240 vars_map, dest_cfun->decl);
6241
30b822ea 6242 if (new_label_map)
6243 htab_delete (new_label_map);
79acaae1 6244 pointer_map_destroy (vars_map);
773c5ba7 6245
6246 /* Rewire the entry and exit blocks. The successor to the entry
6247 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6248 the child function. Similarly, the predecessor of DEST_FN's
6249 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6250 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6251 various CFG manipulation function get to the right CFG.
6252
6253 FIXME, this is silly. The CFG ought to become a parameter to
6254 these helpers. */
79acaae1 6255 push_cfun (dest_cfun);
773c5ba7 6256 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
5056ba1a 6257 if (exit_bb)
6258 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
79acaae1 6259 pop_cfun ();
773c5ba7 6260
6261 /* Back in the original function, the SESE region has disappeared,
6262 create a new basic block in its place. */
6263 bb = create_empty_bb (entry_pred[0]);
28c92cbb 6264 if (current_loops)
6265 add_bb_to_loop (bb, loop);
773c5ba7 6266 for (i = 0; i < num_entry_edges; i++)
79acaae1 6267 {
6268 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6269 e->probability = entry_prob[i];
6270 }
773c5ba7 6271
6272 for (i = 0; i < num_exit_edges; i++)
79acaae1 6273 {
6274 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6275 e->probability = exit_prob[i];
6276 }
6277
6278 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6279 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6280 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6281 VEC_free (basic_block, heap, dom_bbs);
773c5ba7 6282
5056ba1a 6283 if (exit_bb)
6284 {
79acaae1 6285 free (exit_prob);
5056ba1a 6286 free (exit_flag);
6287 free (exit_succ);
6288 }
79acaae1 6289 free (entry_prob);
773c5ba7 6290 free (entry_flag);
6291 free (entry_pred);
773c5ba7 6292 VEC_free (basic_block, heap, bbs);
6293
6294 return bb;
6295}
6296
095dcfa3 6297
75a70cf9 6298/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6299 */
4ee9c684 6300
6301void
6302dump_function_to_file (tree fn, FILE *file, int flags)
6303{
6304 tree arg, vars, var;
2debe3a3 6305 struct function *dsf;
4ee9c684 6306 bool ignore_topmost_bind = false, any_var = false;
6307 basic_block bb;
6308 tree chain;
13b96211 6309
5135beeb 6310 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4ee9c684 6311
6312 arg = DECL_ARGUMENTS (fn);
6313 while (arg)
6314 {
04ba083d 6315 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6316 fprintf (file, " ");
4ee9c684 6317 print_generic_expr (file, arg, dump_flags);
46412a37 6318 if (flags & TDF_VERBOSE)
6319 print_node (file, "", arg, 4);
4ee9c684 6320 if (TREE_CHAIN (arg))
6321 fprintf (file, ", ");
6322 arg = TREE_CHAIN (arg);
6323 }
6324 fprintf (file, ")\n");
6325
46412a37 6326 if (flags & TDF_VERBOSE)
6327 print_node (file, "", fn, 2);
6328
2debe3a3 6329 dsf = DECL_STRUCT_FUNCTION (fn);
6330 if (dsf && (flags & TDF_DETAILS))
6331 dump_eh_tree (file, dsf);
6332
1a1a827a 6333 if (flags & TDF_RAW && !gimple_has_body_p (fn))
4ee9c684 6334 {
6335 dump_node (fn, TDF_SLIM | flags, file);
6336 return;
6337 }
6338
1e8e9920 6339 /* Switch CFUN to point to FN. */
87d4aa85 6340 push_cfun (DECL_STRUCT_FUNCTION (fn));
1e8e9920 6341
4ee9c684 6342 /* When GIMPLE is lowered, the variables are no longer available in
6343 BIND_EXPRs, so display them separately. */
edb7afe8 6344 if (cfun && cfun->decl == fn && cfun->local_decls)
4ee9c684 6345 {
6346 ignore_topmost_bind = true;
6347
6348 fprintf (file, "{\n");
edb7afe8 6349 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
4ee9c684 6350 {
6351 var = TREE_VALUE (vars);
6352
6353 print_generic_decl (file, var, flags);
46412a37 6354 if (flags & TDF_VERBOSE)
6355 print_node (file, "", var, 4);
4ee9c684 6356 fprintf (file, "\n");
6357
6358 any_var = true;
6359 }
6360 }
6361
b8bc99ef 6362 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
4ee9c684 6363 {
75a70cf9 6364 /* If the CFG has been built, emit a CFG-based dump. */
020c749b 6365 check_bb_profile (ENTRY_BLOCK_PTR, file);
4ee9c684 6366 if (!ignore_topmost_bind)
6367 fprintf (file, "{\n");
6368
6369 if (any_var && n_basic_blocks)
6370 fprintf (file, "\n");
6371
6372 FOR_EACH_BB (bb)
75a70cf9 6373 gimple_dump_bb (bb, file, 2, flags);
13b96211 6374
4ee9c684 6375 fprintf (file, "}\n");
020c749b 6376 check_bb_profile (EXIT_BLOCK_PTR, file);
4ee9c684 6377 }
75a70cf9 6378 else if (DECL_SAVED_TREE (fn) == NULL)
6379 {
6380 /* The function is now in GIMPLE form but the CFG has not been
6381 built yet. Emit the single sequence of GIMPLE statements
6382 that make up its body. */
6383 gimple_seq body = gimple_body (fn);
6384
6385 if (gimple_seq_first_stmt (body)
6386 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6387 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6388 print_gimple_seq (file, body, 0, flags);
6389 else
6390 {
6391 if (!ignore_topmost_bind)
6392 fprintf (file, "{\n");
6393
6394 if (any_var)
6395 fprintf (file, "\n");
6396
6397 print_gimple_seq (file, body, 2, flags);
6398 fprintf (file, "}\n");
6399 }
6400 }
4ee9c684 6401 else
6402 {
6403 int indent;
6404
6405 /* Make a tree based dump. */
6406 chain = DECL_SAVED_TREE (fn);
6407
1e8e9920 6408 if (chain && TREE_CODE (chain) == BIND_EXPR)
4ee9c684 6409 {
6410 if (ignore_topmost_bind)
6411 {
6412 chain = BIND_EXPR_BODY (chain);
6413 indent = 2;
6414 }
6415 else
6416 indent = 0;
6417 }
6418 else
6419 {
6420 if (!ignore_topmost_bind)
6421 fprintf (file, "{\n");
6422 indent = 2;
6423 }
6424
6425 if (any_var)
6426 fprintf (file, "\n");
6427
6428 print_generic_stmt_indented (file, chain, flags, indent);
6429 if (ignore_topmost_bind)
6430 fprintf (file, "}\n");
6431 }
6432
6433 fprintf (file, "\n\n");
1e8e9920 6434
6435 /* Restore CFUN. */
87d4aa85 6436 pop_cfun ();
1e8e9920 6437}
6438
6439
6440/* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6441
6442void
6443debug_function (tree fn, int flags)
6444{
6445 dump_function_to_file (fn, stderr, flags);
4ee9c684 6446}
6447
6448
903dae48 6449/* Print on FILE the indexes for the predecessors of basic_block BB. */
4ee9c684 6450
6451static void
cd665a06 6452print_pred_bbs (FILE *file, basic_block bb)
4ee9c684 6453{
cd665a06 6454 edge e;
6455 edge_iterator ei;
6456
6457 FOR_EACH_EDGE (e, ei, bb->preds)
903dae48 6458 fprintf (file, "bb_%d ", e->src->index);
4ee9c684 6459}
6460
6461
903dae48 6462/* Print on FILE the indexes for the successors of basic_block BB. */
4ee9c684 6463
6464static void
cd665a06 6465print_succ_bbs (FILE *file, basic_block bb)
4ee9c684 6466{
cd665a06 6467 edge e;
6468 edge_iterator ei;
6469
6470 FOR_EACH_EDGE (e, ei, bb->succs)
903dae48 6471 fprintf (file, "bb_%d ", e->dest->index);
4ee9c684 6472}
6473
2f62d06a 6474/* Print to FILE the basic block BB following the VERBOSITY level. */
6475
6476void
6477print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6478{
6479 char *s_indent = (char *) alloca ((size_t) indent + 1);
6480 memset ((void *) s_indent, ' ', (size_t) indent);
6481 s_indent[indent] = '\0';
6482
6483 /* Print basic_block's header. */
6484 if (verbosity >= 2)
6485 {
6486 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6487 print_pred_bbs (file, bb);
6488 fprintf (file, "}, succs = {");
6489 print_succ_bbs (file, bb);
6490 fprintf (file, "})\n");
6491 }
6492
6493 /* Print basic_block's body. */
6494 if (verbosity >= 3)
6495 {
6496 fprintf (file, "%s {\n", s_indent);
75a70cf9 6497 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
2f62d06a 6498 fprintf (file, "%s }\n", s_indent);
6499 }
6500}
6501
6502static void print_loop_and_siblings (FILE *, struct loop *, int, int);
4ee9c684 6503
2f62d06a 6504/* Pretty print LOOP on FILE, indented INDENT spaces. Following
6505 VERBOSITY level this outputs the contents of the loop, or just its
6506 structure. */
4ee9c684 6507
6508static void
2f62d06a 6509print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
4ee9c684 6510{
6511 char *s_indent;
6512 basic_block bb;
13b96211 6513
4ee9c684 6514 if (loop == NULL)
6515 return;
6516
6517 s_indent = (char *) alloca ((size_t) indent + 1);
6518 memset ((void *) s_indent, ' ', (size_t) indent);
6519 s_indent[indent] = '\0';
6520
2f62d06a 6521 /* Print loop's header. */
6522 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6523 loop->num, loop->header->index, loop->latch->index);
6524 fprintf (file, ", niter = ");
6525 print_generic_expr (file, loop->nb_iterations, 0);
13b96211 6526
2f62d06a 6527 if (loop->any_upper_bound)
6528 {
6529 fprintf (file, ", upper_bound = ");
6530 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6531 }
13b96211 6532
2f62d06a 6533 if (loop->any_estimate)
6534 {
6535 fprintf (file, ", estimate = ");
6536 dump_double_int (file, loop->nb_iterations_estimate, true);
6537 }
6538 fprintf (file, ")\n");
6539
6540 /* Print loop's body. */
6541 if (verbosity >= 1)
6542 {
6543 fprintf (file, "%s{\n", s_indent);
6544 FOR_EACH_BB (bb)
6545 if (bb->loop_father == loop)
6546 print_loops_bb (file, bb, indent, verbosity);
6547
6548 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6549 fprintf (file, "%s}\n", s_indent);
6550 }
4ee9c684 6551}
6552
2f62d06a 6553/* Print the LOOP and its sibling loops on FILE, indented INDENT
6554 spaces. Following VERBOSITY level this outputs the contents of the
6555 loop, or just its structure. */
6556
6557static void
6558print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6559{
6560 if (loop == NULL)
6561 return;
6562
6563 print_loop (file, loop, indent, verbosity);
6564 print_loop_and_siblings (file, loop->next, indent, verbosity);
6565}
4ee9c684 6566
6567/* Follow a CFG edge from the entry point of the program, and on entry
6568 of a loop, pretty print the loop structure on FILE. */
6569
13b96211 6570void
2f62d06a 6571print_loops (FILE *file, int verbosity)
4ee9c684 6572{
6573 basic_block bb;
13b96211 6574
255b6be7 6575 bb = ENTRY_BLOCK_PTR;
4ee9c684 6576 if (bb && bb->loop_father)
2f62d06a 6577 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
4ee9c684 6578}
6579
6580
2f62d06a 6581/* Debugging loops structure at tree level, at some VERBOSITY level. */
6582
6583void
6584debug_loops (int verbosity)
6585{
6586 print_loops (stderr, verbosity);
6587}
6588
6589/* Print on stderr the code of LOOP, at some VERBOSITY level. */
4ee9c684 6590
13b96211 6591void
2f62d06a 6592debug_loop (struct loop *loop, int verbosity)
4ee9c684 6593{
2f62d06a 6594 print_loop (stderr, loop, 0, verbosity);
4ee9c684 6595}
6596
2f62d06a 6597/* Print on stderr the code of loop number NUM, at some VERBOSITY
6598 level. */
6599
6600void
6601debug_loop_num (unsigned num, int verbosity)
6602{
6603 debug_loop (get_loop (num), verbosity);
6604}
4ee9c684 6605
6606/* Return true if BB ends with a call, possibly followed by some
6607 instructions that must stay with the call. Return false,
6608 otherwise. */
6609
6610static bool
75a70cf9 6611gimple_block_ends_with_call_p (basic_block bb)
4ee9c684 6612{
75a70cf9 6613 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6614 return is_gimple_call (gsi_stmt (gsi));
4ee9c684 6615}
6616
6617
6618/* Return true if BB ends with a conditional branch. Return false,
6619 otherwise. */
6620
6621static bool
75a70cf9 6622gimple_block_ends_with_condjump_p (const_basic_block bb)
4ee9c684 6623{
75a70cf9 6624 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6625 return (stmt && gimple_code (stmt) == GIMPLE_COND);
4ee9c684 6626}
6627
6628
6629/* Return true if we need to add fake edge to exit at statement T.
75a70cf9 6630 Helper function for gimple_flow_call_edges_add. */
4ee9c684 6631
6632static bool
75a70cf9 6633need_fake_edge_p (gimple t)
4ee9c684 6634{
75a70cf9 6635 tree fndecl = NULL_TREE;
6636 int call_flags = 0;
4ee9c684 6637
6638 /* NORETURN and LONGJMP calls already have an edge to exit.
13422b8a 6639 CONST and PURE calls do not need one.
4ee9c684 6640 We don't currently check for CONST and PURE here, although
6641 it would be a good idea, because those attributes are
6642 figured out from the RTL in mark_constant_function, and
6643 the counter incrementation code from -fprofile-arcs
6644 leads to different results from -fbranch-probabilities. */
75a70cf9 6645 if (is_gimple_call (t))
eb5bb63e 6646 {
75a70cf9 6647 fndecl = gimple_call_fndecl (t);
6648 call_flags = gimple_call_flags (t);
eb5bb63e 6649 }
6650
75a70cf9 6651 if (is_gimple_call (t)
6652 && fndecl
6653 && DECL_BUILT_IN (fndecl)
eb5bb63e 6654 && (call_flags & ECF_NOTHROW)
dac59c04 6655 && !(call_flags & ECF_RETURNS_TWICE)
6656 /* fork() doesn't really return twice, but the effect of
6657 wrapping it in __gcov_fork() which calls __gcov_flush()
6658 and clears the counters before forking has the same
6659 effect as returning twice. Force a fake edge. */
6660 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6661 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6662 return false;
eb5bb63e 6663
75a70cf9 6664 if (is_gimple_call (t)
6665 && !(call_flags & ECF_NORETURN))
4ee9c684 6666 return true;
6667
590c3166 6668 if (gimple_code (t) == GIMPLE_ASM
75a70cf9 6669 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
4ee9c684 6670 return true;
6671
6672 return false;
6673}
6674
6675
6676/* Add fake edges to the function exit for any non constant and non
6677 noreturn calls, volatile inline assembly in the bitmap of blocks
6678 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6679 the number of blocks that were split.
6680
6681 The goal is to expose cases in which entering a basic block does
6682 not imply that all subsequent instructions must be executed. */
6683
6684static int
75a70cf9 6685gimple_flow_call_edges_add (sbitmap blocks)
4ee9c684 6686{
6687 int i;
6688 int blocks_split = 0;
6689 int last_bb = last_basic_block;
6690 bool check_last_block = false;
6691
4d2e5d52 6692 if (n_basic_blocks == NUM_FIXED_BLOCKS)
4ee9c684 6693 return 0;
6694
6695 if (! blocks)
6696 check_last_block = true;
6697 else
6698 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6699
6700 /* In the last basic block, before epilogue generation, there will be
6701 a fallthru edge to EXIT. Special care is required if the last insn
6702 of the last basic block is a call because make_edge folds duplicate
6703 edges, which would result in the fallthru edge also being marked
6704 fake, which would result in the fallthru edge being removed by
6705 remove_fake_edges, which would result in an invalid CFG.
6706
6707 Moreover, we can't elide the outgoing fake edge, since the block
6708 profiler needs to take this into account in order to solve the minimal
6709 spanning tree in the case that the call doesn't return.
6710
6711 Handle this by adding a dummy instruction in a new last basic block. */
6712 if (check_last_block)
6713 {
6714 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
75a70cf9 6715 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6716 gimple t = NULL;
6717
6718 if (!gsi_end_p (gsi))
6719 t = gsi_stmt (gsi);
4ee9c684 6720
20498e00 6721 if (t && need_fake_edge_p (t))
4ee9c684 6722 {
6723 edge e;
6724
c6356c17 6725 e = find_edge (bb, EXIT_BLOCK_PTR);
6726 if (e)
6727 {
75a70cf9 6728 gsi_insert_on_edge (e, gimple_build_nop ());
6729 gsi_commit_edge_inserts ();
c6356c17 6730 }
4ee9c684 6731 }
6732 }
6733
6734 /* Now add fake edges to the function exit for any non constant
6735 calls since there is no way that we can determine if they will
6736 return or not... */
6737 for (i = 0; i < last_bb; i++)
6738 {
6739 basic_block bb = BASIC_BLOCK (i);
75a70cf9 6740 gimple_stmt_iterator gsi;
6741 gimple stmt, last_stmt;
4ee9c684 6742
6743 if (!bb)
6744 continue;
6745
6746 if (blocks && !TEST_BIT (blocks, i))
6747 continue;
6748
75a70cf9 6749 gsi = gsi_last_bb (bb);
6750 if (!gsi_end_p (gsi))
4ee9c684 6751 {
75a70cf9 6752 last_stmt = gsi_stmt (gsi);
4ee9c684 6753 do
6754 {
75a70cf9 6755 stmt = gsi_stmt (gsi);
4ee9c684 6756 if (need_fake_edge_p (stmt))
6757 {
6758 edge e;
75a70cf9 6759
4ee9c684 6760 /* The handling above of the final block before the
6761 epilogue should be enough to verify that there is
6762 no edge to the exit block in CFG already.
6763 Calling make_edge in such case would cause us to
6764 mark that edge as fake and remove it later. */
6765#ifdef ENABLE_CHECKING
6766 if (stmt == last_stmt)
cd665a06 6767 {
c6356c17 6768 e = find_edge (bb, EXIT_BLOCK_PTR);
6769 gcc_assert (e == NULL);
cd665a06 6770 }
4ee9c684 6771#endif
6772
6773 /* Note that the following may create a new basic block
6774 and renumber the existing basic blocks. */
6775 if (stmt != last_stmt)
6776 {
6777 e = split_block (bb, stmt);
6778 if (e)
6779 blocks_split++;
6780 }
6781 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6782 }
75a70cf9 6783 gsi_prev (&gsi);
4ee9c684 6784 }
75a70cf9 6785 while (!gsi_end_p (gsi));
4ee9c684 6786 }
6787 }
6788
6789 if (blocks_split)
6790 verify_flow_info ();
6791
6792 return blocks_split;
6793}
6794
2c8a1497 6795/* Purge dead abnormal call edges from basic block BB. */
6796
6797bool
75a70cf9 6798gimple_purge_dead_abnormal_call_edges (basic_block bb)
2c8a1497 6799{
75a70cf9 6800 bool changed = gimple_purge_dead_eh_edges (bb);
2c8a1497 6801
18d50ae6 6802 if (cfun->has_nonlocal_label)
2c8a1497 6803 {
75a70cf9 6804 gimple stmt = last_stmt (bb);
2c8a1497 6805 edge_iterator ei;
6806 edge e;
6807
75a70cf9 6808 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
2c8a1497 6809 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6810 {
6811 if (e->flags & EDGE_ABNORMAL)
6812 {
6813 remove_edge (e);
6814 changed = true;
6815 }
6816 else
6817 ei_next (&ei);
6818 }
6819
75a70cf9 6820 /* See gimple_purge_dead_eh_edges below. */
2c8a1497 6821 if (changed)
6822 free_dominance_info (CDI_DOMINATORS);
6823 }
6824
6825 return changed;
6826}
6827
31a8456e 6828/* Removes edge E and all the blocks dominated by it, and updates dominance
6829 information. The IL in E->src needs to be updated separately.
6830 If dominance info is not available, only the edge E is removed.*/
6831
6832void
6833remove_edge_and_dominated_blocks (edge e)
6834{
6835 VEC (basic_block, heap) *bbs_to_remove = NULL;
6836 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6837 bitmap df, df_idom;
6838 edge f;
6839 edge_iterator ei;
6840 bool none_removed = false;
6841 unsigned i;
6842 basic_block bb, dbb;
6843 bitmap_iterator bi;
6844
50b08d37 6845 if (!dom_info_available_p (CDI_DOMINATORS))
31a8456e 6846 {
6847 remove_edge (e);
6848 return;
6849 }
6850
6851 /* No updating is needed for edges to exit. */
6852 if (e->dest == EXIT_BLOCK_PTR)
6853 {
6854 if (cfgcleanup_altered_bbs)
6855 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6856 remove_edge (e);
6857 return;
6858 }
6859
6860 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6861 that is not dominated by E->dest, then this set is empty. Otherwise,
6862 all the basic blocks dominated by E->dest are removed.
6863
6864 Also, to DF_IDOM we store the immediate dominators of the blocks in
6865 the dominance frontier of E (i.e., of the successors of the
6866 removed blocks, if there are any, and of E->dest otherwise). */
6867 FOR_EACH_EDGE (f, ei, e->dest->preds)
6868 {
6869 if (f == e)
6870 continue;
6871
6872 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6873 {
6874 none_removed = true;
6875 break;
6876 }
6877 }
6878
6879 df = BITMAP_ALLOC (NULL);
6880 df_idom = BITMAP_ALLOC (NULL);
6881
6882 if (none_removed)
6883 bitmap_set_bit (df_idom,
6884 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6885 else
6886 {
59f3ea59 6887 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
31a8456e 6888 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6889 {
6890 FOR_EACH_EDGE (f, ei, bb->succs)
6891 {
6892 if (f->dest != EXIT_BLOCK_PTR)
6893 bitmap_set_bit (df, f->dest->index);
6894 }
6895 }
6896 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6897 bitmap_clear_bit (df, bb->index);
6898
6899 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6900 {
6901 bb = BASIC_BLOCK (i);
6902 bitmap_set_bit (df_idom,
6903 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6904 }
6905 }
6906
6907 if (cfgcleanup_altered_bbs)
6908 {
6909 /* Record the set of the altered basic blocks. */
6910 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6911 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6912 }
6913
6914 /* Remove E and the cancelled blocks. */
6915 if (none_removed)
6916 remove_edge (e);
6917 else
6918 {
6919 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6920 delete_basic_block (bb);
6921 }
6922
6923 /* Update the dominance information. The immediate dominator may change only
6924 for blocks whose immediate dominator belongs to DF_IDOM:
6925
6926 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6927 removal. Let Z the arbitrary block such that idom(Z) = Y and
6928 Z dominates X after the removal. Before removal, there exists a path P
6929 from Y to X that avoids Z. Let F be the last edge on P that is
6930 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6931 dominates W, and because of P, Z does not dominate W), and W belongs to
6932 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6933 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6934 {
6935 bb = BASIC_BLOCK (i);
6936 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6937 dbb;
6938 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6939 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6940 }
6941
3f9439d7 6942 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
31a8456e 6943
6944 BITMAP_FREE (df);
6945 BITMAP_FREE (df_idom);
6946 VEC_free (basic_block, heap, bbs_to_remove);
6947 VEC_free (basic_block, heap, bbs_to_fix_dom);
6948}
6949
2c8a1497 6950/* Purge dead EH edges from basic block BB. */
6951
35c15734 6952bool
75a70cf9 6953gimple_purge_dead_eh_edges (basic_block bb)
35c15734 6954{
6955 bool changed = false;
cd665a06 6956 edge e;
6957 edge_iterator ei;
75a70cf9 6958 gimple stmt = last_stmt (bb);
35c15734 6959
75a70cf9 6960 if (stmt && stmt_can_throw_internal (stmt))
35c15734 6961 return false;
6962
cd665a06 6963 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
35c15734 6964 {
35c15734 6965 if (e->flags & EDGE_EH)
6966 {
31a8456e 6967 remove_edge_and_dominated_blocks (e);
35c15734 6968 changed = true;
6969 }
cd665a06 6970 else
6971 ei_next (&ei);
35c15734 6972 }
6973
6974 return changed;
6975}
6976
6977bool
75a70cf9 6978gimple_purge_all_dead_eh_edges (const_bitmap blocks)
35c15734 6979{
6980 bool changed = false;
4f917ffe 6981 unsigned i;
0cc4271a 6982 bitmap_iterator bi;
35c15734 6983
0cc4271a 6984 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6985 {
96d407e2 6986 basic_block bb = BASIC_BLOCK (i);
6987
6988 /* Earlier gimple_purge_dead_eh_edges could have removed
6989 this basic block already. */
6990 gcc_assert (bb || changed);
6991 if (bb != NULL)
6992 changed |= gimple_purge_dead_eh_edges (bb);
0cc4271a 6993 }
35c15734 6994
6995 return changed;
6996}
4ee9c684 6997
a77b4cde 6998/* This function is called whenever a new edge is created or
6999 redirected. */
7000
7001static void
75a70cf9 7002gimple_execute_on_growing_pred (edge e)
a77b4cde 7003{
7004 basic_block bb = e->dest;
7005
7006 if (phi_nodes (bb))
7007 reserve_phi_args_for_new_edge (bb);
7008}
7009
e4ff8a73 7010/* This function is called immediately before edge E is removed from
7011 the edge vector E->dest->preds. */
7012
7013static void
75a70cf9 7014gimple_execute_on_shrinking_pred (edge e)
e4ff8a73 7015{
7016 if (phi_nodes (e->dest))
7017 remove_phi_args (e);
7018}
7019
c50ae675 7020/*---------------------------------------------------------------------------
7021 Helper functions for Loop versioning
7022 ---------------------------------------------------------------------------*/
7023
7024/* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7025 of 'first'. Both of them are dominated by 'new_head' basic block. When
7026 'new_head' was created by 'second's incoming edge it received phi arguments
7027 on the edge by split_edge(). Later, additional edge 'e' was created to
13b96211 7028 connect 'new_head' and 'first'. Now this routine adds phi args on this
7029 additional edge 'e' that new_head to second edge received as part of edge
75a70cf9 7030 splitting. */
c50ae675 7031
7032static void
75a70cf9 7033gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7034 basic_block new_head, edge e)
c50ae675 7035{
75a70cf9 7036 gimple phi1, phi2;
7037 gimple_stmt_iterator psi1, psi2;
7038 tree def;
b0551d9c 7039 edge e2 = find_edge (new_head, second);
7040
7041 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7042 edge, we should always have an edge from NEW_HEAD to SECOND. */
7043 gcc_assert (e2 != NULL);
c50ae675 7044
7045 /* Browse all 'second' basic block phi nodes and add phi args to
7046 edge 'e' for 'first' head. PHI args are always in correct order. */
7047
75a70cf9 7048 for (psi2 = gsi_start_phis (second),
7049 psi1 = gsi_start_phis (first);
7050 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7051 gsi_next (&psi2), gsi_next (&psi1))
c50ae675 7052 {
75a70cf9 7053 phi1 = gsi_stmt (psi1);
7054 phi2 = gsi_stmt (psi2);
7055 def = PHI_ARG_DEF (phi2, e2->dest_idx);
b0551d9c 7056 add_phi_arg (phi1, def, e);
c50ae675 7057 }
7058}
7059
75a70cf9 7060
13b96211 7061/* Adds a if else statement to COND_BB with condition COND_EXPR.
7062 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
c50ae675 7063 the destination of the ELSE part. */
75a70cf9 7064
c50ae675 7065static void
75a70cf9 7066gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7067 basic_block second_head ATTRIBUTE_UNUSED,
7068 basic_block cond_bb, void *cond_e)
c50ae675 7069{
75a70cf9 7070 gimple_stmt_iterator gsi;
7071 gimple new_cond_expr;
c50ae675 7072 tree cond_expr = (tree) cond_e;
7073 edge e0;
7074
7075 /* Build new conditional expr */
75a70cf9 7076 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7077 NULL_TREE, NULL_TREE);
c50ae675 7078
13b96211 7079 /* Add new cond in cond_bb. */
75a70cf9 7080 gsi = gsi_last_bb (cond_bb);
7081 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7082
c50ae675 7083 /* Adjust edges appropriately to connect new head with first head
7084 as well as second head. */
7085 e0 = single_succ_edge (cond_bb);
7086 e0->flags &= ~EDGE_FALLTHRU;
7087 e0->flags |= EDGE_FALSE_VALUE;
7088}
7089
75a70cf9 7090struct cfg_hooks gimple_cfg_hooks = {
7091 "gimple",
7092 gimple_verify_flow_info,
7093 gimple_dump_bb, /* dump_bb */
4ee9c684 7094 create_bb, /* create_basic_block */
75a70cf9 7095 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7096 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7097 gimple_can_remove_branch_p, /* can_remove_branch_p */
4ee9c684 7098 remove_bb, /* delete_basic_block */
75a70cf9 7099 gimple_split_block, /* split_block */
7100 gimple_move_block_after, /* move_block_after */
7101 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7102 gimple_merge_blocks, /* merge_blocks */
7103 gimple_predict_edge, /* predict_edge */
7104 gimple_predicted_by_p, /* predicted_by_p */
7105 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7106 gimple_duplicate_bb, /* duplicate_block */
7107 gimple_split_edge, /* split_edge */
7108 gimple_make_forwarder_block, /* make_forward_block */
4ee9c684 7109 NULL, /* tidy_fallthru_edge */
75a70cf9 7110 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7111 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7112 gimple_flow_call_edges_add, /* flow_call_edges_add */
7113 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7114 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7115 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7116 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7117 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
c50ae675 7118 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
13b96211 7119 flush_pending_stmts /* flush_pending_stmts */
4ee9c684 7120};
7121
7122
7123/* Split all critical edges. */
7124
2a1990e9 7125static unsigned int
4ee9c684 7126split_critical_edges (void)
7127{
7128 basic_block bb;
7129 edge e;
cd665a06 7130 edge_iterator ei;
4ee9c684 7131
6ff867cc 7132 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7133 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7134 mappings around the calls to split_edge. */
7135 start_recording_case_labels ();
4ee9c684 7136 FOR_ALL_BB (bb)
7137 {
cd665a06 7138 FOR_EACH_EDGE (e, ei, bb->succs)
3d1eacdb 7139 {
7140 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
4ee9c684 7141 split_edge (e);
3d1eacdb 7142 /* PRE inserts statements to edges and expects that
7143 since split_critical_edges was done beforehand, committing edge
7144 insertions will not split more edges. In addition to critical
7145 edges we must split edges that have multiple successors and
7146 end by control flow statements, such as RESX.
7147 Go ahead and split them too. This matches the logic in
7148 gimple_find_edge_insert_loc. */
7149 else if ((!single_pred_p (e->dest)
7150 || phi_nodes (e->dest)
7151 || e->dest == EXIT_BLOCK_PTR)
7152 && e->src != ENTRY_BLOCK_PTR
7153 && !(e->flags & EDGE_ABNORMAL))
7154 {
7155 gimple_stmt_iterator gsi;
7156
7157 gsi = gsi_last_bb (e->src);
7158 if (!gsi_end_p (gsi)
7159 && stmt_ends_bb_p (gsi_stmt (gsi))
7160 && gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
7161 split_edge (e);
7162 }
7163 }
4ee9c684 7164 }
6ff867cc 7165 end_recording_case_labels ();
2a1990e9 7166 return 0;
4ee9c684 7167}
7168
20099e35 7169struct gimple_opt_pass pass_split_crit_edges =
4ee9c684 7170{
20099e35 7171 {
7172 GIMPLE_PASS,
b84a3da6 7173 "crited", /* name */
4ee9c684 7174 NULL, /* gate */
7175 split_critical_edges, /* execute */
7176 NULL, /* sub */
7177 NULL, /* next */
7178 0, /* static_pass_number */
7179 TV_TREE_SPLIT_EDGES, /* tv_id */
7180 PROP_cfg, /* properties required */
7181 PROP_no_crit_edges, /* properties_provided */
7182 0, /* properties_destroyed */
7183 0, /* todo_flags_start */
ccf4ab6b 7184 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
20099e35 7185 }
4ee9c684 7186};
83e2a11b 7187
83e2a11b 7188
75a70cf9 7189/* Build a ternary operation and gimplify it. Emit code before GSI.
83e2a11b 7190 Return the gimple_val holding the result. */
7191
7192tree
75a70cf9 7193gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
83e2a11b 7194 tree type, tree a, tree b, tree c)
7195{
7196 tree ret;
7197
49d00087 7198 ret = fold_build3 (code, type, a, b, c);
83e2a11b 7199 STRIP_NOPS (ret);
7200
75a70cf9 7201 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7202 GSI_SAME_STMT);
83e2a11b 7203}
7204
75a70cf9 7205/* Build a binary operation and gimplify it. Emit code before GSI.
83e2a11b 7206 Return the gimple_val holding the result. */
7207
7208tree
75a70cf9 7209gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
83e2a11b 7210 tree type, tree a, tree b)
7211{
7212 tree ret;
7213
49d00087 7214 ret = fold_build2 (code, type, a, b);
83e2a11b 7215 STRIP_NOPS (ret);
7216
75a70cf9 7217 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7218 GSI_SAME_STMT);
83e2a11b 7219}
7220
75a70cf9 7221/* Build a unary operation and gimplify it. Emit code before GSI.
83e2a11b 7222 Return the gimple_val holding the result. */
7223
7224tree
75a70cf9 7225gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
83e2a11b 7226 tree a)
7227{
7228 tree ret;
7229
49d00087 7230 ret = fold_build1 (code, type, a);
83e2a11b 7231 STRIP_NOPS (ret);
7232
75a70cf9 7233 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7234 GSI_SAME_STMT);
83e2a11b 7235}
7236
7237
4ee9c684 7238\f
7239/* Emit return warnings. */
7240
2a1990e9 7241static unsigned int
4ee9c684 7242execute_warn_function_return (void)
7243{
512e0fec 7244 source_location location;
75a70cf9 7245 gimple last;
4ee9c684 7246 edge e;
cd665a06 7247 edge_iterator ei;
4ee9c684 7248
4ee9c684 7249 /* If we have a path to EXIT, then we do return. */
7250 if (TREE_THIS_VOLATILE (cfun->decl)
cd665a06 7251 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
4ee9c684 7252 {
512e0fec 7253 location = UNKNOWN_LOCATION;
cd665a06 7254 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4ee9c684 7255 {
7256 last = last_stmt (e->src);
75a70cf9 7257 if (gimple_code (last) == GIMPLE_RETURN
7258 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
4ee9c684 7259 break;
7260 }
512e0fec 7261 if (location == UNKNOWN_LOCATION)
7262 location = cfun->function_end_locus;
5fb6a912 7263 warning_at (location, 0, "%<noreturn%> function does return");
4ee9c684 7264 }
7265
7266 /* If we see "return;" in some basic block, then we do reach the end
7267 without returning a value. */
7268 else if (warn_return_type
7f0f308d 7269 && !TREE_NO_WARNING (cfun->decl)
cd665a06 7270 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
4ee9c684 7271 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7272 {
cd665a06 7273 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4ee9c684 7274 {
75a70cf9 7275 gimple last = last_stmt (e->src);
7276 if (gimple_code (last) == GIMPLE_RETURN
7277 && gimple_return_retval (last) == NULL
7278 && !gimple_no_warning_p (last))
4ee9c684 7279 {
75a70cf9 7280 location = gimple_location (last);
512e0fec 7281 if (location == UNKNOWN_LOCATION)
7282 location = cfun->function_end_locus;
bdbc474b 7283 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7f0f308d 7284 TREE_NO_WARNING (cfun->decl) = 1;
4ee9c684 7285 break;
7286 }
7287 }
7288 }
2a1990e9 7289 return 0;
4ee9c684 7290}
7291
7292
7293/* Given a basic block B which ends with a conditional and has
7294 precisely two successors, determine which of the edges is taken if
7295 the conditional is true and which is taken if the conditional is
7296 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7297
7298void
7299extract_true_false_edges_from_block (basic_block b,
7300 edge *true_edge,
7301 edge *false_edge)
7302{
cd665a06 7303 edge e = EDGE_SUCC (b, 0);
4ee9c684 7304
7305 if (e->flags & EDGE_TRUE_VALUE)
7306 {
7307 *true_edge = e;
cd665a06 7308 *false_edge = EDGE_SUCC (b, 1);
4ee9c684 7309 }
7310 else
7311 {
7312 *false_edge = e;
cd665a06 7313 *true_edge = EDGE_SUCC (b, 1);
4ee9c684 7314 }
7315}
7316
20099e35 7317struct gimple_opt_pass pass_warn_function_return =
4ee9c684 7318{
20099e35 7319 {
7320 GIMPLE_PASS,
4ee9c684 7321 NULL, /* name */
7322 NULL, /* gate */
7323 execute_warn_function_return, /* execute */
7324 NULL, /* sub */
7325 NULL, /* next */
7326 0, /* static_pass_number */
0b1615c1 7327 TV_NONE, /* tv_id */
c4110044 7328 PROP_cfg, /* properties_required */
4ee9c684 7329 0, /* properties_provided */
7330 0, /* properties_destroyed */
7331 0, /* todo_flags_start */
20099e35 7332 0 /* todo_flags_finish */
7333 }
4ee9c684 7334};
73d5fb10 7335
7336/* Emit noreturn warnings. */
7337
2a1990e9 7338static unsigned int
73d5fb10 7339execute_warn_function_noreturn (void)
7340{
7341 if (warn_missing_noreturn
7342 && !TREE_THIS_VOLATILE (cfun->decl)
7343 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
b75409ba 7344 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
712d2297 7345 warning_at (DECL_SOURCE_LOCATION (cfun->decl), OPT_Wmissing_noreturn,
7346 "function might be possible candidate "
7347 "for attribute %<noreturn%>");
2a1990e9 7348 return 0;
73d5fb10 7349}
7350
20099e35 7351struct gimple_opt_pass pass_warn_function_noreturn =
73d5fb10 7352{
20099e35 7353 {
7354 GIMPLE_PASS,
73d5fb10 7355 NULL, /* name */
7356 NULL, /* gate */
7357 execute_warn_function_noreturn, /* execute */
7358 NULL, /* sub */
7359 NULL, /* next */
7360 0, /* static_pass_number */
0b1615c1 7361 TV_NONE, /* tv_id */
73d5fb10 7362 PROP_cfg, /* properties_required */
7363 0, /* properties_provided */
7364 0, /* properties_destroyed */
7365 0, /* todo_flags_start */
20099e35 7366 0 /* todo_flags_finish */
7367 }
73d5fb10 7368};